]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
* MAINTAINERS: Change my email address.
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
1e8e9920 1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
4a04f4b4 6 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
1e8e9920 7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
8c4c00c1 12Software Foundation; either version 3, or (at your option) any later
1e8e9920 13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
8c4c00c1 21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
1e8e9920 23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
29#include "rtl.h"
75a70cf9 30#include "gimple.h"
31#include "tree-iterator.h"
1e8e9920 32#include "tree-inline.h"
33#include "langhooks.h"
34#include "diagnostic.h"
35#include "tree-flow.h"
36#include "timevar.h"
37#include "flags.h"
38#include "function.h"
39#include "expr.h"
40#include "toplev.h"
41#include "tree-pass.h"
42#include "ggc.h"
43#include "except.h"
e3022db7 44#include "splay-tree.h"
cb7f680b 45#include "optabs.h"
46#include "cfgloop.h"
1e8e9920 47
75a70cf9 48
48e1416a 49/* Lowering of OpenMP parallel and workshare constructs proceeds in two
1e8e9920 50 phases. The first phase scans the function looking for OMP statements
51 and then for variables that must be replaced to satisfy data sharing
52 clauses. The second phase expands code for the constructs, as well as
334ec2d8 53 re-gimplifying things when variables have been replaced with complex
1e8e9920 54 expressions.
55
d134bccc 56 Final code generation is done by pass_expand_omp. The flowgraph is
57 scanned for parallel regions which are then moved to a new
58 function, to be invoked by the thread library. */
1e8e9920 59
60/* Context structure. Used to store information about each parallel
61 directive in the code. */
62
63typedef struct omp_context
64{
65 /* This field must be at the beginning, as we do "inheritance": Some
66 callback functions for tree-inline.c (e.g., omp_copy_decl)
67 receive a copy_body_data pointer that is up-casted to an
68 omp_context pointer. */
69 copy_body_data cb;
70
71 /* The tree of contexts corresponding to the encountered constructs. */
72 struct omp_context *outer;
75a70cf9 73 gimple stmt;
1e8e9920 74
48e1416a 75 /* Map variables to fields in a structure that allows communication
1e8e9920 76 between sending and receiving threads. */
77 splay_tree field_map;
78 tree record_type;
79 tree sender_decl;
80 tree receiver_decl;
81
fd6481cf 82 /* These are used just by task contexts, if task firstprivate fn is
83 needed. srecord_type is used to communicate from the thread
84 that encountered the task construct to task firstprivate fn,
85 record_type is allocated by GOMP_task, initialized by task firstprivate
86 fn and passed to the task body fn. */
87 splay_tree sfield_map;
88 tree srecord_type;
89
1e8e9920 90 /* A chain of variables to add to the top-level block surrounding the
91 construct. In the case of a parallel, this is in the child function. */
92 tree block_vars;
93
94 /* What to do with variables with implicitly determined sharing
95 attributes. */
96 enum omp_clause_default_kind default_kind;
97
98 /* Nesting depth of this context. Used to beautify error messages re
99 invalid gotos. The outermost ctx is depth 1, with depth 0 being
100 reserved for the main body of the function. */
101 int depth;
102
1e8e9920 103 /* True if this parallel directive is nested within another. */
104 bool is_nested;
1e8e9920 105} omp_context;
106
107
fd6481cf 108struct omp_for_data_loop
109{
110 tree v, n1, n2, step;
111 enum tree_code cond_code;
112};
113
773c5ba7 114/* A structure describing the main elements of a parallel loop. */
1e8e9920 115
773c5ba7 116struct omp_for_data
1e8e9920 117{
fd6481cf 118 struct omp_for_data_loop loop;
75a70cf9 119 tree chunk_size;
120 gimple for_stmt;
fd6481cf 121 tree pre, iter_type;
122 int collapse;
1e8e9920 123 bool have_nowait, have_ordered;
124 enum omp_clause_schedule_kind sched_kind;
fd6481cf 125 struct omp_for_data_loop *loops;
1e8e9920 126};
127
773c5ba7 128
1e8e9920 129static splay_tree all_contexts;
fd6481cf 130static int taskreg_nesting_level;
61e47ac8 131struct omp_region *root_omp_region;
fd6481cf 132static bitmap task_shared_vars;
1e8e9920 133
75a70cf9 134static void scan_omp (gimple_seq, omp_context *);
135static tree scan_omp_1_op (tree *, int *, void *);
136
137#define WALK_SUBSTMTS \
138 case GIMPLE_BIND: \
139 case GIMPLE_TRY: \
140 case GIMPLE_CATCH: \
141 case GIMPLE_EH_FILTER: \
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
144 break;
145
146/* Convenience function for calling scan_omp_1_op on tree operands. */
147
148static inline tree
149scan_omp_op (tree *tp, omp_context *ctx)
150{
151 struct walk_stmt_info wi;
152
153 memset (&wi, 0, sizeof (wi));
154 wi.info = ctx;
155 wi.want_locations = true;
156
157 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
158}
159
160static void lower_omp (gimple_seq, omp_context *);
f49d7bb5 161static tree lookup_decl_in_outer_ctx (tree, omp_context *);
162static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
1e8e9920 163
164/* Find an OpenMP clause of type KIND within CLAUSES. */
165
79acaae1 166tree
590c3166 167find_omp_clause (tree clauses, enum omp_clause_code kind)
1e8e9920 168{
169 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
55d6e7cd 170 if (OMP_CLAUSE_CODE (clauses) == kind)
1e8e9920 171 return clauses;
172
173 return NULL_TREE;
174}
175
176/* Return true if CTX is for an omp parallel. */
177
178static inline bool
179is_parallel_ctx (omp_context *ctx)
180{
75a70cf9 181 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
1e8e9920 182}
183
773c5ba7 184
fd6481cf 185/* Return true if CTX is for an omp task. */
186
187static inline bool
188is_task_ctx (omp_context *ctx)
189{
75a70cf9 190 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
fd6481cf 191}
192
193
194/* Return true if CTX is for an omp parallel or omp task. */
195
196static inline bool
197is_taskreg_ctx (omp_context *ctx)
198{
75a70cf9 199 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
200 || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
fd6481cf 201}
202
203
773c5ba7 204/* Return true if REGION is a combined parallel+workshare region. */
1e8e9920 205
206static inline bool
773c5ba7 207is_combined_parallel (struct omp_region *region)
208{
209 return region->is_combined_parallel;
210}
211
212
213/* Extract the header elements of parallel loop FOR_STMT and store
214 them into *FD. */
215
216static void
75a70cf9 217extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
fd6481cf 218 struct omp_for_data_loop *loops)
773c5ba7 219{
fd6481cf 220 tree t, var, *collapse_iter, *collapse_count;
221 tree count = NULL_TREE, iter_type = long_integer_type_node;
222 struct omp_for_data_loop *loop;
223 int i;
224 struct omp_for_data_loop dummy_loop;
389dd41b 225 location_t loc = gimple_location (for_stmt);
773c5ba7 226
227 fd->for_stmt = for_stmt;
228 fd->pre = NULL;
75a70cf9 229 fd->collapse = gimple_omp_for_collapse (for_stmt);
fd6481cf 230 if (fd->collapse > 1)
231 fd->loops = loops;
232 else
233 fd->loops = &fd->loop;
773c5ba7 234
235 fd->have_nowait = fd->have_ordered = false;
236 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
237 fd->chunk_size = NULL_TREE;
fd6481cf 238 collapse_iter = NULL;
239 collapse_count = NULL;
773c5ba7 240
75a70cf9 241 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
55d6e7cd 242 switch (OMP_CLAUSE_CODE (t))
773c5ba7 243 {
244 case OMP_CLAUSE_NOWAIT:
245 fd->have_nowait = true;
246 break;
247 case OMP_CLAUSE_ORDERED:
248 fd->have_ordered = true;
249 break;
250 case OMP_CLAUSE_SCHEDULE:
251 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
252 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
253 break;
fd6481cf 254 case OMP_CLAUSE_COLLAPSE:
255 if (fd->collapse > 1)
256 {
257 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
258 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
259 }
773c5ba7 260 default:
261 break;
262 }
263
fd6481cf 264 /* FIXME: for now map schedule(auto) to schedule(static).
265 There should be analysis to determine whether all iterations
266 are approximately the same amount of work (then schedule(static)
bde357c8 267 is best) or if it varies (then schedule(dynamic,N) is better). */
fd6481cf 268 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
269 {
270 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
271 gcc_assert (fd->chunk_size == NULL);
272 }
273 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
773c5ba7 274 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
275 gcc_assert (fd->chunk_size == NULL);
276 else if (fd->chunk_size == NULL)
277 {
278 /* We only need to compute a default chunk size for ordered
279 static loops and dynamic loops. */
fd6481cf 280 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
281 || fd->have_ordered
282 || fd->collapse > 1)
773c5ba7 283 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
284 ? integer_zero_node : integer_one_node;
285 }
fd6481cf 286
287 for (i = 0; i < fd->collapse; i++)
288 {
289 if (fd->collapse == 1)
290 loop = &fd->loop;
291 else if (loops != NULL)
292 loop = loops + i;
293 else
294 loop = &dummy_loop;
295
48e1416a 296
75a70cf9 297 loop->v = gimple_omp_for_index (for_stmt, i);
fd6481cf 298 gcc_assert (SSA_VAR_P (loop->v));
299 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
300 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
301 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
75a70cf9 302 loop->n1 = gimple_omp_for_initial (for_stmt, i);
fd6481cf 303
75a70cf9 304 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
305 loop->n2 = gimple_omp_for_final (for_stmt, i);
fd6481cf 306 switch (loop->cond_code)
307 {
308 case LT_EXPR:
309 case GT_EXPR:
310 break;
311 case LE_EXPR:
312 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
389dd41b 313 loop->n2 = fold_build2_loc (loc,
314 POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
fd6481cf 315 loop->n2, size_one_node);
316 else
389dd41b 317 loop->n2 = fold_build2_loc (loc,
318 PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
fd6481cf 319 build_int_cst (TREE_TYPE (loop->n2), 1));
320 loop->cond_code = LT_EXPR;
321 break;
322 case GE_EXPR:
323 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
389dd41b 324 loop->n2 = fold_build2_loc (loc,
325 POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
fd6481cf 326 loop->n2, size_int (-1));
327 else
389dd41b 328 loop->n2 = fold_build2_loc (loc,
329 MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
fd6481cf 330 build_int_cst (TREE_TYPE (loop->n2), 1));
331 loop->cond_code = GT_EXPR;
332 break;
333 default:
334 gcc_unreachable ();
335 }
336
75a70cf9 337 t = gimple_omp_for_incr (for_stmt, i);
fd6481cf 338 gcc_assert (TREE_OPERAND (t, 0) == var);
339 switch (TREE_CODE (t))
340 {
341 case PLUS_EXPR:
342 case POINTER_PLUS_EXPR:
343 loop->step = TREE_OPERAND (t, 1);
344 break;
345 case MINUS_EXPR:
346 loop->step = TREE_OPERAND (t, 1);
389dd41b 347 loop->step = fold_build1_loc (loc,
348 NEGATE_EXPR, TREE_TYPE (loop->step),
fd6481cf 349 loop->step);
350 break;
351 default:
352 gcc_unreachable ();
353 }
354
355 if (iter_type != long_long_unsigned_type_node)
356 {
357 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
358 iter_type = long_long_unsigned_type_node;
359 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
360 && TYPE_PRECISION (TREE_TYPE (loop->v))
361 >= TYPE_PRECISION (iter_type))
362 {
363 tree n;
364
365 if (loop->cond_code == LT_EXPR)
389dd41b 366 n = fold_build2_loc (loc,
367 PLUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 368 loop->n2, loop->step);
369 else
370 n = loop->n1;
371 if (TREE_CODE (n) != INTEGER_CST
372 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
373 iter_type = long_long_unsigned_type_node;
374 }
375 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
376 > TYPE_PRECISION (iter_type))
377 {
378 tree n1, n2;
379
380 if (loop->cond_code == LT_EXPR)
381 {
382 n1 = loop->n1;
389dd41b 383 n2 = fold_build2_loc (loc,
384 PLUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 385 loop->n2, loop->step);
386 }
387 else
388 {
389dd41b 389 n1 = fold_build2_loc (loc,
390 MINUS_EXPR, TREE_TYPE (loop->v),
fd6481cf 391 loop->n2, loop->step);
392 n2 = loop->n1;
393 }
394 if (TREE_CODE (n1) != INTEGER_CST
395 || TREE_CODE (n2) != INTEGER_CST
396 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
397 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
398 iter_type = long_long_unsigned_type_node;
399 }
400 }
401
402 if (collapse_count && *collapse_count == NULL)
403 {
404 if ((i == 0 || count != NULL_TREE)
405 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
406 && TREE_CONSTANT (loop->n1)
407 && TREE_CONSTANT (loop->n2)
408 && TREE_CODE (loop->step) == INTEGER_CST)
409 {
410 tree itype = TREE_TYPE (loop->v);
411
412 if (POINTER_TYPE_P (itype))
413 itype
414 = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
415 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
389dd41b 416 t = fold_build2_loc (loc,
417 PLUS_EXPR, itype,
418 fold_convert_loc (loc, itype, loop->step), t);
419 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
420 fold_convert_loc (loc, itype, loop->n2));
421 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
422 fold_convert_loc (loc, itype, loop->n1));
fd6481cf 423 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
389dd41b 424 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
425 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
426 fold_build1_loc (loc, NEGATE_EXPR, itype,
427 fold_convert_loc (loc, itype,
428 loop->step)));
fd6481cf 429 else
389dd41b 430 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
431 fold_convert_loc (loc, itype, loop->step));
432 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
fd6481cf 433 if (count != NULL_TREE)
389dd41b 434 count = fold_build2_loc (loc,
435 MULT_EXPR, long_long_unsigned_type_node,
fd6481cf 436 count, t);
437 else
438 count = t;
439 if (TREE_CODE (count) != INTEGER_CST)
440 count = NULL_TREE;
441 }
442 else
443 count = NULL_TREE;
444 }
445 }
446
447 if (count)
448 {
449 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
450 iter_type = long_long_unsigned_type_node;
451 else
452 iter_type = long_integer_type_node;
453 }
454 else if (collapse_iter && *collapse_iter != NULL)
455 iter_type = TREE_TYPE (*collapse_iter);
456 fd->iter_type = iter_type;
457 if (collapse_iter && *collapse_iter == NULL)
458 *collapse_iter = create_tmp_var (iter_type, ".iter");
459 if (collapse_count && *collapse_count == NULL)
460 {
461 if (count)
389dd41b 462 *collapse_count = fold_convert_loc (loc, iter_type, count);
fd6481cf 463 else
464 *collapse_count = create_tmp_var (iter_type, ".count");
465 }
466
467 if (fd->collapse > 1)
468 {
469 fd->loop.v = *collapse_iter;
470 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
471 fd->loop.n2 = *collapse_count;
472 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
473 fd->loop.cond_code = LT_EXPR;
474 }
773c5ba7 475}
476
477
478/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
479 is the immediate dominator of PAR_ENTRY_BB, return true if there
480 are no data dependencies that would prevent expanding the parallel
481 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
482
483 When expanding a combined parallel+workshare region, the call to
484 the child function may need additional arguments in the case of
75a70cf9 485 GIMPLE_OMP_FOR regions. In some cases, these arguments are
486 computed out of variables passed in from the parent to the child
487 via 'struct .omp_data_s'. For instance:
773c5ba7 488
489 #pragma omp parallel for schedule (guided, i * 4)
490 for (j ...)
491
492 Is lowered into:
493
494 # BLOCK 2 (PAR_ENTRY_BB)
495 .omp_data_o.i = i;
496 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
48e1416a 497
773c5ba7 498 # BLOCK 3 (WS_ENTRY_BB)
499 .omp_data_i = &.omp_data_o;
500 D.1667 = .omp_data_i->i;
501 D.1598 = D.1667 * 4;
502 #pragma omp for schedule (guided, D.1598)
503
504 When we outline the parallel region, the call to the child function
505 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
506 that value is computed *after* the call site. So, in principle we
507 cannot do the transformation.
508
509 To see whether the code in WS_ENTRY_BB blocks the combined
510 parallel+workshare call, we collect all the variables used in the
75a70cf9 511 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
773c5ba7 512 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
513 call.
514
515 FIXME. If we had the SSA form built at this point, we could merely
516 hoist the code in block 3 into block 2 and be done with it. But at
517 this point we don't have dataflow information and though we could
518 hack something up here, it is really not worth the aggravation. */
519
520static bool
f018d957 521workshare_safe_to_combine_p (basic_block ws_entry_bb)
773c5ba7 522{
523 struct omp_for_data fd;
f018d957 524 gimple ws_stmt = last_stmt (ws_entry_bb);
773c5ba7 525
75a70cf9 526 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
773c5ba7 527 return true;
528
75a70cf9 529 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
773c5ba7 530
fd6481cf 531 extract_omp_for_data (ws_stmt, &fd, NULL);
532
533 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
534 return false;
535 if (fd.iter_type != long_integer_type_node)
536 return false;
773c5ba7 537
538 /* FIXME. We give up too easily here. If any of these arguments
539 are not constants, they will likely involve variables that have
540 been mapped into fields of .omp_data_s for sharing with the child
541 function. With appropriate data flow, it would be possible to
542 see through this. */
fd6481cf 543 if (!is_gimple_min_invariant (fd.loop.n1)
544 || !is_gimple_min_invariant (fd.loop.n2)
545 || !is_gimple_min_invariant (fd.loop.step)
773c5ba7 546 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
547 return false;
548
549 return true;
550}
551
552
553/* Collect additional arguments needed to emit a combined
554 parallel+workshare call. WS_STMT is the workshare directive being
555 expanded. */
556
557static tree
75a70cf9 558get_ws_args_for (gimple ws_stmt)
773c5ba7 559{
560 tree t;
389dd41b 561 location_t loc = gimple_location (ws_stmt);
773c5ba7 562
75a70cf9 563 if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
773c5ba7 564 {
565 struct omp_for_data fd;
566 tree ws_args;
567
fd6481cf 568 extract_omp_for_data (ws_stmt, &fd, NULL);
773c5ba7 569
570 ws_args = NULL_TREE;
571 if (fd.chunk_size)
572 {
389dd41b 573 t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
773c5ba7 574 ws_args = tree_cons (NULL, t, ws_args);
575 }
576
389dd41b 577 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
773c5ba7 578 ws_args = tree_cons (NULL, t, ws_args);
579
389dd41b 580 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n2);
773c5ba7 581 ws_args = tree_cons (NULL, t, ws_args);
582
389dd41b 583 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n1);
773c5ba7 584 ws_args = tree_cons (NULL, t, ws_args);
585
586 return ws_args;
587 }
75a70cf9 588 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
773c5ba7 589 {
ac6e3339 590 /* Number of sections is equal to the number of edges from the
75a70cf9 591 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
592 the exit of the sections region. */
593 basic_block bb = single_succ (gimple_bb (ws_stmt));
ac6e3339 594 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
773c5ba7 595 t = tree_cons (NULL, t, NULL);
596 return t;
597 }
598
599 gcc_unreachable ();
600}
601
602
603/* Discover whether REGION is a combined parallel+workshare region. */
604
605static void
606determine_parallel_type (struct omp_region *region)
1e8e9920 607{
773c5ba7 608 basic_block par_entry_bb, par_exit_bb;
609 basic_block ws_entry_bb, ws_exit_bb;
610
03ed154b 611 if (region == NULL || region->inner == NULL
ac6e3339 612 || region->exit == NULL || region->inner->exit == NULL
613 || region->inner->cont == NULL)
773c5ba7 614 return;
615
616 /* We only support parallel+for and parallel+sections. */
75a70cf9 617 if (region->type != GIMPLE_OMP_PARALLEL
618 || (region->inner->type != GIMPLE_OMP_FOR
619 && region->inner->type != GIMPLE_OMP_SECTIONS))
773c5ba7 620 return;
621
622 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
623 WS_EXIT_BB -> PAR_EXIT_BB. */
61e47ac8 624 par_entry_bb = region->entry;
625 par_exit_bb = region->exit;
626 ws_entry_bb = region->inner->entry;
627 ws_exit_bb = region->inner->exit;
773c5ba7 628
629 if (single_succ (par_entry_bb) == ws_entry_bb
630 && single_succ (ws_exit_bb) == par_exit_bb
f018d957 631 && workshare_safe_to_combine_p (ws_entry_bb)
75a70cf9 632 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
de7ef844 633 || (last_and_only_stmt (ws_entry_bb)
634 && last_and_only_stmt (par_exit_bb))))
773c5ba7 635 {
75a70cf9 636 gimple ws_stmt = last_stmt (ws_entry_bb);
61e47ac8 637
75a70cf9 638 if (region->inner->type == GIMPLE_OMP_FOR)
773c5ba7 639 {
640 /* If this is a combined parallel loop, we need to determine
641 whether or not to use the combined library calls. There
642 are two cases where we do not apply the transformation:
643 static loops and any kind of ordered loop. In the first
644 case, we already open code the loop so there is no need
645 to do anything else. In the latter case, the combined
646 parallel loop call would still need extra synchronization
647 to implement ordered semantics, so there would not be any
648 gain in using the combined call. */
75a70cf9 649 tree clauses = gimple_omp_for_clauses (ws_stmt);
773c5ba7 650 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
651 if (c == NULL
652 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
653 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
654 {
655 region->is_combined_parallel = false;
656 region->inner->is_combined_parallel = false;
657 return;
658 }
659 }
660
661 region->is_combined_parallel = true;
662 region->inner->is_combined_parallel = true;
61e47ac8 663 region->ws_args = get_ws_args_for (ws_stmt);
773c5ba7 664 }
1e8e9920 665}
666
773c5ba7 667
1e8e9920 668/* Return true if EXPR is variable sized. */
669
670static inline bool
1f1872fd 671is_variable_sized (const_tree expr)
1e8e9920 672{
673 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
674}
675
676/* Return true if DECL is a reference type. */
677
678static inline bool
679is_reference (tree decl)
680{
681 return lang_hooks.decls.omp_privatize_by_reference (decl);
682}
683
684/* Lookup variables in the decl or field splay trees. The "maybe" form
685 allows for the variable form to not have been entered, otherwise we
686 assert that the variable must have been entered. */
687
688static inline tree
689lookup_decl (tree var, omp_context *ctx)
690{
e3022db7 691 tree *n;
692 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
693 return *n;
1e8e9920 694}
695
696static inline tree
e8a588af 697maybe_lookup_decl (const_tree var, omp_context *ctx)
1e8e9920 698{
e3022db7 699 tree *n;
700 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
701 return n ? *n : NULL_TREE;
1e8e9920 702}
703
704static inline tree
705lookup_field (tree var, omp_context *ctx)
706{
707 splay_tree_node n;
708 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
709 return (tree) n->value;
710}
711
fd6481cf 712static inline tree
713lookup_sfield (tree var, omp_context *ctx)
714{
715 splay_tree_node n;
716 n = splay_tree_lookup (ctx->sfield_map
717 ? ctx->sfield_map : ctx->field_map,
718 (splay_tree_key) var);
719 return (tree) n->value;
720}
721
1e8e9920 722static inline tree
723maybe_lookup_field (tree var, omp_context *ctx)
724{
725 splay_tree_node n;
726 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
727 return n ? (tree) n->value : NULL_TREE;
728}
729
e8a588af 730/* Return true if DECL should be copied by pointer. SHARED_CTX is
731 the parallel context if DECL is to be shared. */
1e8e9920 732
733static bool
fd6481cf 734use_pointer_for_field (tree decl, omp_context *shared_ctx)
1e8e9920 735{
736 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
737 return true;
738
554f2707 739 /* We can only use copy-in/copy-out semantics for shared variables
1e8e9920 740 when we know the value is not accessible from an outer scope. */
e8a588af 741 if (shared_ctx)
1e8e9920 742 {
743 /* ??? Trivially accessible from anywhere. But why would we even
744 be passing an address in this case? Should we simply assert
745 this to be false, or should we have a cleanup pass that removes
746 these from the list of mappings? */
747 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
748 return true;
749
750 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
751 without analyzing the expression whether or not its location
752 is accessible to anyone else. In the case of nested parallel
753 regions it certainly may be. */
df2c34fc 754 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
1e8e9920 755 return true;
756
757 /* Do not use copy-in/copy-out for variables that have their
758 address taken. */
759 if (TREE_ADDRESSABLE (decl))
760 return true;
e8a588af 761
762 /* Disallow copy-in/out in nested parallel if
763 decl is shared in outer parallel, otherwise
764 each thread could store the shared variable
765 in its own copy-in location, making the
766 variable no longer really shared. */
767 if (!TREE_READONLY (decl) && shared_ctx->is_nested)
768 {
769 omp_context *up;
770
771 for (up = shared_ctx->outer; up; up = up->outer)
0cb159ec 772 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
e8a588af 773 break;
774
0cb159ec 775 if (up)
e8a588af 776 {
777 tree c;
778
75a70cf9 779 for (c = gimple_omp_taskreg_clauses (up->stmt);
e8a588af 780 c; c = OMP_CLAUSE_CHAIN (c))
781 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
782 && OMP_CLAUSE_DECL (c) == decl)
783 break;
784
785 if (c)
786 return true;
787 }
788 }
fd6481cf 789
790 /* For tasks avoid using copy-in/out, unless they are readonly
791 (in which case just copy-in is used). As tasks can be
792 deferred or executed in different thread, when GOMP_task
793 returns, the task hasn't necessarily terminated. */
794 if (!TREE_READONLY (decl) && is_task_ctx (shared_ctx))
795 {
796 tree outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
797 if (is_gimple_reg (outer))
798 {
799 /* Taking address of OUTER in lower_send_shared_vars
800 might need regimplification of everything that uses the
801 variable. */
802 if (!task_shared_vars)
803 task_shared_vars = BITMAP_ALLOC (NULL);
804 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
805 TREE_ADDRESSABLE (outer) = 1;
806 }
807 return true;
808 }
1e8e9920 809 }
810
811 return false;
812}
813
79acaae1 814/* Create a new VAR_DECL and copy information from VAR to it. */
1e8e9920 815
79acaae1 816tree
817copy_var_decl (tree var, tree name, tree type)
1e8e9920 818{
e60a6f7b 819 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
1e8e9920 820
821 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
79acaae1 822 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
8ea8de24 823 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
1e8e9920 824 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
825 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
79acaae1 826 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
1e8e9920 827 TREE_USED (copy) = 1;
1e8e9920 828 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
829
79acaae1 830 return copy;
831}
832
833/* Construct a new automatic decl similar to VAR. */
834
835static tree
836omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
837{
838 tree copy = copy_var_decl (var, name, type);
839
840 DECL_CONTEXT (copy) = current_function_decl;
1e8e9920 841 TREE_CHAIN (copy) = ctx->block_vars;
842 ctx->block_vars = copy;
843
844 return copy;
845}
846
847static tree
848omp_copy_decl_1 (tree var, omp_context *ctx)
849{
850 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
851}
852
853/* Build tree nodes to access the field for VAR on the receiver side. */
854
855static tree
856build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
857{
858 tree x, field = lookup_field (var, ctx);
859
860 /* If the receiver record type was remapped in the child function,
861 remap the field into the new record type. */
862 x = maybe_lookup_field (field, ctx);
863 if (x != NULL)
864 field = x;
865
866 x = build_fold_indirect_ref (ctx->receiver_decl);
867 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL);
868 if (by_ref)
869 x = build_fold_indirect_ref (x);
870
871 return x;
872}
873
874/* Build tree nodes to access VAR in the scope outer to CTX. In the case
875 of a parallel, this is a component reference; for workshare constructs
876 this is some variable. */
877
878static tree
879build_outer_var_ref (tree var, omp_context *ctx)
880{
881 tree x;
882
f49d7bb5 883 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
1e8e9920 884 x = var;
885 else if (is_variable_sized (var))
886 {
887 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
888 x = build_outer_var_ref (x, ctx);
889 x = build_fold_indirect_ref (x);
890 }
fd6481cf 891 else if (is_taskreg_ctx (ctx))
1e8e9920 892 {
e8a588af 893 bool by_ref = use_pointer_for_field (var, NULL);
1e8e9920 894 x = build_receiver_ref (var, by_ref, ctx);
895 }
896 else if (ctx->outer)
897 x = lookup_decl (var, ctx->outer);
9438af57 898 else if (is_reference (var))
899 /* This can happen with orphaned constructs. If var is reference, it is
900 possible it is shared and as such valid. */
901 x = var;
1e8e9920 902 else
903 gcc_unreachable ();
904
905 if (is_reference (var))
906 x = build_fold_indirect_ref (x);
907
908 return x;
909}
910
911/* Build tree nodes to access the field for VAR on the sender side. */
912
913static tree
914build_sender_ref (tree var, omp_context *ctx)
915{
fd6481cf 916 tree field = lookup_sfield (var, ctx);
1e8e9920 917 return build3 (COMPONENT_REF, TREE_TYPE (field),
918 ctx->sender_decl, field, NULL);
919}
920
921/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
922
923static void
fd6481cf 924install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
1e8e9920 925{
fd6481cf 926 tree field, type, sfield = NULL_TREE;
1e8e9920 927
fd6481cf 928 gcc_assert ((mask & 1) == 0
929 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
930 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
931 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
1e8e9920 932
933 type = TREE_TYPE (var);
934 if (by_ref)
935 type = build_pointer_type (type);
fd6481cf 936 else if ((mask & 3) == 1 && is_reference (var))
937 type = TREE_TYPE (type);
1e8e9920 938
e60a6f7b 939 field = build_decl (DECL_SOURCE_LOCATION (var),
940 FIELD_DECL, DECL_NAME (var), type);
1e8e9920 941
942 /* Remember what variable this field was created for. This does have a
943 side effect of making dwarf2out ignore this member, so for helpful
944 debugging we clear it later in delete_omp_context. */
945 DECL_ABSTRACT_ORIGIN (field) = var;
fd6481cf 946 if (type == TREE_TYPE (var))
947 {
948 DECL_ALIGN (field) = DECL_ALIGN (var);
949 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
950 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
951 }
952 else
953 DECL_ALIGN (field) = TYPE_ALIGN (type);
1e8e9920 954
fd6481cf 955 if ((mask & 3) == 3)
956 {
957 insert_field_into_struct (ctx->record_type, field);
958 if (ctx->srecord_type)
959 {
e60a6f7b 960 sfield = build_decl (DECL_SOURCE_LOCATION (var),
961 FIELD_DECL, DECL_NAME (var), type);
fd6481cf 962 DECL_ABSTRACT_ORIGIN (sfield) = var;
963 DECL_ALIGN (sfield) = DECL_ALIGN (field);
964 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
965 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
966 insert_field_into_struct (ctx->srecord_type, sfield);
967 }
968 }
969 else
970 {
971 if (ctx->srecord_type == NULL_TREE)
972 {
973 tree t;
974
975 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
976 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
977 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
978 {
e60a6f7b 979 sfield = build_decl (DECL_SOURCE_LOCATION (var),
980 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
fd6481cf 981 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
982 insert_field_into_struct (ctx->srecord_type, sfield);
983 splay_tree_insert (ctx->sfield_map,
984 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
985 (splay_tree_value) sfield);
986 }
987 }
988 sfield = field;
989 insert_field_into_struct ((mask & 1) ? ctx->record_type
990 : ctx->srecord_type, field);
991 }
1e8e9920 992
fd6481cf 993 if (mask & 1)
994 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
995 (splay_tree_value) field);
996 if ((mask & 2) && ctx->sfield_map)
997 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
998 (splay_tree_value) sfield);
1e8e9920 999}
1000
1001static tree
1002install_var_local (tree var, omp_context *ctx)
1003{
1004 tree new_var = omp_copy_decl_1 (var, ctx);
1005 insert_decl_map (&ctx->cb, var, new_var);
1006 return new_var;
1007}
1008
1009/* Adjust the replacement for DECL in CTX for the new context. This means
1010 copying the DECL_VALUE_EXPR, and fixing up the type. */
1011
1012static void
1013fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
1014{
1015 tree new_decl, size;
1016
1017 new_decl = lookup_decl (decl, ctx);
1018
1019 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
1020
1021 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
1022 && DECL_HAS_VALUE_EXPR_P (decl))
1023 {
1024 tree ve = DECL_VALUE_EXPR (decl);
75a70cf9 1025 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
1e8e9920 1026 SET_DECL_VALUE_EXPR (new_decl, ve);
1027 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1028 }
1029
1030 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1031 {
1032 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1033 if (size == error_mark_node)
1034 size = TYPE_SIZE (TREE_TYPE (new_decl));
1035 DECL_SIZE (new_decl) = size;
1036
1037 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1038 if (size == error_mark_node)
1039 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1040 DECL_SIZE_UNIT (new_decl) = size;
1041 }
1042}
1043
1044/* The callback for remap_decl. Search all containing contexts for a
1045 mapping of the variable; this avoids having to duplicate the splay
1046 tree ahead of time. We know a mapping doesn't already exist in the
1047 given context. Create new mappings to implement default semantics. */
1048
1049static tree
1050omp_copy_decl (tree var, copy_body_data *cb)
1051{
1052 omp_context *ctx = (omp_context *) cb;
1053 tree new_var;
1054
1e8e9920 1055 if (TREE_CODE (var) == LABEL_DECL)
1056 {
e60a6f7b 1057 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
773c5ba7 1058 DECL_CONTEXT (new_var) = current_function_decl;
1e8e9920 1059 insert_decl_map (&ctx->cb, var, new_var);
1060 return new_var;
1061 }
1062
fd6481cf 1063 while (!is_taskreg_ctx (ctx))
1e8e9920 1064 {
1065 ctx = ctx->outer;
1066 if (ctx == NULL)
1067 return var;
1068 new_var = maybe_lookup_decl (var, ctx);
1069 if (new_var)
1070 return new_var;
1071 }
1072
f49d7bb5 1073 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1074 return var;
1075
1e8e9920 1076 return error_mark_node;
1077}
1078
773c5ba7 1079
1080/* Return the parallel region associated with STMT. */
1081
773c5ba7 1082/* Debugging dumps for parallel regions. */
1083void dump_omp_region (FILE *, struct omp_region *, int);
1084void debug_omp_region (struct omp_region *);
1085void debug_all_omp_regions (void);
1086
1087/* Dump the parallel region tree rooted at REGION. */
1088
1089void
1090dump_omp_region (FILE *file, struct omp_region *region, int indent)
1091{
61e47ac8 1092 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
75a70cf9 1093 gimple_code_name[region->type]);
773c5ba7 1094
1095 if (region->inner)
1096 dump_omp_region (file, region->inner, indent + 4);
1097
61e47ac8 1098 if (region->cont)
1099 {
75a70cf9 1100 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
61e47ac8 1101 region->cont->index);
1102 }
48e1416a 1103
773c5ba7 1104 if (region->exit)
75a70cf9 1105 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
61e47ac8 1106 region->exit->index);
773c5ba7 1107 else
61e47ac8 1108 fprintf (file, "%*s[no exit marker]\n", indent, "");
773c5ba7 1109
1110 if (region->next)
61e47ac8 1111 dump_omp_region (file, region->next, indent);
773c5ba7 1112}
1113
1114void
1115debug_omp_region (struct omp_region *region)
1116{
1117 dump_omp_region (stderr, region, 0);
1118}
1119
1120void
1121debug_all_omp_regions (void)
1122{
1123 dump_omp_region (stderr, root_omp_region, 0);
1124}
1125
1126
1127/* Create a new parallel region starting at STMT inside region PARENT. */
1128
61e47ac8 1129struct omp_region *
75a70cf9 1130new_omp_region (basic_block bb, enum gimple_code type,
1131 struct omp_region *parent)
773c5ba7 1132{
4077bf7a 1133 struct omp_region *region = XCNEW (struct omp_region);
773c5ba7 1134
1135 region->outer = parent;
61e47ac8 1136 region->entry = bb;
1137 region->type = type;
773c5ba7 1138
1139 if (parent)
1140 {
1141 /* This is a nested region. Add it to the list of inner
1142 regions in PARENT. */
1143 region->next = parent->inner;
1144 parent->inner = region;
1145 }
61e47ac8 1146 else
773c5ba7 1147 {
1148 /* This is a toplevel region. Add it to the list of toplevel
1149 regions in ROOT_OMP_REGION. */
1150 region->next = root_omp_region;
1151 root_omp_region = region;
1152 }
61e47ac8 1153
1154 return region;
1155}
1156
1157/* Release the memory associated with the region tree rooted at REGION. */
1158
1159static void
1160free_omp_region_1 (struct omp_region *region)
1161{
1162 struct omp_region *i, *n;
1163
1164 for (i = region->inner; i ; i = n)
773c5ba7 1165 {
61e47ac8 1166 n = i->next;
1167 free_omp_region_1 (i);
773c5ba7 1168 }
1169
61e47ac8 1170 free (region);
1171}
773c5ba7 1172
61e47ac8 1173/* Release the memory for the entire omp region tree. */
1174
1175void
1176free_omp_regions (void)
1177{
1178 struct omp_region *r, *n;
1179 for (r = root_omp_region; r ; r = n)
1180 {
1181 n = r->next;
1182 free_omp_region_1 (r);
1183 }
1184 root_omp_region = NULL;
773c5ba7 1185}
1186
1187
1e8e9920 1188/* Create a new context, with OUTER_CTX being the surrounding context. */
1189
1190static omp_context *
75a70cf9 1191new_omp_context (gimple stmt, omp_context *outer_ctx)
1e8e9920 1192{
1193 omp_context *ctx = XCNEW (omp_context);
1194
1195 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1196 (splay_tree_value) ctx);
1197 ctx->stmt = stmt;
1198
1199 if (outer_ctx)
1200 {
1201 ctx->outer = outer_ctx;
1202 ctx->cb = outer_ctx->cb;
1203 ctx->cb.block = NULL;
1204 ctx->depth = outer_ctx->depth + 1;
1205 }
1206 else
1207 {
1208 ctx->cb.src_fn = current_function_decl;
1209 ctx->cb.dst_fn = current_function_decl;
1210 ctx->cb.src_node = cgraph_node (current_function_decl);
1211 ctx->cb.dst_node = ctx->cb.src_node;
1212 ctx->cb.src_cfun = cfun;
1213 ctx->cb.copy_decl = omp_copy_decl;
e38def9c 1214 ctx->cb.eh_lp_nr = 0;
1e8e9920 1215 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1216 ctx->depth = 1;
1217 }
1218
e3022db7 1219 ctx->cb.decl_map = pointer_map_create ();
1e8e9920 1220
1221 return ctx;
1222}
1223
75a70cf9 1224static gimple_seq maybe_catch_exception (gimple_seq);
f6430caa 1225
1226/* Finalize task copyfn. */
1227
1228static void
75a70cf9 1229finalize_task_copyfn (gimple task_stmt)
f6430caa 1230{
1231 struct function *child_cfun;
1232 tree child_fn, old_fn;
75a70cf9 1233 gimple_seq seq, new_seq;
1234 gimple bind;
f6430caa 1235
75a70cf9 1236 child_fn = gimple_omp_task_copy_fn (task_stmt);
f6430caa 1237 if (child_fn == NULL_TREE)
1238 return;
1239
1240 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1241
1242 /* Inform the callgraph about the new function. */
1243 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
1244 = cfun->curr_properties;
1245
1246 old_fn = current_function_decl;
1247 push_cfun (child_cfun);
1248 current_function_decl = child_fn;
75a70cf9 1249 bind = gimplify_body (&DECL_SAVED_TREE (child_fn), child_fn, false);
1250 seq = gimple_seq_alloc ();
1251 gimple_seq_add_stmt (&seq, bind);
1252 new_seq = maybe_catch_exception (seq);
1253 if (new_seq != seq)
1254 {
1255 bind = gimple_build_bind (NULL, new_seq, NULL);
1256 seq = gimple_seq_alloc ();
1257 gimple_seq_add_stmt (&seq, bind);
1258 }
1259 gimple_set_body (child_fn, seq);
f6430caa 1260 pop_cfun ();
1261 current_function_decl = old_fn;
1262
1263 cgraph_add_new_function (child_fn, false);
1264}
1265
1e8e9920 1266/* Destroy a omp_context data structures. Called through the splay tree
1267 value delete callback. */
1268
1269static void
1270delete_omp_context (splay_tree_value value)
1271{
1272 omp_context *ctx = (omp_context *) value;
1273
e3022db7 1274 pointer_map_destroy (ctx->cb.decl_map);
1e8e9920 1275
1276 if (ctx->field_map)
1277 splay_tree_delete (ctx->field_map);
fd6481cf 1278 if (ctx->sfield_map)
1279 splay_tree_delete (ctx->sfield_map);
1e8e9920 1280
1281 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1282 it produces corrupt debug information. */
1283 if (ctx->record_type)
1284 {
1285 tree t;
1286 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
1287 DECL_ABSTRACT_ORIGIN (t) = NULL;
1288 }
fd6481cf 1289 if (ctx->srecord_type)
1290 {
1291 tree t;
1292 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = TREE_CHAIN (t))
1293 DECL_ABSTRACT_ORIGIN (t) = NULL;
1294 }
1e8e9920 1295
f6430caa 1296 if (is_task_ctx (ctx))
1297 finalize_task_copyfn (ctx->stmt);
1298
1e8e9920 1299 XDELETE (ctx);
1300}
1301
1302/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1303 context. */
1304
1305static void
1306fixup_child_record_type (omp_context *ctx)
1307{
1308 tree f, type = ctx->record_type;
1309
1310 /* ??? It isn't sufficient to just call remap_type here, because
1311 variably_modified_type_p doesn't work the way we expect for
1312 record types. Testing each field for whether it needs remapping
1313 and creating a new record by hand works, however. */
1314 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1315 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1316 break;
1317 if (f)
1318 {
1319 tree name, new_fields = NULL;
1320
1321 type = lang_hooks.types.make_type (RECORD_TYPE);
1322 name = DECL_NAME (TYPE_NAME (ctx->record_type));
e60a6f7b 1323 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1324 TYPE_DECL, name, type);
1e8e9920 1325 TYPE_NAME (type) = name;
1326
1327 for (f = TYPE_FIELDS (ctx->record_type); f ; f = TREE_CHAIN (f))
1328 {
1329 tree new_f = copy_node (f);
1330 DECL_CONTEXT (new_f) = type;
1331 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1332 TREE_CHAIN (new_f) = new_fields;
75a70cf9 1333 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1334 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1335 &ctx->cb, NULL);
1336 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1337 &ctx->cb, NULL);
1e8e9920 1338 new_fields = new_f;
1339
1340 /* Arrange to be able to look up the receiver field
1341 given the sender field. */
1342 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1343 (splay_tree_value) new_f);
1344 }
1345 TYPE_FIELDS (type) = nreverse (new_fields);
1346 layout_type (type);
1347 }
1348
1349 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1350}
1351
1352/* Instantiate decls as necessary in CTX to satisfy the data sharing
1353 specified by CLAUSES. */
1354
1355static void
1356scan_sharing_clauses (tree clauses, omp_context *ctx)
1357{
1358 tree c, decl;
1359 bool scan_array_reductions = false;
1360
1361 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1362 {
1363 bool by_ref;
1364
55d6e7cd 1365 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1366 {
1367 case OMP_CLAUSE_PRIVATE:
1368 decl = OMP_CLAUSE_DECL (c);
fd6481cf 1369 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1370 goto do_private;
1371 else if (!is_variable_sized (decl))
1e8e9920 1372 install_var_local (decl, ctx);
1373 break;
1374
1375 case OMP_CLAUSE_SHARED:
fd6481cf 1376 gcc_assert (is_taskreg_ctx (ctx));
1e8e9920 1377 decl = OMP_CLAUSE_DECL (c);
e7327393 1378 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1379 || !is_variable_sized (decl));
f49d7bb5 1380 /* Global variables don't need to be copied,
1381 the receiver side will use them directly. */
1382 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1383 break;
fd6481cf 1384 by_ref = use_pointer_for_field (decl, ctx);
1e8e9920 1385 if (! TREE_READONLY (decl)
1386 || TREE_ADDRESSABLE (decl)
1387 || by_ref
1388 || is_reference (decl))
1389 {
fd6481cf 1390 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1391 install_var_local (decl, ctx);
1392 break;
1393 }
1394 /* We don't need to copy const scalar vars back. */
55d6e7cd 1395 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1e8e9920 1396 goto do_private;
1397
1398 case OMP_CLAUSE_LASTPRIVATE:
1399 /* Let the corresponding firstprivate clause create
1400 the variable. */
1401 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1402 break;
1403 /* FALLTHRU */
1404
1405 case OMP_CLAUSE_FIRSTPRIVATE:
1406 case OMP_CLAUSE_REDUCTION:
1407 decl = OMP_CLAUSE_DECL (c);
1408 do_private:
1409 if (is_variable_sized (decl))
1e8e9920 1410 {
fd6481cf 1411 if (is_task_ctx (ctx))
1412 install_var_field (decl, false, 1, ctx);
1413 break;
1414 }
1415 else if (is_taskreg_ctx (ctx))
1416 {
1417 bool global
1418 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
e8a588af 1419 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1420
1421 if (is_task_ctx (ctx)
1422 && (global || by_ref || is_reference (decl)))
1423 {
1424 install_var_field (decl, false, 1, ctx);
1425 if (!global)
1426 install_var_field (decl, by_ref, 2, ctx);
1427 }
1428 else if (!global)
1429 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1430 }
1431 install_var_local (decl, ctx);
1432 break;
1433
1434 case OMP_CLAUSE_COPYPRIVATE:
1435 if (ctx->outer)
75a70cf9 1436 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1e8e9920 1437 /* FALLTHRU */
1438
1439 case OMP_CLAUSE_COPYIN:
1440 decl = OMP_CLAUSE_DECL (c);
e8a588af 1441 by_ref = use_pointer_for_field (decl, NULL);
fd6481cf 1442 install_var_field (decl, by_ref, 3, ctx);
1e8e9920 1443 break;
1444
1445 case OMP_CLAUSE_DEFAULT:
1446 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1447 break;
1448
1449 case OMP_CLAUSE_IF:
1450 case OMP_CLAUSE_NUM_THREADS:
1451 case OMP_CLAUSE_SCHEDULE:
1452 if (ctx->outer)
75a70cf9 1453 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1e8e9920 1454 break;
1455
1456 case OMP_CLAUSE_NOWAIT:
1457 case OMP_CLAUSE_ORDERED:
fd6481cf 1458 case OMP_CLAUSE_COLLAPSE:
1459 case OMP_CLAUSE_UNTIED:
1e8e9920 1460 break;
1461
1462 default:
1463 gcc_unreachable ();
1464 }
1465 }
1466
1467 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1468 {
55d6e7cd 1469 switch (OMP_CLAUSE_CODE (c))
1e8e9920 1470 {
1471 case OMP_CLAUSE_LASTPRIVATE:
1472 /* Let the corresponding firstprivate clause create
1473 the variable. */
75a70cf9 1474 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
fd6481cf 1475 scan_array_reductions = true;
1e8e9920 1476 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1477 break;
1478 /* FALLTHRU */
1479
1480 case OMP_CLAUSE_PRIVATE:
1481 case OMP_CLAUSE_FIRSTPRIVATE:
1482 case OMP_CLAUSE_REDUCTION:
1483 decl = OMP_CLAUSE_DECL (c);
1484 if (is_variable_sized (decl))
1485 install_var_local (decl, ctx);
1486 fixup_remapped_decl (decl, ctx,
55d6e7cd 1487 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1e8e9920 1488 && OMP_CLAUSE_PRIVATE_DEBUG (c));
55d6e7cd 1489 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1e8e9920 1490 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1491 scan_array_reductions = true;
1492 break;
1493
1494 case OMP_CLAUSE_SHARED:
1495 decl = OMP_CLAUSE_DECL (c);
f49d7bb5 1496 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1497 fixup_remapped_decl (decl, ctx, false);
1e8e9920 1498 break;
1499
1500 case OMP_CLAUSE_COPYPRIVATE:
1501 case OMP_CLAUSE_COPYIN:
1502 case OMP_CLAUSE_DEFAULT:
1503 case OMP_CLAUSE_IF:
1504 case OMP_CLAUSE_NUM_THREADS:
1505 case OMP_CLAUSE_SCHEDULE:
1506 case OMP_CLAUSE_NOWAIT:
1507 case OMP_CLAUSE_ORDERED:
fd6481cf 1508 case OMP_CLAUSE_COLLAPSE:
1509 case OMP_CLAUSE_UNTIED:
1e8e9920 1510 break;
1511
1512 default:
1513 gcc_unreachable ();
1514 }
1515 }
1516
1517 if (scan_array_reductions)
1518 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
55d6e7cd 1519 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1e8e9920 1520 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1521 {
75a70cf9 1522 scan_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1523 scan_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1e8e9920 1524 }
fd6481cf 1525 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
75a70cf9 1526 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1527 scan_omp (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1e8e9920 1528}
1529
1530/* Create a new name for omp child function. Returns an identifier. */
1531
1532static GTY(()) unsigned int tmp_ompfn_id_num;
1533
1534static tree
fd6481cf 1535create_omp_child_function_name (bool task_copy)
1e8e9920 1536{
1537 tree name = DECL_ASSEMBLER_NAME (current_function_decl);
1538 size_t len = IDENTIFIER_LENGTH (name);
1539 char *tmp_name, *prefix;
fd6481cf 1540 const char *suffix;
1e8e9920 1541
fd6481cf 1542 suffix = task_copy ? "_omp_cpyfn" : "_omp_fn";
4077bf7a 1543 prefix = XALLOCAVEC (char, len + strlen (suffix) + 1);
1e8e9920 1544 memcpy (prefix, IDENTIFIER_POINTER (name), len);
fd6481cf 1545 strcpy (prefix + len, suffix);
1e8e9920 1546#ifndef NO_DOT_IN_LABEL
1547 prefix[len] = '.';
1548#elif !defined NO_DOLLAR_IN_LABEL
1549 prefix[len] = '$';
1550#endif
1551 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, tmp_ompfn_id_num++);
1552 return get_identifier (tmp_name);
1553}
1554
1555/* Build a decl for the omp child function. It'll not contain a body
1556 yet, just the bare decl. */
1557
1558static void
fd6481cf 1559create_omp_child_function (omp_context *ctx, bool task_copy)
1e8e9920 1560{
1561 tree decl, type, name, t;
1562
fd6481cf 1563 name = create_omp_child_function_name (task_copy);
1564 if (task_copy)
1565 type = build_function_type_list (void_type_node, ptr_type_node,
1566 ptr_type_node, NULL_TREE);
1567 else
1568 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1e8e9920 1569
e60a6f7b 1570 decl = build_decl (gimple_location (ctx->stmt),
1571 FUNCTION_DECL, name, type);
1e8e9920 1572
fd6481cf 1573 if (!task_copy)
1574 ctx->cb.dst_fn = decl;
1575 else
75a70cf9 1576 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1e8e9920 1577
1578 TREE_STATIC (decl) = 1;
1579 TREE_USED (decl) = 1;
1580 DECL_ARTIFICIAL (decl) = 1;
1581 DECL_IGNORED_P (decl) = 0;
1582 TREE_PUBLIC (decl) = 0;
1583 DECL_UNINLINABLE (decl) = 1;
1584 DECL_EXTERNAL (decl) = 0;
1585 DECL_CONTEXT (decl) = NULL_TREE;
773c5ba7 1586 DECL_INITIAL (decl) = make_node (BLOCK);
1e8e9920 1587
e60a6f7b 1588 t = build_decl (DECL_SOURCE_LOCATION (decl),
1589 RESULT_DECL, NULL_TREE, void_type_node);
1e8e9920 1590 DECL_ARTIFICIAL (t) = 1;
1591 DECL_IGNORED_P (t) = 1;
8e5b4ed6 1592 DECL_CONTEXT (t) = decl;
1e8e9920 1593 DECL_RESULT (decl) = t;
1594
e60a6f7b 1595 t = build_decl (DECL_SOURCE_LOCATION (decl),
1596 PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
1e8e9920 1597 DECL_ARTIFICIAL (t) = 1;
1598 DECL_ARG_TYPE (t) = ptr_type_node;
773c5ba7 1599 DECL_CONTEXT (t) = current_function_decl;
1e8e9920 1600 TREE_USED (t) = 1;
1601 DECL_ARGUMENTS (decl) = t;
fd6481cf 1602 if (!task_copy)
1603 ctx->receiver_decl = t;
1604 else
1605 {
e60a6f7b 1606 t = build_decl (DECL_SOURCE_LOCATION (decl),
1607 PARM_DECL, get_identifier (".omp_data_o"),
fd6481cf 1608 ptr_type_node);
1609 DECL_ARTIFICIAL (t) = 1;
1610 DECL_ARG_TYPE (t) = ptr_type_node;
1611 DECL_CONTEXT (t) = current_function_decl;
1612 TREE_USED (t) = 1;
86f2ad37 1613 TREE_ADDRESSABLE (t) = 1;
fd6481cf 1614 TREE_CHAIN (t) = DECL_ARGUMENTS (decl);
1615 DECL_ARGUMENTS (decl) = t;
1616 }
1e8e9920 1617
48e1416a 1618 /* Allocate memory for the function structure. The call to
773c5ba7 1619 allocate_struct_function clobbers CFUN, so we need to restore
1e8e9920 1620 it afterward. */
87d4aa85 1621 push_struct_function (decl);
75a70cf9 1622 cfun->function_end_locus = gimple_location (ctx->stmt);
87d4aa85 1623 pop_cfun ();
1e8e9920 1624}
1625
1e8e9920 1626
1627/* Scan an OpenMP parallel directive. */
1628
1629static void
75a70cf9 1630scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1e8e9920 1631{
1632 omp_context *ctx;
1633 tree name;
75a70cf9 1634 gimple stmt = gsi_stmt (*gsi);
1e8e9920 1635
1636 /* Ignore parallel directives with empty bodies, unless there
1637 are copyin clauses. */
1638 if (optimize > 0
75a70cf9 1639 && empty_body_p (gimple_omp_body (stmt))
1640 && find_omp_clause (gimple_omp_parallel_clauses (stmt),
1641 OMP_CLAUSE_COPYIN) == NULL)
1e8e9920 1642 {
75a70cf9 1643 gsi_replace (gsi, gimple_build_nop (), false);
1e8e9920 1644 return;
1645 }
1646
75a70cf9 1647 ctx = new_omp_context (stmt, outer_ctx);
fd6481cf 1648 if (taskreg_nesting_level > 1)
773c5ba7 1649 ctx->is_nested = true;
1e8e9920 1650 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1e8e9920 1651 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1652 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1e8e9920 1653 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 1654 name = build_decl (gimple_location (stmt),
1655 TYPE_DECL, name, ctx->record_type);
1e8e9920 1656 TYPE_NAME (ctx->record_type) = name;
fd6481cf 1657 create_omp_child_function (ctx, false);
75a70cf9 1658 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1e8e9920 1659
75a70cf9 1660 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1661 scan_omp (gimple_omp_body (stmt), ctx);
1e8e9920 1662
1663 if (TYPE_FIELDS (ctx->record_type) == NULL)
1664 ctx->record_type = ctx->receiver_decl = NULL;
1665 else
1666 {
1667 layout_type (ctx->record_type);
1668 fixup_child_record_type (ctx);
1669 }
1670}
1671
fd6481cf 1672/* Scan an OpenMP task directive. */
1673
1674static void
75a70cf9 1675scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
fd6481cf 1676{
1677 omp_context *ctx;
75a70cf9 1678 tree name, t;
1679 gimple stmt = gsi_stmt (*gsi);
389dd41b 1680 location_t loc = gimple_location (stmt);
fd6481cf 1681
1682 /* Ignore task directives with empty bodies. */
1683 if (optimize > 0
75a70cf9 1684 && empty_body_p (gimple_omp_body (stmt)))
fd6481cf 1685 {
75a70cf9 1686 gsi_replace (gsi, gimple_build_nop (), false);
fd6481cf 1687 return;
1688 }
1689
75a70cf9 1690 ctx = new_omp_context (stmt, outer_ctx);
fd6481cf 1691 if (taskreg_nesting_level > 1)
1692 ctx->is_nested = true;
1693 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1694 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1695 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1696 name = create_tmp_var_name (".omp_data_s");
e60a6f7b 1697 name = build_decl (gimple_location (stmt),
1698 TYPE_DECL, name, ctx->record_type);
fd6481cf 1699 TYPE_NAME (ctx->record_type) = name;
1700 create_omp_child_function (ctx, false);
75a70cf9 1701 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
fd6481cf 1702
75a70cf9 1703 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
fd6481cf 1704
1705 if (ctx->srecord_type)
1706 {
1707 name = create_tmp_var_name (".omp_data_a");
e60a6f7b 1708 name = build_decl (gimple_location (stmt),
1709 TYPE_DECL, name, ctx->srecord_type);
fd6481cf 1710 TYPE_NAME (ctx->srecord_type) = name;
1711 create_omp_child_function (ctx, true);
1712 }
1713
75a70cf9 1714 scan_omp (gimple_omp_body (stmt), ctx);
fd6481cf 1715
1716 if (TYPE_FIELDS (ctx->record_type) == NULL)
1717 {
1718 ctx->record_type = ctx->receiver_decl = NULL;
75a70cf9 1719 t = build_int_cst (long_integer_type_node, 0);
1720 gimple_omp_task_set_arg_size (stmt, t);
1721 t = build_int_cst (long_integer_type_node, 1);
1722 gimple_omp_task_set_arg_align (stmt, t);
fd6481cf 1723 }
1724 else
1725 {
1726 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1727 /* Move VLA fields to the end. */
1728 p = &TYPE_FIELDS (ctx->record_type);
1729 while (*p)
1730 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1731 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1732 {
1733 *q = *p;
1734 *p = TREE_CHAIN (*p);
1735 TREE_CHAIN (*q) = NULL_TREE;
1736 q = &TREE_CHAIN (*q);
1737 }
1738 else
1739 p = &TREE_CHAIN (*p);
1740 *p = vla_fields;
1741 layout_type (ctx->record_type);
1742 fixup_child_record_type (ctx);
1743 if (ctx->srecord_type)
1744 layout_type (ctx->srecord_type);
389dd41b 1745 t = fold_convert_loc (loc, long_integer_type_node,
fd6481cf 1746 TYPE_SIZE_UNIT (ctx->record_type));
75a70cf9 1747 gimple_omp_task_set_arg_size (stmt, t);
1748 t = build_int_cst (long_integer_type_node,
fd6481cf 1749 TYPE_ALIGN_UNIT (ctx->record_type));
75a70cf9 1750 gimple_omp_task_set_arg_align (stmt, t);
fd6481cf 1751 }
1752}
1753
1e8e9920 1754
773c5ba7 1755/* Scan an OpenMP loop directive. */
1e8e9920 1756
1757static void
75a70cf9 1758scan_omp_for (gimple stmt, omp_context *outer_ctx)
1e8e9920 1759{
773c5ba7 1760 omp_context *ctx;
75a70cf9 1761 size_t i;
1e8e9920 1762
773c5ba7 1763 ctx = new_omp_context (stmt, outer_ctx);
1e8e9920 1764
75a70cf9 1765 scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
1e8e9920 1766
75a70cf9 1767 scan_omp (gimple_omp_for_pre_body (stmt), ctx);
1768 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
fd6481cf 1769 {
75a70cf9 1770 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
1771 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
1772 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
1773 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
fd6481cf 1774 }
75a70cf9 1775 scan_omp (gimple_omp_body (stmt), ctx);
1e8e9920 1776}
1777
1778/* Scan an OpenMP sections directive. */
1779
1780static void
75a70cf9 1781scan_omp_sections (gimple stmt, omp_context *outer_ctx)
1e8e9920 1782{
1e8e9920 1783 omp_context *ctx;
1784
1785 ctx = new_omp_context (stmt, outer_ctx);
75a70cf9 1786 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
1787 scan_omp (gimple_omp_body (stmt), ctx);
1e8e9920 1788}
1789
1790/* Scan an OpenMP single directive. */
1791
1792static void
75a70cf9 1793scan_omp_single (gimple stmt, omp_context *outer_ctx)
1e8e9920 1794{
1e8e9920 1795 omp_context *ctx;
1796 tree name;
1797
1798 ctx = new_omp_context (stmt, outer_ctx);
1799 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1800 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1801 name = create_tmp_var_name (".omp_copy_s");
e60a6f7b 1802 name = build_decl (gimple_location (stmt),
1803 TYPE_DECL, name, ctx->record_type);
1e8e9920 1804 TYPE_NAME (ctx->record_type) = name;
1805
75a70cf9 1806 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
1807 scan_omp (gimple_omp_body (stmt), ctx);
1e8e9920 1808
1809 if (TYPE_FIELDS (ctx->record_type) == NULL)
1810 ctx->record_type = NULL;
1811 else
1812 layout_type (ctx->record_type);
1813}
1814
1e8e9920 1815
c1d127dd 1816/* Check OpenMP nesting restrictions. */
1817static void
75a70cf9 1818check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
c1d127dd 1819{
75a70cf9 1820 switch (gimple_code (stmt))
c1d127dd 1821 {
75a70cf9 1822 case GIMPLE_OMP_FOR:
1823 case GIMPLE_OMP_SECTIONS:
1824 case GIMPLE_OMP_SINGLE:
1825 case GIMPLE_CALL:
c1d127dd 1826 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 1827 switch (gimple_code (ctx->stmt))
c1d127dd 1828 {
75a70cf9 1829 case GIMPLE_OMP_FOR:
1830 case GIMPLE_OMP_SECTIONS:
1831 case GIMPLE_OMP_SINGLE:
1832 case GIMPLE_OMP_ORDERED:
1833 case GIMPLE_OMP_MASTER:
1834 case GIMPLE_OMP_TASK:
1835 if (is_gimple_call (stmt))
fd6481cf 1836 {
1837 warning (0, "barrier region may not be closely nested inside "
1838 "of work-sharing, critical, ordered, master or "
1839 "explicit task region");
1840 return;
1841 }
c1d127dd 1842 warning (0, "work-sharing region may not be closely nested inside "
fd6481cf 1843 "of work-sharing, critical, ordered, master or explicit "
1844 "task region");
c1d127dd 1845 return;
75a70cf9 1846 case GIMPLE_OMP_PARALLEL:
c1d127dd 1847 return;
1848 default:
1849 break;
1850 }
1851 break;
75a70cf9 1852 case GIMPLE_OMP_MASTER:
c1d127dd 1853 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 1854 switch (gimple_code (ctx->stmt))
c1d127dd 1855 {
75a70cf9 1856 case GIMPLE_OMP_FOR:
1857 case GIMPLE_OMP_SECTIONS:
1858 case GIMPLE_OMP_SINGLE:
1859 case GIMPLE_OMP_TASK:
c1d127dd 1860 warning (0, "master region may not be closely nested inside "
fd6481cf 1861 "of work-sharing or explicit task region");
c1d127dd 1862 return;
75a70cf9 1863 case GIMPLE_OMP_PARALLEL:
c1d127dd 1864 return;
1865 default:
1866 break;
1867 }
1868 break;
75a70cf9 1869 case GIMPLE_OMP_ORDERED:
c1d127dd 1870 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 1871 switch (gimple_code (ctx->stmt))
c1d127dd 1872 {
75a70cf9 1873 case GIMPLE_OMP_CRITICAL:
1874 case GIMPLE_OMP_TASK:
c1d127dd 1875 warning (0, "ordered region may not be closely nested inside "
fd6481cf 1876 "of critical or explicit task region");
c1d127dd 1877 return;
75a70cf9 1878 case GIMPLE_OMP_FOR:
1879 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
c1d127dd 1880 OMP_CLAUSE_ORDERED) == NULL)
1881 warning (0, "ordered region must be closely nested inside "
1882 "a loop region with an ordered clause");
1883 return;
75a70cf9 1884 case GIMPLE_OMP_PARALLEL:
c1d127dd 1885 return;
1886 default:
1887 break;
1888 }
1889 break;
75a70cf9 1890 case GIMPLE_OMP_CRITICAL:
c1d127dd 1891 for (; ctx != NULL; ctx = ctx->outer)
75a70cf9 1892 if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
1893 && (gimple_omp_critical_name (stmt)
1894 == gimple_omp_critical_name (ctx->stmt)))
c1d127dd 1895 {
1896 warning (0, "critical region may not be nested inside a critical "
1897 "region with the same name");
1898 return;
1899 }
1900 break;
1901 default:
1902 break;
1903 }
1904}
1905
1906
75a70cf9 1907/* Helper function scan_omp.
1908
1909 Callback for walk_tree or operators in walk_gimple_stmt used to
1910 scan for OpenMP directives in TP. */
1e8e9920 1911
1912static tree
75a70cf9 1913scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
1e8e9920 1914{
4077bf7a 1915 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1916 omp_context *ctx = (omp_context *) wi->info;
1e8e9920 1917 tree t = *tp;
1918
75a70cf9 1919 switch (TREE_CODE (t))
1920 {
1921 case VAR_DECL:
1922 case PARM_DECL:
1923 case LABEL_DECL:
1924 case RESULT_DECL:
1925 if (ctx)
1926 *tp = remap_decl (t, &ctx->cb);
1927 break;
1928
1929 default:
1930 if (ctx && TYPE_P (t))
1931 *tp = remap_type (t, &ctx->cb);
1932 else if (!DECL_P (t))
7cf869dd 1933 {
1934 *walk_subtrees = 1;
1935 if (ctx)
1936 TREE_TYPE (t) = remap_type (TREE_TYPE (t), &ctx->cb);
1937 }
75a70cf9 1938 break;
1939 }
1940
1941 return NULL_TREE;
1942}
1943
1944
1945/* Helper function for scan_omp.
1946
1947 Callback for walk_gimple_stmt used to scan for OpenMP directives in
1948 the current statement in GSI. */
1949
1950static tree
1951scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1952 struct walk_stmt_info *wi)
1953{
1954 gimple stmt = gsi_stmt (*gsi);
1955 omp_context *ctx = (omp_context *) wi->info;
1956
1957 if (gimple_has_location (stmt))
1958 input_location = gimple_location (stmt);
1e8e9920 1959
c1d127dd 1960 /* Check the OpenMP nesting restrictions. */
fd6481cf 1961 if (ctx != NULL)
1962 {
75a70cf9 1963 if (is_gimple_omp (stmt))
1964 check_omp_nesting_restrictions (stmt, ctx);
1965 else if (is_gimple_call (stmt))
fd6481cf 1966 {
75a70cf9 1967 tree fndecl = gimple_call_fndecl (stmt);
fd6481cf 1968 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1969 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
75a70cf9 1970 check_omp_nesting_restrictions (stmt, ctx);
fd6481cf 1971 }
1972 }
c1d127dd 1973
75a70cf9 1974 *handled_ops_p = true;
1975
1976 switch (gimple_code (stmt))
1e8e9920 1977 {
75a70cf9 1978 case GIMPLE_OMP_PARALLEL:
fd6481cf 1979 taskreg_nesting_level++;
75a70cf9 1980 scan_omp_parallel (gsi, ctx);
fd6481cf 1981 taskreg_nesting_level--;
1982 break;
1983
75a70cf9 1984 case GIMPLE_OMP_TASK:
fd6481cf 1985 taskreg_nesting_level++;
75a70cf9 1986 scan_omp_task (gsi, ctx);
fd6481cf 1987 taskreg_nesting_level--;
1e8e9920 1988 break;
1989
75a70cf9 1990 case GIMPLE_OMP_FOR:
1991 scan_omp_for (stmt, ctx);
1e8e9920 1992 break;
1993
75a70cf9 1994 case GIMPLE_OMP_SECTIONS:
1995 scan_omp_sections (stmt, ctx);
1e8e9920 1996 break;
1997
75a70cf9 1998 case GIMPLE_OMP_SINGLE:
1999 scan_omp_single (stmt, ctx);
1e8e9920 2000 break;
2001
75a70cf9 2002 case GIMPLE_OMP_SECTION:
2003 case GIMPLE_OMP_MASTER:
2004 case GIMPLE_OMP_ORDERED:
2005 case GIMPLE_OMP_CRITICAL:
2006 ctx = new_omp_context (stmt, ctx);
2007 scan_omp (gimple_omp_body (stmt), ctx);
1e8e9920 2008 break;
2009
75a70cf9 2010 case GIMPLE_BIND:
1e8e9920 2011 {
2012 tree var;
1e8e9920 2013
75a70cf9 2014 *handled_ops_p = false;
2015 if (ctx)
2016 for (var = gimple_bind_vars (stmt); var ; var = TREE_CHAIN (var))
2017 insert_decl_map (&ctx->cb, var, var);
1e8e9920 2018 }
2019 break;
1e8e9920 2020 default:
75a70cf9 2021 *handled_ops_p = false;
1e8e9920 2022 break;
2023 }
2024
2025 return NULL_TREE;
2026}
2027
2028
75a70cf9 2029/* Scan all the statements starting at the current statement. CTX
2030 contains context information about the OpenMP directives and
2031 clauses found during the scan. */
1e8e9920 2032
2033static void
75a70cf9 2034scan_omp (gimple_seq body, omp_context *ctx)
1e8e9920 2035{
2036 location_t saved_location;
2037 struct walk_stmt_info wi;
2038
2039 memset (&wi, 0, sizeof (wi));
1e8e9920 2040 wi.info = ctx;
1e8e9920 2041 wi.want_locations = true;
2042
2043 saved_location = input_location;
75a70cf9 2044 walk_gimple_seq (body, scan_omp_1_stmt, scan_omp_1_op, &wi);
1e8e9920 2045 input_location = saved_location;
2046}
2047\f
2048/* Re-gimplification and code generation routines. */
2049
2050/* Build a call to GOMP_barrier. */
2051
79acaae1 2052static tree
2053build_omp_barrier (void)
1e8e9920 2054{
79acaae1 2055 return build_call_expr (built_in_decls[BUILT_IN_GOMP_BARRIER], 0);
1e8e9920 2056}
2057
2058/* If a context was created for STMT when it was scanned, return it. */
2059
2060static omp_context *
75a70cf9 2061maybe_lookup_ctx (gimple stmt)
1e8e9920 2062{
2063 splay_tree_node n;
2064 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
2065 return n ? (omp_context *) n->value : NULL;
2066}
2067
773c5ba7 2068
2069/* Find the mapping for DECL in CTX or the immediately enclosing
2070 context that has a mapping for DECL.
2071
2072 If CTX is a nested parallel directive, we may have to use the decl
2073 mappings created in CTX's parent context. Suppose that we have the
2074 following parallel nesting (variable UIDs showed for clarity):
2075
2076 iD.1562 = 0;
2077 #omp parallel shared(iD.1562) -> outer parallel
2078 iD.1562 = iD.1562 + 1;
2079
2080 #omp parallel shared (iD.1562) -> inner parallel
2081 iD.1562 = iD.1562 - 1;
2082
2083 Each parallel structure will create a distinct .omp_data_s structure
2084 for copying iD.1562 in/out of the directive:
2085
2086 outer parallel .omp_data_s.1.i -> iD.1562
2087 inner parallel .omp_data_s.2.i -> iD.1562
2088
2089 A shared variable mapping will produce a copy-out operation before
2090 the parallel directive and a copy-in operation after it. So, in
2091 this case we would have:
2092
2093 iD.1562 = 0;
2094 .omp_data_o.1.i = iD.1562;
2095 #omp parallel shared(iD.1562) -> outer parallel
2096 .omp_data_i.1 = &.omp_data_o.1
2097 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2098
2099 .omp_data_o.2.i = iD.1562; -> **
2100 #omp parallel shared(iD.1562) -> inner parallel
2101 .omp_data_i.2 = &.omp_data_o.2
2102 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2103
2104
2105 ** This is a problem. The symbol iD.1562 cannot be referenced
2106 inside the body of the outer parallel region. But since we are
2107 emitting this copy operation while expanding the inner parallel
2108 directive, we need to access the CTX structure of the outer
2109 parallel directive to get the correct mapping:
2110
2111 .omp_data_o.2.i = .omp_data_i.1->i
2112
2113 Since there may be other workshare or parallel directives enclosing
2114 the parallel directive, it may be necessary to walk up the context
2115 parent chain. This is not a problem in general because nested
2116 parallelism happens only rarely. */
2117
2118static tree
2119lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2120{
2121 tree t;
2122 omp_context *up;
2123
773c5ba7 2124 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2125 t = maybe_lookup_decl (decl, up);
2126
87b31375 2127 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
773c5ba7 2128
c37594c7 2129 return t ? t : decl;
773c5ba7 2130}
2131
2132
f49d7bb5 2133/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2134 in outer contexts. */
2135
2136static tree
2137maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2138{
2139 tree t = NULL;
2140 omp_context *up;
2141
87b31375 2142 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2143 t = maybe_lookup_decl (decl, up);
f49d7bb5 2144
2145 return t ? t : decl;
2146}
2147
2148
1e8e9920 2149/* Construct the initialization value for reduction CLAUSE. */
2150
2151tree
2152omp_reduction_init (tree clause, tree type)
2153{
389dd41b 2154 location_t loc = OMP_CLAUSE_LOCATION (clause);
1e8e9920 2155 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2156 {
2157 case PLUS_EXPR:
2158 case MINUS_EXPR:
2159 case BIT_IOR_EXPR:
2160 case BIT_XOR_EXPR:
2161 case TRUTH_OR_EXPR:
2162 case TRUTH_ORIF_EXPR:
2163 case TRUTH_XOR_EXPR:
2164 case NE_EXPR:
389dd41b 2165 return fold_convert_loc (loc, type, integer_zero_node);
1e8e9920 2166
2167 case MULT_EXPR:
2168 case TRUTH_AND_EXPR:
2169 case TRUTH_ANDIF_EXPR:
2170 case EQ_EXPR:
389dd41b 2171 return fold_convert_loc (loc, type, integer_one_node);
1e8e9920 2172
2173 case BIT_AND_EXPR:
389dd41b 2174 return fold_convert_loc (loc, type, integer_minus_one_node);
1e8e9920 2175
2176 case MAX_EXPR:
2177 if (SCALAR_FLOAT_TYPE_P (type))
2178 {
2179 REAL_VALUE_TYPE max, min;
2180 if (HONOR_INFINITIES (TYPE_MODE (type)))
2181 {
2182 real_inf (&max);
2183 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2184 }
2185 else
2186 real_maxval (&min, 1, TYPE_MODE (type));
2187 return build_real (type, min);
2188 }
2189 else
2190 {
2191 gcc_assert (INTEGRAL_TYPE_P (type));
2192 return TYPE_MIN_VALUE (type);
2193 }
2194
2195 case MIN_EXPR:
2196 if (SCALAR_FLOAT_TYPE_P (type))
2197 {
2198 REAL_VALUE_TYPE max;
2199 if (HONOR_INFINITIES (TYPE_MODE (type)))
2200 real_inf (&max);
2201 else
2202 real_maxval (&max, 0, TYPE_MODE (type));
2203 return build_real (type, max);
2204 }
2205 else
2206 {
2207 gcc_assert (INTEGRAL_TYPE_P (type));
2208 return TYPE_MAX_VALUE (type);
2209 }
2210
2211 default:
2212 gcc_unreachable ();
2213 }
2214}
2215
2216/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2217 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2218 private variables. Initialization statements go in ILIST, while calls
2219 to destructors go in DLIST. */
2220
2221static void
75a70cf9 2222lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
1e4afe3c 2223 omp_context *ctx)
1e8e9920 2224{
75a70cf9 2225 gimple_stmt_iterator diter;
c2f47e15 2226 tree c, dtor, copyin_seq, x, ptr;
1e8e9920 2227 bool copyin_by_ref = false;
f49d7bb5 2228 bool lastprivate_firstprivate = false;
1e8e9920 2229 int pass;
2230
75a70cf9 2231 *dlist = gimple_seq_alloc ();
2232 diter = gsi_start (*dlist);
1e8e9920 2233 copyin_seq = NULL;
2234
2235 /* Do all the fixed sized types in the first pass, and the variable sized
2236 types in the second pass. This makes sure that the scalar arguments to
48e1416a 2237 the variable sized types are processed before we use them in the
1e8e9920 2238 variable sized operations. */
2239 for (pass = 0; pass < 2; ++pass)
2240 {
2241 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2242 {
55d6e7cd 2243 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
1e8e9920 2244 tree var, new_var;
2245 bool by_ref;
389dd41b 2246 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 2247
2248 switch (c_kind)
2249 {
2250 case OMP_CLAUSE_PRIVATE:
2251 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
2252 continue;
2253 break;
2254 case OMP_CLAUSE_SHARED:
f49d7bb5 2255 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
2256 {
2257 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
2258 continue;
2259 }
1e8e9920 2260 case OMP_CLAUSE_FIRSTPRIVATE:
1e8e9920 2261 case OMP_CLAUSE_COPYIN:
2262 case OMP_CLAUSE_REDUCTION:
2263 break;
df2c34fc 2264 case OMP_CLAUSE_LASTPRIVATE:
f49d7bb5 2265 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2266 {
2267 lastprivate_firstprivate = true;
2268 if (pass != 0)
2269 continue;
2270 }
df2c34fc 2271 break;
1e8e9920 2272 default:
2273 continue;
2274 }
2275
2276 new_var = var = OMP_CLAUSE_DECL (c);
2277 if (c_kind != OMP_CLAUSE_COPYIN)
2278 new_var = lookup_decl (var, ctx);
2279
2280 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
2281 {
2282 if (pass != 0)
2283 continue;
2284 }
1e8e9920 2285 else if (is_variable_sized (var))
2286 {
773c5ba7 2287 /* For variable sized types, we need to allocate the
2288 actual storage here. Call alloca and store the
2289 result in the pointer decl that we created elsewhere. */
1e8e9920 2290 if (pass == 0)
2291 continue;
2292
fd6481cf 2293 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
2294 {
75a70cf9 2295 gimple stmt;
2296 tree tmp;
2297
fd6481cf 2298 ptr = DECL_VALUE_EXPR (new_var);
2299 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
2300 ptr = TREE_OPERAND (ptr, 0);
2301 gcc_assert (DECL_P (ptr));
2302 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
75a70cf9 2303
2304 /* void *tmp = __builtin_alloca */
2305 stmt
2306 = gimple_build_call (built_in_decls[BUILT_IN_ALLOCA], 1, x);
2307 tmp = create_tmp_var_raw (ptr_type_node, NULL);
2308 gimple_add_tmp_var (tmp);
2309 gimple_call_set_lhs (stmt, tmp);
2310
2311 gimple_seq_add_stmt (ilist, stmt);
2312
389dd41b 2313 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
75a70cf9 2314 gimplify_assign (ptr, x, ilist);
fd6481cf 2315 }
1e8e9920 2316 }
1e8e9920 2317 else if (is_reference (var))
2318 {
773c5ba7 2319 /* For references that are being privatized for Fortran,
2320 allocate new backing storage for the new pointer
2321 variable. This allows us to avoid changing all the
2322 code that expects a pointer to something that expects
2323 a direct variable. Note that this doesn't apply to
2324 C++, since reference types are disallowed in data
df2c34fc 2325 sharing clauses there, except for NRV optimized
2326 return values. */
1e8e9920 2327 if (pass == 0)
2328 continue;
2329
2330 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
fd6481cf 2331 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
2332 {
2333 x = build_receiver_ref (var, false, ctx);
389dd41b 2334 x = build_fold_addr_expr_loc (clause_loc, x);
fd6481cf 2335 }
2336 else if (TREE_CONSTANT (x))
1e8e9920 2337 {
2338 const char *name = NULL;
2339 if (DECL_NAME (var))
2340 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
2341
df2c34fc 2342 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
2343 name);
2344 gimple_add_tmp_var (x);
86f2ad37 2345 TREE_ADDRESSABLE (x) = 1;
389dd41b 2346 x = build_fold_addr_expr_loc (clause_loc, x);
1e8e9920 2347 }
2348 else
2349 {
389dd41b 2350 x = build_call_expr_loc (clause_loc,
2351 built_in_decls[BUILT_IN_ALLOCA], 1, x);
1e8e9920 2352 }
2353
389dd41b 2354 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
75a70cf9 2355 gimplify_assign (new_var, x, ilist);
1e8e9920 2356
389dd41b 2357 new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
1e8e9920 2358 }
2359 else if (c_kind == OMP_CLAUSE_REDUCTION
2360 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2361 {
2362 if (pass == 0)
2363 continue;
2364 }
2365 else if (pass != 0)
2366 continue;
2367
55d6e7cd 2368 switch (OMP_CLAUSE_CODE (c))
1e8e9920 2369 {
2370 case OMP_CLAUSE_SHARED:
f49d7bb5 2371 /* Shared global vars are just accessed directly. */
2372 if (is_global_var (new_var))
2373 break;
1e8e9920 2374 /* Set up the DECL_VALUE_EXPR for shared variables now. This
2375 needs to be delayed until after fixup_child_record_type so
2376 that we get the correct type during the dereference. */
e8a588af 2377 by_ref = use_pointer_for_field (var, ctx);
1e8e9920 2378 x = build_receiver_ref (var, by_ref, ctx);
2379 SET_DECL_VALUE_EXPR (new_var, x);
2380 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2381
2382 /* ??? If VAR is not passed by reference, and the variable
2383 hasn't been initialized yet, then we'll get a warning for
2384 the store into the omp_data_s structure. Ideally, we'd be
48e1416a 2385 able to notice this and not store anything at all, but
1e8e9920 2386 we're generating code too early. Suppress the warning. */
2387 if (!by_ref)
2388 TREE_NO_WARNING (var) = 1;
2389 break;
2390
2391 case OMP_CLAUSE_LASTPRIVATE:
2392 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2393 break;
2394 /* FALLTHRU */
2395
2396 case OMP_CLAUSE_PRIVATE:
fd6481cf 2397 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
2398 x = build_outer_var_ref (var, ctx);
2399 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2400 {
2401 if (is_task_ctx (ctx))
2402 x = build_receiver_ref (var, false, ctx);
2403 else
2404 x = build_outer_var_ref (var, ctx);
2405 }
2406 else
2407 x = NULL;
2408 x = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
1e8e9920 2409 if (x)
2410 gimplify_and_add (x, ilist);
2411 /* FALLTHRU */
2412
2413 do_dtor:
2414 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
2415 if (x)
2416 {
75a70cf9 2417 gimple_seq tseq = NULL;
2418
1e8e9920 2419 dtor = x;
75a70cf9 2420 gimplify_stmt (&dtor, &tseq);
2421 gsi_insert_seq_before (&diter, tseq, GSI_SAME_STMT);
1e8e9920 2422 }
2423 break;
2424
2425 case OMP_CLAUSE_FIRSTPRIVATE:
fd6481cf 2426 if (is_task_ctx (ctx))
2427 {
2428 if (is_reference (var) || is_variable_sized (var))
2429 goto do_dtor;
2430 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
2431 ctx))
2432 || use_pointer_for_field (var, NULL))
2433 {
2434 x = build_receiver_ref (var, false, ctx);
2435 SET_DECL_VALUE_EXPR (new_var, x);
2436 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2437 goto do_dtor;
2438 }
2439 }
1e8e9920 2440 x = build_outer_var_ref (var, ctx);
2441 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
2442 gimplify_and_add (x, ilist);
2443 goto do_dtor;
2444 break;
2445
2446 case OMP_CLAUSE_COPYIN:
e8a588af 2447 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 2448 x = build_receiver_ref (var, by_ref, ctx);
2449 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
2450 append_to_statement_list (x, &copyin_seq);
2451 copyin_by_ref |= by_ref;
2452 break;
2453
2454 case OMP_CLAUSE_REDUCTION:
2455 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2456 {
fd6481cf 2457 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2458 x = build_outer_var_ref (var, ctx);
2459
2460 if (is_reference (var))
389dd41b 2461 x = build_fold_addr_expr_loc (clause_loc, x);
fd6481cf 2462 SET_DECL_VALUE_EXPR (placeholder, x);
2463 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
75a70cf9 2464 lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
2465 gimple_seq_add_seq (ilist,
2466 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
2467 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
fd6481cf 2468 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
1e8e9920 2469 }
2470 else
2471 {
2472 x = omp_reduction_init (c, TREE_TYPE (new_var));
2473 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
75a70cf9 2474 gimplify_assign (new_var, x, ilist);
1e8e9920 2475 }
2476 break;
2477
2478 default:
2479 gcc_unreachable ();
2480 }
2481 }
2482 }
2483
2484 /* The copyin sequence is not to be executed by the main thread, since
2485 that would result in self-copies. Perhaps not visible to scalars,
2486 but it certainly is to C++ operator=. */
2487 if (copyin_seq)
2488 {
c2f47e15 2489 x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
1e8e9920 2490 x = build2 (NE_EXPR, boolean_type_node, x,
2491 build_int_cst (TREE_TYPE (x), 0));
2492 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
2493 gimplify_and_add (x, ilist);
2494 }
2495
2496 /* If any copyin variable is passed by reference, we must ensure the
2497 master thread doesn't modify it before it is copied over in all
f49d7bb5 2498 threads. Similarly for variables in both firstprivate and
2499 lastprivate clauses we need to ensure the lastprivate copying
2500 happens after firstprivate copying in all threads. */
2501 if (copyin_by_ref || lastprivate_firstprivate)
79acaae1 2502 gimplify_and_add (build_omp_barrier (), ilist);
1e8e9920 2503}
2504
773c5ba7 2505
1e8e9920 2506/* Generate code to implement the LASTPRIVATE clauses. This is used for
2507 both parallel and workshare constructs. PREDICATE may be NULL if it's
2508 always true. */
2509
2510static void
75a70cf9 2511lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
2512 omp_context *ctx)
1e8e9920 2513{
75a70cf9 2514 tree x, c, label = NULL;
fd6481cf 2515 bool par_clauses = false;
1e8e9920 2516
2517 /* Early exit if there are no lastprivate clauses. */
2518 clauses = find_omp_clause (clauses, OMP_CLAUSE_LASTPRIVATE);
2519 if (clauses == NULL)
2520 {
2521 /* If this was a workshare clause, see if it had been combined
2522 with its parallel. In that case, look for the clauses on the
2523 parallel statement itself. */
2524 if (is_parallel_ctx (ctx))
2525 return;
2526
2527 ctx = ctx->outer;
2528 if (ctx == NULL || !is_parallel_ctx (ctx))
2529 return;
2530
75a70cf9 2531 clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
1e8e9920 2532 OMP_CLAUSE_LASTPRIVATE);
2533 if (clauses == NULL)
2534 return;
fd6481cf 2535 par_clauses = true;
1e8e9920 2536 }
2537
75a70cf9 2538 if (predicate)
2539 {
2540 gimple stmt;
2541 tree label_true, arm1, arm2;
2542
e60a6f7b 2543 label = create_artificial_label (UNKNOWN_LOCATION);
2544 label_true = create_artificial_label (UNKNOWN_LOCATION);
75a70cf9 2545 arm1 = TREE_OPERAND (predicate, 0);
2546 arm2 = TREE_OPERAND (predicate, 1);
2547 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
2548 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
2549 stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
2550 label_true, label);
2551 gimple_seq_add_stmt (stmt_list, stmt);
2552 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
2553 }
1e8e9920 2554
fd6481cf 2555 for (c = clauses; c ;)
1e8e9920 2556 {
2557 tree var, new_var;
389dd41b 2558 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 2559
fd6481cf 2560 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
2561 {
2562 var = OMP_CLAUSE_DECL (c);
2563 new_var = lookup_decl (var, ctx);
1e8e9920 2564
75a70cf9 2565 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
2566 {
2567 lower_omp (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2568 gimple_seq_add_seq (stmt_list,
2569 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
2570 }
2571 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
1e8e9920 2572
fd6481cf 2573 x = build_outer_var_ref (var, ctx);
2574 if (is_reference (var))
389dd41b 2575 new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
fd6481cf 2576 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
75a70cf9 2577 gimplify_and_add (x, stmt_list);
fd6481cf 2578 }
2579 c = OMP_CLAUSE_CHAIN (c);
2580 if (c == NULL && !par_clauses)
2581 {
2582 /* If this was a workshare clause, see if it had been combined
2583 with its parallel. In that case, continue looking for the
2584 clauses also on the parallel statement itself. */
2585 if (is_parallel_ctx (ctx))
2586 break;
2587
2588 ctx = ctx->outer;
2589 if (ctx == NULL || !is_parallel_ctx (ctx))
2590 break;
2591
75a70cf9 2592 c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
fd6481cf 2593 OMP_CLAUSE_LASTPRIVATE);
2594 par_clauses = true;
2595 }
1e8e9920 2596 }
2597
75a70cf9 2598 if (label)
2599 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
1e8e9920 2600}
2601
773c5ba7 2602
1e8e9920 2603/* Generate code to implement the REDUCTION clauses. */
2604
2605static void
75a70cf9 2606lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
1e8e9920 2607{
75a70cf9 2608 gimple_seq sub_seq = NULL;
2609 gimple stmt;
2610 tree x, c;
1e8e9920 2611 int count = 0;
2612
2613 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
2614 update in that case, otherwise use a lock. */
2615 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
55d6e7cd 2616 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
1e8e9920 2617 {
2618 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2619 {
2620 /* Never use OMP_ATOMIC for array reductions. */
2621 count = -1;
2622 break;
2623 }
2624 count++;
2625 }
2626
2627 if (count == 0)
2628 return;
2629
2630 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2631 {
2632 tree var, ref, new_var;
2633 enum tree_code code;
389dd41b 2634 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 2635
55d6e7cd 2636 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
1e8e9920 2637 continue;
2638
2639 var = OMP_CLAUSE_DECL (c);
2640 new_var = lookup_decl (var, ctx);
2641 if (is_reference (var))
389dd41b 2642 new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
1e8e9920 2643 ref = build_outer_var_ref (var, ctx);
2644 code = OMP_CLAUSE_REDUCTION_CODE (c);
773c5ba7 2645
2646 /* reduction(-:var) sums up the partial results, so it acts
2647 identically to reduction(+:var). */
1e8e9920 2648 if (code == MINUS_EXPR)
2649 code = PLUS_EXPR;
2650
2651 if (count == 1)
2652 {
389dd41b 2653 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 2654
2655 addr = save_expr (addr);
2656 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
389dd41b 2657 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
1e8e9920 2658 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
75a70cf9 2659 gimplify_and_add (x, stmt_seqp);
1e8e9920 2660 return;
2661 }
2662
2663 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2664 {
2665 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2666
2667 if (is_reference (var))
389dd41b 2668 ref = build_fold_addr_expr_loc (clause_loc, ref);
1e8e9920 2669 SET_DECL_VALUE_EXPR (placeholder, ref);
2670 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
75a70cf9 2671 lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
2672 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
2673 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
1e8e9920 2674 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
2675 }
2676 else
2677 {
2678 x = build2 (code, TREE_TYPE (ref), ref, new_var);
2679 ref = build_outer_var_ref (var, ctx);
75a70cf9 2680 gimplify_assign (ref, x, &sub_seq);
1e8e9920 2681 }
2682 }
2683
75a70cf9 2684 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ATOMIC_START], 0);
2685 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 2686
75a70cf9 2687 gimple_seq_add_seq (stmt_seqp, sub_seq);
1e8e9920 2688
75a70cf9 2689 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ATOMIC_END], 0);
2690 gimple_seq_add_stmt (stmt_seqp, stmt);
1e8e9920 2691}
2692
773c5ba7 2693
1e8e9920 2694/* Generate code to implement the COPYPRIVATE clauses. */
2695
2696static void
75a70cf9 2697lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
1e8e9920 2698 omp_context *ctx)
2699{
2700 tree c;
2701
2702 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2703 {
2704 tree var, ref, x;
2705 bool by_ref;
389dd41b 2706 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 2707
55d6e7cd 2708 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
1e8e9920 2709 continue;
2710
2711 var = OMP_CLAUSE_DECL (c);
e8a588af 2712 by_ref = use_pointer_for_field (var, NULL);
1e8e9920 2713
2714 ref = build_sender_ref (var, ctx);
87b31375 2715 x = lookup_decl_in_outer_ctx (var, ctx);
389dd41b 2716 x = by_ref ? build_fold_addr_expr_loc (clause_loc, x) : x;
75a70cf9 2717 gimplify_assign (ref, x, slist);
1e8e9920 2718
2719 ref = build_receiver_ref (var, by_ref, ctx);
2720 if (is_reference (var))
2721 {
389dd41b 2722 ref = build_fold_indirect_ref_loc (clause_loc, ref);
2723 var = build_fold_indirect_ref_loc (clause_loc, var);
1e8e9920 2724 }
2725 x = lang_hooks.decls.omp_clause_assign_op (c, var, ref);
2726 gimplify_and_add (x, rlist);
2727 }
2728}
2729
773c5ba7 2730
1e8e9920 2731/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
2732 and REDUCTION from the sender (aka parent) side. */
2733
2734static void
75a70cf9 2735lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
2736 omp_context *ctx)
1e8e9920 2737{
2738 tree c;
2739
2740 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2741 {
773c5ba7 2742 tree val, ref, x, var;
1e8e9920 2743 bool by_ref, do_in = false, do_out = false;
389dd41b 2744 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
1e8e9920 2745
55d6e7cd 2746 switch (OMP_CLAUSE_CODE (c))
1e8e9920 2747 {
fd6481cf 2748 case OMP_CLAUSE_PRIVATE:
2749 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2750 break;
2751 continue;
1e8e9920 2752 case OMP_CLAUSE_FIRSTPRIVATE:
2753 case OMP_CLAUSE_COPYIN:
2754 case OMP_CLAUSE_LASTPRIVATE:
2755 case OMP_CLAUSE_REDUCTION:
2756 break;
2757 default:
2758 continue;
2759 }
2760
87b31375 2761 val = OMP_CLAUSE_DECL (c);
2762 var = lookup_decl_in_outer_ctx (val, ctx);
773c5ba7 2763
f49d7bb5 2764 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
2765 && is_global_var (var))
2766 continue;
1e8e9920 2767 if (is_variable_sized (val))
2768 continue;
e8a588af 2769 by_ref = use_pointer_for_field (val, NULL);
1e8e9920 2770
55d6e7cd 2771 switch (OMP_CLAUSE_CODE (c))
1e8e9920 2772 {
fd6481cf 2773 case OMP_CLAUSE_PRIVATE:
1e8e9920 2774 case OMP_CLAUSE_FIRSTPRIVATE:
2775 case OMP_CLAUSE_COPYIN:
2776 do_in = true;
2777 break;
2778
2779 case OMP_CLAUSE_LASTPRIVATE:
2780 if (by_ref || is_reference (val))
2781 {
2782 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2783 continue;
2784 do_in = true;
2785 }
2786 else
fd6481cf 2787 {
2788 do_out = true;
2789 if (lang_hooks.decls.omp_private_outer_ref (val))
2790 do_in = true;
2791 }
1e8e9920 2792 break;
2793
2794 case OMP_CLAUSE_REDUCTION:
2795 do_in = true;
2796 do_out = !(by_ref || is_reference (val));
2797 break;
2798
2799 default:
2800 gcc_unreachable ();
2801 }
2802
2803 if (do_in)
2804 {
2805 ref = build_sender_ref (val, ctx);
389dd41b 2806 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
75a70cf9 2807 gimplify_assign (ref, x, ilist);
fd6481cf 2808 if (is_task_ctx (ctx))
2809 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
1e8e9920 2810 }
773c5ba7 2811
1e8e9920 2812 if (do_out)
2813 {
2814 ref = build_sender_ref (val, ctx);
75a70cf9 2815 gimplify_assign (var, ref, olist);
1e8e9920 2816 }
2817 }
2818}
2819
75a70cf9 2820/* Generate code to implement SHARED from the sender (aka parent)
2821 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
2822 list things that got automatically shared. */
1e8e9920 2823
2824static void
75a70cf9 2825lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
1e8e9920 2826{
fd6481cf 2827 tree var, ovar, nvar, f, x, record_type;
1e8e9920 2828
2829 if (ctx->record_type == NULL)
2830 return;
773c5ba7 2831
fd6481cf 2832 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
2833 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
1e8e9920 2834 {
2835 ovar = DECL_ABSTRACT_ORIGIN (f);
2836 nvar = maybe_lookup_decl (ovar, ctx);
2837 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
2838 continue;
2839
773c5ba7 2840 /* If CTX is a nested parallel directive. Find the immediately
2841 enclosing parallel or workshare construct that contains a
2842 mapping for OVAR. */
87b31375 2843 var = lookup_decl_in_outer_ctx (ovar, ctx);
773c5ba7 2844
e8a588af 2845 if (use_pointer_for_field (ovar, ctx))
1e8e9920 2846 {
2847 x = build_sender_ref (ovar, ctx);
773c5ba7 2848 var = build_fold_addr_expr (var);
75a70cf9 2849 gimplify_assign (x, var, ilist);
1e8e9920 2850 }
2851 else
2852 {
2853 x = build_sender_ref (ovar, ctx);
75a70cf9 2854 gimplify_assign (x, var, ilist);
1e8e9920 2855
d2263ebb 2856 if (!TREE_READONLY (var)
2857 /* We don't need to receive a new reference to a result
2858 or parm decl. In fact we may not store to it as we will
2859 invalidate any pending RSO and generate wrong gimple
2860 during inlining. */
2861 && !((TREE_CODE (var) == RESULT_DECL
2862 || TREE_CODE (var) == PARM_DECL)
2863 && DECL_BY_REFERENCE (var)))
fd6481cf 2864 {
2865 x = build_sender_ref (ovar, ctx);
75a70cf9 2866 gimplify_assign (var, x, olist);
fd6481cf 2867 }
1e8e9920 2868 }
2869 }
2870}
2871
75a70cf9 2872
2873/* A convenience function to build an empty GIMPLE_COND with just the
2874 condition. */
2875
2876static gimple
2877gimple_build_cond_empty (tree cond)
2878{
2879 enum tree_code pred_code;
2880 tree lhs, rhs;
2881
2882 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
2883 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
2884}
2885
2886
48e1416a 2887/* Build the function calls to GOMP_parallel_start etc to actually
773c5ba7 2888 generate the parallel operation. REGION is the parallel region
2889 being expanded. BB is the block where to insert the code. WS_ARGS
2890 will be set if this is a call to a combined parallel+workshare
2891 construct, it contains the list of additional arguments needed by
2892 the workshare construct. */
1e8e9920 2893
2894static void
61e47ac8 2895expand_parallel_call (struct omp_region *region, basic_block bb,
75a70cf9 2896 gimple entry_stmt, tree ws_args)
1e8e9920 2897{
79acaae1 2898 tree t, t1, t2, val, cond, c, clauses;
75a70cf9 2899 gimple_stmt_iterator gsi;
2900 gimple stmt;
773c5ba7 2901 int start_ix;
389dd41b 2902 location_t clause_loc;
773c5ba7 2903
75a70cf9 2904 clauses = gimple_omp_parallel_clauses (entry_stmt);
773c5ba7 2905
334ec2d8 2906 /* Determine what flavor of GOMP_parallel_start we will be
773c5ba7 2907 emitting. */
2908 start_ix = BUILT_IN_GOMP_PARALLEL_START;
2909 if (is_combined_parallel (region))
2910 {
61e47ac8 2911 switch (region->inner->type)
773c5ba7 2912 {
75a70cf9 2913 case GIMPLE_OMP_FOR:
fd6481cf 2914 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
f77459c5 2915 start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
fd6481cf 2916 + (region->inner->sched_kind
2917 == OMP_CLAUSE_SCHEDULE_RUNTIME
2918 ? 3 : region->inner->sched_kind);
61e47ac8 2919 break;
75a70cf9 2920 case GIMPLE_OMP_SECTIONS:
61e47ac8 2921 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
2922 break;
2923 default:
2924 gcc_unreachable ();
773c5ba7 2925 }
773c5ba7 2926 }
1e8e9920 2927
2928 /* By default, the value of NUM_THREADS is zero (selected at run time)
2929 and there is no conditional. */
2930 cond = NULL_TREE;
2931 val = build_int_cst (unsigned_type_node, 0);
2932
2933 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2934 if (c)
2935 cond = OMP_CLAUSE_IF_EXPR (c);
2936
2937 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
2938 if (c)
389dd41b 2939 {
2940 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
2941 clause_loc = OMP_CLAUSE_LOCATION (c);
2942 }
2943 else
2944 clause_loc = gimple_location (entry_stmt);
1e8e9920 2945
2946 /* Ensure 'val' is of the correct type. */
389dd41b 2947 val = fold_convert_loc (clause_loc, unsigned_type_node, val);
1e8e9920 2948
2949 /* If we found the clause 'if (cond)', build either
2950 (cond != 0) or (cond ? val : 1u). */
2951 if (cond)
2952 {
75a70cf9 2953 gimple_stmt_iterator gsi;
773c5ba7 2954
2955 cond = gimple_boolify (cond);
2956
1e8e9920 2957 if (integer_zerop (val))
389dd41b 2958 val = fold_build2_loc (clause_loc,
2959 EQ_EXPR, unsigned_type_node, cond,
79acaae1 2960 build_int_cst (TREE_TYPE (cond), 0));
1e8e9920 2961 else
773c5ba7 2962 {
2963 basic_block cond_bb, then_bb, else_bb;
79acaae1 2964 edge e, e_then, e_else;
75a70cf9 2965 tree tmp_then, tmp_else, tmp_join, tmp_var;
79acaae1 2966
2967 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
2968 if (gimple_in_ssa_p (cfun))
2969 {
75a70cf9 2970 tmp_then = make_ssa_name (tmp_var, NULL);
2971 tmp_else = make_ssa_name (tmp_var, NULL);
2972 tmp_join = make_ssa_name (tmp_var, NULL);
79acaae1 2973 }
2974 else
2975 {
2976 tmp_then = tmp_var;
2977 tmp_else = tmp_var;
2978 tmp_join = tmp_var;
2979 }
773c5ba7 2980
773c5ba7 2981 e = split_block (bb, NULL);
2982 cond_bb = e->src;
2983 bb = e->dest;
2984 remove_edge (e);
2985
2986 then_bb = create_empty_bb (cond_bb);
2987 else_bb = create_empty_bb (then_bb);
79acaae1 2988 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
2989 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
773c5ba7 2990
75a70cf9 2991 stmt = gimple_build_cond_empty (cond);
2992 gsi = gsi_start_bb (cond_bb);
2993 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 2994
75a70cf9 2995 gsi = gsi_start_bb (then_bb);
2996 stmt = gimple_build_assign (tmp_then, val);
2997 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 2998
75a70cf9 2999 gsi = gsi_start_bb (else_bb);
3000 stmt = gimple_build_assign
3001 (tmp_else, build_int_cst (unsigned_type_node, 1));
3002 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
773c5ba7 3003
3004 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
3005 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
79acaae1 3006 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
3007 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
773c5ba7 3008
79acaae1 3009 if (gimple_in_ssa_p (cfun))
3010 {
75a70cf9 3011 gimple phi = create_phi_node (tmp_join, bb);
79acaae1 3012 SSA_NAME_DEF_STMT (tmp_join) = phi;
efbcb6de 3013 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
3014 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
79acaae1 3015 }
3016
3017 val = tmp_join;
773c5ba7 3018 }
3019
75a70cf9 3020 gsi = gsi_start_bb (bb);
3021 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
3022 false, GSI_CONTINUE_LINKING);
1e8e9920 3023 }
3024
75a70cf9 3025 gsi = gsi_last_bb (bb);
3026 t = gimple_omp_parallel_data_arg (entry_stmt);
1e8e9920 3027 if (t == NULL)
c2f47e15 3028 t1 = null_pointer_node;
1e8e9920 3029 else
c2f47e15 3030 t1 = build_fold_addr_expr (t);
75a70cf9 3031 t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
773c5ba7 3032
3033 if (ws_args)
c2f47e15 3034 {
3035 tree args = tree_cons (NULL, t2,
3036 tree_cons (NULL, t1,
3037 tree_cons (NULL, val, ws_args)));
389dd41b 3038 t = build_function_call_expr (UNKNOWN_LOCATION,
3039 built_in_decls[start_ix], args);
c2f47e15 3040 }
3041 else
3042 t = build_call_expr (built_in_decls[start_ix], 3, t2, t1, val);
773c5ba7 3043
75a70cf9 3044 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3045 false, GSI_CONTINUE_LINKING);
1e8e9920 3046
75a70cf9 3047 t = gimple_omp_parallel_data_arg (entry_stmt);
1e8e9920 3048 if (t == NULL)
3049 t = null_pointer_node;
3050 else
3051 t = build_fold_addr_expr (t);
389dd41b 3052 t = build_call_expr_loc (gimple_location (entry_stmt),
3053 gimple_omp_parallel_child_fn (entry_stmt), 1, t);
75a70cf9 3054 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3055 false, GSI_CONTINUE_LINKING);
1e8e9920 3056
389dd41b 3057 t = build_call_expr_loc (gimple_location (entry_stmt),
3058 built_in_decls[BUILT_IN_GOMP_PARALLEL_END], 0);
75a70cf9 3059 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3060 false, GSI_CONTINUE_LINKING);
1e8e9920 3061}
3062
773c5ba7 3063
fd6481cf 3064/* Build the function call to GOMP_task to actually
3065 generate the task operation. BB is the block where to insert the code. */
3066
3067static void
75a70cf9 3068expand_task_call (basic_block bb, gimple entry_stmt)
fd6481cf 3069{
3070 tree t, t1, t2, t3, flags, cond, c, clauses;
75a70cf9 3071 gimple_stmt_iterator gsi;
389dd41b 3072 location_t loc = gimple_location (entry_stmt);
fd6481cf 3073
75a70cf9 3074 clauses = gimple_omp_task_clauses (entry_stmt);
fd6481cf 3075
fd6481cf 3076 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
3077 if (c)
3078 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
3079 else
3080 cond = boolean_true_node;
3081
3082 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
3083 flags = build_int_cst (unsigned_type_node, (c ? 1 : 0));
3084
75a70cf9 3085 gsi = gsi_last_bb (bb);
3086 t = gimple_omp_task_data_arg (entry_stmt);
fd6481cf 3087 if (t == NULL)
3088 t2 = null_pointer_node;
3089 else
389dd41b 3090 t2 = build_fold_addr_expr_loc (loc, t);
3091 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
75a70cf9 3092 t = gimple_omp_task_copy_fn (entry_stmt);
fd6481cf 3093 if (t == NULL)
3094 t3 = null_pointer_node;
3095 else
389dd41b 3096 t3 = build_fold_addr_expr_loc (loc, t);
fd6481cf 3097
3098 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_TASK], 7, t1, t2, t3,
75a70cf9 3099 gimple_omp_task_arg_size (entry_stmt),
3100 gimple_omp_task_arg_align (entry_stmt), cond, flags);
fd6481cf 3101
75a70cf9 3102 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3103 false, GSI_CONTINUE_LINKING);
fd6481cf 3104}
3105
3106
75a70cf9 3107/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
3108 catch handler and return it. This prevents programs from violating the
3109 structured block semantics with throws. */
1e8e9920 3110
75a70cf9 3111static gimple_seq
3112maybe_catch_exception (gimple_seq body)
1e8e9920 3113{
e38def9c 3114 gimple g;
3115 tree decl;
1e8e9920 3116
3117 if (!flag_exceptions)
75a70cf9 3118 return body;
1e8e9920 3119
3120 if (lang_protect_cleanup_actions)
e38def9c 3121 decl = lang_protect_cleanup_actions ();
1e8e9920 3122 else
e38def9c 3123 decl = built_in_decls[BUILT_IN_TRAP];
75a70cf9 3124
e38def9c 3125 g = gimple_build_eh_must_not_throw (decl);
3126 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
75a70cf9 3127 GIMPLE_TRY_CATCH);
1e8e9920 3128
e38def9c 3129 return gimple_seq_alloc_with_stmt (g);
1e8e9920 3130}
3131
773c5ba7 3132/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
1e8e9920 3133
773c5ba7 3134static tree
3135list2chain (tree list)
1e8e9920 3136{
773c5ba7 3137 tree t;
1e8e9920 3138
773c5ba7 3139 for (t = list; t; t = TREE_CHAIN (t))
3140 {
3141 tree var = TREE_VALUE (t);
3142 if (TREE_CHAIN (t))
3143 TREE_CHAIN (var) = TREE_VALUE (TREE_CHAIN (t));
3144 else
3145 TREE_CHAIN (var) = NULL_TREE;
3146 }
1e8e9920 3147
773c5ba7 3148 return list ? TREE_VALUE (list) : NULL_TREE;
3149}
1e8e9920 3150
1e8e9920 3151
773c5ba7 3152/* Remove barriers in REGION->EXIT's block. Note that this is only
75a70cf9 3153 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
3154 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
3155 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
773c5ba7 3156 removed. */
1e8e9920 3157
773c5ba7 3158static void
3159remove_exit_barrier (struct omp_region *region)
3160{
75a70cf9 3161 gimple_stmt_iterator gsi;
773c5ba7 3162 basic_block exit_bb;
61e47ac8 3163 edge_iterator ei;
3164 edge e;
75a70cf9 3165 gimple stmt;
4a04f4b4 3166 int any_addressable_vars = -1;
1e8e9920 3167
61e47ac8 3168 exit_bb = region->exit;
1e8e9920 3169
5056ba1a 3170 /* If the parallel region doesn't return, we don't have REGION->EXIT
3171 block at all. */
3172 if (! exit_bb)
3173 return;
3174
75a70cf9 3175 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
3176 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
61e47ac8 3177 statements that can appear in between are extremely limited -- no
3178 memory operations at all. Here, we allow nothing at all, so the
75a70cf9 3179 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
3180 gsi = gsi_last_bb (exit_bb);
3181 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
3182 gsi_prev (&gsi);
3183 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
773c5ba7 3184 return;
1e8e9920 3185
61e47ac8 3186 FOR_EACH_EDGE (e, ei, exit_bb->preds)
3187 {
75a70cf9 3188 gsi = gsi_last_bb (e->src);
3189 if (gsi_end_p (gsi))
61e47ac8 3190 continue;
75a70cf9 3191 stmt = gsi_stmt (gsi);
4a04f4b4 3192 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
3193 && !gimple_omp_return_nowait_p (stmt))
3194 {
3195 /* OpenMP 3.0 tasks unfortunately prevent this optimization
3196 in many cases. If there could be tasks queued, the barrier
3197 might be needed to let the tasks run before some local
3198 variable of the parallel that the task uses as shared
3199 runs out of scope. The task can be spawned either
3200 from within current function (this would be easy to check)
3201 or from some function it calls and gets passed an address
3202 of such a variable. */
3203 if (any_addressable_vars < 0)
3204 {
3205 gimple parallel_stmt = last_stmt (region->entry);
3206 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
3207 tree local_decls = DECL_STRUCT_FUNCTION (child_fun)->local_decls;
3208 tree block;
3209
3210 any_addressable_vars = 0;
3211 for (; local_decls; local_decls = TREE_CHAIN (local_decls))
3212 if (TREE_ADDRESSABLE (TREE_VALUE (local_decls)))
3213 {
3214 any_addressable_vars = 1;
3215 break;
3216 }
3217 for (block = gimple_block (stmt);
3218 !any_addressable_vars
3219 && block
3220 && TREE_CODE (block) == BLOCK;
3221 block = BLOCK_SUPERCONTEXT (block))
3222 {
3223 for (local_decls = BLOCK_VARS (block);
3224 local_decls;
3225 local_decls = TREE_CHAIN (local_decls))
3226 if (TREE_ADDRESSABLE (local_decls))
3227 {
3228 any_addressable_vars = 1;
3229 break;
3230 }
3231 if (block == gimple_block (parallel_stmt))
3232 break;
3233 }
3234 }
3235 if (!any_addressable_vars)
3236 gimple_omp_return_set_nowait (stmt);
3237 }
61e47ac8 3238 }
1e8e9920 3239}
3240
61e47ac8 3241static void
3242remove_exit_barriers (struct omp_region *region)
3243{
75a70cf9 3244 if (region->type == GIMPLE_OMP_PARALLEL)
61e47ac8 3245 remove_exit_barrier (region);
3246
3247 if (region->inner)
3248 {
3249 region = region->inner;
3250 remove_exit_barriers (region);
3251 while (region->next)
3252 {
3253 region = region->next;
3254 remove_exit_barriers (region);
3255 }
3256 }
3257}
773c5ba7 3258
658b4427 3259/* Optimize omp_get_thread_num () and omp_get_num_threads ()
3260 calls. These can't be declared as const functions, but
3261 within one parallel body they are constant, so they can be
3262 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
fd6481cf 3263 which are declared const. Similarly for task body, except
3264 that in untied task omp_get_thread_num () can change at any task
3265 scheduling point. */
658b4427 3266
3267static void
75a70cf9 3268optimize_omp_library_calls (gimple entry_stmt)
658b4427 3269{
3270 basic_block bb;
75a70cf9 3271 gimple_stmt_iterator gsi;
658b4427 3272 tree thr_num_id
3273 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM]);
3274 tree num_thr_id
3275 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS]);
75a70cf9 3276 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
3277 && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
fd6481cf 3278 OMP_CLAUSE_UNTIED) != NULL);
658b4427 3279
3280 FOR_EACH_BB (bb)
75a70cf9 3281 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
658b4427 3282 {
75a70cf9 3283 gimple call = gsi_stmt (gsi);
658b4427 3284 tree decl;
3285
75a70cf9 3286 if (is_gimple_call (call)
3287 && (decl = gimple_call_fndecl (call))
658b4427 3288 && DECL_EXTERNAL (decl)
3289 && TREE_PUBLIC (decl)
3290 && DECL_INITIAL (decl) == NULL)
3291 {
3292 tree built_in;
3293
3294 if (DECL_NAME (decl) == thr_num_id)
fd6481cf 3295 {
3296 /* In #pragma omp task untied omp_get_thread_num () can change
3297 during the execution of the task region. */
3298 if (untied_task)
3299 continue;
3300 built_in = built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM];
3301 }
658b4427 3302 else if (DECL_NAME (decl) == num_thr_id)
3303 built_in = built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS];
3304 else
3305 continue;
3306
3307 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
75a70cf9 3308 || gimple_call_num_args (call) != 0)
658b4427 3309 continue;
3310
3311 if (flag_exceptions && !TREE_NOTHROW (decl))
3312 continue;
3313
3314 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
1ea6a73c 3315 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl)),
3316 TREE_TYPE (TREE_TYPE (built_in))))
658b4427 3317 continue;
3318
0acacf9e 3319 gimple_call_set_fndecl (call, built_in);
658b4427 3320 }
3321 }
3322}
3323
fd6481cf 3324/* Expand the OpenMP parallel or task directive starting at REGION. */
1e8e9920 3325
3326static void
fd6481cf 3327expand_omp_taskreg (struct omp_region *region)
1e8e9920 3328{
773c5ba7 3329 basic_block entry_bb, exit_bb, new_bb;
87d4aa85 3330 struct function *child_cfun;
1d22f541 3331 tree child_fn, block, t, ws_args, *tp;
ba3a7ba0 3332 tree save_current;
75a70cf9 3333 gimple_stmt_iterator gsi;
3334 gimple entry_stmt, stmt;
773c5ba7 3335 edge e;
3336
61e47ac8 3337 entry_stmt = last_stmt (region->entry);
75a70cf9 3338 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
773c5ba7 3339 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
b3a3ddec 3340 /* If this function has been already instrumented, make sure
3341 the child function isn't instrumented again. */
3342 child_cfun->after_tree_profile = cfun->after_tree_profile;
773c5ba7 3343
61e47ac8 3344 entry_bb = region->entry;
3345 exit_bb = region->exit;
773c5ba7 3346
773c5ba7 3347 if (is_combined_parallel (region))
61e47ac8 3348 ws_args = region->ws_args;
773c5ba7 3349 else
3350 ws_args = NULL_TREE;
1e8e9920 3351
61e47ac8 3352 if (child_cfun->cfg)
1e8e9920 3353 {
773c5ba7 3354 /* Due to inlining, it may happen that we have already outlined
3355 the region, in which case all we need to do is make the
3356 sub-graph unreachable and emit the parallel call. */
3357 edge entry_succ_e, exit_succ_e;
75a70cf9 3358 gimple_stmt_iterator gsi;
773c5ba7 3359
3360 entry_succ_e = single_succ_edge (entry_bb);
773c5ba7 3361
75a70cf9 3362 gsi = gsi_last_bb (entry_bb);
3363 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
3364 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
3365 gsi_remove (&gsi, true);
773c5ba7 3366
3367 new_bb = entry_bb;
03ed154b 3368 if (exit_bb)
3369 {
3370 exit_succ_e = single_succ_edge (exit_bb);
3371 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
3372 }
79acaae1 3373 remove_edge_and_dominated_blocks (entry_succ_e);
1e8e9920 3374 }
773c5ba7 3375 else
3376 {
3377 /* If the parallel region needs data sent from the parent
3480139d 3378 function, then the very first statement (except possible
3379 tree profile counter updates) of the parallel body
773c5ba7 3380 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
3381 &.OMP_DATA_O is passed as an argument to the child function,
3382 we need to replace it with the argument as seen by the child
3383 function.
3384
3385 In most cases, this will end up being the identity assignment
3386 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
3387 a function call that has been inlined, the original PARM_DECL
3388 .OMP_DATA_I may have been converted into a different local
3389 variable. In which case, we need to keep the assignment. */
75a70cf9 3390 if (gimple_omp_taskreg_data_arg (entry_stmt))
773c5ba7 3391 {
3392 basic_block entry_succ_bb = single_succ (entry_bb);
75a70cf9 3393 gimple_stmt_iterator gsi;
3394 tree arg, narg;
3395 gimple parcopy_stmt = NULL;
1e8e9920 3396
75a70cf9 3397 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
3480139d 3398 {
75a70cf9 3399 gimple stmt;
3480139d 3400
75a70cf9 3401 gcc_assert (!gsi_end_p (gsi));
3402 stmt = gsi_stmt (gsi);
3403 if (gimple_code (stmt) != GIMPLE_ASSIGN)
cc6b725b 3404 continue;
3405
75a70cf9 3406 if (gimple_num_ops (stmt) == 2)
3480139d 3407 {
75a70cf9 3408 tree arg = gimple_assign_rhs1 (stmt);
3409
3410 /* We're ignore the subcode because we're
3411 effectively doing a STRIP_NOPS. */
3412
3413 if (TREE_CODE (arg) == ADDR_EXPR
3414 && TREE_OPERAND (arg, 0)
3415 == gimple_omp_taskreg_data_arg (entry_stmt))
3416 {
3417 parcopy_stmt = stmt;
3418 break;
3419 }
3480139d 3420 }
3421 }
79acaae1 3422
75a70cf9 3423 gcc_assert (parcopy_stmt != NULL);
79acaae1 3424 arg = DECL_ARGUMENTS (child_fn);
3425
3426 if (!gimple_in_ssa_p (cfun))
3427 {
75a70cf9 3428 if (gimple_assign_lhs (parcopy_stmt) == arg)
3429 gsi_remove (&gsi, true);
79acaae1 3430 else
75a70cf9 3431 {
3432 /* ?? Is setting the subcode really necessary ?? */
3433 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
3434 gimple_assign_set_rhs1 (parcopy_stmt, arg);
3435 }
79acaae1 3436 }
3437 else
3438 {
3439 /* If we are in ssa form, we must load the value from the default
3440 definition of the argument. That should not be defined now,
3441 since the argument is not used uninitialized. */
3442 gcc_assert (gimple_default_def (cfun, arg) == NULL);
75a70cf9 3443 narg = make_ssa_name (arg, gimple_build_nop ());
79acaae1 3444 set_default_def (arg, narg);
75a70cf9 3445 /* ?? Is setting the subcode really necessary ?? */
3446 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
3447 gimple_assign_set_rhs1 (parcopy_stmt, narg);
79acaae1 3448 update_stmt (parcopy_stmt);
3449 }
773c5ba7 3450 }
3451
3452 /* Declare local variables needed in CHILD_CFUN. */
3453 block = DECL_INITIAL (child_fn);
edb7afe8 3454 BLOCK_VARS (block) = list2chain (child_cfun->local_decls);
e1a7ccb9 3455 /* The gimplifier could record temporaries in parallel/task block
3456 rather than in containing function's local_decls chain,
3457 which would mean cgraph missed finalizing them. Do it now. */
3458 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3459 if (TREE_CODE (t) == VAR_DECL
3460 && TREE_STATIC (t)
3461 && !DECL_EXTERNAL (t))
3462 varpool_finalize_decl (t);
75a70cf9 3463 DECL_SAVED_TREE (child_fn) = NULL;
3464 gimple_set_body (child_fn, bb_seq (single_succ (entry_bb)));
1d22f541 3465 TREE_USED (block) = 1;
773c5ba7 3466
79acaae1 3467 /* Reset DECL_CONTEXT on function arguments. */
773c5ba7 3468 for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
3469 DECL_CONTEXT (t) = child_fn;
3470
75a70cf9 3471 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
3472 so that it can be moved to the child function. */
3473 gsi = gsi_last_bb (entry_bb);
3474 stmt = gsi_stmt (gsi);
3475 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
3476 || gimple_code (stmt) == GIMPLE_OMP_TASK));
3477 gsi_remove (&gsi, true);
3478 e = split_block (entry_bb, stmt);
773c5ba7 3479 entry_bb = e->dest;
3480 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3481
75a70cf9 3482 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
5056ba1a 3483 if (exit_bb)
3484 {
75a70cf9 3485 gsi = gsi_last_bb (exit_bb);
3486 gcc_assert (!gsi_end_p (gsi)
3487 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
3488 stmt = gimple_build_return (NULL);
3489 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
3490 gsi_remove (&gsi, true);
5056ba1a 3491 }
79acaae1 3492
3493 /* Move the parallel region into CHILD_CFUN. */
48e1416a 3494
79acaae1 3495 if (gimple_in_ssa_p (cfun))
3496 {
3497 push_cfun (child_cfun);
bcaa2770 3498 init_tree_ssa (child_cfun);
79acaae1 3499 init_ssa_operands ();
3500 cfun->gimple_df->in_ssa_p = true;
3501 pop_cfun ();
1d22f541 3502 block = NULL_TREE;
79acaae1 3503 }
1d22f541 3504 else
75a70cf9 3505 block = gimple_block (entry_stmt);
1d22f541 3506
3507 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
79acaae1 3508 if (exit_bb)
3509 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
3510
1d22f541 3511 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
3512 for (tp = &child_cfun->local_decls; *tp; )
3513 if (DECL_CONTEXT (TREE_VALUE (*tp)) != cfun->decl)
3514 tp = &TREE_CHAIN (*tp);
3515 else
3516 *tp = TREE_CHAIN (*tp);
3517
79acaae1 3518 /* Inform the callgraph about the new function. */
3519 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
3520 = cfun->curr_properties;
3521 cgraph_add_new_function (child_fn, true);
3522
3523 /* Fix the callgraph edges for child_cfun. Those for cfun will be
3524 fixed in a following pass. */
3525 push_cfun (child_cfun);
ba3a7ba0 3526 save_current = current_function_decl;
3527 current_function_decl = child_fn;
658b4427 3528 if (optimize)
fd6481cf 3529 optimize_omp_library_calls (entry_stmt);
79acaae1 3530 rebuild_cgraph_edges ();
fbe86b1b 3531
3532 /* Some EH regions might become dead, see PR34608. If
3533 pass_cleanup_cfg isn't the first pass to happen with the
3534 new child, these dead EH edges might cause problems.
3535 Clean them up now. */
3536 if (flag_exceptions)
3537 {
3538 basic_block bb;
fbe86b1b 3539 bool changed = false;
3540
fbe86b1b 3541 FOR_EACH_BB (bb)
75a70cf9 3542 changed |= gimple_purge_dead_eh_edges (bb);
fbe86b1b 3543 if (changed)
3544 cleanup_tree_cfg ();
fbe86b1b 3545 }
dd277d48 3546 if (gimple_in_ssa_p (cfun))
3547 update_ssa (TODO_update_ssa);
ba3a7ba0 3548 current_function_decl = save_current;
79acaae1 3549 pop_cfun ();
773c5ba7 3550 }
48e1416a 3551
773c5ba7 3552 /* Emit a library call to launch the children threads. */
75a70cf9 3553 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
fd6481cf 3554 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
3555 else
3556 expand_task_call (new_bb, entry_stmt);
28c92cbb 3557 update_ssa (TODO_update_ssa_only_virtuals);
1e8e9920 3558}
3559
773c5ba7 3560
3561/* A subroutine of expand_omp_for. Generate code for a parallel
1e8e9920 3562 loop with any schedule. Given parameters:
3563
3564 for (V = N1; V cond N2; V += STEP) BODY;
3565
3566 where COND is "<" or ">", we generate pseudocode
3567
3568 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
773c5ba7 3569 if (more) goto L0; else goto L3;
1e8e9920 3570 L0:
3571 V = istart0;
3572 iend = iend0;
3573 L1:
3574 BODY;
3575 V += STEP;
773c5ba7 3576 if (V cond iend) goto L1; else goto L2;
1e8e9920 3577 L2:
773c5ba7 3578 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3579 L3:
1e8e9920 3580
773c5ba7 3581 If this is a combined omp parallel loop, instead of the call to
fd6481cf 3582 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
3583
3584 For collapsed loops, given parameters:
3585 collapse(3)
3586 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
3587 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
3588 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
3589 BODY;
3590
3591 we generate pseudocode
3592
3593 if (cond3 is <)
3594 adj = STEP3 - 1;
3595 else
3596 adj = STEP3 + 1;
3597 count3 = (adj + N32 - N31) / STEP3;
3598 if (cond2 is <)
3599 adj = STEP2 - 1;
3600 else
3601 adj = STEP2 + 1;
3602 count2 = (adj + N22 - N21) / STEP2;
3603 if (cond1 is <)
3604 adj = STEP1 - 1;
3605 else
3606 adj = STEP1 + 1;
3607 count1 = (adj + N12 - N11) / STEP1;
3608 count = count1 * count2 * count3;
3609 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
3610 if (more) goto L0; else goto L3;
3611 L0:
3612 V = istart0;
3613 T = V;
3614 V3 = N31 + (T % count3) * STEP3;
3615 T = T / count3;
3616 V2 = N21 + (T % count2) * STEP2;
3617 T = T / count2;
3618 V1 = N11 + T * STEP1;
3619 iend = iend0;
3620 L1:
3621 BODY;
3622 V += 1;
3623 if (V < iend) goto L10; else goto L2;
3624 L10:
3625 V3 += STEP3;
3626 if (V3 cond3 N32) goto L1; else goto L11;
3627 L11:
3628 V3 = N31;
3629 V2 += STEP2;
3630 if (V2 cond2 N22) goto L1; else goto L12;
3631 L12:
3632 V2 = N21;
3633 V1 += STEP1;
3634 goto L1;
3635 L2:
3636 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3637 L3:
3638
3639 */
1e8e9920 3640
61e47ac8 3641static void
773c5ba7 3642expand_omp_for_generic (struct omp_region *region,
3643 struct omp_for_data *fd,
1e8e9920 3644 enum built_in_function start_fn,
3645 enum built_in_function next_fn)
3646{
75a70cf9 3647 tree type, istart0, iend0, iend;
fd6481cf 3648 tree t, vmain, vback, bias = NULL_TREE;
3649 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
03ed154b 3650 basic_block l2_bb = NULL, l3_bb = NULL;
75a70cf9 3651 gimple_stmt_iterator gsi;
3652 gimple stmt;
773c5ba7 3653 bool in_combined_parallel = is_combined_parallel (region);
ac6e3339 3654 bool broken_loop = region->cont == NULL;
79acaae1 3655 edge e, ne;
fd6481cf 3656 tree *counts = NULL;
3657 int i;
ac6e3339 3658
3659 gcc_assert (!broken_loop || !in_combined_parallel);
fd6481cf 3660 gcc_assert (fd->iter_type == long_integer_type_node
3661 || !in_combined_parallel);
1e8e9920 3662
fd6481cf 3663 type = TREE_TYPE (fd->loop.v);
3664 istart0 = create_tmp_var (fd->iter_type, ".istart0");
3665 iend0 = create_tmp_var (fd->iter_type, ".iend0");
6d63fc03 3666 TREE_ADDRESSABLE (istart0) = 1;
3667 TREE_ADDRESSABLE (iend0) = 1;
79acaae1 3668 if (gimple_in_ssa_p (cfun))
3669 {
3670 add_referenced_var (istart0);
3671 add_referenced_var (iend0);
3672 }
1e8e9920 3673
fd6481cf 3674 /* See if we need to bias by LLONG_MIN. */
3675 if (fd->iter_type == long_long_unsigned_type_node
3676 && TREE_CODE (type) == INTEGER_TYPE
3677 && !TYPE_UNSIGNED (type))
3678 {
3679 tree n1, n2;
3680
3681 if (fd->loop.cond_code == LT_EXPR)
3682 {
3683 n1 = fd->loop.n1;
3684 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
3685 }
3686 else
3687 {
3688 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
3689 n2 = fd->loop.n1;
3690 }
3691 if (TREE_CODE (n1) != INTEGER_CST
3692 || TREE_CODE (n2) != INTEGER_CST
3693 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
3694 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
3695 }
3696
61e47ac8 3697 entry_bb = region->entry;
03ed154b 3698 cont_bb = region->cont;
fd6481cf 3699 collapse_bb = NULL;
ac6e3339 3700 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
3701 gcc_assert (broken_loop
3702 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
3703 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
3704 l1_bb = single_succ (l0_bb);
3705 if (!broken_loop)
03ed154b 3706 {
3707 l2_bb = create_empty_bb (cont_bb);
ac6e3339 3708 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
3709 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
03ed154b 3710 }
ac6e3339 3711 else
3712 l2_bb = NULL;
3713 l3_bb = BRANCH_EDGE (entry_bb)->dest;
3714 exit_bb = region->exit;
773c5ba7 3715
75a70cf9 3716 gsi = gsi_last_bb (entry_bb);
fd6481cf 3717
75a70cf9 3718 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
fd6481cf 3719 if (fd->collapse > 1)
3720 {
3721 /* collapsed loops need work for expansion in SSA form. */
3722 gcc_assert (!gimple_in_ssa_p (cfun));
3723 counts = (tree *) alloca (fd->collapse * sizeof (tree));
3724 for (i = 0; i < fd->collapse; i++)
3725 {
3726 tree itype = TREE_TYPE (fd->loops[i].v);
3727
3728 if (POINTER_TYPE_P (itype))
3729 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
3730 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
3731 ? -1 : 1));
3732 t = fold_build2 (PLUS_EXPR, itype,
3733 fold_convert (itype, fd->loops[i].step), t);
3734 t = fold_build2 (PLUS_EXPR, itype, t,
3735 fold_convert (itype, fd->loops[i].n2));
3736 t = fold_build2 (MINUS_EXPR, itype, t,
3737 fold_convert (itype, fd->loops[i].n1));
3738 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
3739 t = fold_build2 (TRUNC_DIV_EXPR, itype,
3740 fold_build1 (NEGATE_EXPR, itype, t),
3741 fold_build1 (NEGATE_EXPR, itype,
3742 fold_convert (itype,
3743 fd->loops[i].step)));
3744 else
3745 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
3746 fold_convert (itype, fd->loops[i].step));
3747 t = fold_convert (type, t);
3748 if (TREE_CODE (t) == INTEGER_CST)
3749 counts[i] = t;
3750 else
3751 {
3752 counts[i] = create_tmp_var (type, ".count");
75a70cf9 3753 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3754 true, GSI_SAME_STMT);
3755 stmt = gimple_build_assign (counts[i], t);
3756 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
fd6481cf 3757 }
3758 if (SSA_VAR_P (fd->loop.n2))
3759 {
3760 if (i == 0)
75a70cf9 3761 t = counts[0];
fd6481cf 3762 else
3763 {
3764 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
75a70cf9 3765 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3766 true, GSI_SAME_STMT);
fd6481cf 3767 }
75a70cf9 3768 stmt = gimple_build_assign (fd->loop.n2, t);
3769 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
fd6481cf 3770 }
3771 }
3772 }
79acaae1 3773 if (in_combined_parallel)
3774 {
3775 /* In a combined parallel loop, emit a call to
3776 GOMP_loop_foo_next. */
3777 t = build_call_expr (built_in_decls[next_fn], 2,
3778 build_fold_addr_expr (istart0),
3779 build_fold_addr_expr (iend0));
3780 }
3781 else
1e8e9920 3782 {
c2f47e15 3783 tree t0, t1, t2, t3, t4;
773c5ba7 3784 /* If this is not a combined parallel loop, emit a call to
3785 GOMP_loop_foo_start in ENTRY_BB. */
c2f47e15 3786 t4 = build_fold_addr_expr (iend0);
3787 t3 = build_fold_addr_expr (istart0);
fd6481cf 3788 t2 = fold_convert (fd->iter_type, fd->loop.step);
c799f233 3789 if (POINTER_TYPE_P (type)
3790 && TYPE_PRECISION (type) != TYPE_PRECISION (fd->iter_type))
3791 {
3792 /* Avoid casting pointers to integer of a different size. */
3793 tree itype
3794 = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
3795 t1 = fold_convert (fd->iter_type, fold_convert (itype, fd->loop.n2));
3796 t0 = fold_convert (fd->iter_type, fold_convert (itype, fd->loop.n1));
3797 }
3798 else
3799 {
3800 t1 = fold_convert (fd->iter_type, fd->loop.n2);
3801 t0 = fold_convert (fd->iter_type, fd->loop.n1);
3802 }
fd6481cf 3803 if (bias)
1e8e9920 3804 {
fd6481cf 3805 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
3806 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
3807 }
3808 if (fd->iter_type == long_integer_type_node)
3809 {
3810 if (fd->chunk_size)
3811 {
3812 t = fold_convert (fd->iter_type, fd->chunk_size);
3813 t = build_call_expr (built_in_decls[start_fn], 6,
3814 t0, t1, t2, t, t3, t4);
3815 }
3816 else
3817 t = build_call_expr (built_in_decls[start_fn], 5,
3818 t0, t1, t2, t3, t4);
1e8e9920 3819 }
c2f47e15 3820 else
fd6481cf 3821 {
3822 tree t5;
3823 tree c_bool_type;
3824
3825 /* The GOMP_loop_ull_*start functions have additional boolean
3826 argument, true for < loops and false for > loops.
3827 In Fortran, the C bool type can be different from
3828 boolean_type_node. */
3829 c_bool_type = TREE_TYPE (TREE_TYPE (built_in_decls[start_fn]));
3830 t5 = build_int_cst (c_bool_type,
3831 fd->loop.cond_code == LT_EXPR ? 1 : 0);
3832 if (fd->chunk_size)
3833 {
3834 t = fold_convert (fd->iter_type, fd->chunk_size);
3835 t = build_call_expr (built_in_decls[start_fn], 7,
3836 t5, t0, t1, t2, t, t3, t4);
3837 }
3838 else
3839 t = build_call_expr (built_in_decls[start_fn], 6,
3840 t5, t0, t1, t2, t3, t4);
3841 }
1e8e9920 3842 }
fd6481cf 3843 if (TREE_TYPE (t) != boolean_type_node)
3844 t = fold_build2 (NE_EXPR, boolean_type_node,
3845 t, build_int_cst (TREE_TYPE (t), 0));
75a70cf9 3846 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3847 true, GSI_SAME_STMT);
3848 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
79acaae1 3849
75a70cf9 3850 /* Remove the GIMPLE_OMP_FOR statement. */
3851 gsi_remove (&gsi, true);
1e8e9920 3852
773c5ba7 3853 /* Iteration setup for sequential loop goes in L0_BB. */
75a70cf9 3854 gsi = gsi_start_bb (l0_bb);
1efcacec 3855 t = istart0;
fd6481cf 3856 if (bias)
1efcacec 3857 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3858 if (POINTER_TYPE_P (type))
3859 t = fold_convert (lang_hooks.types.type_for_size (TYPE_PRECISION (type),
3860 0), t);
3861 t = fold_convert (type, t);
75a70cf9 3862 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3863 false, GSI_CONTINUE_LINKING);
3864 stmt = gimple_build_assign (fd->loop.v, t);
3865 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
1e8e9920 3866
1efcacec 3867 t = iend0;
fd6481cf 3868 if (bias)
1efcacec 3869 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3870 if (POINTER_TYPE_P (type))
3871 t = fold_convert (lang_hooks.types.type_for_size (TYPE_PRECISION (type),
3872 0), t);
3873 t = fold_convert (type, t);
75a70cf9 3874 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3875 false, GSI_CONTINUE_LINKING);
fd6481cf 3876 if (fd->collapse > 1)
3877 {
3878 tree tem = create_tmp_var (type, ".tem");
3879
75a70cf9 3880 stmt = gimple_build_assign (tem, fd->loop.v);
3881 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
fd6481cf 3882 for (i = fd->collapse - 1; i >= 0; i--)
3883 {
3884 tree vtype = TREE_TYPE (fd->loops[i].v), itype;
3885 itype = vtype;
3886 if (POINTER_TYPE_P (vtype))
3887 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (vtype), 0);
3888 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
3889 t = fold_convert (itype, t);
c821ef7d 3890 t = fold_build2 (MULT_EXPR, itype, t,
3891 fold_convert (itype, fd->loops[i].step));
fd6481cf 3892 if (POINTER_TYPE_P (vtype))
3893 t = fold_build2 (POINTER_PLUS_EXPR, vtype,
3894 fd->loops[i].n1, fold_convert (sizetype, t));
3895 else
3896 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
75a70cf9 3897 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3898 false, GSI_CONTINUE_LINKING);
3899 stmt = gimple_build_assign (fd->loops[i].v, t);
3900 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
fd6481cf 3901 if (i != 0)
3902 {
3903 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
75a70cf9 3904 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3905 false, GSI_CONTINUE_LINKING);
3906 stmt = gimple_build_assign (tem, t);
3907 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
fd6481cf 3908 }
3909 }
3910 }
773c5ba7 3911
ac6e3339 3912 if (!broken_loop)
03ed154b 3913 {
ac6e3339 3914 /* Code to control the increment and predicate for the sequential
3915 loop goes in the CONT_BB. */
75a70cf9 3916 gsi = gsi_last_bb (cont_bb);
3917 stmt = gsi_stmt (gsi);
3918 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
3919 vmain = gimple_omp_continue_control_use (stmt);
3920 vback = gimple_omp_continue_control_def (stmt);
79acaae1 3921
fd6481cf 3922 if (POINTER_TYPE_P (type))
3923 t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
3924 fold_convert (sizetype, fd->loop.step));
3925 else
3926 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
75a70cf9 3927 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3928 true, GSI_SAME_STMT);
3929 stmt = gimple_build_assign (vback, t);
3930 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3931
fd6481cf 3932 t = build2 (fd->loop.cond_code, boolean_type_node, vback, iend);
75a70cf9 3933 stmt = gimple_build_cond_empty (t);
3934 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
773c5ba7 3935
75a70cf9 3936 /* Remove GIMPLE_OMP_CONTINUE. */
3937 gsi_remove (&gsi, true);
773c5ba7 3938
fd6481cf 3939 if (fd->collapse > 1)
3940 {
3941 basic_block last_bb, bb;
3942
3943 last_bb = cont_bb;
3944 for (i = fd->collapse - 1; i >= 0; i--)
3945 {
3946 tree vtype = TREE_TYPE (fd->loops[i].v);
3947
3948 bb = create_empty_bb (last_bb);
75a70cf9 3949 gsi = gsi_start_bb (bb);
fd6481cf 3950
3951 if (i < fd->collapse - 1)
3952 {
3953 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
3954 e->probability = REG_BR_PROB_BASE / 8;
3955
75a70cf9 3956 t = fd->loops[i + 1].n1;
3957 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3958 false, GSI_CONTINUE_LINKING);
3959 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
3960 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
fd6481cf 3961 }
3962 else
3963 collapse_bb = bb;
3964
3965 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
3966
3967 if (POINTER_TYPE_P (vtype))
3968 t = fold_build2 (POINTER_PLUS_EXPR, vtype,
3969 fd->loops[i].v,
3970 fold_convert (sizetype, fd->loops[i].step));
3971 else
3972 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v,
3973 fd->loops[i].step);
75a70cf9 3974 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3975 false, GSI_CONTINUE_LINKING);
3976 stmt = gimple_build_assign (fd->loops[i].v, t);
3977 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
fd6481cf 3978
3979 if (i > 0)
3980 {
75a70cf9 3981 t = fd->loops[i].n2;
3982 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3983 false, GSI_CONTINUE_LINKING);
fd6481cf 3984 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node,
75a70cf9 3985 fd->loops[i].v, t);
3986 stmt = gimple_build_cond_empty (t);
3987 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
fd6481cf 3988 e = make_edge (bb, l1_bb, EDGE_TRUE_VALUE);
3989 e->probability = REG_BR_PROB_BASE * 7 / 8;
3990 }
3991 else
3992 make_edge (bb, l1_bb, EDGE_FALLTHRU);
3993 last_bb = bb;
3994 }
3995 }
3996
ac6e3339 3997 /* Emit code to get the next parallel iteration in L2_BB. */
75a70cf9 3998 gsi = gsi_start_bb (l2_bb);
773c5ba7 3999
ac6e3339 4000 t = build_call_expr (built_in_decls[next_fn], 2,
4001 build_fold_addr_expr (istart0),
4002 build_fold_addr_expr (iend0));
75a70cf9 4003 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4004 false, GSI_CONTINUE_LINKING);
fd6481cf 4005 if (TREE_TYPE (t) != boolean_type_node)
4006 t = fold_build2 (NE_EXPR, boolean_type_node,
4007 t, build_int_cst (TREE_TYPE (t), 0));
75a70cf9 4008 stmt = gimple_build_cond_empty (t);
4009 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
ac6e3339 4010 }
1e8e9920 4011
61e47ac8 4012 /* Add the loop cleanup function. */
75a70cf9 4013 gsi = gsi_last_bb (exit_bb);
4014 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
61e47ac8 4015 t = built_in_decls[BUILT_IN_GOMP_LOOP_END_NOWAIT];
4016 else
4017 t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
75a70cf9 4018 stmt = gimple_build_call (t, 0);
4019 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
4020 gsi_remove (&gsi, true);
773c5ba7 4021
4022 /* Connect the new blocks. */
79acaae1 4023 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
4024 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
1e8e9920 4025
ac6e3339 4026 if (!broken_loop)
4027 {
75a70cf9 4028 gimple_seq phis;
4029
79acaae1 4030 e = find_edge (cont_bb, l3_bb);
4031 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
4032
75a70cf9 4033 phis = phi_nodes (l3_bb);
4034 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4035 {
4036 gimple phi = gsi_stmt (gsi);
4037 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
4038 PHI_ARG_DEF_FROM_EDGE (phi, e));
4039 }
79acaae1 4040 remove_edge (e);
4041
ac6e3339 4042 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
fd6481cf 4043 if (fd->collapse > 1)
4044 {
4045 e = find_edge (cont_bb, l1_bb);
4046 remove_edge (e);
4047 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
4048 }
4049 else
4050 {
4051 e = find_edge (cont_bb, l1_bb);
4052 e->flags = EDGE_TRUE_VALUE;
4053 }
4054 e->probability = REG_BR_PROB_BASE * 7 / 8;
4055 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
ac6e3339 4056 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
79acaae1 4057
4058 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
4059 recompute_dominator (CDI_DOMINATORS, l2_bb));
4060 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
4061 recompute_dominator (CDI_DOMINATORS, l3_bb));
4062 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
4063 recompute_dominator (CDI_DOMINATORS, l0_bb));
4064 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
4065 recompute_dominator (CDI_DOMINATORS, l1_bb));
ac6e3339 4066 }
1e8e9920 4067}
4068
4069
773c5ba7 4070/* A subroutine of expand_omp_for. Generate code for a parallel
4071 loop with static schedule and no specified chunk size. Given
4072 parameters:
1e8e9920 4073
4074 for (V = N1; V cond N2; V += STEP) BODY;
4075
4076 where COND is "<" or ">", we generate pseudocode
4077
4078 if (cond is <)
4079 adj = STEP - 1;
4080 else
4081 adj = STEP + 1;
fd6481cf 4082 if ((__typeof (V)) -1 > 0 && cond is >)
4083 n = -(adj + N2 - N1) / -STEP;
4084 else
4085 n = (adj + N2 - N1) / STEP;
1e8e9920 4086 q = n / nthreads;
4087 q += (q * nthreads != n);
4088 s0 = q * threadid;
4089 e0 = min(s0 + q, n);
79acaae1 4090 V = s0 * STEP + N1;
1e8e9920 4091 if (s0 >= e0) goto L2; else goto L0;
4092 L0:
1e8e9920 4093 e = e0 * STEP + N1;
4094 L1:
4095 BODY;
4096 V += STEP;
4097 if (V cond e) goto L1;
1e8e9920 4098 L2:
4099*/
4100
61e47ac8 4101static void
773c5ba7 4102expand_omp_for_static_nochunk (struct omp_region *region,
4103 struct omp_for_data *fd)
1e8e9920 4104{
63f88450 4105 tree n, q, s0, e0, e, t, nthreads, threadid;
fd6481cf 4106 tree type, itype, vmain, vback;
61e47ac8 4107 basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
4108 basic_block fin_bb;
75a70cf9 4109 gimple_stmt_iterator gsi;
4110 gimple stmt;
1e8e9920 4111
fd6481cf 4112 itype = type = TREE_TYPE (fd->loop.v);
4113 if (POINTER_TYPE_P (type))
4114 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
1e8e9920 4115
61e47ac8 4116 entry_bb = region->entry;
61e47ac8 4117 cont_bb = region->cont;
ac6e3339 4118 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
4119 gcc_assert (BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
4120 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
4121 body_bb = single_succ (seq_start_bb);
4122 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
4123 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
4124 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
61e47ac8 4125 exit_bb = region->exit;
4126
773c5ba7 4127 /* Iteration space partitioning goes in ENTRY_BB. */
75a70cf9 4128 gsi = gsi_last_bb (entry_bb);
4129 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
61e47ac8 4130
c2f47e15 4131 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
fd6481cf 4132 t = fold_convert (itype, t);
75a70cf9 4133 nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4134 true, GSI_SAME_STMT);
48e1416a 4135
c2f47e15 4136 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
fd6481cf 4137 t = fold_convert (itype, t);
75a70cf9 4138 threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4139 true, GSI_SAME_STMT);
1e8e9920 4140
fd6481cf 4141 fd->loop.n1
75a70cf9 4142 = force_gimple_operand_gsi (&gsi, fold_convert (type, fd->loop.n1),
4143 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 4144 fd->loop.n2
75a70cf9 4145 = force_gimple_operand_gsi (&gsi, fold_convert (itype, fd->loop.n2),
4146 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 4147 fd->loop.step
75a70cf9 4148 = force_gimple_operand_gsi (&gsi, fold_convert (itype, fd->loop.step),
4149 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 4150
4151 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
4152 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
4153 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
4154 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
4155 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
4156 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4157 fold_build1 (NEGATE_EXPR, itype, t),
4158 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
4159 else
4160 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
4161 t = fold_convert (itype, t);
75a70cf9 4162 n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
1e8e9920 4163
fd6481cf 4164 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
75a70cf9 4165 q = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
1e8e9920 4166
fd6481cf 4167 t = fold_build2 (MULT_EXPR, itype, q, nthreads);
4168 t = fold_build2 (NE_EXPR, itype, t, n);
4169 t = fold_build2 (PLUS_EXPR, itype, q, t);
75a70cf9 4170 q = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
1e8e9920 4171
fd6481cf 4172 t = build2 (MULT_EXPR, itype, q, threadid);
75a70cf9 4173 s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
1e8e9920 4174
fd6481cf 4175 t = fold_build2 (PLUS_EXPR, itype, s0, q);
4176 t = fold_build2 (MIN_EXPR, itype, t, n);
75a70cf9 4177 e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
79acaae1 4178
1e8e9920 4179 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
75a70cf9 4180 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
773c5ba7 4181
75a70cf9 4182 /* Remove the GIMPLE_OMP_FOR statement. */
4183 gsi_remove (&gsi, true);
773c5ba7 4184
4185 /* Setup code for sequential iteration goes in SEQ_START_BB. */
75a70cf9 4186 gsi = gsi_start_bb (seq_start_bb);
1e8e9920 4187
fd6481cf 4188 t = fold_convert (itype, s0);
4189 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4190 if (POINTER_TYPE_P (type))
4191 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4192 fold_convert (sizetype, t));
4193 else
4194 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
75a70cf9 4195 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
4196 false, GSI_CONTINUE_LINKING);
4197 stmt = gimple_build_assign (fd->loop.v, t);
4198 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
48e1416a 4199
fd6481cf 4200 t = fold_convert (itype, e0);
4201 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4202 if (POINTER_TYPE_P (type))
4203 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4204 fold_convert (sizetype, t));
4205 else
4206 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
75a70cf9 4207 e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4208 false, GSI_CONTINUE_LINKING);
1e8e9920 4209
75a70cf9 4210 /* The code controlling the sequential loop replaces the
4211 GIMPLE_OMP_CONTINUE. */
4212 gsi = gsi_last_bb (cont_bb);
4213 stmt = gsi_stmt (gsi);
4214 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
4215 vmain = gimple_omp_continue_control_use (stmt);
4216 vback = gimple_omp_continue_control_def (stmt);
79acaae1 4217
fd6481cf 4218 if (POINTER_TYPE_P (type))
4219 t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
4220 fold_convert (sizetype, fd->loop.step));
4221 else
4222 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
75a70cf9 4223 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
4224 true, GSI_SAME_STMT);
4225 stmt = gimple_build_assign (vback, t);
4226 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
79acaae1 4227
fd6481cf 4228 t = build2 (fd->loop.cond_code, boolean_type_node, vback, e);
75a70cf9 4229 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
1e8e9920 4230
75a70cf9 4231 /* Remove the GIMPLE_OMP_CONTINUE statement. */
4232 gsi_remove (&gsi, true);
773c5ba7 4233
75a70cf9 4234 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
4235 gsi = gsi_last_bb (exit_bb);
4236 if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
4237 force_gimple_operand_gsi (&gsi, build_omp_barrier (), false, NULL_TREE,
4238 false, GSI_SAME_STMT);
4239 gsi_remove (&gsi, true);
773c5ba7 4240
4241 /* Connect all the blocks. */
ac6e3339 4242 find_edge (entry_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
4243 find_edge (entry_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
79acaae1 4244
ac6e3339 4245 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
61e47ac8 4246 find_edge (cont_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
48e1416a 4247
79acaae1 4248 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, entry_bb);
4249 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4250 recompute_dominator (CDI_DOMINATORS, body_bb));
4251 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4252 recompute_dominator (CDI_DOMINATORS, fin_bb));
1e8e9920 4253}
4254
773c5ba7 4255
4256/* A subroutine of expand_omp_for. Generate code for a parallel
4257 loop with static schedule and a specified chunk size. Given
4258 parameters:
1e8e9920 4259
4260 for (V = N1; V cond N2; V += STEP) BODY;
4261
4262 where COND is "<" or ">", we generate pseudocode
4263
4264 if (cond is <)
4265 adj = STEP - 1;
4266 else
4267 adj = STEP + 1;
fd6481cf 4268 if ((__typeof (V)) -1 > 0 && cond is >)
4269 n = -(adj + N2 - N1) / -STEP;
4270 else
4271 n = (adj + N2 - N1) / STEP;
1e8e9920 4272 trip = 0;
79acaae1 4273 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
4274 here so that V is defined
4275 if the loop is not entered
1e8e9920 4276 L0:
4277 s0 = (trip * nthreads + threadid) * CHUNK;
4278 e0 = min(s0 + CHUNK, n);
4279 if (s0 < n) goto L1; else goto L4;
4280 L1:
4281 V = s0 * STEP + N1;
4282 e = e0 * STEP + N1;
4283 L2:
4284 BODY;
4285 V += STEP;
4286 if (V cond e) goto L2; else goto L3;
4287 L3:
4288 trip += 1;
4289 goto L0;
4290 L4:
1e8e9920 4291*/
4292
61e47ac8 4293static void
75a70cf9 4294expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
1e8e9920 4295{
75a70cf9 4296 tree n, s0, e0, e, t;
79acaae1 4297 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
75a70cf9 4298 tree type, itype, v_main, v_back, v_extra;
773c5ba7 4299 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
61e47ac8 4300 basic_block trip_update_bb, cont_bb, fin_bb;
75a70cf9 4301 gimple_stmt_iterator si;
4302 gimple stmt;
4303 edge se;
1e8e9920 4304
fd6481cf 4305 itype = type = TREE_TYPE (fd->loop.v);
4306 if (POINTER_TYPE_P (type))
4307 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
1e8e9920 4308
61e47ac8 4309 entry_bb = region->entry;
ac6e3339 4310 se = split_block (entry_bb, last_stmt (entry_bb));
4311 entry_bb = se->src;
4312 iter_part_bb = se->dest;
61e47ac8 4313 cont_bb = region->cont;
ac6e3339 4314 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
4315 gcc_assert (BRANCH_EDGE (iter_part_bb)->dest
4316 == FALLTHRU_EDGE (cont_bb)->dest);
4317 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
4318 body_bb = single_succ (seq_start_bb);
4319 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
4320 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
4321 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
4322 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
61e47ac8 4323 exit_bb = region->exit;
773c5ba7 4324
773c5ba7 4325 /* Trip and adjustment setup goes in ENTRY_BB. */
75a70cf9 4326 si = gsi_last_bb (entry_bb);
4327 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
773c5ba7 4328
c2f47e15 4329 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
fd6481cf 4330 t = fold_convert (itype, t);
75a70cf9 4331 nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4332 true, GSI_SAME_STMT);
48e1416a 4333
c2f47e15 4334 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
fd6481cf 4335 t = fold_convert (itype, t);
75a70cf9 4336 threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4337 true, GSI_SAME_STMT);
79acaae1 4338
fd6481cf 4339 fd->loop.n1
75a70cf9 4340 = force_gimple_operand_gsi (&si, fold_convert (type, fd->loop.n1),
4341 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 4342 fd->loop.n2
75a70cf9 4343 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->loop.n2),
4344 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 4345 fd->loop.step
75a70cf9 4346 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->loop.step),
4347 true, NULL_TREE, true, GSI_SAME_STMT);
79acaae1 4348 fd->chunk_size
75a70cf9 4349 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
4350 true, NULL_TREE, true, GSI_SAME_STMT);
fd6481cf 4351
4352 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
4353 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
4354 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
4355 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
4356 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
4357 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4358 fold_build1 (NEGATE_EXPR, itype, t),
4359 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
4360 else
4361 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
4362 t = fold_convert (itype, t);
75a70cf9 4363 n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4364 true, GSI_SAME_STMT);
79acaae1 4365
fd6481cf 4366 trip_var = create_tmp_var (itype, ".trip");
79acaae1 4367 if (gimple_in_ssa_p (cfun))
4368 {
4369 add_referenced_var (trip_var);
75a70cf9 4370 trip_init = make_ssa_name (trip_var, NULL);
4371 trip_main = make_ssa_name (trip_var, NULL);
4372 trip_back = make_ssa_name (trip_var, NULL);
79acaae1 4373 }
1e8e9920 4374 else
79acaae1 4375 {
4376 trip_init = trip_var;
4377 trip_main = trip_var;
4378 trip_back = trip_var;
4379 }
1e8e9920 4380
75a70cf9 4381 stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
4382 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
773c5ba7 4383
fd6481cf 4384 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
4385 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4386 if (POINTER_TYPE_P (type))
4387 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4388 fold_convert (sizetype, t));
4389 else
4390 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
75a70cf9 4391 v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4392 true, GSI_SAME_STMT);
79acaae1 4393
75a70cf9 4394 /* Remove the GIMPLE_OMP_FOR. */
4395 gsi_remove (&si, true);
773c5ba7 4396
4397 /* Iteration space partitioning goes in ITER_PART_BB. */
75a70cf9 4398 si = gsi_last_bb (iter_part_bb);
1e8e9920 4399
fd6481cf 4400 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
4401 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
4402 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
75a70cf9 4403 s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4404 false, GSI_CONTINUE_LINKING);
1e8e9920 4405
fd6481cf 4406 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
4407 t = fold_build2 (MIN_EXPR, itype, t, n);
75a70cf9 4408 e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4409 false, GSI_CONTINUE_LINKING);
1e8e9920 4410
4411 t = build2 (LT_EXPR, boolean_type_node, s0, n);
75a70cf9 4412 gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
773c5ba7 4413
4414 /* Setup code for sequential iteration goes in SEQ_START_BB. */
75a70cf9 4415 si = gsi_start_bb (seq_start_bb);
1e8e9920 4416
fd6481cf 4417 t = fold_convert (itype, s0);
4418 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4419 if (POINTER_TYPE_P (type))
4420 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4421 fold_convert (sizetype, t));
4422 else
4423 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
75a70cf9 4424 t = force_gimple_operand_gsi (&si, t, false, NULL_TREE,
4425 false, GSI_CONTINUE_LINKING);
4426 stmt = gimple_build_assign (fd->loop.v, t);
4427 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
1e8e9920 4428
fd6481cf 4429 t = fold_convert (itype, e0);
4430 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4431 if (POINTER_TYPE_P (type))
4432 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4433 fold_convert (sizetype, t));
4434 else
4435 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
75a70cf9 4436 e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4437 false, GSI_CONTINUE_LINKING);
1e8e9920 4438
61e47ac8 4439 /* The code controlling the sequential loop goes in CONT_BB,
75a70cf9 4440 replacing the GIMPLE_OMP_CONTINUE. */
4441 si = gsi_last_bb (cont_bb);
4442 stmt = gsi_stmt (si);
4443 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
4444 v_main = gimple_omp_continue_control_use (stmt);
4445 v_back = gimple_omp_continue_control_def (stmt);
79acaae1 4446
fd6481cf 4447 if (POINTER_TYPE_P (type))
4448 t = fold_build2 (POINTER_PLUS_EXPR, type, v_main,
4449 fold_convert (sizetype, fd->loop.step));
4450 else
75a70cf9 4451 t = fold_build2 (PLUS_EXPR, type, v_main, fd->loop.step);
4452 stmt = gimple_build_assign (v_back, t);
4453 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
79acaae1 4454
fd6481cf 4455 t = build2 (fd->loop.cond_code, boolean_type_node, v_back, e);
75a70cf9 4456 gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
48e1416a 4457
75a70cf9 4458 /* Remove GIMPLE_OMP_CONTINUE. */
4459 gsi_remove (&si, true);
773c5ba7 4460
4461 /* Trip update code goes into TRIP_UPDATE_BB. */
75a70cf9 4462 si = gsi_start_bb (trip_update_bb);
1e8e9920 4463
fd6481cf 4464 t = build_int_cst (itype, 1);
4465 t = build2 (PLUS_EXPR, itype, trip_main, t);
75a70cf9 4466 stmt = gimple_build_assign (trip_back, t);
4467 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
1e8e9920 4468
75a70cf9 4469 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
4470 si = gsi_last_bb (exit_bb);
4471 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
4472 force_gimple_operand_gsi (&si, build_omp_barrier (), false, NULL_TREE,
4473 false, GSI_SAME_STMT);
4474 gsi_remove (&si, true);
1e8e9920 4475
773c5ba7 4476 /* Connect the new blocks. */
ac6e3339 4477 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
4478 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
79acaae1 4479
ac6e3339 4480 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
4481 find_edge (cont_bb, trip_update_bb)->flags = EDGE_FALSE_VALUE;
79acaae1 4482
ac6e3339 4483 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
79acaae1 4484
4485 if (gimple_in_ssa_p (cfun))
4486 {
75a70cf9 4487 gimple_stmt_iterator psi;
4488 gimple phi;
4489 edge re, ene;
4490 edge_var_map_vector head;
4491 edge_var_map *vm;
4492 size_t i;
4493
79acaae1 4494 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
4495 remove arguments of the phi nodes in fin_bb. We need to create
4496 appropriate phi nodes in iter_part_bb instead. */
4497 se = single_pred_edge (fin_bb);
4498 re = single_succ_edge (trip_update_bb);
75a70cf9 4499 head = redirect_edge_var_map_vector (re);
79acaae1 4500 ene = single_succ_edge (entry_bb);
4501
75a70cf9 4502 psi = gsi_start_phis (fin_bb);
4503 for (i = 0; !gsi_end_p (psi) && VEC_iterate (edge_var_map, head, i, vm);
4504 gsi_next (&psi), ++i)
79acaae1 4505 {
75a70cf9 4506 gimple nphi;
efbcb6de 4507 source_location locus;
75a70cf9 4508
4509 phi = gsi_stmt (psi);
4510 t = gimple_phi_result (phi);
4511 gcc_assert (t == redirect_edge_var_map_result (vm));
79acaae1 4512 nphi = create_phi_node (t, iter_part_bb);
4513 SSA_NAME_DEF_STMT (t) = nphi;
4514
4515 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
efbcb6de 4516 locus = gimple_phi_arg_location_from_edge (phi, se);
4517
fd6481cf 4518 /* A special case -- fd->loop.v is not yet computed in
4519 iter_part_bb, we need to use v_extra instead. */
4520 if (t == fd->loop.v)
79acaae1 4521 t = v_extra;
efbcb6de 4522 add_phi_arg (nphi, t, ene, locus);
4523 locus = redirect_edge_var_map_location (vm);
4524 add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
75a70cf9 4525 }
4526 gcc_assert (!gsi_end_p (psi) && i == VEC_length (edge_var_map, head));
4527 redirect_edge_var_map_clear (re);
4528 while (1)
4529 {
4530 psi = gsi_start_phis (fin_bb);
4531 if (gsi_end_p (psi))
4532 break;
4533 remove_phi_node (&psi, false);
79acaae1 4534 }
79acaae1 4535
4536 /* Make phi node for trip. */
4537 phi = create_phi_node (trip_main, iter_part_bb);
4538 SSA_NAME_DEF_STMT (trip_main) = phi;
efbcb6de 4539 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb),
4540 UNKNOWN_LOCATION);
4541 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb),
4542 UNKNOWN_LOCATION);
79acaae1 4543 }
4544
4545 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
4546 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
4547 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
4548 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4549 recompute_dominator (CDI_DOMINATORS, fin_bb));
4550 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
4551 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
4552 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4553 recompute_dominator (CDI_DOMINATORS, body_bb));
1e8e9920 4554}
4555
1e8e9920 4556
773c5ba7 4557/* Expand the OpenMP loop defined by REGION. */
1e8e9920 4558
773c5ba7 4559static void
4560expand_omp_for (struct omp_region *region)
4561{
4562 struct omp_for_data fd;
fd6481cf 4563 struct omp_for_data_loop *loops;
1e8e9920 4564
fd6481cf 4565 loops
4566 = (struct omp_for_data_loop *)
75a70cf9 4567 alloca (gimple_omp_for_collapse (last_stmt (region->entry))
fd6481cf 4568 * sizeof (struct omp_for_data_loop));
fd6481cf 4569 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
f77459c5 4570 region->sched_kind = fd.sched_kind;
1e8e9920 4571
b3a3ddec 4572 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
4573 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4574 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4575 if (region->cont)
4576 {
4577 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
4578 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4579 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4580 }
4581
03ed154b 4582 if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
4583 && !fd.have_ordered
fd6481cf 4584 && fd.collapse == 1
ac6e3339 4585 && region->cont != NULL)
1e8e9920 4586 {
4587 if (fd.chunk_size == NULL)
61e47ac8 4588 expand_omp_for_static_nochunk (region, &fd);
1e8e9920 4589 else
61e47ac8 4590 expand_omp_for_static_chunk (region, &fd);
1e8e9920 4591 }
4592 else
4593 {
fd6481cf 4594 int fn_index, start_ix, next_ix;
4595
4596 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
4597 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
75a70cf9 4598 ? 3 : fd.sched_kind;
fd6481cf 4599 fn_index += fd.have_ordered * 4;
4600 start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
4601 next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
4602 if (fd.iter_type == long_long_unsigned_type_node)
4603 {
4604 start_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_START
4605 - BUILT_IN_GOMP_LOOP_STATIC_START;
4606 next_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
4607 - BUILT_IN_GOMP_LOOP_STATIC_NEXT;
4608 }
b9c74b4d 4609 expand_omp_for_generic (region, &fd, (enum built_in_function) start_ix,
4610 (enum built_in_function) next_ix);
1e8e9920 4611 }
28c92cbb 4612
4613 update_ssa (TODO_update_ssa_only_virtuals);
1e8e9920 4614}
4615
1e8e9920 4616
4617/* Expand code for an OpenMP sections directive. In pseudo code, we generate
4618
1e8e9920 4619 v = GOMP_sections_start (n);
4620 L0:
4621 switch (v)
4622 {
4623 case 0:
4624 goto L2;
4625 case 1:
4626 section 1;
4627 goto L1;
4628 case 2:
4629 ...
4630 case n:
4631 ...
1e8e9920 4632 default:
4633 abort ();
4634 }
4635 L1:
4636 v = GOMP_sections_next ();
4637 goto L0;
4638 L2:
4639 reduction;
4640
773c5ba7 4641 If this is a combined parallel sections, replace the call to
79acaae1 4642 GOMP_sections_start with call to GOMP_sections_next. */
1e8e9920 4643
4644static void
773c5ba7 4645expand_omp_sections (struct omp_region *region)
1e8e9920 4646{
f018d957 4647 tree t, u, vin = NULL, vmain, vnext, l2;
75a70cf9 4648 VEC (tree,heap) *label_vec;
4649 unsigned len;
ac6e3339 4650 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
75a70cf9 4651 gimple_stmt_iterator si, switch_si;
4652 gimple sections_stmt, stmt, cont;
9884aaf8 4653 edge_iterator ei;
4654 edge e;
61e47ac8 4655 struct omp_region *inner;
75a70cf9 4656 unsigned i, casei;
ac6e3339 4657 bool exit_reachable = region->cont != NULL;
1e8e9920 4658
ac6e3339 4659 gcc_assert (exit_reachable == (region->exit != NULL));
61e47ac8 4660 entry_bb = region->entry;
ac6e3339 4661 l0_bb = single_succ (entry_bb);
61e47ac8 4662 l1_bb = region->cont;
ac6e3339 4663 l2_bb = region->exit;
4664 if (exit_reachable)
03ed154b 4665 {
9884aaf8 4666 if (single_pred (l2_bb) == l0_bb)
75a70cf9 4667 l2 = gimple_block_label (l2_bb);
9884aaf8 4668 else
4669 {
4670 /* This can happen if there are reductions. */
4671 len = EDGE_COUNT (l0_bb->succs);
4672 gcc_assert (len > 0);
4673 e = EDGE_SUCC (l0_bb, len - 1);
75a70cf9 4674 si = gsi_last_bb (e->dest);
6d5a0fbe 4675 l2 = NULL_TREE;
75a70cf9 4676 if (gsi_end_p (si)
4677 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
4678 l2 = gimple_block_label (e->dest);
9884aaf8 4679 else
4680 FOR_EACH_EDGE (e, ei, l0_bb->succs)
4681 {
75a70cf9 4682 si = gsi_last_bb (e->dest);
4683 if (gsi_end_p (si)
4684 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
9884aaf8 4685 {
75a70cf9 4686 l2 = gimple_block_label (e->dest);
9884aaf8 4687 break;
4688 }
4689 }
4690 }
03ed154b 4691 default_bb = create_empty_bb (l1_bb->prev_bb);
03ed154b 4692 }
4693 else
4694 {
ac6e3339 4695 default_bb = create_empty_bb (l0_bb);
75a70cf9 4696 l2 = gimple_block_label (default_bb);
03ed154b 4697 }
773c5ba7 4698
4699 /* We will build a switch() with enough cases for all the
75a70cf9 4700 GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
773c5ba7 4701 and a default case to abort if something goes wrong. */
ac6e3339 4702 len = EDGE_COUNT (l0_bb->succs);
75a70cf9 4703
4704 /* Use VEC_quick_push on label_vec throughout, since we know the size
4705 in advance. */
4706 label_vec = VEC_alloc (tree, heap, len);
1e8e9920 4707
61e47ac8 4708 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
75a70cf9 4709 GIMPLE_OMP_SECTIONS statement. */
4710 si = gsi_last_bb (entry_bb);
4711 sections_stmt = gsi_stmt (si);
4712 gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
4713 vin = gimple_omp_sections_control (sections_stmt);
773c5ba7 4714 if (!is_combined_parallel (region))
1e8e9920 4715 {
773c5ba7 4716 /* If we are not inside a combined parallel+sections region,
4717 call GOMP_sections_start. */
ac6e3339 4718 t = build_int_cst (unsigned_type_node,
4719 exit_reachable ? len - 1 : len);
1e8e9920 4720 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
75a70cf9 4721 stmt = gimple_build_call (u, 1, t);
1e8e9920 4722 }
79acaae1 4723 else
4724 {
4725 /* Otherwise, call GOMP_sections_next. */
4726 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
75a70cf9 4727 stmt = gimple_build_call (u, 0);
79acaae1 4728 }
75a70cf9 4729 gimple_call_set_lhs (stmt, vin);
4730 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4731 gsi_remove (&si, true);
4732
4733 /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
4734 L0_BB. */
4735 switch_si = gsi_last_bb (l0_bb);
4736 gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
79acaae1 4737 if (exit_reachable)
4738 {
4739 cont = last_stmt (l1_bb);
75a70cf9 4740 gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
4741 vmain = gimple_omp_continue_control_use (cont);
4742 vnext = gimple_omp_continue_control_def (cont);
79acaae1 4743 }
4744 else
4745 {
4746 vmain = vin;
4747 vnext = NULL_TREE;
4748 }
1e8e9920 4749
ac6e3339 4750 i = 0;
4751 if (exit_reachable)
4752 {
4753 t = build3 (CASE_LABEL_EXPR, void_type_node,
4754 build_int_cst (unsigned_type_node, 0), NULL, l2);
75a70cf9 4755 VEC_quick_push (tree, label_vec, t);
ac6e3339 4756 i++;
4757 }
03ed154b 4758
75a70cf9 4759 /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
ac6e3339 4760 for (inner = region->inner, casei = 1;
4761 inner;
4762 inner = inner->next, i++, casei++)
1e8e9920 4763 {
773c5ba7 4764 basic_block s_entry_bb, s_exit_bb;
4765
9884aaf8 4766 /* Skip optional reduction region. */
75a70cf9 4767 if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
9884aaf8 4768 {
4769 --i;
4770 --casei;
4771 continue;
4772 }
4773
61e47ac8 4774 s_entry_bb = inner->entry;
4775 s_exit_bb = inner->exit;
1e8e9920 4776
75a70cf9 4777 t = gimple_block_label (s_entry_bb);
ac6e3339 4778 u = build_int_cst (unsigned_type_node, casei);
1e8e9920 4779 u = build3 (CASE_LABEL_EXPR, void_type_node, u, NULL, t);
75a70cf9 4780 VEC_quick_push (tree, label_vec, u);
61e47ac8 4781
75a70cf9 4782 si = gsi_last_bb (s_entry_bb);
4783 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
4784 gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
4785 gsi_remove (&si, true);
61e47ac8 4786 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
03ed154b 4787
4788 if (s_exit_bb == NULL)
4789 continue;
4790
75a70cf9 4791 si = gsi_last_bb (s_exit_bb);
4792 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
4793 gsi_remove (&si, true);
03ed154b 4794
773c5ba7 4795 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
1e8e9920 4796 }
4797
773c5ba7 4798 /* Error handling code goes in DEFAULT_BB. */
75a70cf9 4799 t = gimple_block_label (default_bb);
1e8e9920 4800 u = build3 (CASE_LABEL_EXPR, void_type_node, NULL, NULL, t);
61e47ac8 4801 make_edge (l0_bb, default_bb, 0);
1e8e9920 4802
75a70cf9 4803 stmt = gimple_build_switch_vec (vmain, u, label_vec);
4804 gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
4805 gsi_remove (&switch_si, true);
4806 VEC_free (tree, heap, label_vec);
4807
4808 si = gsi_start_bb (default_bb);
4809 stmt = gimple_build_call (built_in_decls[BUILT_IN_TRAP], 0);
4810 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
773c5ba7 4811
ac6e3339 4812 if (exit_reachable)
03ed154b 4813 {
ac6e3339 4814 /* Code to get the next section goes in L1_BB. */
75a70cf9 4815 si = gsi_last_bb (l1_bb);
4816 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
1e8e9920 4817
75a70cf9 4818 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT], 0);
4819 gimple_call_set_lhs (stmt, vnext);
4820 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4821 gsi_remove (&si, true);
773c5ba7 4822
ac6e3339 4823 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
4824
75a70cf9 4825 /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB. */
4826 si = gsi_last_bb (l2_bb);
4827 if (gimple_omp_return_nowait_p (gsi_stmt (si)))
03ed154b 4828 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
4829 else
4830 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
75a70cf9 4831 stmt = gimple_build_call (t, 0);
4832 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4833 gsi_remove (&si, true);
03ed154b 4834 }
773c5ba7 4835
79acaae1 4836 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
773c5ba7 4837}
1e8e9920 4838
1e8e9920 4839
61e47ac8 4840/* Expand code for an OpenMP single directive. We've already expanded
4841 much of the code, here we simply place the GOMP_barrier call. */
4842
4843static void
4844expand_omp_single (struct omp_region *region)
4845{
4846 basic_block entry_bb, exit_bb;
75a70cf9 4847 gimple_stmt_iterator si;
61e47ac8 4848 bool need_barrier = false;
4849
4850 entry_bb = region->entry;
4851 exit_bb = region->exit;
4852
75a70cf9 4853 si = gsi_last_bb (entry_bb);
61e47ac8 4854 /* The terminal barrier at the end of a GOMP_single_copy sequence cannot
4855 be removed. We need to ensure that the thread that entered the single
4856 does not exit before the data is copied out by the other threads. */
75a70cf9 4857 if (find_omp_clause (gimple_omp_single_clauses (gsi_stmt (si)),
61e47ac8 4858 OMP_CLAUSE_COPYPRIVATE))
4859 need_barrier = true;
75a70cf9 4860 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
4861 gsi_remove (&si, true);
61e47ac8 4862 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4863
75a70cf9 4864 si = gsi_last_bb (exit_bb);
4865 if (!gimple_omp_return_nowait_p (gsi_stmt (si)) || need_barrier)
4866 force_gimple_operand_gsi (&si, build_omp_barrier (), false, NULL_TREE,
4867 false, GSI_SAME_STMT);
4868 gsi_remove (&si, true);
61e47ac8 4869 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4870}
4871
4872
4873/* Generic expansion for OpenMP synchronization directives: master,
4874 ordered and critical. All we need to do here is remove the entry
4875 and exit markers for REGION. */
773c5ba7 4876
4877static void
4878expand_omp_synch (struct omp_region *region)
4879{
4880 basic_block entry_bb, exit_bb;
75a70cf9 4881 gimple_stmt_iterator si;
773c5ba7 4882
61e47ac8 4883 entry_bb = region->entry;
4884 exit_bb = region->exit;
773c5ba7 4885
75a70cf9 4886 si = gsi_last_bb (entry_bb);
4887 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
4888 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
4889 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
4890 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL);
4891 gsi_remove (&si, true);
773c5ba7 4892 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4893
03ed154b 4894 if (exit_bb)
4895 {
75a70cf9 4896 si = gsi_last_bb (exit_bb);
4897 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
4898 gsi_remove (&si, true);
03ed154b 4899 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4900 }
773c5ba7 4901}
1e8e9920 4902
cb7f680b 4903/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
4904 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
4905 size of the data type, and thus usable to find the index of the builtin
4906 decl. Returns false if the expression is not of the proper form. */
4907
4908static bool
4909expand_omp_atomic_fetch_op (basic_block load_bb,
4910 tree addr, tree loaded_val,
4911 tree stored_val, int index)
4912{
4913 enum built_in_function base;
4914 tree decl, itype, call;
4915 enum insn_code *optab;
4916 tree rhs;
4917 basic_block store_bb = single_succ (load_bb);
75a70cf9 4918 gimple_stmt_iterator gsi;
4919 gimple stmt;
389dd41b 4920 location_t loc;
cb7f680b 4921
4922 /* We expect to find the following sequences:
48e1416a 4923
cb7f680b 4924 load_bb:
75a70cf9 4925 GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
cb7f680b 4926
4927 store_bb:
4928 val = tmp OP something; (or: something OP tmp)
48e1416a 4929 GIMPLE_OMP_STORE (val)
cb7f680b 4930
48e1416a 4931 ???FIXME: Allow a more flexible sequence.
cb7f680b 4932 Perhaps use data flow to pick the statements.
48e1416a 4933
cb7f680b 4934 */
4935
75a70cf9 4936 gsi = gsi_after_labels (store_bb);
4937 stmt = gsi_stmt (gsi);
389dd41b 4938 loc = gimple_location (stmt);
75a70cf9 4939 if (!is_gimple_assign (stmt))
cb7f680b 4940 return false;
75a70cf9 4941 gsi_next (&gsi);
4942 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
cb7f680b 4943 return false;
4944
75a70cf9 4945 if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
cb7f680b 4946 return false;
4947
cb7f680b 4948 /* Check for one of the supported fetch-op operations. */
75a70cf9 4949 switch (gimple_assign_rhs_code (stmt))
cb7f680b 4950 {
4951 case PLUS_EXPR:
4952 case POINTER_PLUS_EXPR:
4953 base = BUILT_IN_FETCH_AND_ADD_N;
4954 optab = sync_add_optab;
4955 break;
4956 case MINUS_EXPR:
4957 base = BUILT_IN_FETCH_AND_SUB_N;
4958 optab = sync_add_optab;
4959 break;
4960 case BIT_AND_EXPR:
4961 base = BUILT_IN_FETCH_AND_AND_N;
4962 optab = sync_and_optab;
4963 break;
4964 case BIT_IOR_EXPR:
4965 base = BUILT_IN_FETCH_AND_OR_N;
4966 optab = sync_ior_optab;
4967 break;
4968 case BIT_XOR_EXPR:
4969 base = BUILT_IN_FETCH_AND_XOR_N;
4970 optab = sync_xor_optab;
4971 break;
4972 default:
4973 return false;
4974 }
4975 /* Make sure the expression is of the proper form. */
75a70cf9 4976 if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
4977 rhs = gimple_assign_rhs2 (stmt);
4978 else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
4979 && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
4980 rhs = gimple_assign_rhs1 (stmt);
cb7f680b 4981 else
4982 return false;
4983
4984 decl = built_in_decls[base + index + 1];
4985 itype = TREE_TYPE (TREE_TYPE (decl));
4986
4987 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
4988 return false;
4989
75a70cf9 4990 gsi = gsi_last_bb (load_bb);
4991 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
389dd41b 4992 call = build_call_expr_loc (loc,
4993 decl, 2, addr,
4994 fold_convert_loc (loc, itype, rhs));
4995 call = fold_convert_loc (loc, void_type_node, call);
75a70cf9 4996 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
4997 gsi_remove (&gsi, true);
cb7f680b 4998
75a70cf9 4999 gsi = gsi_last_bb (store_bb);
5000 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
5001 gsi_remove (&gsi, true);
5002 gsi = gsi_last_bb (store_bb);
5003 gsi_remove (&gsi, true);
cb7f680b 5004
5005 if (gimple_in_ssa_p (cfun))
5006 update_ssa (TODO_update_ssa_no_phi);
5007
5008 return true;
5009}
5010
5011/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
5012
5013 oldval = *addr;
5014 repeat:
5015 newval = rhs; // with oldval replacing *addr in rhs
5016 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5017 if (oldval != newval)
5018 goto repeat;
5019
5020 INDEX is log2 of the size of the data type, and thus usable to find the
5021 index of the builtin decl. */
5022
5023static bool
5024expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
5025 tree addr, tree loaded_val, tree stored_val,
5026 int index)
5027{
790368c5 5028 tree loadedi, storedi, initial, new_storedi, old_vali;
cb7f680b 5029 tree type, itype, cmpxchg, iaddr;
75a70cf9 5030 gimple_stmt_iterator si;
cb7f680b 5031 basic_block loop_header = single_succ (load_bb);
75a70cf9 5032 gimple phi, stmt;
cb7f680b 5033 edge e;
5034
5035 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5036 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5037 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5038
5039 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5040 return false;
5041
75a70cf9 5042 /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD. */
5043 si = gsi_last_bb (load_bb);
5044 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
5045
790368c5 5046 /* For floating-point values, we'll need to view-convert them to integers
5047 so that we can perform the atomic compare and swap. Simplify the
5048 following code by always setting up the "i"ntegral variables. */
5049 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5050 {
75a70cf9 5051 tree iaddr_val;
5052
8115f0af 5053 iaddr = create_tmp_var (build_pointer_type_for_mode (itype, ptr_mode,
5054 true), NULL);
75a70cf9 5055 iaddr_val
5056 = force_gimple_operand_gsi (&si,
5057 fold_convert (TREE_TYPE (iaddr), addr),
5058 false, NULL_TREE, true, GSI_SAME_STMT);
5059 stmt = gimple_build_assign (iaddr, iaddr_val);
5060 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
790368c5 5061 loadedi = create_tmp_var (itype, NULL);
5062 if (gimple_in_ssa_p (cfun))
5063 {
5064 add_referenced_var (iaddr);
5065 add_referenced_var (loadedi);
5066 loadedi = make_ssa_name (loadedi, NULL);
5067 }
5068 }
5069 else
5070 {
5071 iaddr = addr;
5072 loadedi = loaded_val;
5073 }
75a70cf9 5074
5075 initial = force_gimple_operand_gsi (&si, build_fold_indirect_ref (iaddr),
5076 true, NULL_TREE, true, GSI_SAME_STMT);
790368c5 5077
5078 /* Move the value to the LOADEDI temporary. */
cb7f680b 5079 if (gimple_in_ssa_p (cfun))
5080 {
75a70cf9 5081 gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
790368c5 5082 phi = create_phi_node (loadedi, loop_header);
5083 SSA_NAME_DEF_STMT (loadedi) = phi;
cb7f680b 5084 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
5085 initial);
5086 }
5087 else
75a70cf9 5088 gsi_insert_before (&si,
5089 gimple_build_assign (loadedi, initial),
5090 GSI_SAME_STMT);
790368c5 5091 if (loadedi != loaded_val)
5092 {
75a70cf9 5093 gimple_stmt_iterator gsi2;
5094 tree x;
790368c5 5095
5096 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
75a70cf9 5097 gsi2 = gsi_start_bb (loop_header);
790368c5 5098 if (gimple_in_ssa_p (cfun))
5099 {
75a70cf9 5100 gimple stmt;
5101 x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
5102 true, GSI_SAME_STMT);
5103 stmt = gimple_build_assign (loaded_val, x);
5104 gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
790368c5 5105 }
5106 else
5107 {
75a70cf9 5108 x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
5109 force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
5110 true, GSI_SAME_STMT);
790368c5 5111 }
5112 }
75a70cf9 5113 gsi_remove (&si, true);
cb7f680b 5114
75a70cf9 5115 si = gsi_last_bb (store_bb);
5116 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
cb7f680b 5117
790368c5 5118 if (iaddr == addr)
5119 storedi = stored_val;
cb7f680b 5120 else
790368c5 5121 storedi =
75a70cf9 5122 force_gimple_operand_gsi (&si,
790368c5 5123 build1 (VIEW_CONVERT_EXPR, itype,
5124 stored_val), true, NULL_TREE, true,
75a70cf9 5125 GSI_SAME_STMT);
cb7f680b 5126
5127 /* Build the compare&swap statement. */
5128 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
75a70cf9 5129 new_storedi = force_gimple_operand_gsi (&si,
87f9ffa4 5130 fold_convert (TREE_TYPE (loadedi),
5131 new_storedi),
cb7f680b 5132 true, NULL_TREE,
75a70cf9 5133 true, GSI_SAME_STMT);
cb7f680b 5134
5135 if (gimple_in_ssa_p (cfun))
5136 old_vali = loadedi;
5137 else
5138 {
87f9ffa4 5139 old_vali = create_tmp_var (TREE_TYPE (loadedi), NULL);
790368c5 5140 if (gimple_in_ssa_p (cfun))
5141 add_referenced_var (old_vali);
75a70cf9 5142 stmt = gimple_build_assign (old_vali, loadedi);
5143 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 5144
75a70cf9 5145 stmt = gimple_build_assign (loadedi, new_storedi);
5146 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 5147 }
5148
5149 /* Note that we always perform the comparison as an integer, even for
48e1416a 5150 floating point. This allows the atomic operation to properly
cb7f680b 5151 succeed even with NaNs and -0.0. */
75a70cf9 5152 stmt = gimple_build_cond_empty
5153 (build2 (NE_EXPR, boolean_type_node,
5154 new_storedi, old_vali));
5155 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 5156
5157 /* Update cfg. */
5158 e = single_succ_edge (store_bb);
5159 e->flags &= ~EDGE_FALLTHRU;
5160 e->flags |= EDGE_FALSE_VALUE;
5161
5162 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
5163
790368c5 5164 /* Copy the new value to loadedi (we already did that before the condition
cb7f680b 5165 if we are not in SSA). */
5166 if (gimple_in_ssa_p (cfun))
5167 {
75a70cf9 5168 phi = gimple_seq_first_stmt (phi_nodes (loop_header));
790368c5 5169 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
cb7f680b 5170 }
5171
75a70cf9 5172 /* Remove GIMPLE_OMP_ATOMIC_STORE. */
5173 gsi_remove (&si, true);
cb7f680b 5174
5175 if (gimple_in_ssa_p (cfun))
5176 update_ssa (TODO_update_ssa_no_phi);
5177
5178 return true;
5179}
5180
5181/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
5182
5183 GOMP_atomic_start ();
5184 *addr = rhs;
5185 GOMP_atomic_end ();
5186
5187 The result is not globally atomic, but works so long as all parallel
5188 references are within #pragma omp atomic directives. According to
5189 responses received from omp@openmp.org, appears to be within spec.
5190 Which makes sense, since that's how several other compilers handle
48e1416a 5191 this situation as well.
75a70cf9 5192 LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
5193 expanding. STORED_VAL is the operand of the matching
5194 GIMPLE_OMP_ATOMIC_STORE.
cb7f680b 5195
48e1416a 5196 We replace
5197 GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with
cb7f680b 5198 loaded_val = *addr;
5199
5200 and replace
75a70cf9 5201 GIMPLE_OMP_ATOMIC_ATORE (stored_val) with
48e1416a 5202 *addr = stored_val;
cb7f680b 5203*/
5204
5205static bool
5206expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
5207 tree addr, tree loaded_val, tree stored_val)
5208{
75a70cf9 5209 gimple_stmt_iterator si;
5210 gimple stmt;
cb7f680b 5211 tree t;
5212
75a70cf9 5213 si = gsi_last_bb (load_bb);
5214 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
cb7f680b 5215
5216 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
389dd41b 5217 t = build_function_call_expr (UNKNOWN_LOCATION, t, 0);
75a70cf9 5218 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
cb7f680b 5219
75a70cf9 5220 stmt = gimple_build_assign (loaded_val, build_fold_indirect_ref (addr));
5221 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
5222 gsi_remove (&si, true);
cb7f680b 5223
75a70cf9 5224 si = gsi_last_bb (store_bb);
5225 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
cb7f680b 5226
75a70cf9 5227 stmt = gimple_build_assign (build_fold_indirect_ref (unshare_expr (addr)),
cb7f680b 5228 stored_val);
75a70cf9 5229 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
cb7f680b 5230
5231 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
389dd41b 5232 t = build_function_call_expr (UNKNOWN_LOCATION, t, 0);
75a70cf9 5233 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
5234 gsi_remove (&si, true);
cb7f680b 5235
5236 if (gimple_in_ssa_p (cfun))
5237 update_ssa (TODO_update_ssa_no_phi);
5238 return true;
5239}
5240
48e1416a 5241/* Expand an GIMPLE_OMP_ATOMIC statement. We try to expand
5242 using expand_omp_atomic_fetch_op. If it failed, we try to
cb7f680b 5243 call expand_omp_atomic_pipeline, and if it fails too, the
5244 ultimate fallback is wrapping the operation in a mutex
48e1416a 5245 (expand_omp_atomic_mutex). REGION is the atomic region built
5246 by build_omp_regions_1(). */
cb7f680b 5247
5248static void
5249expand_omp_atomic (struct omp_region *region)
5250{
5251 basic_block load_bb = region->entry, store_bb = region->exit;
75a70cf9 5252 gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
5253 tree loaded_val = gimple_omp_atomic_load_lhs (load);
5254 tree addr = gimple_omp_atomic_load_rhs (load);
5255 tree stored_val = gimple_omp_atomic_store_val (store);
cb7f680b 5256 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5257 HOST_WIDE_INT index;
5258
5259 /* Make sure the type is one of the supported sizes. */
5260 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5261 index = exact_log2 (index);
5262 if (index >= 0 && index <= 4)
5263 {
5264 unsigned int align = TYPE_ALIGN_UNIT (type);
5265
5266 /* __sync builtins require strict data alignment. */
5267 if (exact_log2 (align) >= index)
5268 {
5269 /* When possible, use specialized atomic update functions. */
5270 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5271 && store_bb == single_succ (load_bb))
5272 {
5273 if (expand_omp_atomic_fetch_op (load_bb, addr,
5274 loaded_val, stored_val, index))
5275 return;
5276 }
5277
5278 /* If we don't have specialized __sync builtins, try and implement
5279 as a compare and swap loop. */
5280 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
5281 loaded_val, stored_val, index))
5282 return;
5283 }
5284 }
5285
5286 /* The ultimate fallback is wrapping the operation in a mutex. */
5287 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
5288}
5289
1e8e9920 5290
773c5ba7 5291/* Expand the parallel region tree rooted at REGION. Expansion
5292 proceeds in depth-first order. Innermost regions are expanded
5293 first. This way, parallel regions that require a new function to
75a70cf9 5294 be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
773c5ba7 5295 internal dependencies in their body. */
5296
5297static void
5298expand_omp (struct omp_region *region)
5299{
5300 while (region)
5301 {
1d22f541 5302 location_t saved_location;
5303
d1d5b012 5304 /* First, determine whether this is a combined parallel+workshare
5305 region. */
75a70cf9 5306 if (region->type == GIMPLE_OMP_PARALLEL)
d1d5b012 5307 determine_parallel_type (region);
5308
773c5ba7 5309 if (region->inner)
5310 expand_omp (region->inner);
5311
1d22f541 5312 saved_location = input_location;
75a70cf9 5313 if (gimple_has_location (last_stmt (region->entry)))
5314 input_location = gimple_location (last_stmt (region->entry));
1d22f541 5315
61e47ac8 5316 switch (region->type)
773c5ba7 5317 {
75a70cf9 5318 case GIMPLE_OMP_PARALLEL:
5319 case GIMPLE_OMP_TASK:
fd6481cf 5320 expand_omp_taskreg (region);
5321 break;
5322
75a70cf9 5323 case GIMPLE_OMP_FOR:
61e47ac8 5324 expand_omp_for (region);
5325 break;
773c5ba7 5326
75a70cf9 5327 case GIMPLE_OMP_SECTIONS:
61e47ac8 5328 expand_omp_sections (region);
5329 break;
773c5ba7 5330
75a70cf9 5331 case GIMPLE_OMP_SECTION:
61e47ac8 5332 /* Individual omp sections are handled together with their
75a70cf9 5333 parent GIMPLE_OMP_SECTIONS region. */
61e47ac8 5334 break;
773c5ba7 5335
75a70cf9 5336 case GIMPLE_OMP_SINGLE:
61e47ac8 5337 expand_omp_single (region);
5338 break;
773c5ba7 5339
75a70cf9 5340 case GIMPLE_OMP_MASTER:
5341 case GIMPLE_OMP_ORDERED:
5342 case GIMPLE_OMP_CRITICAL:
61e47ac8 5343 expand_omp_synch (region);
5344 break;
773c5ba7 5345
75a70cf9 5346 case GIMPLE_OMP_ATOMIC_LOAD:
cb7f680b 5347 expand_omp_atomic (region);
5348 break;
5349
61e47ac8 5350 default:
5351 gcc_unreachable ();
5352 }
cc5982dc 5353
1d22f541 5354 input_location = saved_location;
773c5ba7 5355 region = region->next;
5356 }
5357}
5358
5359
5360/* Helper for build_omp_regions. Scan the dominator tree starting at
28c92cbb 5361 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
5362 true, the function ends once a single tree is built (otherwise, whole
5363 forest of OMP constructs may be built). */
773c5ba7 5364
5365static void
28c92cbb 5366build_omp_regions_1 (basic_block bb, struct omp_region *parent,
5367 bool single_tree)
773c5ba7 5368{
75a70cf9 5369 gimple_stmt_iterator gsi;
5370 gimple stmt;
773c5ba7 5371 basic_block son;
5372
75a70cf9 5373 gsi = gsi_last_bb (bb);
5374 if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
773c5ba7 5375 {
5376 struct omp_region *region;
75a70cf9 5377 enum gimple_code code;
773c5ba7 5378
75a70cf9 5379 stmt = gsi_stmt (gsi);
5380 code = gimple_code (stmt);
5381 if (code == GIMPLE_OMP_RETURN)
773c5ba7 5382 {
5383 /* STMT is the return point out of region PARENT. Mark it
5384 as the exit point and make PARENT the immediately
5385 enclosing region. */
5386 gcc_assert (parent);
5387 region = parent;
61e47ac8 5388 region->exit = bb;
773c5ba7 5389 parent = parent->outer;
773c5ba7 5390 }
75a70cf9 5391 else if (code == GIMPLE_OMP_ATOMIC_STORE)
cb7f680b 5392 {
75a70cf9 5393 /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
5394 GIMPLE_OMP_RETURN, but matches with
5395 GIMPLE_OMP_ATOMIC_LOAD. */
cb7f680b 5396 gcc_assert (parent);
75a70cf9 5397 gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
cb7f680b 5398 region = parent;
5399 region->exit = bb;
5400 parent = parent->outer;
5401 }
5402
75a70cf9 5403 else if (code == GIMPLE_OMP_CONTINUE)
61e47ac8 5404 {
5405 gcc_assert (parent);
5406 parent->cont = bb;
5407 }
75a70cf9 5408 else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
ac6e3339 5409 {
75a70cf9 5410 /* GIMPLE_OMP_SECTIONS_SWITCH is part of
5411 GIMPLE_OMP_SECTIONS, and we do nothing for it. */
5412 ;
ac6e3339 5413 }
773c5ba7 5414 else
5415 {
5416 /* Otherwise, this directive becomes the parent for a new
5417 region. */
61e47ac8 5418 region = new_omp_region (bb, code, parent);
773c5ba7 5419 parent = region;
5420 }
773c5ba7 5421 }
5422
28c92cbb 5423 if (single_tree && !parent)
5424 return;
5425
773c5ba7 5426 for (son = first_dom_son (CDI_DOMINATORS, bb);
5427 son;
5428 son = next_dom_son (CDI_DOMINATORS, son))
28c92cbb 5429 build_omp_regions_1 (son, parent, single_tree);
5430}
5431
5432/* Builds the tree of OMP regions rooted at ROOT, storing it to
5433 root_omp_region. */
5434
5435static void
5436build_omp_regions_root (basic_block root)
5437{
5438 gcc_assert (root_omp_region == NULL);
5439 build_omp_regions_1 (root, NULL, true);
5440 gcc_assert (root_omp_region != NULL);
773c5ba7 5441}
5442
28c92cbb 5443/* Expands omp construct (and its subconstructs) starting in HEAD. */
5444
5445void
5446omp_expand_local (basic_block head)
5447{
5448 build_omp_regions_root (head);
5449 if (dump_file && (dump_flags & TDF_DETAILS))
5450 {
5451 fprintf (dump_file, "\nOMP region tree\n\n");
5452 dump_omp_region (dump_file, root_omp_region, 0);
5453 fprintf (dump_file, "\n");
5454 }
5455
5456 remove_exit_barriers (root_omp_region);
5457 expand_omp (root_omp_region);
5458
5459 free_omp_regions ();
5460}
773c5ba7 5461
5462/* Scan the CFG and build a tree of OMP regions. Return the root of
5463 the OMP region tree. */
5464
5465static void
5466build_omp_regions (void)
5467{
61e47ac8 5468 gcc_assert (root_omp_region == NULL);
773c5ba7 5469 calculate_dominance_info (CDI_DOMINATORS);
28c92cbb 5470 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
773c5ba7 5471}
5472
773c5ba7 5473/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
5474
2a1990e9 5475static unsigned int
773c5ba7 5476execute_expand_omp (void)
5477{
5478 build_omp_regions ();
5479
61e47ac8 5480 if (!root_omp_region)
5481 return 0;
773c5ba7 5482
61e47ac8 5483 if (dump_file)
5484 {
5485 fprintf (dump_file, "\nOMP region tree\n\n");
5486 dump_omp_region (dump_file, root_omp_region, 0);
5487 fprintf (dump_file, "\n");
773c5ba7 5488 }
61e47ac8 5489
5490 remove_exit_barriers (root_omp_region);
5491
5492 expand_omp (root_omp_region);
5493
61e47ac8 5494 cleanup_tree_cfg ();
5495
5496 free_omp_regions ();
5497
2a1990e9 5498 return 0;
773c5ba7 5499}
5500
79acaae1 5501/* OMP expansion -- the default pass, run before creation of SSA form. */
5502
773c5ba7 5503static bool
5504gate_expand_omp (void)
5505{
945e498c 5506 return (flag_openmp != 0 && errorcount == 0);
773c5ba7 5507}
5508
48e1416a 5509struct gimple_opt_pass pass_expand_omp =
773c5ba7 5510{
20099e35 5511 {
5512 GIMPLE_PASS,
773c5ba7 5513 "ompexp", /* name */
5514 gate_expand_omp, /* gate */
5515 execute_expand_omp, /* execute */
5516 NULL, /* sub */
5517 NULL, /* next */
5518 0, /* static_pass_number */
0b1615c1 5519 TV_NONE, /* tv_id */
773c5ba7 5520 PROP_gimple_any, /* properties_required */
41709826 5521 0, /* properties_provided */
773c5ba7 5522 0, /* properties_destroyed */
5523 0, /* todo_flags_start */
20099e35 5524 TODO_dump_func /* todo_flags_finish */
5525 }
773c5ba7 5526};
5527\f
5528/* Routines to lower OpenMP directives into OMP-GIMPLE. */
5529
75a70cf9 5530/* Lower the OpenMP sections directive in the current statement in GSI_P.
5531 CTX is the enclosing OMP context for the current statement. */
773c5ba7 5532
5533static void
75a70cf9 5534lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 5535{
75a70cf9 5536 tree block, control;
5537 gimple_stmt_iterator tgsi;
773c5ba7 5538 unsigned i, len;
75a70cf9 5539 gimple stmt, new_stmt, bind, t;
5540 gimple_seq ilist, dlist, olist, new_body, body;
dac18d1a 5541 struct gimplify_ctx gctx;
773c5ba7 5542
75a70cf9 5543 stmt = gsi_stmt (*gsi_p);
773c5ba7 5544
dac18d1a 5545 push_gimplify_context (&gctx);
773c5ba7 5546
5547 dlist = NULL;
5548 ilist = NULL;
75a70cf9 5549 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5550 &ilist, &dlist, ctx);
773c5ba7 5551
75a70cf9 5552 tgsi = gsi_start (gimple_omp_body (stmt));
5553 for (len = 0; !gsi_end_p (tgsi); len++, gsi_next (&tgsi))
773c5ba7 5554 continue;
5555
75a70cf9 5556 tgsi = gsi_start (gimple_omp_body (stmt));
5557 body = NULL;
5558 for (i = 0; i < len; i++, gsi_next (&tgsi))
773c5ba7 5559 {
5560 omp_context *sctx;
75a70cf9 5561 gimple sec_start;
773c5ba7 5562
75a70cf9 5563 sec_start = gsi_stmt (tgsi);
773c5ba7 5564 sctx = maybe_lookup_ctx (sec_start);
5565 gcc_assert (sctx);
5566
75a70cf9 5567 gimple_seq_add_stmt (&body, sec_start);
61e47ac8 5568
75a70cf9 5569 lower_omp (gimple_omp_body (sec_start), sctx);
5570 gimple_seq_add_seq (&body, gimple_omp_body (sec_start));
5571 gimple_omp_set_body (sec_start, NULL);
773c5ba7 5572
5573 if (i == len - 1)
5574 {
75a70cf9 5575 gimple_seq l = NULL;
5576 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
773c5ba7 5577 &l, ctx);
75a70cf9 5578 gimple_seq_add_seq (&body, l);
5579 gimple_omp_section_set_last (sec_start);
773c5ba7 5580 }
48e1416a 5581
75a70cf9 5582 gimple_seq_add_stmt (&body, gimple_build_omp_return (false));
773c5ba7 5583 }
1e8e9920 5584
5585 block = make_node (BLOCK);
75a70cf9 5586 bind = gimple_build_bind (NULL, body, block);
1e8e9920 5587
75a70cf9 5588 olist = NULL;
5589 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
773c5ba7 5590
1d22f541 5591 block = make_node (BLOCK);
75a70cf9 5592 new_stmt = gimple_build_bind (NULL, NULL, block);
773c5ba7 5593
1d22f541 5594 pop_gimplify_context (new_stmt);
75a70cf9 5595 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5596 BLOCK_VARS (block) = gimple_bind_vars (bind);
1d22f541 5597 if (BLOCK_VARS (block))
5598 TREE_USED (block) = 1;
5599
75a70cf9 5600 new_body = NULL;
5601 gimple_seq_add_seq (&new_body, ilist);
5602 gimple_seq_add_stmt (&new_body, stmt);
5603 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5604 gimple_seq_add_stmt (&new_body, bind);
61e47ac8 5605
ac6e3339 5606 control = create_tmp_var (unsigned_type_node, ".section");
75a70cf9 5607 t = gimple_build_omp_continue (control, control);
5608 gimple_omp_sections_set_control (stmt, control);
5609 gimple_seq_add_stmt (&new_body, t);
61e47ac8 5610
75a70cf9 5611 gimple_seq_add_seq (&new_body, olist);
5612 gimple_seq_add_seq (&new_body, dlist);
773c5ba7 5613
75a70cf9 5614 new_body = maybe_catch_exception (new_body);
aade31a0 5615
75a70cf9 5616 t = gimple_build_omp_return
5617 (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
5618 OMP_CLAUSE_NOWAIT));
5619 gimple_seq_add_stmt (&new_body, t);
61e47ac8 5620
75a70cf9 5621 gimple_bind_set_body (new_stmt, new_body);
5622 gimple_omp_set_body (stmt, NULL);
773c5ba7 5623
75a70cf9 5624 gsi_replace (gsi_p, new_stmt, true);
1e8e9920 5625}
5626
5627
773c5ba7 5628/* A subroutine of lower_omp_single. Expand the simple form of
75a70cf9 5629 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
1e8e9920 5630
5631 if (GOMP_single_start ())
5632 BODY;
5633 [ GOMP_barrier (); ] -> unless 'nowait' is present.
773c5ba7 5634
5635 FIXME. It may be better to delay expanding the logic of this until
5636 pass_expand_omp. The expanded logic may make the job more difficult
5637 to a synchronization analysis pass. */
1e8e9920 5638
5639static void
75a70cf9 5640lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
1e8e9920 5641{
e60a6f7b 5642 location_t loc = gimple_location (single_stmt);
5643 tree tlabel = create_artificial_label (loc);
5644 tree flabel = create_artificial_label (loc);
75a70cf9 5645 gimple call, cond;
5646 tree lhs, decl;
5647
5648 decl = built_in_decls[BUILT_IN_GOMP_SINGLE_START];
5649 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
5650 call = gimple_build_call (decl, 0);
5651 gimple_call_set_lhs (call, lhs);
5652 gimple_seq_add_stmt (pre_p, call);
5653
5654 cond = gimple_build_cond (EQ_EXPR, lhs,
389dd41b 5655 fold_convert_loc (loc, TREE_TYPE (lhs),
5656 boolean_true_node),
75a70cf9 5657 tlabel, flabel);
5658 gimple_seq_add_stmt (pre_p, cond);
5659 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5660 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5661 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
1e8e9920 5662}
5663
773c5ba7 5664
5665/* A subroutine of lower_omp_single. Expand the simple form of
75a70cf9 5666 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
1e8e9920 5667
5668 #pragma omp single copyprivate (a, b, c)
5669
5670 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5671
5672 {
5673 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5674 {
5675 BODY;
5676 copyout.a = a;
5677 copyout.b = b;
5678 copyout.c = c;
5679 GOMP_single_copy_end (&copyout);
5680 }
5681 else
5682 {
5683 a = copyout_p->a;
5684 b = copyout_p->b;
5685 c = copyout_p->c;
5686 }
5687 GOMP_barrier ();
5688 }
773c5ba7 5689
5690 FIXME. It may be better to delay expanding the logic of this until
5691 pass_expand_omp. The expanded logic may make the job more difficult
5692 to a synchronization analysis pass. */
1e8e9920 5693
5694static void
75a70cf9 5695lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
1e8e9920 5696{
75a70cf9 5697 tree ptr_type, t, l0, l1, l2;
5698 gimple_seq copyin_seq;
e60a6f7b 5699 location_t loc = gimple_location (single_stmt);
1e8e9920 5700
5701 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5702
5703 ptr_type = build_pointer_type (ctx->record_type);
5704 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5705
e60a6f7b 5706 l0 = create_artificial_label (loc);
5707 l1 = create_artificial_label (loc);
5708 l2 = create_artificial_label (loc);
1e8e9920 5709
389dd41b 5710 t = build_call_expr_loc (loc, built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START], 0);
5711 t = fold_convert_loc (loc, ptr_type, t);
75a70cf9 5712 gimplify_assign (ctx->receiver_decl, t, pre_p);
1e8e9920 5713
5714 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5715 build_int_cst (ptr_type, 0));
5716 t = build3 (COND_EXPR, void_type_node, t,
5717 build_and_jump (&l0), build_and_jump (&l1));
5718 gimplify_and_add (t, pre_p);
5719
75a70cf9 5720 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
1e8e9920 5721
75a70cf9 5722 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
1e8e9920 5723
5724 copyin_seq = NULL;
75a70cf9 5725 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
1e8e9920 5726 &copyin_seq, ctx);
5727
389dd41b 5728 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
5729 t = build_call_expr_loc (loc, built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END],
5730 1, t);
1e8e9920 5731 gimplify_and_add (t, pre_p);
5732
5733 t = build_and_jump (&l2);
5734 gimplify_and_add (t, pre_p);
5735
75a70cf9 5736 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
1e8e9920 5737
75a70cf9 5738 gimple_seq_add_seq (pre_p, copyin_seq);
1e8e9920 5739
75a70cf9 5740 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
1e8e9920 5741}
5742
773c5ba7 5743
1e8e9920 5744/* Expand code for an OpenMP single directive. */
5745
5746static void
75a70cf9 5747lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 5748{
75a70cf9 5749 tree block;
5750 gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
5751 gimple_seq bind_body, dlist;
dac18d1a 5752 struct gimplify_ctx gctx;
1e8e9920 5753
dac18d1a 5754 push_gimplify_context (&gctx);
1e8e9920 5755
75a70cf9 5756 bind_body = NULL;
5757 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
5758 &bind_body, &dlist, ctx);
5759 lower_omp (gimple_omp_body (single_stmt), ctx);
1e8e9920 5760
75a70cf9 5761 gimple_seq_add_stmt (&bind_body, single_stmt);
1e8e9920 5762
5763 if (ctx->record_type)
75a70cf9 5764 lower_omp_single_copy (single_stmt, &bind_body, ctx);
1e8e9920 5765 else
75a70cf9 5766 lower_omp_single_simple (single_stmt, &bind_body);
5767
5768 gimple_omp_set_body (single_stmt, NULL);
1e8e9920 5769
75a70cf9 5770 gimple_seq_add_seq (&bind_body, dlist);
61e47ac8 5771
75a70cf9 5772 bind_body = maybe_catch_exception (bind_body);
61e47ac8 5773
48e1416a 5774 t = gimple_build_omp_return
75a70cf9 5775 (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
5776 OMP_CLAUSE_NOWAIT));
5777 gimple_seq_add_stmt (&bind_body, t);
aade31a0 5778
75a70cf9 5779 block = make_node (BLOCK);
5780 bind = gimple_build_bind (NULL, bind_body, block);
61e47ac8 5781
1e8e9920 5782 pop_gimplify_context (bind);
773c5ba7 5783
75a70cf9 5784 gimple_bind_append_vars (bind, ctx->block_vars);
5785 BLOCK_VARS (block) = ctx->block_vars;
5786 gsi_replace (gsi_p, bind, true);
1d22f541 5787 if (BLOCK_VARS (block))
5788 TREE_USED (block) = 1;
1e8e9920 5789}
5790
773c5ba7 5791
1e8e9920 5792/* Expand code for an OpenMP master directive. */
5793
5794static void
75a70cf9 5795lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 5796{
75a70cf9 5797 tree block, lab = NULL, x;
5798 gimple stmt = gsi_stmt (*gsi_p), bind;
389dd41b 5799 location_t loc = gimple_location (stmt);
75a70cf9 5800 gimple_seq tseq;
dac18d1a 5801 struct gimplify_ctx gctx;
1e8e9920 5802
dac18d1a 5803 push_gimplify_context (&gctx);
1e8e9920 5804
5805 block = make_node (BLOCK);
75a70cf9 5806 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
5807 block);
61e47ac8 5808
389dd41b 5809 x = build_call_expr_loc (loc, built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
1e8e9920 5810 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
5811 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
75a70cf9 5812 tseq = NULL;
5813 gimplify_and_add (x, &tseq);
5814 gimple_bind_add_seq (bind, tseq);
1e8e9920 5815
75a70cf9 5816 lower_omp (gimple_omp_body (stmt), ctx);
5817 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
5818 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
5819 gimple_omp_set_body (stmt, NULL);
1e8e9920 5820
75a70cf9 5821 gimple_bind_add_stmt (bind, gimple_build_label (lab));
61e47ac8 5822
75a70cf9 5823 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
61e47ac8 5824
1e8e9920 5825 pop_gimplify_context (bind);
773c5ba7 5826
75a70cf9 5827 gimple_bind_append_vars (bind, ctx->block_vars);
5828 BLOCK_VARS (block) = ctx->block_vars;
5829 gsi_replace (gsi_p, bind, true);
1e8e9920 5830}
5831
773c5ba7 5832
1e8e9920 5833/* Expand code for an OpenMP ordered directive. */
5834
5835static void
75a70cf9 5836lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 5837{
75a70cf9 5838 tree block;
5839 gimple stmt = gsi_stmt (*gsi_p), bind, x;
dac18d1a 5840 struct gimplify_ctx gctx;
1e8e9920 5841
dac18d1a 5842 push_gimplify_context (&gctx);
1e8e9920 5843
5844 block = make_node (BLOCK);
75a70cf9 5845 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
5846 block);
61e47ac8 5847
75a70cf9 5848 x = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ORDERED_START], 0);
5849 gimple_bind_add_stmt (bind, x);
1e8e9920 5850
75a70cf9 5851 lower_omp (gimple_omp_body (stmt), ctx);
5852 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
5853 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
5854 gimple_omp_set_body (stmt, NULL);
1e8e9920 5855
75a70cf9 5856 x = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ORDERED_END], 0);
5857 gimple_bind_add_stmt (bind, x);
61e47ac8 5858
75a70cf9 5859 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
61e47ac8 5860
1e8e9920 5861 pop_gimplify_context (bind);
773c5ba7 5862
75a70cf9 5863 gimple_bind_append_vars (bind, ctx->block_vars);
5864 BLOCK_VARS (block) = gimple_bind_vars (bind);
5865 gsi_replace (gsi_p, bind, true);
1e8e9920 5866}
5867
1e8e9920 5868
75a70cf9 5869/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
1e8e9920 5870 substitution of a couple of function calls. But in the NAMED case,
5871 requires that languages coordinate a symbol name. It is therefore
5872 best put here in common code. */
5873
5874static GTY((param1_is (tree), param2_is (tree)))
5875 splay_tree critical_name_mutexes;
5876
5877static void
75a70cf9 5878lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 5879{
75a70cf9 5880 tree block;
5881 tree name, lock, unlock;
5882 gimple stmt = gsi_stmt (*gsi_p), bind;
389dd41b 5883 location_t loc = gimple_location (stmt);
75a70cf9 5884 gimple_seq tbody;
dac18d1a 5885 struct gimplify_ctx gctx;
1e8e9920 5886
75a70cf9 5887 name = gimple_omp_critical_name (stmt);
1e8e9920 5888 if (name)
5889 {
c2f47e15 5890 tree decl;
1e8e9920 5891 splay_tree_node n;
5892
5893 if (!critical_name_mutexes)
5894 critical_name_mutexes
5895 = splay_tree_new_ggc (splay_tree_compare_pointers);
5896
5897 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
5898 if (n == NULL)
5899 {
5900 char *new_str;
5901
5902 decl = create_tmp_var_raw (ptr_type_node, NULL);
5903
5904 new_str = ACONCAT ((".gomp_critical_user_",
5905 IDENTIFIER_POINTER (name), NULL));
5906 DECL_NAME (decl) = get_identifier (new_str);
5907 TREE_PUBLIC (decl) = 1;
5908 TREE_STATIC (decl) = 1;
5909 DECL_COMMON (decl) = 1;
5910 DECL_ARTIFICIAL (decl) = 1;
5911 DECL_IGNORED_P (decl) = 1;
1d416bd7 5912 varpool_finalize_decl (decl);
1e8e9920 5913
5914 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
5915 (splay_tree_value) decl);
5916 }
5917 else
5918 decl = (tree) n->value;
5919
1e8e9920 5920 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_START];
389dd41b 5921 lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
1e8e9920 5922
1e8e9920 5923 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_END];
389dd41b 5924 unlock = build_call_expr_loc (loc, unlock, 1,
5925 build_fold_addr_expr_loc (loc, decl));
1e8e9920 5926 }
5927 else
5928 {
5929 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_START];
389dd41b 5930 lock = build_call_expr_loc (loc, lock, 0);
1e8e9920 5931
5932 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_END];
389dd41b 5933 unlock = build_call_expr_loc (loc, unlock, 0);
1e8e9920 5934 }
5935
dac18d1a 5936 push_gimplify_context (&gctx);
1e8e9920 5937
5938 block = make_node (BLOCK);
75a70cf9 5939 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt), block);
61e47ac8 5940
75a70cf9 5941 tbody = gimple_bind_body (bind);
5942 gimplify_and_add (lock, &tbody);
5943 gimple_bind_set_body (bind, tbody);
1e8e9920 5944
75a70cf9 5945 lower_omp (gimple_omp_body (stmt), ctx);
5946 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
5947 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
5948 gimple_omp_set_body (stmt, NULL);
1e8e9920 5949
75a70cf9 5950 tbody = gimple_bind_body (bind);
5951 gimplify_and_add (unlock, &tbody);
5952 gimple_bind_set_body (bind, tbody);
61e47ac8 5953
75a70cf9 5954 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
1e8e9920 5955
5956 pop_gimplify_context (bind);
75a70cf9 5957 gimple_bind_append_vars (bind, ctx->block_vars);
5958 BLOCK_VARS (block) = gimple_bind_vars (bind);
5959 gsi_replace (gsi_p, bind, true);
773c5ba7 5960}
5961
5962
5963/* A subroutine of lower_omp_for. Generate code to emit the predicate
5964 for a lastprivate clause. Given a loop control predicate of (V
5965 cond N2), we gate the clause on (!(V cond N2)). The lowered form
1e4afe3c 5966 is appended to *DLIST, iterator initialization is appended to
5967 *BODY_P. */
773c5ba7 5968
5969static void
75a70cf9 5970lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
5971 gimple_seq *dlist, struct omp_context *ctx)
773c5ba7 5972{
75a70cf9 5973 tree clauses, cond, vinit;
773c5ba7 5974 enum tree_code cond_code;
75a70cf9 5975 gimple_seq stmts;
48e1416a 5976
fd6481cf 5977 cond_code = fd->loop.cond_code;
773c5ba7 5978 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
5979
5980 /* When possible, use a strict equality expression. This can let VRP
5981 type optimizations deduce the value and remove a copy. */
fd6481cf 5982 if (host_integerp (fd->loop.step, 0))
773c5ba7 5983 {
fd6481cf 5984 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
773c5ba7 5985 if (step == 1 || step == -1)
5986 cond_code = EQ_EXPR;
5987 }
5988
fd6481cf 5989 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
773c5ba7 5990
75a70cf9 5991 clauses = gimple_omp_for_clauses (fd->for_stmt);
1e4afe3c 5992 stmts = NULL;
5993 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
75a70cf9 5994 if (!gimple_seq_empty_p (stmts))
1e4afe3c 5995 {
75a70cf9 5996 gimple_seq_add_seq (&stmts, *dlist);
fd6481cf 5997 *dlist = stmts;
1e4afe3c 5998
5999 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
fd6481cf 6000 vinit = fd->loop.n1;
1e4afe3c 6001 if (cond_code == EQ_EXPR
fd6481cf 6002 && host_integerp (fd->loop.n2, 0)
6003 && ! integer_zerop (fd->loop.n2))
6004 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
1e4afe3c 6005
6006 /* Initialize the iterator variable, so that threads that don't execute
6007 any iterations don't execute the lastprivate clauses by accident. */
75a70cf9 6008 gimplify_assign (fd->loop.v, vinit, body_p);
1e4afe3c 6009 }
773c5ba7 6010}
6011
6012
6013/* Lower code for an OpenMP loop directive. */
6014
6015static void
75a70cf9 6016lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 6017{
75a70cf9 6018 tree *rhs_p, block;
773c5ba7 6019 struct omp_for_data fd;
75a70cf9 6020 gimple stmt = gsi_stmt (*gsi_p), new_stmt;
f018d957 6021 gimple_seq omp_for_body, body, dlist;
75a70cf9 6022 size_t i;
dac18d1a 6023 struct gimplify_ctx gctx;
773c5ba7 6024
dac18d1a 6025 push_gimplify_context (&gctx);
773c5ba7 6026
75a70cf9 6027 lower_omp (gimple_omp_for_pre_body (stmt), ctx);
6028 lower_omp (gimple_omp_body (stmt), ctx);
773c5ba7 6029
1d22f541 6030 block = make_node (BLOCK);
75a70cf9 6031 new_stmt = gimple_build_bind (NULL, NULL, block);
1d22f541 6032
773c5ba7 6033 /* Move declaration of temporaries in the loop body before we make
6034 it go away. */
75a70cf9 6035 omp_for_body = gimple_omp_body (stmt);
6036 if (!gimple_seq_empty_p (omp_for_body)
6037 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6038 {
6039 tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
6040 gimple_bind_append_vars (new_stmt, vars);
6041 }
773c5ba7 6042
75a70cf9 6043 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
773c5ba7 6044 dlist = NULL;
75a70cf9 6045 body = NULL;
6046 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx);
6047 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
773c5ba7 6048
6049 /* Lower the header expressions. At this point, we can assume that
6050 the header is of the form:
6051
6052 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6053
6054 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6055 using the .omp_data_s mapping, if needed. */
75a70cf9 6056 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
fd6481cf 6057 {
75a70cf9 6058 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
fd6481cf 6059 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 6060 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 6061
75a70cf9 6062 rhs_p = gimple_omp_for_final_ptr (stmt, i);
fd6481cf 6063 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 6064 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 6065
75a70cf9 6066 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
fd6481cf 6067 if (!is_gimple_min_invariant (*rhs_p))
75a70cf9 6068 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
fd6481cf 6069 }
773c5ba7 6070
6071 /* Once lowered, extract the bounds and clauses. */
fd6481cf 6072 extract_omp_for_data (stmt, &fd, NULL);
773c5ba7 6073
75a70cf9 6074 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
773c5ba7 6075
75a70cf9 6076 gimple_seq_add_stmt (&body, stmt);
6077 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
61e47ac8 6078
75a70cf9 6079 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6080 fd.loop.v));
61e47ac8 6081
773c5ba7 6082 /* After the loop, add exit clauses. */
75a70cf9 6083 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6084 gimple_seq_add_seq (&body, dlist);
773c5ba7 6085
75a70cf9 6086 body = maybe_catch_exception (body);
aade31a0 6087
61e47ac8 6088 /* Region exit marker goes at the end of the loop body. */
75a70cf9 6089 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
773c5ba7 6090
1d22f541 6091 pop_gimplify_context (new_stmt);
75a70cf9 6092
6093 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6094 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
1d22f541 6095 if (BLOCK_VARS (block))
6096 TREE_USED (block) = 1;
773c5ba7 6097
75a70cf9 6098 gimple_bind_set_body (new_stmt, body);
6099 gimple_omp_set_body (stmt, NULL);
6100 gimple_omp_for_set_pre_body (stmt, NULL);
6101 gsi_replace (gsi_p, new_stmt, true);
1e8e9920 6102}
6103
48e1416a 6104/* Callback for walk_stmts. Check if the current statement only contains
75a70cf9 6105 GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL. */
de7ef844 6106
6107static tree
75a70cf9 6108check_combined_parallel (gimple_stmt_iterator *gsi_p,
6109 bool *handled_ops_p,
6110 struct walk_stmt_info *wi)
de7ef844 6111{
4077bf7a 6112 int *info = (int *) wi->info;
75a70cf9 6113 gimple stmt = gsi_stmt (*gsi_p);
de7ef844 6114
75a70cf9 6115 *handled_ops_p = true;
6116 switch (gimple_code (stmt))
de7ef844 6117 {
75a70cf9 6118 WALK_SUBSTMTS;
6119
6120 case GIMPLE_OMP_FOR:
6121 case GIMPLE_OMP_SECTIONS:
de7ef844 6122 *info = *info == 0 ? 1 : -1;
6123 break;
6124 default:
6125 *info = -1;
6126 break;
6127 }
6128 return NULL;
6129}
773c5ba7 6130
fd6481cf 6131struct omp_taskcopy_context
6132{
6133 /* This field must be at the beginning, as we do "inheritance": Some
6134 callback functions for tree-inline.c (e.g., omp_copy_decl)
6135 receive a copy_body_data pointer that is up-casted to an
6136 omp_context pointer. */
6137 copy_body_data cb;
6138 omp_context *ctx;
6139};
6140
6141static tree
6142task_copyfn_copy_decl (tree var, copy_body_data *cb)
6143{
6144 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6145
6146 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6147 return create_tmp_var (TREE_TYPE (var), NULL);
6148
6149 return var;
6150}
6151
6152static tree
6153task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6154{
6155 tree name, new_fields = NULL, type, f;
6156
6157 type = lang_hooks.types.make_type (RECORD_TYPE);
6158 name = DECL_NAME (TYPE_NAME (orig_type));
e60a6f7b 6159 name = build_decl (gimple_location (tcctx->ctx->stmt),
6160 TYPE_DECL, name, type);
fd6481cf 6161 TYPE_NAME (type) = name;
6162
6163 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
6164 {
6165 tree new_f = copy_node (f);
6166 DECL_CONTEXT (new_f) = type;
6167 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
6168 TREE_CHAIN (new_f) = new_fields;
75a70cf9 6169 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6170 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6171 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
6172 &tcctx->cb, NULL);
fd6481cf 6173 new_fields = new_f;
6174 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
6175 }
6176 TYPE_FIELDS (type) = nreverse (new_fields);
6177 layout_type (type);
6178 return type;
6179}
6180
6181/* Create task copyfn. */
6182
6183static void
75a70cf9 6184create_task_copyfn (gimple task_stmt, omp_context *ctx)
fd6481cf 6185{
6186 struct function *child_cfun;
6187 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
6188 tree record_type, srecord_type, bind, list;
6189 bool record_needs_remap = false, srecord_needs_remap = false;
6190 splay_tree_node n;
6191 struct omp_taskcopy_context tcctx;
dac18d1a 6192 struct gimplify_ctx gctx;
389dd41b 6193 location_t loc = gimple_location (task_stmt);
fd6481cf 6194
75a70cf9 6195 child_fn = gimple_omp_task_copy_fn (task_stmt);
fd6481cf 6196 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
6197 gcc_assert (child_cfun->cfg == NULL);
6198 child_cfun->dont_save_pending_sizes_p = 1;
6199 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
6200
6201 /* Reset DECL_CONTEXT on function arguments. */
6202 for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
6203 DECL_CONTEXT (t) = child_fn;
6204
6205 /* Populate the function. */
dac18d1a 6206 push_gimplify_context (&gctx);
fd6481cf 6207 current_function_decl = child_fn;
6208
6209 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6210 TREE_SIDE_EFFECTS (bind) = 1;
6211 list = NULL;
6212 DECL_SAVED_TREE (child_fn) = bind;
75a70cf9 6213 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
fd6481cf 6214
6215 /* Remap src and dst argument types if needed. */
6216 record_type = ctx->record_type;
6217 srecord_type = ctx->srecord_type;
6218 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
6219 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6220 {
6221 record_needs_remap = true;
6222 break;
6223 }
6224 for (f = TYPE_FIELDS (srecord_type); f ; f = TREE_CHAIN (f))
6225 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6226 {
6227 srecord_needs_remap = true;
6228 break;
6229 }
6230
6231 if (record_needs_remap || srecord_needs_remap)
6232 {
6233 memset (&tcctx, '\0', sizeof (tcctx));
6234 tcctx.cb.src_fn = ctx->cb.src_fn;
6235 tcctx.cb.dst_fn = child_fn;
6236 tcctx.cb.src_node = cgraph_node (tcctx.cb.src_fn);
6237 tcctx.cb.dst_node = tcctx.cb.src_node;
6238 tcctx.cb.src_cfun = ctx->cb.src_cfun;
6239 tcctx.cb.copy_decl = task_copyfn_copy_decl;
e38def9c 6240 tcctx.cb.eh_lp_nr = 0;
fd6481cf 6241 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
6242 tcctx.cb.decl_map = pointer_map_create ();
6243 tcctx.ctx = ctx;
6244
6245 if (record_needs_remap)
6246 record_type = task_copyfn_remap_type (&tcctx, record_type);
6247 if (srecord_needs_remap)
6248 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
6249 }
6250 else
6251 tcctx.cb.decl_map = NULL;
6252
6253 push_cfun (child_cfun);
6254
6255 arg = DECL_ARGUMENTS (child_fn);
6256 TREE_TYPE (arg) = build_pointer_type (record_type);
6257 sarg = TREE_CHAIN (arg);
6258 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
6259
6260 /* First pass: initialize temporaries used in record_type and srecord_type
6261 sizes and field offsets. */
6262 if (tcctx.cb.decl_map)
75a70cf9 6263 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 6264 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
6265 {
6266 tree *p;
6267
6268 decl = OMP_CLAUSE_DECL (c);
6269 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
6270 if (p == NULL)
6271 continue;
6272 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6273 sf = (tree) n->value;
6274 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
389dd41b 6275 src = build_fold_indirect_ref_loc (loc, sarg);
fd6481cf 6276 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
75a70cf9 6277 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
fd6481cf 6278 append_to_statement_list (t, &list);
6279 }
6280
6281 /* Second pass: copy shared var pointers and copy construct non-VLA
6282 firstprivate vars. */
75a70cf9 6283 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 6284 switch (OMP_CLAUSE_CODE (c))
6285 {
6286 case OMP_CLAUSE_SHARED:
6287 decl = OMP_CLAUSE_DECL (c);
6288 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6289 if (n == NULL)
6290 break;
6291 f = (tree) n->value;
6292 if (tcctx.cb.decl_map)
6293 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6294 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6295 sf = (tree) n->value;
6296 if (tcctx.cb.decl_map)
6297 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
389dd41b 6298 src = build_fold_indirect_ref_loc (loc, sarg);
fd6481cf 6299 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
389dd41b 6300 dst = build_fold_indirect_ref_loc (loc, arg);
fd6481cf 6301 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
75a70cf9 6302 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
fd6481cf 6303 append_to_statement_list (t, &list);
6304 break;
6305 case OMP_CLAUSE_FIRSTPRIVATE:
6306 decl = OMP_CLAUSE_DECL (c);
6307 if (is_variable_sized (decl))
6308 break;
6309 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6310 if (n == NULL)
6311 break;
6312 f = (tree) n->value;
6313 if (tcctx.cb.decl_map)
6314 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6315 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6316 if (n != NULL)
6317 {
6318 sf = (tree) n->value;
6319 if (tcctx.cb.decl_map)
6320 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
389dd41b 6321 src = build_fold_indirect_ref_loc (loc, sarg);
fd6481cf 6322 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6323 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
389dd41b 6324 src = build_fold_indirect_ref_loc (loc, src);
fd6481cf 6325 }
6326 else
6327 src = decl;
389dd41b 6328 dst = build_fold_indirect_ref_loc (loc, arg);
fd6481cf 6329 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6330 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6331 append_to_statement_list (t, &list);
6332 break;
6333 case OMP_CLAUSE_PRIVATE:
6334 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6335 break;
6336 decl = OMP_CLAUSE_DECL (c);
6337 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6338 f = (tree) n->value;
6339 if (tcctx.cb.decl_map)
6340 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6341 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6342 if (n != NULL)
6343 {
6344 sf = (tree) n->value;
6345 if (tcctx.cb.decl_map)
6346 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
389dd41b 6347 src = build_fold_indirect_ref_loc (loc, sarg);
fd6481cf 6348 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6349 if (use_pointer_for_field (decl, NULL))
389dd41b 6350 src = build_fold_indirect_ref_loc (loc, src);
fd6481cf 6351 }
6352 else
6353 src = decl;
389dd41b 6354 dst = build_fold_indirect_ref_loc (loc, arg);
fd6481cf 6355 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
75a70cf9 6356 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
fd6481cf 6357 append_to_statement_list (t, &list);
6358 break;
6359 default:
6360 break;
6361 }
6362
6363 /* Last pass: handle VLA firstprivates. */
6364 if (tcctx.cb.decl_map)
75a70cf9 6365 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
fd6481cf 6366 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
6367 {
6368 tree ind, ptr, df;
6369
6370 decl = OMP_CLAUSE_DECL (c);
6371 if (!is_variable_sized (decl))
6372 continue;
6373 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6374 if (n == NULL)
6375 continue;
6376 f = (tree) n->value;
6377 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6378 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
6379 ind = DECL_VALUE_EXPR (decl);
6380 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
6381 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
6382 n = splay_tree_lookup (ctx->sfield_map,
6383 (splay_tree_key) TREE_OPERAND (ind, 0));
6384 sf = (tree) n->value;
6385 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
389dd41b 6386 src = build_fold_indirect_ref_loc (loc, sarg);
fd6481cf 6387 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
389dd41b 6388 src = build_fold_indirect_ref_loc (loc, src);
6389 dst = build_fold_indirect_ref_loc (loc, arg);
fd6481cf 6390 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6391 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6392 append_to_statement_list (t, &list);
6393 n = splay_tree_lookup (ctx->field_map,
6394 (splay_tree_key) TREE_OPERAND (ind, 0));
6395 df = (tree) n->value;
6396 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
389dd41b 6397 ptr = build_fold_indirect_ref_loc (loc, arg);
fd6481cf 6398 ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
75a70cf9 6399 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
389dd41b 6400 build_fold_addr_expr_loc (loc, dst));
fd6481cf 6401 append_to_statement_list (t, &list);
6402 }
6403
6404 t = build1 (RETURN_EXPR, void_type_node, NULL);
6405 append_to_statement_list (t, &list);
6406
6407 if (tcctx.cb.decl_map)
6408 pointer_map_destroy (tcctx.cb.decl_map);
6409 pop_gimplify_context (NULL);
6410 BIND_EXPR_BODY (bind) = list;
6411 pop_cfun ();
6412 current_function_decl = ctx->cb.src_fn;
6413}
6414
75a70cf9 6415/* Lower the OpenMP parallel or task directive in the current statement
6416 in GSI_P. CTX holds context information for the directive. */
773c5ba7 6417
6418static void
75a70cf9 6419lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
773c5ba7 6420{
75a70cf9 6421 tree clauses;
6422 tree child_fn, t;
6423 gimple stmt = gsi_stmt (*gsi_p);
6424 gimple par_bind, bind;
6425 gimple_seq par_body, olist, ilist, par_olist, par_ilist, new_body;
dac18d1a 6426 struct gimplify_ctx gctx;
389dd41b 6427 location_t loc = gimple_location (stmt);
773c5ba7 6428
75a70cf9 6429 clauses = gimple_omp_taskreg_clauses (stmt);
6430 par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
6431 par_body = gimple_bind_body (par_bind);
773c5ba7 6432 child_fn = ctx->cb.dst_fn;
75a70cf9 6433 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
6434 && !gimple_omp_parallel_combined_p (stmt))
de7ef844 6435 {
6436 struct walk_stmt_info wi;
6437 int ws_num = 0;
6438
6439 memset (&wi, 0, sizeof (wi));
de7ef844 6440 wi.info = &ws_num;
6441 wi.val_only = true;
75a70cf9 6442 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
de7ef844 6443 if (ws_num == 1)
75a70cf9 6444 gimple_omp_parallel_set_combined_p (stmt, true);
de7ef844 6445 }
fd6481cf 6446 if (ctx->srecord_type)
6447 create_task_copyfn (stmt, ctx);
773c5ba7 6448
dac18d1a 6449 push_gimplify_context (&gctx);
773c5ba7 6450
75a70cf9 6451 par_olist = NULL;
6452 par_ilist = NULL;
773c5ba7 6453 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
75a70cf9 6454 lower_omp (par_body, ctx);
6455 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
fd6481cf 6456 lower_reduction_clauses (clauses, &par_olist, ctx);
773c5ba7 6457
6458 /* Declare all the variables created by mapping and the variables
6459 declared in the scope of the parallel body. */
6460 record_vars_into (ctx->block_vars, child_fn);
75a70cf9 6461 record_vars_into (gimple_bind_vars (par_bind), child_fn);
773c5ba7 6462
6463 if (ctx->record_type)
6464 {
fd6481cf 6465 ctx->sender_decl
6466 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
6467 : ctx->record_type, ".omp_data_o");
86f2ad37 6468 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
75a70cf9 6469 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
773c5ba7 6470 }
6471
75a70cf9 6472 olist = NULL;
6473 ilist = NULL;
773c5ba7 6474 lower_send_clauses (clauses, &ilist, &olist, ctx);
6475 lower_send_shared_vars (&ilist, &olist, ctx);
6476
6477 /* Once all the expansions are done, sequence all the different
75a70cf9 6478 fragments inside gimple_omp_body. */
773c5ba7 6479
75a70cf9 6480 new_body = NULL;
773c5ba7 6481
6482 if (ctx->record_type)
6483 {
389dd41b 6484 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
cc6b725b 6485 /* fixup_child_record_type might have changed receiver_decl's type. */
389dd41b 6486 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
75a70cf9 6487 gimple_seq_add_stmt (&new_body,
6488 gimple_build_assign (ctx->receiver_decl, t));
773c5ba7 6489 }
6490
75a70cf9 6491 gimple_seq_add_seq (&new_body, par_ilist);
6492 gimple_seq_add_seq (&new_body, par_body);
6493 gimple_seq_add_seq (&new_body, par_olist);
6494 new_body = maybe_catch_exception (new_body);
6495 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
6496 gimple_omp_set_body (stmt, new_body);
773c5ba7 6497
75a70cf9 6498 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
6499 gimple_bind_add_stmt (bind, stmt);
1d22f541 6500 if (ilist || olist)
6501 {
75a70cf9 6502 gimple_seq_add_stmt (&ilist, bind);
6503 gimple_seq_add_seq (&ilist, olist);
6504 bind = gimple_build_bind (NULL, ilist, NULL);
1d22f541 6505 }
773c5ba7 6506
75a70cf9 6507 gsi_replace (gsi_p, bind, true);
773c5ba7 6508
75a70cf9 6509 pop_gimplify_context (NULL);
773c5ba7 6510}
6511
a4890dc9 6512/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
75a70cf9 6513 regimplified. If DATA is non-NULL, lower_omp_1 is outside
6514 of OpenMP context, but with task_shared_vars set. */
46515c92 6515
6516static tree
75a70cf9 6517lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
6518 void *data)
46515c92 6519{
a4890dc9 6520 tree t = *tp;
46515c92 6521
a4890dc9 6522 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
75a70cf9 6523 if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
9f49e155 6524 return t;
6525
6526 if (task_shared_vars
6527 && DECL_P (t)
6528 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
a4890dc9 6529 return t;
46515c92 6530
a4890dc9 6531 /* If a global variable has been privatized, TREE_CONSTANT on
6532 ADDR_EXPR might be wrong. */
75a70cf9 6533 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
a4890dc9 6534 recompute_tree_invariant_for_addr_expr (t);
46515c92 6535
a4890dc9 6536 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
6537 return NULL_TREE;
46515c92 6538}
773c5ba7 6539
a4890dc9 6540static void
75a70cf9 6541lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
1e8e9920 6542{
75a70cf9 6543 gimple stmt = gsi_stmt (*gsi_p);
6544 struct walk_stmt_info wi;
1e8e9920 6545
75a70cf9 6546 if (gimple_has_location (stmt))
6547 input_location = gimple_location (stmt);
a4890dc9 6548
75a70cf9 6549 if (task_shared_vars)
6550 memset (&wi, '\0', sizeof (wi));
a4890dc9 6551
773c5ba7 6552 /* If we have issued syntax errors, avoid doing any heavy lifting.
6553 Just replace the OpenMP directives with a NOP to avoid
6554 confusing RTL expansion. */
75a70cf9 6555 if (errorcount && is_gimple_omp (stmt))
773c5ba7 6556 {
75a70cf9 6557 gsi_replace (gsi_p, gimple_build_nop (), true);
a4890dc9 6558 return;
773c5ba7 6559 }
6560
75a70cf9 6561 switch (gimple_code (stmt))
1e8e9920 6562 {
75a70cf9 6563 case GIMPLE_COND:
fd6481cf 6564 if ((ctx || task_shared_vars)
75a70cf9 6565 && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
6566 ctx ? NULL : &wi, NULL)
6567 || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
6568 ctx ? NULL : &wi, NULL)))
6569 gimple_regimplify_operands (stmt, gsi_p);
a4890dc9 6570 break;
75a70cf9 6571 case GIMPLE_CATCH:
6572 lower_omp (gimple_catch_handler (stmt), ctx);
a4890dc9 6573 break;
75a70cf9 6574 case GIMPLE_EH_FILTER:
6575 lower_omp (gimple_eh_filter_failure (stmt), ctx);
a4890dc9 6576 break;
75a70cf9 6577 case GIMPLE_TRY:
6578 lower_omp (gimple_try_eval (stmt), ctx);
6579 lower_omp (gimple_try_cleanup (stmt), ctx);
a4890dc9 6580 break;
75a70cf9 6581 case GIMPLE_BIND:
6582 lower_omp (gimple_bind_body (stmt), ctx);
a4890dc9 6583 break;
75a70cf9 6584 case GIMPLE_OMP_PARALLEL:
6585 case GIMPLE_OMP_TASK:
6586 ctx = maybe_lookup_ctx (stmt);
6587 lower_omp_taskreg (gsi_p, ctx);
a4890dc9 6588 break;
75a70cf9 6589 case GIMPLE_OMP_FOR:
6590 ctx = maybe_lookup_ctx (stmt);
1e8e9920 6591 gcc_assert (ctx);
75a70cf9 6592 lower_omp_for (gsi_p, ctx);
1e8e9920 6593 break;
75a70cf9 6594 case GIMPLE_OMP_SECTIONS:
6595 ctx = maybe_lookup_ctx (stmt);
1e8e9920 6596 gcc_assert (ctx);
75a70cf9 6597 lower_omp_sections (gsi_p, ctx);
1e8e9920 6598 break;
75a70cf9 6599 case GIMPLE_OMP_SINGLE:
6600 ctx = maybe_lookup_ctx (stmt);
1e8e9920 6601 gcc_assert (ctx);
75a70cf9 6602 lower_omp_single (gsi_p, ctx);
1e8e9920 6603 break;
75a70cf9 6604 case GIMPLE_OMP_MASTER:
6605 ctx = maybe_lookup_ctx (stmt);
1e8e9920 6606 gcc_assert (ctx);
75a70cf9 6607 lower_omp_master (gsi_p, ctx);
1e8e9920 6608 break;
75a70cf9 6609 case GIMPLE_OMP_ORDERED:
6610 ctx = maybe_lookup_ctx (stmt);
1e8e9920 6611 gcc_assert (ctx);
75a70cf9 6612 lower_omp_ordered (gsi_p, ctx);
1e8e9920 6613 break;
75a70cf9 6614 case GIMPLE_OMP_CRITICAL:
6615 ctx = maybe_lookup_ctx (stmt);
1e8e9920 6616 gcc_assert (ctx);
75a70cf9 6617 lower_omp_critical (gsi_p, ctx);
6618 break;
6619 case GIMPLE_OMP_ATOMIC_LOAD:
6620 if ((ctx || task_shared_vars)
6621 && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
6622 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
6623 gimple_regimplify_operands (stmt, gsi_p);
1e8e9920 6624 break;
a4890dc9 6625 default:
fd6481cf 6626 if ((ctx || task_shared_vars)
75a70cf9 6627 && walk_gimple_op (stmt, lower_omp_regimplify_p,
6628 ctx ? NULL : &wi))
6629 gimple_regimplify_operands (stmt, gsi_p);
1e8e9920 6630 break;
1e8e9920 6631 }
1e8e9920 6632}
6633
6634static void
75a70cf9 6635lower_omp (gimple_seq body, omp_context *ctx)
1e8e9920 6636{
1d22f541 6637 location_t saved_location = input_location;
75a70cf9 6638 gimple_stmt_iterator gsi = gsi_start (body);
6639 for (gsi = gsi_start (body); !gsi_end_p (gsi); gsi_next (&gsi))
6640 lower_omp_1 (&gsi, ctx);
1d22f541 6641 input_location = saved_location;
1e8e9920 6642}
6643\f
6644/* Main entry point. */
6645
2a1990e9 6646static unsigned int
1e8e9920 6647execute_lower_omp (void)
6648{
75a70cf9 6649 gimple_seq body;
6650
41709826 6651 /* This pass always runs, to provide PROP_gimple_lomp.
6652 But there is nothing to do unless -fopenmp is given. */
6653 if (flag_openmp == 0)
6654 return 0;
6655
1e8e9920 6656 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
6657 delete_omp_context);
6658
75a70cf9 6659 body = gimple_body (current_function_decl);
6660 scan_omp (body, NULL);
fd6481cf 6661 gcc_assert (taskreg_nesting_level == 0);
1e8e9920 6662
6663 if (all_contexts->root)
fd6481cf 6664 {
dac18d1a 6665 struct gimplify_ctx gctx;
6666
fd6481cf 6667 if (task_shared_vars)
dac18d1a 6668 push_gimplify_context (&gctx);
75a70cf9 6669 lower_omp (body, NULL);
fd6481cf 6670 if (task_shared_vars)
6671 pop_gimplify_context (NULL);
6672 }
1e8e9920 6673
773c5ba7 6674 if (all_contexts)
6675 {
6676 splay_tree_delete (all_contexts);
6677 all_contexts = NULL;
6678 }
fd6481cf 6679 BITMAP_FREE (task_shared_vars);
2a1990e9 6680 return 0;
1e8e9920 6681}
6682
48e1416a 6683struct gimple_opt_pass pass_lower_omp =
1e8e9920 6684{
20099e35 6685 {
6686 GIMPLE_PASS,
1e8e9920 6687 "omplower", /* name */
41709826 6688 NULL, /* gate */
1e8e9920 6689 execute_lower_omp, /* execute */
6690 NULL, /* sub */
6691 NULL, /* next */
6692 0, /* static_pass_number */
0b1615c1 6693 TV_NONE, /* tv_id */
1e8e9920 6694 PROP_gimple_any, /* properties_required */
6695 PROP_gimple_lomp, /* properties_provided */
6696 0, /* properties_destroyed */
6697 0, /* todo_flags_start */
20099e35 6698 TODO_dump_func /* todo_flags_finish */
6699 }
1e8e9920 6700};
1e8e9920 6701\f
6702/* The following is a utility to diagnose OpenMP structured block violations.
61e47ac8 6703 It is not part of the "omplower" pass, as that's invoked too late. It
6704 should be invoked by the respective front ends after gimplification. */
1e8e9920 6705
6706static splay_tree all_labels;
6707
6708/* Check for mismatched contexts and generate an error if needed. Return
6709 true if an error is detected. */
6710
6711static bool
75a70cf9 6712diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
6713 gimple branch_ctx, gimple label_ctx)
1e8e9920 6714{
75a70cf9 6715 if (label_ctx == branch_ctx)
1e8e9920 6716 return false;
6717
48e1416a 6718
75a70cf9 6719 /*
6720 Previously we kept track of the label's entire context in diagnose_sb_[12]
6721 so we could traverse it and issue a correct "exit" or "enter" error
6722 message upon a structured block violation.
6723
6724 We built the context by building a list with tree_cons'ing, but there is
6725 no easy counterpart in gimple tuples. It seems like far too much work
6726 for issuing exit/enter error messages. If someone really misses the
6727 distinct error message... patches welcome.
6728 */
48e1416a 6729
75a70cf9 6730#if 0
1e8e9920 6731 /* Try to avoid confusing the user by producing and error message
f0b5f617 6732 with correct "exit" or "enter" verbiage. We prefer "exit"
1e8e9920 6733 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
6734 if (branch_ctx == NULL)
6735 exit_p = false;
6736 else
6737 {
6738 while (label_ctx)
6739 {
6740 if (TREE_VALUE (label_ctx) == branch_ctx)
6741 {
6742 exit_p = false;
6743 break;
6744 }
6745 label_ctx = TREE_CHAIN (label_ctx);
6746 }
6747 }
6748
6749 if (exit_p)
6750 error ("invalid exit from OpenMP structured block");
6751 else
6752 error ("invalid entry to OpenMP structured block");
75a70cf9 6753#endif
1e8e9920 6754
75a70cf9 6755 /* If it's obvious we have an invalid entry, be specific about the error. */
6756 if (branch_ctx == NULL)
6757 error ("invalid entry to OpenMP structured block");
6758 else
6759 /* Otherwise, be vague and lazy, but efficient. */
6760 error ("invalid branch to/from an OpenMP structured block");
6761
6762 gsi_replace (gsi_p, gimple_build_nop (), false);
1e8e9920 6763 return true;
6764}
6765
6766/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
75a70cf9 6767 where each label is found. */
1e8e9920 6768
6769static tree
75a70cf9 6770diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6771 struct walk_stmt_info *wi)
1e8e9920 6772{
75a70cf9 6773 gimple context = (gimple) wi->info;
6774 gimple inner_context;
6775 gimple stmt = gsi_stmt (*gsi_p);
1e8e9920 6776
75a70cf9 6777 *handled_ops_p = true;
6778
6779 switch (gimple_code (stmt))
1e8e9920 6780 {
75a70cf9 6781 WALK_SUBSTMTS;
48e1416a 6782
75a70cf9 6783 case GIMPLE_OMP_PARALLEL:
6784 case GIMPLE_OMP_TASK:
6785 case GIMPLE_OMP_SECTIONS:
6786 case GIMPLE_OMP_SINGLE:
6787 case GIMPLE_OMP_SECTION:
6788 case GIMPLE_OMP_MASTER:
6789 case GIMPLE_OMP_ORDERED:
6790 case GIMPLE_OMP_CRITICAL:
6791 /* The minimal context here is just the current OMP construct. */
6792 inner_context = stmt;
1e8e9920 6793 wi->info = inner_context;
75a70cf9 6794 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
1e8e9920 6795 wi->info = context;
6796 break;
6797
75a70cf9 6798 case GIMPLE_OMP_FOR:
6799 inner_context = stmt;
1e8e9920 6800 wi->info = inner_context;
75a70cf9 6801 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
6802 walk them. */
6803 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
6804 diagnose_sb_1, NULL, wi);
6805 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
1e8e9920 6806 wi->info = context;
6807 break;
6808
75a70cf9 6809 case GIMPLE_LABEL:
6810 splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
1e8e9920 6811 (splay_tree_value) context);
6812 break;
6813
6814 default:
6815 break;
6816 }
6817
6818 return NULL_TREE;
6819}
6820
6821/* Pass 2: Check each branch and see if its context differs from that of
6822 the destination label's context. */
6823
6824static tree
75a70cf9 6825diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6826 struct walk_stmt_info *wi)
1e8e9920 6827{
75a70cf9 6828 gimple context = (gimple) wi->info;
1e8e9920 6829 splay_tree_node n;
75a70cf9 6830 gimple stmt = gsi_stmt (*gsi_p);
1e8e9920 6831
75a70cf9 6832 *handled_ops_p = true;
6833
6834 switch (gimple_code (stmt))
1e8e9920 6835 {
75a70cf9 6836 WALK_SUBSTMTS;
6837
6838 case GIMPLE_OMP_PARALLEL:
6839 case GIMPLE_OMP_TASK:
6840 case GIMPLE_OMP_SECTIONS:
6841 case GIMPLE_OMP_SINGLE:
6842 case GIMPLE_OMP_SECTION:
6843 case GIMPLE_OMP_MASTER:
6844 case GIMPLE_OMP_ORDERED:
6845 case GIMPLE_OMP_CRITICAL:
6846 wi->info = stmt;
6847 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_2, NULL, wi);
1e8e9920 6848 wi->info = context;
6849 break;
6850
75a70cf9 6851 case GIMPLE_OMP_FOR:
6852 wi->info = stmt;
6853 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
6854 walk them. */
6855 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
6856 diagnose_sb_2, NULL, wi);
6857 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_2, NULL, wi);
1e8e9920 6858 wi->info = context;
6859 break;
6860
75a70cf9 6861 case GIMPLE_GOTO:
1e8e9920 6862 {
75a70cf9 6863 tree lab = gimple_goto_dest (stmt);
1e8e9920 6864 if (TREE_CODE (lab) != LABEL_DECL)
6865 break;
6866
6867 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
75a70cf9 6868 diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
1e8e9920 6869 }
6870 break;
6871
75a70cf9 6872 case GIMPLE_SWITCH:
1e8e9920 6873 {
75a70cf9 6874 unsigned int i;
6875 for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
1e8e9920 6876 {
75a70cf9 6877 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
1e8e9920 6878 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
75a70cf9 6879 if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
1e8e9920 6880 break;
6881 }
6882 }
6883 break;
6884
75a70cf9 6885 case GIMPLE_RETURN:
6886 diagnose_sb_0 (gsi_p, context, NULL);
1e8e9920 6887 break;
6888
6889 default:
6890 break;
6891 }
6892
6893 return NULL_TREE;
6894}
6895
bfec3452 6896static unsigned int
6897diagnose_omp_structured_block_errors (void)
1e8e9920 6898{
1e8e9920 6899 struct walk_stmt_info wi;
bfec3452 6900 gimple_seq body = gimple_body (current_function_decl);
1e8e9920 6901
6902 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
6903
6904 memset (&wi, 0, sizeof (wi));
75a70cf9 6905 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
1e8e9920 6906
6907 memset (&wi, 0, sizeof (wi));
1e8e9920 6908 wi.want_locations = true;
75a70cf9 6909 walk_gimple_seq (body, diagnose_sb_2, NULL, &wi);
1e8e9920 6910
6911 splay_tree_delete (all_labels);
6912 all_labels = NULL;
6913
bfec3452 6914 return 0;
1e8e9920 6915}
6916
bfec3452 6917static bool
6918gate_diagnose_omp_blocks (void)
6919{
6920 return flag_openmp != 0;
6921}
6922
6923struct gimple_opt_pass pass_diagnose_omp_blocks =
6924{
6925 {
6926 GIMPLE_PASS,
53b5ae07 6927 "*diagnose_omp_blocks", /* name */
bfec3452 6928 gate_diagnose_omp_blocks, /* gate */
6929 diagnose_omp_structured_block_errors, /* execute */
6930 NULL, /* sub */
6931 NULL, /* next */
6932 0, /* static_pass_number */
6933 TV_NONE, /* tv_id */
6934 PROP_gimple_any, /* properties_required */
6935 0, /* properties_provided */
6936 0, /* properties_destroyed */
6937 0, /* todo_flags_start */
6938 0, /* todo_flags_finish */
6939 }
6940};
6941
1e8e9920 6942#include "gt-omp-low.h"