]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
re PR tree-optimization/36345 (TBAA-pruning of points-to sets ineffective)
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
953ff289
DN
1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
99819c63 6 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
953ff289
DN
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
9dcd6f09 12Software Foundation; either version 3, or (at your option) any later
953ff289
DN
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
9dcd6f09
NC
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
953ff289
DN
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
29#include "rtl.h"
30#include "tree-gimple.h"
31#include "tree-inline.h"
32#include "langhooks.h"
33#include "diagnostic.h"
34#include "tree-flow.h"
35#include "timevar.h"
36#include "flags.h"
37#include "function.h"
38#include "expr.h"
39#include "toplev.h"
40#include "tree-pass.h"
41#include "ggc.h"
42#include "except.h"
6be42dd4 43#include "splay-tree.h"
a509ebb5
RL
44#include "optabs.h"
45#include "cfgloop.h"
953ff289
DN
46
47/* Lowering of OpenMP parallel and workshare constructs proceeds in two
48 phases. The first phase scans the function looking for OMP statements
49 and then for variables that must be replaced to satisfy data sharing
50 clauses. The second phase expands code for the constructs, as well as
c0220ea4 51 re-gimplifying things when variables have been replaced with complex
953ff289
DN
52 expressions.
53
7ebaeab5
DN
54 Final code generation is done by pass_expand_omp. The flowgraph is
55 scanned for parallel regions which are then moved to a new
56 function, to be invoked by the thread library. */
953ff289
DN
57
58/* Context structure. Used to store information about each parallel
59 directive in the code. */
60
61typedef struct omp_context
62{
63 /* This field must be at the beginning, as we do "inheritance": Some
64 callback functions for tree-inline.c (e.g., omp_copy_decl)
65 receive a copy_body_data pointer that is up-casted to an
66 omp_context pointer. */
67 copy_body_data cb;
68
69 /* The tree of contexts corresponding to the encountered constructs. */
70 struct omp_context *outer;
71 tree stmt;
72
73 /* Map variables to fields in a structure that allows communication
74 between sending and receiving threads. */
75 splay_tree field_map;
76 tree record_type;
77 tree sender_decl;
78 tree receiver_decl;
79
a68ab351
JJ
80 /* These are used just by task contexts, if task firstprivate fn is
81 needed. srecord_type is used to communicate from the thread
82 that encountered the task construct to task firstprivate fn,
83 record_type is allocated by GOMP_task, initialized by task firstprivate
84 fn and passed to the task body fn. */
85 splay_tree sfield_map;
86 tree srecord_type;
87
953ff289
DN
88 /* A chain of variables to add to the top-level block surrounding the
89 construct. In the case of a parallel, this is in the child function. */
90 tree block_vars;
91
92 /* What to do with variables with implicitly determined sharing
93 attributes. */
94 enum omp_clause_default_kind default_kind;
95
96 /* Nesting depth of this context. Used to beautify error messages re
97 invalid gotos. The outermost ctx is depth 1, with depth 0 being
98 reserved for the main body of the function. */
99 int depth;
100
953ff289
DN
101 /* True if this parallel directive is nested within another. */
102 bool is_nested;
953ff289
DN
103} omp_context;
104
105
a68ab351
JJ
106struct omp_for_data_loop
107{
108 tree v, n1, n2, step;
109 enum tree_code cond_code;
110};
111
50674e96 112/* A structure describing the main elements of a parallel loop. */
953ff289 113
50674e96 114struct omp_for_data
953ff289 115{
a68ab351
JJ
116 struct omp_for_data_loop loop;
117 tree chunk_size, for_stmt;
118 tree pre, iter_type;
119 int collapse;
953ff289
DN
120 bool have_nowait, have_ordered;
121 enum omp_clause_schedule_kind sched_kind;
a68ab351 122 struct omp_for_data_loop *loops;
953ff289
DN
123};
124
50674e96 125
953ff289 126static splay_tree all_contexts;
a68ab351 127static int taskreg_nesting_level;
777f7f9a 128struct omp_region *root_omp_region;
a68ab351 129static bitmap task_shared_vars;
953ff289
DN
130
131static void scan_omp (tree *, omp_context *);
50674e96 132static void lower_omp (tree *, omp_context *);
8ca5b2a2
JJ
133static tree lookup_decl_in_outer_ctx (tree, omp_context *);
134static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289
DN
135
136/* Find an OpenMP clause of type KIND within CLAUSES. */
137
917948d3 138tree
953ff289
DN
139find_omp_clause (tree clauses, enum tree_code kind)
140{
141 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
aaf46ef9 142 if (OMP_CLAUSE_CODE (clauses) == kind)
953ff289
DN
143 return clauses;
144
145 return NULL_TREE;
146}
147
148/* Return true if CTX is for an omp parallel. */
149
150static inline bool
151is_parallel_ctx (omp_context *ctx)
152{
50674e96 153 return TREE_CODE (ctx->stmt) == OMP_PARALLEL;
953ff289
DN
154}
155
50674e96 156
a68ab351
JJ
157/* Return true if CTX is for an omp task. */
158
159static inline bool
160is_task_ctx (omp_context *ctx)
161{
162 return TREE_CODE (ctx->stmt) == OMP_TASK;
163}
164
165
166/* Return true if CTX is for an omp parallel or omp task. */
167
168static inline bool
169is_taskreg_ctx (omp_context *ctx)
170{
171 return TREE_CODE (ctx->stmt) == OMP_PARALLEL
172 || TREE_CODE (ctx->stmt) == OMP_TASK;
173}
174
175
50674e96 176/* Return true if REGION is a combined parallel+workshare region. */
953ff289
DN
177
178static inline bool
50674e96
DN
179is_combined_parallel (struct omp_region *region)
180{
181 return region->is_combined_parallel;
182}
183
184
185/* Extract the header elements of parallel loop FOR_STMT and store
186 them into *FD. */
187
188static void
a68ab351
JJ
189extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
190 struct omp_for_data_loop *loops)
50674e96 191{
a68ab351
JJ
192 tree t, var, *collapse_iter, *collapse_count;
193 tree count = NULL_TREE, iter_type = long_integer_type_node;
194 struct omp_for_data_loop *loop;
195 int i;
196 struct omp_for_data_loop dummy_loop;
50674e96
DN
197
198 fd->for_stmt = for_stmt;
199 fd->pre = NULL;
a68ab351
JJ
200 fd->collapse = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
201 if (fd->collapse > 1)
202 fd->loops = loops;
203 else
204 fd->loops = &fd->loop;
50674e96
DN
205
206 fd->have_nowait = fd->have_ordered = false;
207 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
208 fd->chunk_size = NULL_TREE;
a68ab351
JJ
209 collapse_iter = NULL;
210 collapse_count = NULL;
50674e96
DN
211
212 for (t = OMP_FOR_CLAUSES (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
aaf46ef9 213 switch (OMP_CLAUSE_CODE (t))
50674e96
DN
214 {
215 case OMP_CLAUSE_NOWAIT:
216 fd->have_nowait = true;
217 break;
218 case OMP_CLAUSE_ORDERED:
219 fd->have_ordered = true;
220 break;
221 case OMP_CLAUSE_SCHEDULE:
222 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
223 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
224 break;
a68ab351
JJ
225 case OMP_CLAUSE_COLLAPSE:
226 if (fd->collapse > 1)
227 {
228 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
229 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
230 }
50674e96
DN
231 default:
232 break;
233 }
234
a68ab351
JJ
235 /* FIXME: for now map schedule(auto) to schedule(static).
236 There should be analysis to determine whether all iterations
237 are approximately the same amount of work (then schedule(static)
238 is best) or if it varries (then schedule(dynamic,N) is better). */
239 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
240 {
241 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
242 gcc_assert (fd->chunk_size == NULL);
243 }
244 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
50674e96
DN
245 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
246 gcc_assert (fd->chunk_size == NULL);
247 else if (fd->chunk_size == NULL)
248 {
249 /* We only need to compute a default chunk size for ordered
250 static loops and dynamic loops. */
a68ab351
JJ
251 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
252 || fd->have_ordered
253 || fd->collapse > 1)
50674e96
DN
254 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
255 ? integer_zero_node : integer_one_node;
256 }
a68ab351
JJ
257
258 for (i = 0; i < fd->collapse; i++)
259 {
260 if (fd->collapse == 1)
261 loop = &fd->loop;
262 else if (loops != NULL)
263 loop = loops + i;
264 else
265 loop = &dummy_loop;
266
267 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
268 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
269 loop->v = GIMPLE_STMT_OPERAND (t, 0);
270 gcc_assert (SSA_VAR_P (loop->v));
271 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
272 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
273 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
274 loop->n1 = GIMPLE_STMT_OPERAND (t, 1);
275
276 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
277 loop->cond_code = TREE_CODE (t);
278 gcc_assert (TREE_OPERAND (t, 0) == var);
279 loop->n2 = TREE_OPERAND (t, 1);
280 switch (loop->cond_code)
281 {
282 case LT_EXPR:
283 case GT_EXPR:
284 break;
285 case LE_EXPR:
286 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
287 loop->n2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
288 loop->n2, size_one_node);
289 else
290 loop->n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
291 build_int_cst (TREE_TYPE (loop->n2), 1));
292 loop->cond_code = LT_EXPR;
293 break;
294 case GE_EXPR:
295 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
296 loop->n2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
297 loop->n2, size_int (-1));
298 else
299 loop->n2 = fold_build2 (MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
300 build_int_cst (TREE_TYPE (loop->n2), 1));
301 loop->cond_code = GT_EXPR;
302 break;
303 default:
304 gcc_unreachable ();
305 }
306
307 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
308 gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
309 gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == var);
310 t = GIMPLE_STMT_OPERAND (t, 1);
311 gcc_assert (TREE_OPERAND (t, 0) == var);
312 switch (TREE_CODE (t))
313 {
314 case PLUS_EXPR:
315 case POINTER_PLUS_EXPR:
316 loop->step = TREE_OPERAND (t, 1);
317 break;
318 case MINUS_EXPR:
319 loop->step = TREE_OPERAND (t, 1);
320 loop->step = fold_build1 (NEGATE_EXPR, TREE_TYPE (loop->step),
321 loop->step);
322 break;
323 default:
324 gcc_unreachable ();
325 }
326
327 if (iter_type != long_long_unsigned_type_node)
328 {
329 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
330 iter_type = long_long_unsigned_type_node;
331 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
332 && TYPE_PRECISION (TREE_TYPE (loop->v))
333 >= TYPE_PRECISION (iter_type))
334 {
335 tree n;
336
337 if (loop->cond_code == LT_EXPR)
338 n = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->v),
339 loop->n2, loop->step);
340 else
341 n = loop->n1;
342 if (TREE_CODE (n) != INTEGER_CST
343 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
344 iter_type = long_long_unsigned_type_node;
345 }
346 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
347 > TYPE_PRECISION (iter_type))
348 {
349 tree n1, n2;
350
351 if (loop->cond_code == LT_EXPR)
352 {
353 n1 = loop->n1;
354 n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (loop->v),
355 loop->n2, loop->step);
356 }
357 else
358 {
359 n1 = fold_build2 (MINUS_EXPR, TREE_TYPE (loop->v),
360 loop->n2, loop->step);
361 n2 = loop->n1;
362 }
363 if (TREE_CODE (n1) != INTEGER_CST
364 || TREE_CODE (n2) != INTEGER_CST
365 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
366 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
367 iter_type = long_long_unsigned_type_node;
368 }
369 }
370
371 if (collapse_count && *collapse_count == NULL)
372 {
373 if ((i == 0 || count != NULL_TREE)
374 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
375 && TREE_CONSTANT (loop->n1)
376 && TREE_CONSTANT (loop->n2)
377 && TREE_CODE (loop->step) == INTEGER_CST)
378 {
379 tree itype = TREE_TYPE (loop->v);
380
381 if (POINTER_TYPE_P (itype))
382 itype
383 = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
384 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
385 t = fold_build2 (PLUS_EXPR, itype,
386 fold_convert (itype, loop->step), t);
387 t = fold_build2 (PLUS_EXPR, itype, t,
388 fold_convert (itype, loop->n2));
389 t = fold_build2 (MINUS_EXPR, itype, t,
390 fold_convert (itype, loop->n1));
391 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
392 t = fold_build2 (TRUNC_DIV_EXPR, itype,
393 fold_build1 (NEGATE_EXPR, itype, t),
394 fold_build1 (NEGATE_EXPR, itype,
395 fold_convert (itype,
396 loop->step)));
397 else
398 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
399 fold_convert (itype, loop->step));
400 t = fold_convert (long_long_unsigned_type_node, t);
401 if (count != NULL_TREE)
402 count = fold_build2 (MULT_EXPR, long_long_unsigned_type_node,
403 count, t);
404 else
405 count = t;
406 if (TREE_CODE (count) != INTEGER_CST)
407 count = NULL_TREE;
408 }
409 else
410 count = NULL_TREE;
411 }
412 }
413
414 if (count)
415 {
416 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
417 iter_type = long_long_unsigned_type_node;
418 else
419 iter_type = long_integer_type_node;
420 }
421 else if (collapse_iter && *collapse_iter != NULL)
422 iter_type = TREE_TYPE (*collapse_iter);
423 fd->iter_type = iter_type;
424 if (collapse_iter && *collapse_iter == NULL)
425 *collapse_iter = create_tmp_var (iter_type, ".iter");
426 if (collapse_count && *collapse_count == NULL)
427 {
428 if (count)
429 *collapse_count = fold_convert (iter_type, count);
430 else
431 *collapse_count = create_tmp_var (iter_type, ".count");
432 }
433
434 if (fd->collapse > 1)
435 {
436 fd->loop.v = *collapse_iter;
437 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
438 fd->loop.n2 = *collapse_count;
439 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
440 fd->loop.cond_code = LT_EXPR;
441 }
50674e96
DN
442}
443
444
445/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
446 is the immediate dominator of PAR_ENTRY_BB, return true if there
447 are no data dependencies that would prevent expanding the parallel
448 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
449
450 When expanding a combined parallel+workshare region, the call to
451 the child function may need additional arguments in the case of
452 OMP_FOR regions. In some cases, these arguments are computed out
453 of variables passed in from the parent to the child via 'struct
454 .omp_data_s'. For instance:
455
456 #pragma omp parallel for schedule (guided, i * 4)
457 for (j ...)
458
459 Is lowered into:
460
461 # BLOCK 2 (PAR_ENTRY_BB)
462 .omp_data_o.i = i;
463 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
464
465 # BLOCK 3 (WS_ENTRY_BB)
466 .omp_data_i = &.omp_data_o;
467 D.1667 = .omp_data_i->i;
468 D.1598 = D.1667 * 4;
469 #pragma omp for schedule (guided, D.1598)
470
471 When we outline the parallel region, the call to the child function
472 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
473 that value is computed *after* the call site. So, in principle we
474 cannot do the transformation.
475
476 To see whether the code in WS_ENTRY_BB blocks the combined
477 parallel+workshare call, we collect all the variables used in the
478 OMP_FOR header check whether they appear on the LHS of any
479 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
480 call.
481
482 FIXME. If we had the SSA form built at this point, we could merely
483 hoist the code in block 3 into block 2 and be done with it. But at
484 this point we don't have dataflow information and though we could
485 hack something up here, it is really not worth the aggravation. */
486
487static bool
488workshare_safe_to_combine_p (basic_block par_entry_bb, basic_block ws_entry_bb)
489{
490 struct omp_for_data fd;
491 tree par_stmt, ws_stmt;
492
493 par_stmt = last_stmt (par_entry_bb);
494 ws_stmt = last_stmt (ws_entry_bb);
495
496 if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
497 return true;
498
499 gcc_assert (TREE_CODE (ws_stmt) == OMP_FOR);
500
a68ab351
JJ
501 extract_omp_for_data (ws_stmt, &fd, NULL);
502
503 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
504 return false;
505 if (fd.iter_type != long_integer_type_node)
506 return false;
50674e96
DN
507
508 /* FIXME. We give up too easily here. If any of these arguments
509 are not constants, they will likely involve variables that have
510 been mapped into fields of .omp_data_s for sharing with the child
511 function. With appropriate data flow, it would be possible to
512 see through this. */
a68ab351
JJ
513 if (!is_gimple_min_invariant (fd.loop.n1)
514 || !is_gimple_min_invariant (fd.loop.n2)
515 || !is_gimple_min_invariant (fd.loop.step)
50674e96
DN
516 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
517 return false;
518
519 return true;
520}
521
522
523/* Collect additional arguments needed to emit a combined
524 parallel+workshare call. WS_STMT is the workshare directive being
525 expanded. */
526
527static tree
528get_ws_args_for (tree ws_stmt)
529{
530 tree t;
531
532 if (TREE_CODE (ws_stmt) == OMP_FOR)
533 {
534 struct omp_for_data fd;
535 tree ws_args;
536
a68ab351 537 extract_omp_for_data (ws_stmt, &fd, NULL);
50674e96
DN
538
539 ws_args = NULL_TREE;
540 if (fd.chunk_size)
541 {
542 t = fold_convert (long_integer_type_node, fd.chunk_size);
543 ws_args = tree_cons (NULL, t, ws_args);
544 }
545
a68ab351 546 t = fold_convert (long_integer_type_node, fd.loop.step);
50674e96
DN
547 ws_args = tree_cons (NULL, t, ws_args);
548
a68ab351 549 t = fold_convert (long_integer_type_node, fd.loop.n2);
50674e96
DN
550 ws_args = tree_cons (NULL, t, ws_args);
551
a68ab351 552 t = fold_convert (long_integer_type_node, fd.loop.n1);
50674e96
DN
553 ws_args = tree_cons (NULL, t, ws_args);
554
555 return ws_args;
556 }
557 else if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
558 {
e5c95afe
ZD
559 /* Number of sections is equal to the number of edges from the
560 OMP_SECTIONS_SWITCH statement, except for the one to the exit
561 of the sections region. */
562 basic_block bb = single_succ (bb_for_stmt (ws_stmt));
563 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
50674e96
DN
564 t = tree_cons (NULL, t, NULL);
565 return t;
566 }
567
568 gcc_unreachable ();
569}
570
571
572/* Discover whether REGION is a combined parallel+workshare region. */
573
574static void
575determine_parallel_type (struct omp_region *region)
953ff289 576{
50674e96
DN
577 basic_block par_entry_bb, par_exit_bb;
578 basic_block ws_entry_bb, ws_exit_bb;
579
d3c673c7 580 if (region == NULL || region->inner == NULL
e5c95afe
ZD
581 || region->exit == NULL || region->inner->exit == NULL
582 || region->inner->cont == NULL)
50674e96
DN
583 return;
584
585 /* We only support parallel+for and parallel+sections. */
777f7f9a
RH
586 if (region->type != OMP_PARALLEL
587 || (region->inner->type != OMP_FOR
588 && region->inner->type != OMP_SECTIONS))
50674e96
DN
589 return;
590
591 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
592 WS_EXIT_BB -> PAR_EXIT_BB. */
777f7f9a
RH
593 par_entry_bb = region->entry;
594 par_exit_bb = region->exit;
595 ws_entry_bb = region->inner->entry;
596 ws_exit_bb = region->inner->exit;
50674e96
DN
597
598 if (single_succ (par_entry_bb) == ws_entry_bb
599 && single_succ (ws_exit_bb) == par_exit_bb
69f1837b
JJ
600 && workshare_safe_to_combine_p (par_entry_bb, ws_entry_bb)
601 && (OMP_PARALLEL_COMBINED (last_stmt (par_entry_bb))
602 || (last_and_only_stmt (ws_entry_bb)
603 && last_and_only_stmt (par_exit_bb))))
50674e96 604 {
69f1837b 605 tree ws_stmt = last_stmt (ws_entry_bb);
777f7f9a
RH
606
607 if (region->inner->type == OMP_FOR)
50674e96
DN
608 {
609 /* If this is a combined parallel loop, we need to determine
610 whether or not to use the combined library calls. There
611 are two cases where we do not apply the transformation:
612 static loops and any kind of ordered loop. In the first
613 case, we already open code the loop so there is no need
614 to do anything else. In the latter case, the combined
615 parallel loop call would still need extra synchronization
616 to implement ordered semantics, so there would not be any
617 gain in using the combined call. */
777f7f9a 618 tree clauses = OMP_FOR_CLAUSES (ws_stmt);
50674e96
DN
619 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
620 if (c == NULL
621 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
622 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
623 {
624 region->is_combined_parallel = false;
625 region->inner->is_combined_parallel = false;
626 return;
627 }
628 }
629
630 region->is_combined_parallel = true;
631 region->inner->is_combined_parallel = true;
777f7f9a 632 region->ws_args = get_ws_args_for (ws_stmt);
50674e96 633 }
953ff289
DN
634}
635
50674e96 636
953ff289
DN
637/* Return true if EXPR is variable sized. */
638
639static inline bool
22ea9ec0 640is_variable_sized (const_tree expr)
953ff289
DN
641{
642 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
643}
644
645/* Return true if DECL is a reference type. */
646
647static inline bool
648is_reference (tree decl)
649{
650 return lang_hooks.decls.omp_privatize_by_reference (decl);
651}
652
653/* Lookup variables in the decl or field splay trees. The "maybe" form
654 allows for the variable form to not have been entered, otherwise we
655 assert that the variable must have been entered. */
656
657static inline tree
658lookup_decl (tree var, omp_context *ctx)
659{
6be42dd4
RG
660 tree *n;
661 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
662 return *n;
953ff289
DN
663}
664
665static inline tree
7c8f7639 666maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 667{
6be42dd4
RG
668 tree *n;
669 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
670 return n ? *n : NULL_TREE;
953ff289
DN
671}
672
673static inline tree
674lookup_field (tree var, omp_context *ctx)
675{
676 splay_tree_node n;
677 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
678 return (tree) n->value;
679}
680
a68ab351
JJ
681static inline tree
682lookup_sfield (tree var, omp_context *ctx)
683{
684 splay_tree_node n;
685 n = splay_tree_lookup (ctx->sfield_map
686 ? ctx->sfield_map : ctx->field_map,
687 (splay_tree_key) var);
688 return (tree) n->value;
689}
690
953ff289
DN
691static inline tree
692maybe_lookup_field (tree var, omp_context *ctx)
693{
694 splay_tree_node n;
695 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
696 return n ? (tree) n->value : NULL_TREE;
697}
698
7c8f7639
JJ
699/* Return true if DECL should be copied by pointer. SHARED_CTX is
700 the parallel context if DECL is to be shared. */
953ff289
DN
701
702static bool
a68ab351 703use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289
DN
704{
705 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
706 return true;
707
6fc0bb99 708 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 709 when we know the value is not accessible from an outer scope. */
7c8f7639 710 if (shared_ctx)
953ff289
DN
711 {
712 /* ??? Trivially accessible from anywhere. But why would we even
713 be passing an address in this case? Should we simply assert
714 this to be false, or should we have a cleanup pass that removes
715 these from the list of mappings? */
716 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
717 return true;
718
719 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
720 without analyzing the expression whether or not its location
721 is accessible to anyone else. In the case of nested parallel
722 regions it certainly may be. */
077b0dfb 723 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
724 return true;
725
726 /* Do not use copy-in/copy-out for variables that have their
727 address taken. */
728 if (TREE_ADDRESSABLE (decl))
729 return true;
7c8f7639
JJ
730
731 /* Disallow copy-in/out in nested parallel if
732 decl is shared in outer parallel, otherwise
733 each thread could store the shared variable
734 in its own copy-in location, making the
735 variable no longer really shared. */
736 if (!TREE_READONLY (decl) && shared_ctx->is_nested)
737 {
738 omp_context *up;
739
740 for (up = shared_ctx->outer; up; up = up->outer)
741 if (maybe_lookup_decl (decl, up))
742 break;
743
a68ab351 744 if (up && is_taskreg_ctx (up))
7c8f7639
JJ
745 {
746 tree c;
747
a68ab351 748 for (c = OMP_TASKREG_CLAUSES (up->stmt);
7c8f7639
JJ
749 c; c = OMP_CLAUSE_CHAIN (c))
750 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
751 && OMP_CLAUSE_DECL (c) == decl)
752 break;
753
754 if (c)
755 return true;
756 }
757 }
a68ab351
JJ
758
759 /* For tasks avoid using copy-in/out, unless they are readonly
760 (in which case just copy-in is used). As tasks can be
761 deferred or executed in different thread, when GOMP_task
762 returns, the task hasn't necessarily terminated. */
763 if (!TREE_READONLY (decl) && is_task_ctx (shared_ctx))
764 {
765 tree outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
766 if (is_gimple_reg (outer))
767 {
768 /* Taking address of OUTER in lower_send_shared_vars
769 might need regimplification of everything that uses the
770 variable. */
771 if (!task_shared_vars)
772 task_shared_vars = BITMAP_ALLOC (NULL);
773 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
774 TREE_ADDRESSABLE (outer) = 1;
775 }
776 return true;
777 }
953ff289
DN
778 }
779
780 return false;
781}
782
917948d3 783/* Create a new VAR_DECL and copy information from VAR to it. */
953ff289 784
917948d3
ZD
785tree
786copy_var_decl (tree var, tree name, tree type)
953ff289
DN
787{
788 tree copy = build_decl (VAR_DECL, name, type);
789
790 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
917948d3 791 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
0890b981 792 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
058dcc25 793 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (var);
953ff289
DN
794 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
795 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
917948d3 796 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
c90eee44 797 DECL_SOURCE_LOCATION (copy) = DECL_SOURCE_LOCATION (var);
953ff289 798 TREE_USED (copy) = 1;
953ff289
DN
799 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
800
917948d3
ZD
801 return copy;
802}
803
804/* Construct a new automatic decl similar to VAR. */
805
806static tree
807omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
808{
809 tree copy = copy_var_decl (var, name, type);
810
811 DECL_CONTEXT (copy) = current_function_decl;
953ff289
DN
812 TREE_CHAIN (copy) = ctx->block_vars;
813 ctx->block_vars = copy;
814
815 return copy;
816}
817
818static tree
819omp_copy_decl_1 (tree var, omp_context *ctx)
820{
821 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
822}
823
824/* Build tree nodes to access the field for VAR on the receiver side. */
825
826static tree
827build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
828{
829 tree x, field = lookup_field (var, ctx);
830
831 /* If the receiver record type was remapped in the child function,
832 remap the field into the new record type. */
833 x = maybe_lookup_field (field, ctx);
834 if (x != NULL)
835 field = x;
836
837 x = build_fold_indirect_ref (ctx->receiver_decl);
838 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL);
839 if (by_ref)
840 x = build_fold_indirect_ref (x);
841
842 return x;
843}
844
845/* Build tree nodes to access VAR in the scope outer to CTX. In the case
846 of a parallel, this is a component reference; for workshare constructs
847 this is some variable. */
848
849static tree
850build_outer_var_ref (tree var, omp_context *ctx)
851{
852 tree x;
853
8ca5b2a2 854 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
855 x = var;
856 else if (is_variable_sized (var))
857 {
858 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
859 x = build_outer_var_ref (x, ctx);
860 x = build_fold_indirect_ref (x);
861 }
a68ab351 862 else if (is_taskreg_ctx (ctx))
953ff289 863 {
7c8f7639 864 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
865 x = build_receiver_ref (var, by_ref, ctx);
866 }
867 else if (ctx->outer)
868 x = lookup_decl (var, ctx->outer);
eeb1d9e0
JJ
869 else if (is_reference (var))
870 /* This can happen with orphaned constructs. If var is reference, it is
871 possible it is shared and as such valid. */
872 x = var;
953ff289
DN
873 else
874 gcc_unreachable ();
875
876 if (is_reference (var))
877 x = build_fold_indirect_ref (x);
878
879 return x;
880}
881
882/* Build tree nodes to access the field for VAR on the sender side. */
883
884static tree
885build_sender_ref (tree var, omp_context *ctx)
886{
a68ab351 887 tree field = lookup_sfield (var, ctx);
953ff289
DN
888 return build3 (COMPONENT_REF, TREE_TYPE (field),
889 ctx->sender_decl, field, NULL);
890}
891
892/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
893
894static void
a68ab351 895install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 896{
a68ab351 897 tree field, type, sfield = NULL_TREE;
953ff289 898
a68ab351
JJ
899 gcc_assert ((mask & 1) == 0
900 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
901 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
902 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
953ff289
DN
903
904 type = TREE_TYPE (var);
905 if (by_ref)
906 type = build_pointer_type (type);
a68ab351
JJ
907 else if ((mask & 3) == 1 && is_reference (var))
908 type = TREE_TYPE (type);
953ff289
DN
909
910 field = build_decl (FIELD_DECL, DECL_NAME (var), type);
911
912 /* Remember what variable this field was created for. This does have a
913 side effect of making dwarf2out ignore this member, so for helpful
914 debugging we clear it later in delete_omp_context. */
915 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
916 if (type == TREE_TYPE (var))
917 {
918 DECL_ALIGN (field) = DECL_ALIGN (var);
919 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
920 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
921 }
922 else
923 DECL_ALIGN (field) = TYPE_ALIGN (type);
953ff289 924
a68ab351
JJ
925 if ((mask & 3) == 3)
926 {
927 insert_field_into_struct (ctx->record_type, field);
928 if (ctx->srecord_type)
929 {
930 sfield = build_decl (FIELD_DECL, DECL_NAME (var), type);
931 DECL_ABSTRACT_ORIGIN (sfield) = var;
932 DECL_ALIGN (sfield) = DECL_ALIGN (field);
933 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
934 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
935 insert_field_into_struct (ctx->srecord_type, sfield);
936 }
937 }
938 else
939 {
940 if (ctx->srecord_type == NULL_TREE)
941 {
942 tree t;
943
944 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
945 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
946 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
947 {
948 sfield = build_decl (FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
949 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
950 insert_field_into_struct (ctx->srecord_type, sfield);
951 splay_tree_insert (ctx->sfield_map,
952 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
953 (splay_tree_value) sfield);
954 }
955 }
956 sfield = field;
957 insert_field_into_struct ((mask & 1) ? ctx->record_type
958 : ctx->srecord_type, field);
959 }
953ff289 960
a68ab351
JJ
961 if (mask & 1)
962 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
963 (splay_tree_value) field);
964 if ((mask & 2) && ctx->sfield_map)
965 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
966 (splay_tree_value) sfield);
953ff289
DN
967}
968
969static tree
970install_var_local (tree var, omp_context *ctx)
971{
972 tree new_var = omp_copy_decl_1 (var, ctx);
973 insert_decl_map (&ctx->cb, var, new_var);
974 return new_var;
975}
976
977/* Adjust the replacement for DECL in CTX for the new context. This means
978 copying the DECL_VALUE_EXPR, and fixing up the type. */
979
980static void
981fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
982{
983 tree new_decl, size;
984
985 new_decl = lookup_decl (decl, ctx);
986
987 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
988
989 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
990 && DECL_HAS_VALUE_EXPR_P (decl))
991 {
992 tree ve = DECL_VALUE_EXPR (decl);
993 walk_tree (&ve, copy_body_r, &ctx->cb, NULL);
994 SET_DECL_VALUE_EXPR (new_decl, ve);
995 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
996 }
997
998 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
999 {
1000 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1001 if (size == error_mark_node)
1002 size = TYPE_SIZE (TREE_TYPE (new_decl));
1003 DECL_SIZE (new_decl) = size;
1004
1005 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1006 if (size == error_mark_node)
1007 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1008 DECL_SIZE_UNIT (new_decl) = size;
1009 }
1010}
1011
1012/* The callback for remap_decl. Search all containing contexts for a
1013 mapping of the variable; this avoids having to duplicate the splay
1014 tree ahead of time. We know a mapping doesn't already exist in the
1015 given context. Create new mappings to implement default semantics. */
1016
1017static tree
1018omp_copy_decl (tree var, copy_body_data *cb)
1019{
1020 omp_context *ctx = (omp_context *) cb;
1021 tree new_var;
1022
953ff289
DN
1023 if (TREE_CODE (var) == LABEL_DECL)
1024 {
1025 new_var = create_artificial_label ();
50674e96 1026 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
1027 insert_decl_map (&ctx->cb, var, new_var);
1028 return new_var;
1029 }
1030
a68ab351 1031 while (!is_taskreg_ctx (ctx))
953ff289
DN
1032 {
1033 ctx = ctx->outer;
1034 if (ctx == NULL)
1035 return var;
1036 new_var = maybe_lookup_decl (var, ctx);
1037 if (new_var)
1038 return new_var;
1039 }
1040
8ca5b2a2
JJ
1041 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1042 return var;
1043
953ff289
DN
1044 return error_mark_node;
1045}
1046
50674e96
DN
1047
1048/* Return the parallel region associated with STMT. */
1049
50674e96
DN
1050/* Debugging dumps for parallel regions. */
1051void dump_omp_region (FILE *, struct omp_region *, int);
1052void debug_omp_region (struct omp_region *);
1053void debug_all_omp_regions (void);
1054
1055/* Dump the parallel region tree rooted at REGION. */
1056
1057void
1058dump_omp_region (FILE *file, struct omp_region *region, int indent)
1059{
777f7f9a
RH
1060 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
1061 tree_code_name[region->type]);
50674e96
DN
1062
1063 if (region->inner)
1064 dump_omp_region (file, region->inner, indent + 4);
1065
777f7f9a
RH
1066 if (region->cont)
1067 {
444e96af 1068 fprintf (file, "%*sbb %d: OMP_CONTINUE\n", indent, "",
777f7f9a
RH
1069 region->cont->index);
1070 }
1071
50674e96 1072 if (region->exit)
077b0dfb 1073 fprintf (file, "%*sbb %d: OMP_RETURN\n", indent, "",
777f7f9a 1074 region->exit->index);
50674e96 1075 else
777f7f9a 1076 fprintf (file, "%*s[no exit marker]\n", indent, "");
50674e96
DN
1077
1078 if (region->next)
777f7f9a 1079 dump_omp_region (file, region->next, indent);
50674e96
DN
1080}
1081
1082void
1083debug_omp_region (struct omp_region *region)
1084{
1085 dump_omp_region (stderr, region, 0);
1086}
1087
1088void
1089debug_all_omp_regions (void)
1090{
1091 dump_omp_region (stderr, root_omp_region, 0);
1092}
1093
1094
1095/* Create a new parallel region starting at STMT inside region PARENT. */
1096
777f7f9a
RH
1097struct omp_region *
1098new_omp_region (basic_block bb, enum tree_code type, struct omp_region *parent)
50674e96 1099{
777f7f9a 1100 struct omp_region *region = xcalloc (1, sizeof (*region));
50674e96
DN
1101
1102 region->outer = parent;
777f7f9a
RH
1103 region->entry = bb;
1104 region->type = type;
50674e96
DN
1105
1106 if (parent)
1107 {
1108 /* This is a nested region. Add it to the list of inner
1109 regions in PARENT. */
1110 region->next = parent->inner;
1111 parent->inner = region;
1112 }
777f7f9a 1113 else
50674e96
DN
1114 {
1115 /* This is a toplevel region. Add it to the list of toplevel
1116 regions in ROOT_OMP_REGION. */
1117 region->next = root_omp_region;
1118 root_omp_region = region;
1119 }
777f7f9a
RH
1120
1121 return region;
1122}
1123
1124/* Release the memory associated with the region tree rooted at REGION. */
1125
1126static void
1127free_omp_region_1 (struct omp_region *region)
1128{
1129 struct omp_region *i, *n;
1130
1131 for (i = region->inner; i ; i = n)
50674e96 1132 {
777f7f9a
RH
1133 n = i->next;
1134 free_omp_region_1 (i);
50674e96
DN
1135 }
1136
777f7f9a
RH
1137 free (region);
1138}
50674e96 1139
777f7f9a
RH
1140/* Release the memory for the entire omp region tree. */
1141
1142void
1143free_omp_regions (void)
1144{
1145 struct omp_region *r, *n;
1146 for (r = root_omp_region; r ; r = n)
1147 {
1148 n = r->next;
1149 free_omp_region_1 (r);
1150 }
1151 root_omp_region = NULL;
50674e96
DN
1152}
1153
1154
953ff289
DN
1155/* Create a new context, with OUTER_CTX being the surrounding context. */
1156
1157static omp_context *
1158new_omp_context (tree stmt, omp_context *outer_ctx)
1159{
1160 omp_context *ctx = XCNEW (omp_context);
1161
1162 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1163 (splay_tree_value) ctx);
1164 ctx->stmt = stmt;
1165
1166 if (outer_ctx)
1167 {
1168 ctx->outer = outer_ctx;
1169 ctx->cb = outer_ctx->cb;
1170 ctx->cb.block = NULL;
1171 ctx->depth = outer_ctx->depth + 1;
1172 }
1173 else
1174 {
1175 ctx->cb.src_fn = current_function_decl;
1176 ctx->cb.dst_fn = current_function_decl;
1177 ctx->cb.src_node = cgraph_node (current_function_decl);
1178 ctx->cb.dst_node = ctx->cb.src_node;
1179 ctx->cb.src_cfun = cfun;
1180 ctx->cb.copy_decl = omp_copy_decl;
1181 ctx->cb.eh_region = -1;
1182 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1183 ctx->depth = 1;
1184 }
1185
6be42dd4 1186 ctx->cb.decl_map = pointer_map_create ();
953ff289
DN
1187
1188 return ctx;
1189}
1190
1191/* Destroy a omp_context data structures. Called through the splay tree
1192 value delete callback. */
1193
1194static void
1195delete_omp_context (splay_tree_value value)
1196{
1197 omp_context *ctx = (omp_context *) value;
1198
6be42dd4 1199 pointer_map_destroy (ctx->cb.decl_map);
953ff289
DN
1200
1201 if (ctx->field_map)
1202 splay_tree_delete (ctx->field_map);
a68ab351
JJ
1203 if (ctx->sfield_map)
1204 splay_tree_delete (ctx->sfield_map);
953ff289
DN
1205
1206 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1207 it produces corrupt debug information. */
1208 if (ctx->record_type)
1209 {
1210 tree t;
1211 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
1212 DECL_ABSTRACT_ORIGIN (t) = NULL;
1213 }
a68ab351
JJ
1214 if (ctx->srecord_type)
1215 {
1216 tree t;
1217 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = TREE_CHAIN (t))
1218 DECL_ABSTRACT_ORIGIN (t) = NULL;
1219 }
953ff289
DN
1220
1221 XDELETE (ctx);
1222}
1223
1224/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1225 context. */
1226
1227static void
1228fixup_child_record_type (omp_context *ctx)
1229{
1230 tree f, type = ctx->record_type;
1231
1232 /* ??? It isn't sufficient to just call remap_type here, because
1233 variably_modified_type_p doesn't work the way we expect for
1234 record types. Testing each field for whether it needs remapping
1235 and creating a new record by hand works, however. */
1236 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1237 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1238 break;
1239 if (f)
1240 {
1241 tree name, new_fields = NULL;
1242
1243 type = lang_hooks.types.make_type (RECORD_TYPE);
1244 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1245 name = build_decl (TYPE_DECL, name, type);
1246 TYPE_NAME (type) = name;
1247
1248 for (f = TYPE_FIELDS (ctx->record_type); f ; f = TREE_CHAIN (f))
1249 {
1250 tree new_f = copy_node (f);
1251 DECL_CONTEXT (new_f) = type;
1252 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1253 TREE_CHAIN (new_f) = new_fields;
a68ab351
JJ
1254 walk_tree (&DECL_SIZE (new_f), copy_body_r, &ctx->cb, NULL);
1255 walk_tree (&DECL_SIZE_UNIT (new_f), copy_body_r, &ctx->cb, NULL);
1256 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_body_r, &ctx->cb, NULL);
953ff289
DN
1257 new_fields = new_f;
1258
1259 /* Arrange to be able to look up the receiver field
1260 given the sender field. */
1261 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1262 (splay_tree_value) new_f);
1263 }
1264 TYPE_FIELDS (type) = nreverse (new_fields);
1265 layout_type (type);
1266 }
1267
1268 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1269}
1270
1271/* Instantiate decls as necessary in CTX to satisfy the data sharing
1272 specified by CLAUSES. */
1273
1274static void
1275scan_sharing_clauses (tree clauses, omp_context *ctx)
1276{
1277 tree c, decl;
1278 bool scan_array_reductions = false;
1279
1280 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1281 {
1282 bool by_ref;
1283
aaf46ef9 1284 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1285 {
1286 case OMP_CLAUSE_PRIVATE:
1287 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1288 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1289 goto do_private;
1290 else if (!is_variable_sized (decl))
953ff289
DN
1291 install_var_local (decl, ctx);
1292 break;
1293
1294 case OMP_CLAUSE_SHARED:
a68ab351 1295 gcc_assert (is_taskreg_ctx (ctx));
953ff289 1296 decl = OMP_CLAUSE_DECL (c);
5da250fc
JJ
1297 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1298 || !is_variable_sized (decl));
8ca5b2a2
JJ
1299 /* Global variables don't need to be copied,
1300 the receiver side will use them directly. */
1301 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1302 break;
a68ab351 1303 by_ref = use_pointer_for_field (decl, ctx);
953ff289
DN
1304 if (! TREE_READONLY (decl)
1305 || TREE_ADDRESSABLE (decl)
1306 || by_ref
1307 || is_reference (decl))
1308 {
a68ab351 1309 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1310 install_var_local (decl, ctx);
1311 break;
1312 }
1313 /* We don't need to copy const scalar vars back. */
aaf46ef9 1314 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1315 goto do_private;
1316
1317 case OMP_CLAUSE_LASTPRIVATE:
1318 /* Let the corresponding firstprivate clause create
1319 the variable. */
1320 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1321 break;
1322 /* FALLTHRU */
1323
1324 case OMP_CLAUSE_FIRSTPRIVATE:
1325 case OMP_CLAUSE_REDUCTION:
1326 decl = OMP_CLAUSE_DECL (c);
1327 do_private:
1328 if (is_variable_sized (decl))
953ff289 1329 {
a68ab351
JJ
1330 if (is_task_ctx (ctx))
1331 install_var_field (decl, false, 1, ctx);
1332 break;
1333 }
1334 else if (is_taskreg_ctx (ctx))
1335 {
1336 bool global
1337 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1338 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1339
1340 if (is_task_ctx (ctx)
1341 && (global || by_ref || is_reference (decl)))
1342 {
1343 install_var_field (decl, false, 1, ctx);
1344 if (!global)
1345 install_var_field (decl, by_ref, 2, ctx);
1346 }
1347 else if (!global)
1348 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1349 }
1350 install_var_local (decl, ctx);
1351 break;
1352
1353 case OMP_CLAUSE_COPYPRIVATE:
1354 if (ctx->outer)
1355 scan_omp (&OMP_CLAUSE_DECL (c), ctx->outer);
1356 /* FALLTHRU */
1357
1358 case OMP_CLAUSE_COPYIN:
1359 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1360 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1361 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1362 break;
1363
1364 case OMP_CLAUSE_DEFAULT:
1365 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1366 break;
1367
1368 case OMP_CLAUSE_IF:
1369 case OMP_CLAUSE_NUM_THREADS:
1370 case OMP_CLAUSE_SCHEDULE:
1371 if (ctx->outer)
aaf46ef9 1372 scan_omp (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1373 break;
1374
1375 case OMP_CLAUSE_NOWAIT:
1376 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1377 case OMP_CLAUSE_COLLAPSE:
1378 case OMP_CLAUSE_UNTIED:
953ff289
DN
1379 break;
1380
1381 default:
1382 gcc_unreachable ();
1383 }
1384 }
1385
1386 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1387 {
aaf46ef9 1388 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1389 {
1390 case OMP_CLAUSE_LASTPRIVATE:
1391 /* Let the corresponding firstprivate clause create
1392 the variable. */
a68ab351
JJ
1393 if (OMP_CLAUSE_LASTPRIVATE_STMT (c))
1394 scan_array_reductions = true;
953ff289
DN
1395 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1396 break;
1397 /* FALLTHRU */
1398
1399 case OMP_CLAUSE_PRIVATE:
1400 case OMP_CLAUSE_FIRSTPRIVATE:
1401 case OMP_CLAUSE_REDUCTION:
1402 decl = OMP_CLAUSE_DECL (c);
1403 if (is_variable_sized (decl))
1404 install_var_local (decl, ctx);
1405 fixup_remapped_decl (decl, ctx,
aaf46ef9 1406 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1407 && OMP_CLAUSE_PRIVATE_DEBUG (c));
aaf46ef9 1408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1409 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1410 scan_array_reductions = true;
1411 break;
1412
1413 case OMP_CLAUSE_SHARED:
1414 decl = OMP_CLAUSE_DECL (c);
8ca5b2a2
JJ
1415 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1416 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1417 break;
1418
1419 case OMP_CLAUSE_COPYPRIVATE:
1420 case OMP_CLAUSE_COPYIN:
1421 case OMP_CLAUSE_DEFAULT:
1422 case OMP_CLAUSE_IF:
1423 case OMP_CLAUSE_NUM_THREADS:
1424 case OMP_CLAUSE_SCHEDULE:
1425 case OMP_CLAUSE_NOWAIT:
1426 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1427 case OMP_CLAUSE_COLLAPSE:
1428 case OMP_CLAUSE_UNTIED:
953ff289
DN
1429 break;
1430
1431 default:
1432 gcc_unreachable ();
1433 }
1434 }
1435
1436 if (scan_array_reductions)
1437 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 1438 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1439 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1440 {
1441 scan_omp (&OMP_CLAUSE_REDUCTION_INIT (c), ctx);
1442 scan_omp (&OMP_CLAUSE_REDUCTION_MERGE (c), ctx);
1443 }
a68ab351
JJ
1444 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1445 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
1446 scan_omp (&OMP_CLAUSE_LASTPRIVATE_STMT (c), ctx);
953ff289
DN
1447}
1448
1449/* Create a new name for omp child function. Returns an identifier. */
1450
1451static GTY(()) unsigned int tmp_ompfn_id_num;
1452
1453static tree
a68ab351 1454create_omp_child_function_name (bool task_copy)
953ff289
DN
1455{
1456 tree name = DECL_ASSEMBLER_NAME (current_function_decl);
1457 size_t len = IDENTIFIER_LENGTH (name);
1458 char *tmp_name, *prefix;
a68ab351 1459 const char *suffix;
953ff289 1460
a68ab351
JJ
1461 suffix = task_copy ? "_omp_cpyfn" : "_omp_fn";
1462 prefix = alloca (len + strlen (suffix) + 1);
953ff289 1463 memcpy (prefix, IDENTIFIER_POINTER (name), len);
a68ab351 1464 strcpy (prefix + len, suffix);
953ff289
DN
1465#ifndef NO_DOT_IN_LABEL
1466 prefix[len] = '.';
1467#elif !defined NO_DOLLAR_IN_LABEL
1468 prefix[len] = '$';
1469#endif
1470 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, tmp_ompfn_id_num++);
1471 return get_identifier (tmp_name);
1472}
1473
1474/* Build a decl for the omp child function. It'll not contain a body
1475 yet, just the bare decl. */
1476
1477static void
a68ab351 1478create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1479{
1480 tree decl, type, name, t;
1481
a68ab351
JJ
1482 name = create_omp_child_function_name (task_copy);
1483 if (task_copy)
1484 type = build_function_type_list (void_type_node, ptr_type_node,
1485 ptr_type_node, NULL_TREE);
1486 else
1487 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289
DN
1488
1489 decl = build_decl (FUNCTION_DECL, name, type);
1490 decl = lang_hooks.decls.pushdecl (decl);
1491
a68ab351
JJ
1492 if (!task_copy)
1493 ctx->cb.dst_fn = decl;
1494 else
1495 OMP_TASK_COPYFN (ctx->stmt) = decl;
953ff289
DN
1496
1497 TREE_STATIC (decl) = 1;
1498 TREE_USED (decl) = 1;
1499 DECL_ARTIFICIAL (decl) = 1;
1500 DECL_IGNORED_P (decl) = 0;
1501 TREE_PUBLIC (decl) = 0;
1502 DECL_UNINLINABLE (decl) = 1;
1503 DECL_EXTERNAL (decl) = 0;
1504 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1505 DECL_INITIAL (decl) = make_node (BLOCK);
953ff289
DN
1506
1507 t = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1508 DECL_ARTIFICIAL (t) = 1;
1509 DECL_IGNORED_P (t) = 1;
1510 DECL_RESULT (decl) = t;
1511
1512 t = build_decl (PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
1513 DECL_ARTIFICIAL (t) = 1;
1514 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1515 DECL_CONTEXT (t) = current_function_decl;
953ff289
DN
1516 TREE_USED (t) = 1;
1517 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1518 if (!task_copy)
1519 ctx->receiver_decl = t;
1520 else
1521 {
1522 t = build_decl (PARM_DECL, get_identifier (".omp_data_o"),
1523 ptr_type_node);
1524 DECL_ARTIFICIAL (t) = 1;
1525 DECL_ARG_TYPE (t) = ptr_type_node;
1526 DECL_CONTEXT (t) = current_function_decl;
1527 TREE_USED (t) = 1;
1528 TREE_CHAIN (t) = DECL_ARGUMENTS (decl);
1529 DECL_ARGUMENTS (decl) = t;
1530 }
953ff289
DN
1531
1532 /* Allocate memory for the function structure. The call to
50674e96 1533 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1534 it afterward. */
db2960f4 1535 push_struct_function (decl);
953ff289
DN
1536 DECL_SOURCE_LOCATION (decl) = EXPR_LOCATION (ctx->stmt);
1537 cfun->function_end_locus = EXPR_LOCATION (ctx->stmt);
db2960f4 1538 pop_cfun ();
953ff289
DN
1539}
1540
953ff289
DN
1541
1542/* Scan an OpenMP parallel directive. */
1543
1544static void
1545scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
1546{
1547 omp_context *ctx;
1548 tree name;
1549
1550 /* Ignore parallel directives with empty bodies, unless there
1551 are copyin clauses. */
1552 if (optimize > 0
1553 && empty_body_p (OMP_PARALLEL_BODY (*stmt_p))
1554 && find_omp_clause (OMP_CLAUSES (*stmt_p), OMP_CLAUSE_COPYIN) == NULL)
1555 {
1556 *stmt_p = build_empty_stmt ();
1557 return;
1558 }
1559
1560 ctx = new_omp_context (*stmt_p, outer_ctx);
a68ab351 1561 if (taskreg_nesting_level > 1)
50674e96 1562 ctx->is_nested = true;
953ff289 1563 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289
DN
1564 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1565 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289
DN
1566 name = create_tmp_var_name (".omp_data_s");
1567 name = build_decl (TYPE_DECL, name, ctx->record_type);
1568 TYPE_NAME (ctx->record_type) = name;
a68ab351 1569 create_omp_child_function (ctx, false);
50674e96 1570 OMP_PARALLEL_FN (*stmt_p) = ctx->cb.dst_fn;
953ff289
DN
1571
1572 scan_sharing_clauses (OMP_PARALLEL_CLAUSES (*stmt_p), ctx);
1573 scan_omp (&OMP_PARALLEL_BODY (*stmt_p), ctx);
1574
1575 if (TYPE_FIELDS (ctx->record_type) == NULL)
1576 ctx->record_type = ctx->receiver_decl = NULL;
1577 else
1578 {
1579 layout_type (ctx->record_type);
1580 fixup_child_record_type (ctx);
1581 }
1582}
1583
a68ab351
JJ
1584/* Scan an OpenMP task directive. */
1585
1586static void
1587scan_omp_task (tree *stmt_p, omp_context *outer_ctx)
1588{
1589 omp_context *ctx;
1590 tree name;
1591
1592 /* Ignore task directives with empty bodies. */
1593 if (optimize > 0
1594 && empty_body_p (OMP_TASK_BODY (*stmt_p)))
1595 {
1596 *stmt_p = build_empty_stmt ();
1597 return;
1598 }
1599
1600 ctx = new_omp_context (*stmt_p, outer_ctx);
1601 if (taskreg_nesting_level > 1)
1602 ctx->is_nested = true;
1603 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1604 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1605 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1606 name = create_tmp_var_name (".omp_data_s");
1607 name = build_decl (TYPE_DECL, name, ctx->record_type);
1608 TYPE_NAME (ctx->record_type) = name;
1609 create_omp_child_function (ctx, false);
1610 OMP_TASK_FN (*stmt_p) = ctx->cb.dst_fn;
1611
1612 scan_sharing_clauses (OMP_TASK_CLAUSES (*stmt_p), ctx);
1613
1614 if (ctx->srecord_type)
1615 {
1616 name = create_tmp_var_name (".omp_data_a");
1617 name = build_decl (TYPE_DECL, name, ctx->srecord_type);
1618 TYPE_NAME (ctx->srecord_type) = name;
1619 create_omp_child_function (ctx, true);
1620 }
1621
1622 scan_omp (&OMP_TASK_BODY (*stmt_p), ctx);
1623
1624 if (TYPE_FIELDS (ctx->record_type) == NULL)
1625 {
1626 ctx->record_type = ctx->receiver_decl = NULL;
1627 OMP_TASK_ARG_SIZE (*stmt_p)
1628 = build_int_cst (long_integer_type_node, 0);
1629 OMP_TASK_ARG_ALIGN (*stmt_p)
1630 = build_int_cst (long_integer_type_node, 1);
1631 }
1632 else
1633 {
1634 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1635 /* Move VLA fields to the end. */
1636 p = &TYPE_FIELDS (ctx->record_type);
1637 while (*p)
1638 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1639 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1640 {
1641 *q = *p;
1642 *p = TREE_CHAIN (*p);
1643 TREE_CHAIN (*q) = NULL_TREE;
1644 q = &TREE_CHAIN (*q);
1645 }
1646 else
1647 p = &TREE_CHAIN (*p);
1648 *p = vla_fields;
1649 layout_type (ctx->record_type);
1650 fixup_child_record_type (ctx);
1651 if (ctx->srecord_type)
1652 layout_type (ctx->srecord_type);
1653 OMP_TASK_ARG_SIZE (*stmt_p)
1654 = fold_convert (long_integer_type_node,
1655 TYPE_SIZE_UNIT (ctx->record_type));
1656 OMP_TASK_ARG_ALIGN (*stmt_p)
1657 = build_int_cst (long_integer_type_node,
1658 TYPE_ALIGN_UNIT (ctx->record_type));
1659 }
1660}
1661
953ff289 1662
50674e96 1663/* Scan an OpenMP loop directive. */
953ff289
DN
1664
1665static void
50674e96 1666scan_omp_for (tree *stmt_p, omp_context *outer_ctx)
953ff289 1667{
50674e96
DN
1668 omp_context *ctx;
1669 tree stmt;
a68ab351 1670 int i;
953ff289 1671
50674e96
DN
1672 stmt = *stmt_p;
1673 ctx = new_omp_context (stmt, outer_ctx);
953ff289 1674
50674e96 1675 scan_sharing_clauses (OMP_FOR_CLAUSES (stmt), ctx);
953ff289 1676
953ff289 1677 scan_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
a68ab351
JJ
1678 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
1679 {
1680 scan_omp (&TREE_VEC_ELT (OMP_FOR_INIT (stmt), i), ctx);
1681 scan_omp (&TREE_VEC_ELT (OMP_FOR_COND (stmt), i), ctx);
1682 scan_omp (&TREE_VEC_ELT (OMP_FOR_INCR (stmt), i), ctx);
1683 }
953ff289
DN
1684 scan_omp (&OMP_FOR_BODY (stmt), ctx);
1685}
1686
1687/* Scan an OpenMP sections directive. */
1688
1689static void
1690scan_omp_sections (tree *stmt_p, omp_context *outer_ctx)
1691{
50674e96 1692 tree stmt;
953ff289
DN
1693 omp_context *ctx;
1694
50674e96 1695 stmt = *stmt_p;
953ff289
DN
1696 ctx = new_omp_context (stmt, outer_ctx);
1697 scan_sharing_clauses (OMP_SECTIONS_CLAUSES (stmt), ctx);
1698 scan_omp (&OMP_SECTIONS_BODY (stmt), ctx);
1699}
1700
1701/* Scan an OpenMP single directive. */
1702
1703static void
1704scan_omp_single (tree *stmt_p, omp_context *outer_ctx)
1705{
1706 tree stmt = *stmt_p;
1707 omp_context *ctx;
1708 tree name;
1709
1710 ctx = new_omp_context (stmt, outer_ctx);
1711 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1712 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1713 name = create_tmp_var_name (".omp_copy_s");
1714 name = build_decl (TYPE_DECL, name, ctx->record_type);
1715 TYPE_NAME (ctx->record_type) = name;
1716
1717 scan_sharing_clauses (OMP_SINGLE_CLAUSES (stmt), ctx);
1718 scan_omp (&OMP_SINGLE_BODY (stmt), ctx);
1719
1720 if (TYPE_FIELDS (ctx->record_type) == NULL)
1721 ctx->record_type = NULL;
1722 else
1723 layout_type (ctx->record_type);
1724}
1725
953ff289 1726
a6fc8e21
JJ
1727/* Check OpenMP nesting restrictions. */
1728static void
1729check_omp_nesting_restrictions (tree t, omp_context *ctx)
1730{
1731 switch (TREE_CODE (t))
1732 {
1733 case OMP_FOR:
1734 case OMP_SECTIONS:
1735 case OMP_SINGLE:
a68ab351 1736 case CALL_EXPR:
a6fc8e21
JJ
1737 for (; ctx != NULL; ctx = ctx->outer)
1738 switch (TREE_CODE (ctx->stmt))
1739 {
1740 case OMP_FOR:
1741 case OMP_SECTIONS:
1742 case OMP_SINGLE:
1743 case OMP_ORDERED:
1744 case OMP_MASTER:
a68ab351
JJ
1745 case OMP_TASK:
1746 if (TREE_CODE (t) == CALL_EXPR)
1747 {
1748 warning (0, "barrier region may not be closely nested inside "
1749 "of work-sharing, critical, ordered, master or "
1750 "explicit task region");
1751 return;
1752 }
a6fc8e21 1753 warning (0, "work-sharing region may not be closely nested inside "
a68ab351
JJ
1754 "of work-sharing, critical, ordered, master or explicit "
1755 "task region");
a6fc8e21
JJ
1756 return;
1757 case OMP_PARALLEL:
1758 return;
1759 default:
1760 break;
1761 }
1762 break;
1763 case OMP_MASTER:
1764 for (; ctx != NULL; ctx = ctx->outer)
1765 switch (TREE_CODE (ctx->stmt))
1766 {
1767 case OMP_FOR:
1768 case OMP_SECTIONS:
1769 case OMP_SINGLE:
a68ab351 1770 case OMP_TASK:
a6fc8e21 1771 warning (0, "master region may not be closely nested inside "
a68ab351 1772 "of work-sharing or explicit task region");
a6fc8e21
JJ
1773 return;
1774 case OMP_PARALLEL:
1775 return;
1776 default:
1777 break;
1778 }
1779 break;
1780 case OMP_ORDERED:
1781 for (; ctx != NULL; ctx = ctx->outer)
1782 switch (TREE_CODE (ctx->stmt))
1783 {
1784 case OMP_CRITICAL:
a68ab351 1785 case OMP_TASK:
a6fc8e21 1786 warning (0, "ordered region may not be closely nested inside "
a68ab351 1787 "of critical or explicit task region");
a6fc8e21
JJ
1788 return;
1789 case OMP_FOR:
1790 if (find_omp_clause (OMP_CLAUSES (ctx->stmt),
1791 OMP_CLAUSE_ORDERED) == NULL)
1792 warning (0, "ordered region must be closely nested inside "
1793 "a loop region with an ordered clause");
1794 return;
1795 case OMP_PARALLEL:
1796 return;
1797 default:
1798 break;
1799 }
1800 break;
1801 case OMP_CRITICAL:
1802 for (; ctx != NULL; ctx = ctx->outer)
1803 if (TREE_CODE (ctx->stmt) == OMP_CRITICAL
1804 && OMP_CRITICAL_NAME (t) == OMP_CRITICAL_NAME (ctx->stmt))
1805 {
1806 warning (0, "critical region may not be nested inside a critical "
1807 "region with the same name");
1808 return;
1809 }
1810 break;
1811 default:
1812 break;
1813 }
1814}
1815
1816
953ff289
DN
1817/* Callback for walk_stmts used to scan for OpenMP directives at TP. */
1818
1819static tree
1820scan_omp_1 (tree *tp, int *walk_subtrees, void *data)
1821{
1822 struct walk_stmt_info *wi = data;
1823 omp_context *ctx = wi->info;
1824 tree t = *tp;
1825
1826 if (EXPR_HAS_LOCATION (t))
1827 input_location = EXPR_LOCATION (t);
1828
a6fc8e21 1829 /* Check the OpenMP nesting restrictions. */
a68ab351
JJ
1830 if (ctx != NULL)
1831 {
1832 if (OMP_DIRECTIVE_P (t))
1833 check_omp_nesting_restrictions (t, ctx);
1834 else if (TREE_CODE (t) == CALL_EXPR)
1835 {
1836 tree fndecl = get_callee_fndecl (t);
1837 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1838 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
1839 check_omp_nesting_restrictions (t, ctx);
1840 }
1841 }
a6fc8e21 1842
953ff289
DN
1843 *walk_subtrees = 0;
1844 switch (TREE_CODE (t))
1845 {
1846 case OMP_PARALLEL:
a68ab351 1847 taskreg_nesting_level++;
50674e96 1848 scan_omp_parallel (tp, ctx);
a68ab351
JJ
1849 taskreg_nesting_level--;
1850 break;
1851
1852 case OMP_TASK:
1853 taskreg_nesting_level++;
1854 scan_omp_task (tp, ctx);
1855 taskreg_nesting_level--;
953ff289
DN
1856 break;
1857
1858 case OMP_FOR:
50674e96 1859 scan_omp_for (tp, ctx);
953ff289
DN
1860 break;
1861
1862 case OMP_SECTIONS:
50674e96 1863 scan_omp_sections (tp, ctx);
953ff289
DN
1864 break;
1865
1866 case OMP_SINGLE:
50674e96 1867 scan_omp_single (tp, ctx);
953ff289
DN
1868 break;
1869
1870 case OMP_SECTION:
1871 case OMP_MASTER:
1872 case OMP_ORDERED:
1873 case OMP_CRITICAL:
1874 ctx = new_omp_context (*tp, ctx);
1875 scan_omp (&OMP_BODY (*tp), ctx);
1876 break;
1877
1878 case BIND_EXPR:
1879 {
1880 tree var;
1881 *walk_subtrees = 1;
1882
1883 for (var = BIND_EXPR_VARS (t); var ; var = TREE_CHAIN (var))
50674e96 1884 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
1885 }
1886 break;
1887
1888 case VAR_DECL:
1889 case PARM_DECL:
1890 case LABEL_DECL:
077b0dfb 1891 case RESULT_DECL:
953ff289
DN
1892 if (ctx)
1893 *tp = remap_decl (t, &ctx->cb);
1894 break;
1895
1896 default:
1897 if (ctx && TYPE_P (t))
1898 *tp = remap_type (t, &ctx->cb);
1899 else if (!DECL_P (t))
1900 *walk_subtrees = 1;
1901 break;
1902 }
1903
1904 return NULL_TREE;
1905}
1906
1907
1908/* Scan all the statements starting at STMT_P. CTX contains context
1909 information about the OpenMP directives and clauses found during
1910 the scan. */
1911
1912static void
1913scan_omp (tree *stmt_p, omp_context *ctx)
1914{
1915 location_t saved_location;
1916 struct walk_stmt_info wi;
1917
1918 memset (&wi, 0, sizeof (wi));
1919 wi.callback = scan_omp_1;
1920 wi.info = ctx;
1921 wi.want_bind_expr = (ctx != NULL);
1922 wi.want_locations = true;
1923
1924 saved_location = input_location;
1925 walk_stmts (&wi, stmt_p);
1926 input_location = saved_location;
1927}
1928\f
1929/* Re-gimplification and code generation routines. */
1930
1931/* Build a call to GOMP_barrier. */
1932
917948d3
ZD
1933static tree
1934build_omp_barrier (void)
953ff289 1935{
917948d3 1936 return build_call_expr (built_in_decls[BUILT_IN_GOMP_BARRIER], 0);
953ff289
DN
1937}
1938
1939/* If a context was created for STMT when it was scanned, return it. */
1940
1941static omp_context *
1942maybe_lookup_ctx (tree stmt)
1943{
1944 splay_tree_node n;
1945 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
1946 return n ? (omp_context *) n->value : NULL;
1947}
1948
50674e96
DN
1949
1950/* Find the mapping for DECL in CTX or the immediately enclosing
1951 context that has a mapping for DECL.
1952
1953 If CTX is a nested parallel directive, we may have to use the decl
1954 mappings created in CTX's parent context. Suppose that we have the
1955 following parallel nesting (variable UIDs showed for clarity):
1956
1957 iD.1562 = 0;
1958 #omp parallel shared(iD.1562) -> outer parallel
1959 iD.1562 = iD.1562 + 1;
1960
1961 #omp parallel shared (iD.1562) -> inner parallel
1962 iD.1562 = iD.1562 - 1;
1963
1964 Each parallel structure will create a distinct .omp_data_s structure
1965 for copying iD.1562 in/out of the directive:
1966
1967 outer parallel .omp_data_s.1.i -> iD.1562
1968 inner parallel .omp_data_s.2.i -> iD.1562
1969
1970 A shared variable mapping will produce a copy-out operation before
1971 the parallel directive and a copy-in operation after it. So, in
1972 this case we would have:
1973
1974 iD.1562 = 0;
1975 .omp_data_o.1.i = iD.1562;
1976 #omp parallel shared(iD.1562) -> outer parallel
1977 .omp_data_i.1 = &.omp_data_o.1
1978 .omp_data_i.1->i = .omp_data_i.1->i + 1;
1979
1980 .omp_data_o.2.i = iD.1562; -> **
1981 #omp parallel shared(iD.1562) -> inner parallel
1982 .omp_data_i.2 = &.omp_data_o.2
1983 .omp_data_i.2->i = .omp_data_i.2->i - 1;
1984
1985
1986 ** This is a problem. The symbol iD.1562 cannot be referenced
1987 inside the body of the outer parallel region. But since we are
1988 emitting this copy operation while expanding the inner parallel
1989 directive, we need to access the CTX structure of the outer
1990 parallel directive to get the correct mapping:
1991
1992 .omp_data_o.2.i = .omp_data_i.1->i
1993
1994 Since there may be other workshare or parallel directives enclosing
1995 the parallel directive, it may be necessary to walk up the context
1996 parent chain. This is not a problem in general because nested
1997 parallelism happens only rarely. */
1998
1999static tree
2000lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2001{
2002 tree t;
2003 omp_context *up;
2004
50674e96
DN
2005 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2006 t = maybe_lookup_decl (decl, up);
2007
d2dda7fe 2008 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 2009
64964499 2010 return t ? t : decl;
50674e96
DN
2011}
2012
2013
8ca5b2a2
JJ
2014/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2015 in outer contexts. */
2016
2017static tree
2018maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2019{
2020 tree t = NULL;
2021 omp_context *up;
2022
d2dda7fe
JJ
2023 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2024 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
2025
2026 return t ? t : decl;
2027}
2028
2029
953ff289
DN
2030/* Construct the initialization value for reduction CLAUSE. */
2031
2032tree
2033omp_reduction_init (tree clause, tree type)
2034{
2035 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2036 {
2037 case PLUS_EXPR:
2038 case MINUS_EXPR:
2039 case BIT_IOR_EXPR:
2040 case BIT_XOR_EXPR:
2041 case TRUTH_OR_EXPR:
2042 case TRUTH_ORIF_EXPR:
2043 case TRUTH_XOR_EXPR:
2044 case NE_EXPR:
2045 return fold_convert (type, integer_zero_node);
2046
2047 case MULT_EXPR:
2048 case TRUTH_AND_EXPR:
2049 case TRUTH_ANDIF_EXPR:
2050 case EQ_EXPR:
2051 return fold_convert (type, integer_one_node);
2052
2053 case BIT_AND_EXPR:
2054 return fold_convert (type, integer_minus_one_node);
2055
2056 case MAX_EXPR:
2057 if (SCALAR_FLOAT_TYPE_P (type))
2058 {
2059 REAL_VALUE_TYPE max, min;
2060 if (HONOR_INFINITIES (TYPE_MODE (type)))
2061 {
2062 real_inf (&max);
2063 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2064 }
2065 else
2066 real_maxval (&min, 1, TYPE_MODE (type));
2067 return build_real (type, min);
2068 }
2069 else
2070 {
2071 gcc_assert (INTEGRAL_TYPE_P (type));
2072 return TYPE_MIN_VALUE (type);
2073 }
2074
2075 case MIN_EXPR:
2076 if (SCALAR_FLOAT_TYPE_P (type))
2077 {
2078 REAL_VALUE_TYPE max;
2079 if (HONOR_INFINITIES (TYPE_MODE (type)))
2080 real_inf (&max);
2081 else
2082 real_maxval (&max, 0, TYPE_MODE (type));
2083 return build_real (type, max);
2084 }
2085 else
2086 {
2087 gcc_assert (INTEGRAL_TYPE_P (type));
2088 return TYPE_MAX_VALUE (type);
2089 }
2090
2091 default:
2092 gcc_unreachable ();
2093 }
2094}
2095
2096/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2097 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2098 private variables. Initialization statements go in ILIST, while calls
2099 to destructors go in DLIST. */
2100
2101static void
50674e96 2102lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
3d55c64b 2103 omp_context *ctx)
953ff289
DN
2104{
2105 tree_stmt_iterator diter;
5039610b 2106 tree c, dtor, copyin_seq, x, ptr;
953ff289 2107 bool copyin_by_ref = false;
8ca5b2a2 2108 bool lastprivate_firstprivate = false;
953ff289
DN
2109 int pass;
2110
2111 *dlist = alloc_stmt_list ();
2112 diter = tsi_start (*dlist);
2113 copyin_seq = NULL;
2114
2115 /* Do all the fixed sized types in the first pass, and the variable sized
2116 types in the second pass. This makes sure that the scalar arguments to
2117 the variable sized types are processed before we use them in the
2118 variable sized operations. */
2119 for (pass = 0; pass < 2; ++pass)
2120 {
2121 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2122 {
aaf46ef9 2123 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
2124 tree var, new_var;
2125 bool by_ref;
2126
2127 switch (c_kind)
2128 {
2129 case OMP_CLAUSE_PRIVATE:
2130 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
2131 continue;
2132 break;
2133 case OMP_CLAUSE_SHARED:
8ca5b2a2
JJ
2134 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
2135 {
2136 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
2137 continue;
2138 }
953ff289 2139 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289
DN
2140 case OMP_CLAUSE_COPYIN:
2141 case OMP_CLAUSE_REDUCTION:
2142 break;
077b0dfb 2143 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
2144 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2145 {
2146 lastprivate_firstprivate = true;
2147 if (pass != 0)
2148 continue;
2149 }
077b0dfb 2150 break;
953ff289
DN
2151 default:
2152 continue;
2153 }
2154
2155 new_var = var = OMP_CLAUSE_DECL (c);
2156 if (c_kind != OMP_CLAUSE_COPYIN)
2157 new_var = lookup_decl (var, ctx);
2158
2159 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
2160 {
2161 if (pass != 0)
2162 continue;
2163 }
953ff289
DN
2164 else if (is_variable_sized (var))
2165 {
50674e96
DN
2166 /* For variable sized types, we need to allocate the
2167 actual storage here. Call alloca and store the
2168 result in the pointer decl that we created elsewhere. */
953ff289
DN
2169 if (pass == 0)
2170 continue;
2171
a68ab351
JJ
2172 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
2173 {
2174 ptr = DECL_VALUE_EXPR (new_var);
2175 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
2176 ptr = TREE_OPERAND (ptr, 0);
2177 gcc_assert (DECL_P (ptr));
2178 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
2179 x = build_call_expr (built_in_decls[BUILT_IN_ALLOCA], 1, x);
2180 x = fold_convert (TREE_TYPE (ptr), x);
2181 x = build_gimple_modify_stmt (ptr, x);
2182 gimplify_and_add (x, ilist);
2183 }
953ff289 2184 }
953ff289
DN
2185 else if (is_reference (var))
2186 {
50674e96
DN
2187 /* For references that are being privatized for Fortran,
2188 allocate new backing storage for the new pointer
2189 variable. This allows us to avoid changing all the
2190 code that expects a pointer to something that expects
2191 a direct variable. Note that this doesn't apply to
2192 C++, since reference types are disallowed in data
077b0dfb
JJ
2193 sharing clauses there, except for NRV optimized
2194 return values. */
953ff289
DN
2195 if (pass == 0)
2196 continue;
2197
2198 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
2199 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
2200 {
2201 x = build_receiver_ref (var, false, ctx);
2202 x = build_fold_addr_expr (x);
2203 }
2204 else if (TREE_CONSTANT (x))
953ff289
DN
2205 {
2206 const char *name = NULL;
2207 if (DECL_NAME (var))
2208 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
2209
077b0dfb
JJ
2210 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
2211 name);
2212 gimple_add_tmp_var (x);
953ff289
DN
2213 x = build_fold_addr_expr_with_type (x, TREE_TYPE (new_var));
2214 }
2215 else
2216 {
5039610b 2217 x = build_call_expr (built_in_decls[BUILT_IN_ALLOCA], 1, x);
953ff289
DN
2218 x = fold_convert (TREE_TYPE (new_var), x);
2219 }
2220
b56b9fe3 2221 x = build_gimple_modify_stmt (new_var, x);
953ff289
DN
2222 gimplify_and_add (x, ilist);
2223
2224 new_var = build_fold_indirect_ref (new_var);
2225 }
2226 else if (c_kind == OMP_CLAUSE_REDUCTION
2227 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2228 {
2229 if (pass == 0)
2230 continue;
2231 }
2232 else if (pass != 0)
2233 continue;
2234
aaf46ef9 2235 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
2236 {
2237 case OMP_CLAUSE_SHARED:
8ca5b2a2
JJ
2238 /* Shared global vars are just accessed directly. */
2239 if (is_global_var (new_var))
2240 break;
953ff289
DN
2241 /* Set up the DECL_VALUE_EXPR for shared variables now. This
2242 needs to be delayed until after fixup_child_record_type so
2243 that we get the correct type during the dereference. */
7c8f7639 2244 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
2245 x = build_receiver_ref (var, by_ref, ctx);
2246 SET_DECL_VALUE_EXPR (new_var, x);
2247 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2248
2249 /* ??? If VAR is not passed by reference, and the variable
2250 hasn't been initialized yet, then we'll get a warning for
2251 the store into the omp_data_s structure. Ideally, we'd be
2252 able to notice this and not store anything at all, but
2253 we're generating code too early. Suppress the warning. */
2254 if (!by_ref)
2255 TREE_NO_WARNING (var) = 1;
2256 break;
2257
2258 case OMP_CLAUSE_LASTPRIVATE:
2259 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2260 break;
2261 /* FALLTHRU */
2262
2263 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
2264 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
2265 x = build_outer_var_ref (var, ctx);
2266 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2267 {
2268 if (is_task_ctx (ctx))
2269 x = build_receiver_ref (var, false, ctx);
2270 else
2271 x = build_outer_var_ref (var, ctx);
2272 }
2273 else
2274 x = NULL;
2275 x = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
953ff289
DN
2276 if (x)
2277 gimplify_and_add (x, ilist);
2278 /* FALLTHRU */
2279
2280 do_dtor:
2281 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
2282 if (x)
2283 {
2284 dtor = x;
2285 gimplify_stmt (&dtor);
2286 tsi_link_before (&diter, dtor, TSI_SAME_STMT);
2287 }
2288 break;
2289
2290 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
2291 if (is_task_ctx (ctx))
2292 {
2293 if (is_reference (var) || is_variable_sized (var))
2294 goto do_dtor;
2295 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
2296 ctx))
2297 || use_pointer_for_field (var, NULL))
2298 {
2299 x = build_receiver_ref (var, false, ctx);
2300 SET_DECL_VALUE_EXPR (new_var, x);
2301 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2302 goto do_dtor;
2303 }
2304 }
953ff289
DN
2305 x = build_outer_var_ref (var, ctx);
2306 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
2307 gimplify_and_add (x, ilist);
2308 goto do_dtor;
2309 break;
2310
2311 case OMP_CLAUSE_COPYIN:
7c8f7639 2312 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
2313 x = build_receiver_ref (var, by_ref, ctx);
2314 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
2315 append_to_statement_list (x, &copyin_seq);
2316 copyin_by_ref |= by_ref;
2317 break;
2318
2319 case OMP_CLAUSE_REDUCTION:
2320 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2321 {
a68ab351
JJ
2322 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2323 x = build_outer_var_ref (var, ctx);
2324
2325 if (is_reference (var))
2326 x = build_fold_addr_expr (x);
2327 SET_DECL_VALUE_EXPR (placeholder, x);
2328 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
953ff289
DN
2329 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), ilist);
2330 OMP_CLAUSE_REDUCTION_INIT (c) = NULL;
a68ab351 2331 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
953ff289
DN
2332 }
2333 else
2334 {
2335 x = omp_reduction_init (c, TREE_TYPE (new_var));
2336 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
b56b9fe3 2337 x = build_gimple_modify_stmt (new_var, x);
953ff289
DN
2338 gimplify_and_add (x, ilist);
2339 }
2340 break;
2341
2342 default:
2343 gcc_unreachable ();
2344 }
2345 }
2346 }
2347
2348 /* The copyin sequence is not to be executed by the main thread, since
2349 that would result in self-copies. Perhaps not visible to scalars,
2350 but it certainly is to C++ operator=. */
2351 if (copyin_seq)
2352 {
5039610b 2353 x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
953ff289
DN
2354 x = build2 (NE_EXPR, boolean_type_node, x,
2355 build_int_cst (TREE_TYPE (x), 0));
2356 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
2357 gimplify_and_add (x, ilist);
2358 }
2359
2360 /* If any copyin variable is passed by reference, we must ensure the
2361 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
2362 threads. Similarly for variables in both firstprivate and
2363 lastprivate clauses we need to ensure the lastprivate copying
2364 happens after firstprivate copying in all threads. */
2365 if (copyin_by_ref || lastprivate_firstprivate)
917948d3 2366 gimplify_and_add (build_omp_barrier (), ilist);
953ff289
DN
2367}
2368
50674e96 2369
953ff289
DN
2370/* Generate code to implement the LASTPRIVATE clauses. This is used for
2371 both parallel and workshare constructs. PREDICATE may be NULL if it's
2372 always true. */
2373
2374static void
50674e96 2375lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
a68ab351 2376 omp_context *ctx)
953ff289
DN
2377{
2378 tree sub_list, x, c;
a68ab351 2379 bool par_clauses = false;
953ff289
DN
2380
2381 /* Early exit if there are no lastprivate clauses. */
2382 clauses = find_omp_clause (clauses, OMP_CLAUSE_LASTPRIVATE);
2383 if (clauses == NULL)
2384 {
2385 /* If this was a workshare clause, see if it had been combined
2386 with its parallel. In that case, look for the clauses on the
2387 parallel statement itself. */
2388 if (is_parallel_ctx (ctx))
2389 return;
2390
2391 ctx = ctx->outer;
2392 if (ctx == NULL || !is_parallel_ctx (ctx))
2393 return;
2394
2395 clauses = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
2396 OMP_CLAUSE_LASTPRIVATE);
2397 if (clauses == NULL)
2398 return;
a68ab351 2399 par_clauses = true;
953ff289
DN
2400 }
2401
2402 sub_list = alloc_stmt_list ();
2403
a68ab351 2404 for (c = clauses; c ;)
953ff289
DN
2405 {
2406 tree var, new_var;
2407
a68ab351
JJ
2408 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
2409 {
2410 var = OMP_CLAUSE_DECL (c);
2411 new_var = lookup_decl (var, ctx);
953ff289 2412
a68ab351
JJ
2413 if (OMP_CLAUSE_LASTPRIVATE_STMT (c))
2414 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), &sub_list);
2415 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL;
953ff289 2416
a68ab351
JJ
2417 x = build_outer_var_ref (var, ctx);
2418 if (is_reference (var))
2419 new_var = build_fold_indirect_ref (new_var);
2420 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
2421 append_to_statement_list (x, &sub_list);
2422 }
2423 c = OMP_CLAUSE_CHAIN (c);
2424 if (c == NULL && !par_clauses)
2425 {
2426 /* If this was a workshare clause, see if it had been combined
2427 with its parallel. In that case, continue looking for the
2428 clauses also on the parallel statement itself. */
2429 if (is_parallel_ctx (ctx))
2430 break;
2431
2432 ctx = ctx->outer;
2433 if (ctx == NULL || !is_parallel_ctx (ctx))
2434 break;
2435
2436 c = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
2437 OMP_CLAUSE_LASTPRIVATE);
2438 par_clauses = true;
2439 }
953ff289
DN
2440 }
2441
2442 if (predicate)
2443 x = build3 (COND_EXPR, void_type_node, predicate, sub_list, NULL);
2444 else
2445 x = sub_list;
50674e96 2446
953ff289
DN
2447 gimplify_and_add (x, stmt_list);
2448}
2449
50674e96 2450
953ff289
DN
2451/* Generate code to implement the REDUCTION clauses. */
2452
2453static void
50674e96 2454lower_reduction_clauses (tree clauses, tree *stmt_list, omp_context *ctx)
953ff289
DN
2455{
2456 tree sub_list = NULL, x, c;
2457 int count = 0;
2458
2459 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
2460 update in that case, otherwise use a lock. */
2461 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 2462 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289
DN
2463 {
2464 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2465 {
2466 /* Never use OMP_ATOMIC for array reductions. */
2467 count = -1;
2468 break;
2469 }
2470 count++;
2471 }
2472
2473 if (count == 0)
2474 return;
2475
2476 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2477 {
2478 tree var, ref, new_var;
2479 enum tree_code code;
2480
aaf46ef9 2481 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
2482 continue;
2483
2484 var = OMP_CLAUSE_DECL (c);
2485 new_var = lookup_decl (var, ctx);
2486 if (is_reference (var))
2487 new_var = build_fold_indirect_ref (new_var);
2488 ref = build_outer_var_ref (var, ctx);
2489 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
2490
2491 /* reduction(-:var) sums up the partial results, so it acts
2492 identically to reduction(+:var). */
953ff289
DN
2493 if (code == MINUS_EXPR)
2494 code = PLUS_EXPR;
2495
2496 if (count == 1)
2497 {
2498 tree addr = build_fold_addr_expr (ref);
2499
2500 addr = save_expr (addr);
2501 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
2502 x = fold_build2 (code, TREE_TYPE (ref), ref, new_var);
2503 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
2504 gimplify_and_add (x, stmt_list);
2505 return;
2506 }
2507
2508 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2509 {
2510 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2511
2512 if (is_reference (var))
2513 ref = build_fold_addr_expr (ref);
2514 SET_DECL_VALUE_EXPR (placeholder, ref);
2515 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
2516 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), &sub_list);
2517 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL;
2518 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
2519 }
2520 else
2521 {
2522 x = build2 (code, TREE_TYPE (ref), ref, new_var);
2523 ref = build_outer_var_ref (var, ctx);
b56b9fe3 2524 x = build_gimple_modify_stmt (ref, x);
953ff289
DN
2525 append_to_statement_list (x, &sub_list);
2526 }
2527 }
2528
5039610b 2529 x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ATOMIC_START], 0);
953ff289
DN
2530 gimplify_and_add (x, stmt_list);
2531
2532 gimplify_and_add (sub_list, stmt_list);
2533
5039610b 2534 x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ATOMIC_END], 0);
953ff289
DN
2535 gimplify_and_add (x, stmt_list);
2536}
2537
50674e96 2538
953ff289
DN
2539/* Generate code to implement the COPYPRIVATE clauses. */
2540
2541static void
50674e96 2542lower_copyprivate_clauses (tree clauses, tree *slist, tree *rlist,
953ff289
DN
2543 omp_context *ctx)
2544{
2545 tree c;
2546
2547 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2548 {
2549 tree var, ref, x;
2550 bool by_ref;
2551
aaf46ef9 2552 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
2553 continue;
2554
2555 var = OMP_CLAUSE_DECL (c);
7c8f7639 2556 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
2557
2558 ref = build_sender_ref (var, ctx);
d2dda7fe 2559 x = lookup_decl_in_outer_ctx (var, ctx);
50674e96 2560 x = by_ref ? build_fold_addr_expr (x) : x;
b56b9fe3 2561 x = build_gimple_modify_stmt (ref, x);
953ff289
DN
2562 gimplify_and_add (x, slist);
2563
2564 ref = build_receiver_ref (var, by_ref, ctx);
2565 if (is_reference (var))
2566 {
2567 ref = build_fold_indirect_ref (ref);
2568 var = build_fold_indirect_ref (var);
2569 }
2570 x = lang_hooks.decls.omp_clause_assign_op (c, var, ref);
2571 gimplify_and_add (x, rlist);
2572 }
2573}
2574
50674e96 2575
953ff289
DN
2576/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
2577 and REDUCTION from the sender (aka parent) side. */
2578
2579static void
50674e96 2580lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
953ff289
DN
2581{
2582 tree c;
2583
2584 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2585 {
50674e96 2586 tree val, ref, x, var;
953ff289
DN
2587 bool by_ref, do_in = false, do_out = false;
2588
aaf46ef9 2589 switch (OMP_CLAUSE_CODE (c))
953ff289 2590 {
a68ab351
JJ
2591 case OMP_CLAUSE_PRIVATE:
2592 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2593 break;
2594 continue;
953ff289
DN
2595 case OMP_CLAUSE_FIRSTPRIVATE:
2596 case OMP_CLAUSE_COPYIN:
2597 case OMP_CLAUSE_LASTPRIVATE:
2598 case OMP_CLAUSE_REDUCTION:
2599 break;
2600 default:
2601 continue;
2602 }
2603
d2dda7fe
JJ
2604 val = OMP_CLAUSE_DECL (c);
2605 var = lookup_decl_in_outer_ctx (val, ctx);
50674e96 2606
8ca5b2a2
JJ
2607 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
2608 && is_global_var (var))
2609 continue;
953ff289
DN
2610 if (is_variable_sized (val))
2611 continue;
7c8f7639 2612 by_ref = use_pointer_for_field (val, NULL);
953ff289 2613
aaf46ef9 2614 switch (OMP_CLAUSE_CODE (c))
953ff289 2615 {
a68ab351 2616 case OMP_CLAUSE_PRIVATE:
953ff289
DN
2617 case OMP_CLAUSE_FIRSTPRIVATE:
2618 case OMP_CLAUSE_COPYIN:
2619 do_in = true;
2620 break;
2621
2622 case OMP_CLAUSE_LASTPRIVATE:
2623 if (by_ref || is_reference (val))
2624 {
2625 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2626 continue;
2627 do_in = true;
2628 }
2629 else
a68ab351
JJ
2630 {
2631 do_out = true;
2632 if (lang_hooks.decls.omp_private_outer_ref (val))
2633 do_in = true;
2634 }
953ff289
DN
2635 break;
2636
2637 case OMP_CLAUSE_REDUCTION:
2638 do_in = true;
2639 do_out = !(by_ref || is_reference (val));
2640 break;
2641
2642 default:
2643 gcc_unreachable ();
2644 }
2645
2646 if (do_in)
2647 {
2648 ref = build_sender_ref (val, ctx);
50674e96 2649 x = by_ref ? build_fold_addr_expr (var) : var;
b56b9fe3 2650 x = build_gimple_modify_stmt (ref, x);
953ff289 2651 gimplify_and_add (x, ilist);
a68ab351
JJ
2652 if (is_task_ctx (ctx))
2653 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 2654 }
50674e96 2655
953ff289
DN
2656 if (do_out)
2657 {
2658 ref = build_sender_ref (val, ctx);
b56b9fe3 2659 x = build_gimple_modify_stmt (var, ref);
953ff289
DN
2660 gimplify_and_add (x, olist);
2661 }
2662 }
2663}
2664
2665/* Generate code to implement SHARED from the sender (aka parent) side.
2666 This is trickier, since OMP_PARALLEL_CLAUSES doesn't list things that
2667 got automatically shared. */
2668
2669static void
50674e96 2670lower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
953ff289 2671{
a68ab351 2672 tree var, ovar, nvar, f, x, record_type;
953ff289
DN
2673
2674 if (ctx->record_type == NULL)
2675 return;
50674e96 2676
a68ab351
JJ
2677 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
2678 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
953ff289
DN
2679 {
2680 ovar = DECL_ABSTRACT_ORIGIN (f);
2681 nvar = maybe_lookup_decl (ovar, ctx);
2682 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
2683 continue;
2684
50674e96
DN
2685 /* If CTX is a nested parallel directive. Find the immediately
2686 enclosing parallel or workshare construct that contains a
2687 mapping for OVAR. */
d2dda7fe 2688 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 2689
7c8f7639 2690 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
2691 {
2692 x = build_sender_ref (ovar, ctx);
50674e96 2693 var = build_fold_addr_expr (var);
b56b9fe3 2694 x = build_gimple_modify_stmt (x, var);
953ff289
DN
2695 gimplify_and_add (x, ilist);
2696 }
2697 else
2698 {
2699 x = build_sender_ref (ovar, ctx);
b56b9fe3 2700 x = build_gimple_modify_stmt (x, var);
953ff289
DN
2701 gimplify_and_add (x, ilist);
2702
a68ab351
JJ
2703 if (!TREE_READONLY (var))
2704 {
2705 x = build_sender_ref (ovar, ctx);
2706 x = build_gimple_modify_stmt (var, x);
2707 gimplify_and_add (x, olist);
2708 }
953ff289
DN
2709 }
2710 }
2711}
2712
2713/* Build the function calls to GOMP_parallel_start etc to actually
50674e96
DN
2714 generate the parallel operation. REGION is the parallel region
2715 being expanded. BB is the block where to insert the code. WS_ARGS
2716 will be set if this is a call to a combined parallel+workshare
2717 construct, it contains the list of additional arguments needed by
2718 the workshare construct. */
953ff289
DN
2719
2720static void
777f7f9a
RH
2721expand_parallel_call (struct omp_region *region, basic_block bb,
2722 tree entry_stmt, tree ws_args)
953ff289 2723{
917948d3 2724 tree t, t1, t2, val, cond, c, clauses;
50674e96
DN
2725 block_stmt_iterator si;
2726 int start_ix;
2727
777f7f9a 2728 clauses = OMP_PARALLEL_CLAUSES (entry_stmt);
50674e96 2729
c0220ea4 2730 /* Determine what flavor of GOMP_parallel_start we will be
50674e96
DN
2731 emitting. */
2732 start_ix = BUILT_IN_GOMP_PARALLEL_START;
2733 if (is_combined_parallel (region))
2734 {
777f7f9a 2735 switch (region->inner->type)
50674e96 2736 {
777f7f9a 2737 case OMP_FOR:
a68ab351 2738 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
21a66e91 2739 start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
a68ab351
JJ
2740 + (region->inner->sched_kind
2741 == OMP_CLAUSE_SCHEDULE_RUNTIME
2742 ? 3 : region->inner->sched_kind);
777f7f9a
RH
2743 break;
2744 case OMP_SECTIONS:
2745 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
2746 break;
2747 default:
2748 gcc_unreachable ();
50674e96 2749 }
50674e96 2750 }
953ff289
DN
2751
2752 /* By default, the value of NUM_THREADS is zero (selected at run time)
2753 and there is no conditional. */
2754 cond = NULL_TREE;
2755 val = build_int_cst (unsigned_type_node, 0);
2756
2757 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2758 if (c)
2759 cond = OMP_CLAUSE_IF_EXPR (c);
2760
2761 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
2762 if (c)
2763 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
2764
2765 /* Ensure 'val' is of the correct type. */
2766 val = fold_convert (unsigned_type_node, val);
2767
2768 /* If we found the clause 'if (cond)', build either
2769 (cond != 0) or (cond ? val : 1u). */
2770 if (cond)
2771 {
50674e96
DN
2772 block_stmt_iterator si;
2773
2774 cond = gimple_boolify (cond);
2775
953ff289 2776 if (integer_zerop (val))
917948d3
ZD
2777 val = fold_build2 (EQ_EXPR, unsigned_type_node, cond,
2778 build_int_cst (TREE_TYPE (cond), 0));
953ff289 2779 else
50674e96
DN
2780 {
2781 basic_block cond_bb, then_bb, else_bb;
917948d3
ZD
2782 edge e, e_then, e_else;
2783 tree t, tmp_then, tmp_else, tmp_join, tmp_var;
2784
2785 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
2786 if (gimple_in_ssa_p (cfun))
2787 {
2788 tmp_then = make_ssa_name (tmp_var, NULL_TREE);
2789 tmp_else = make_ssa_name (tmp_var, NULL_TREE);
2790 tmp_join = make_ssa_name (tmp_var, NULL_TREE);
2791 }
2792 else
2793 {
2794 tmp_then = tmp_var;
2795 tmp_else = tmp_var;
2796 tmp_join = tmp_var;
2797 }
50674e96 2798
50674e96
DN
2799 e = split_block (bb, NULL);
2800 cond_bb = e->src;
2801 bb = e->dest;
2802 remove_edge (e);
2803
2804 then_bb = create_empty_bb (cond_bb);
2805 else_bb = create_empty_bb (then_bb);
917948d3
ZD
2806 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
2807 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
50674e96
DN
2808
2809 t = build3 (COND_EXPR, void_type_node,
a9b77cd1 2810 cond, NULL_TREE, NULL_TREE);
50674e96
DN
2811
2812 si = bsi_start (cond_bb);
2813 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2814
2815 si = bsi_start (then_bb);
917948d3
ZD
2816 t = build_gimple_modify_stmt (tmp_then, val);
2817 if (gimple_in_ssa_p (cfun))
2818 SSA_NAME_DEF_STMT (tmp_then) = t;
50674e96
DN
2819 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2820
2821 si = bsi_start (else_bb);
917948d3 2822 t = build_gimple_modify_stmt (tmp_else,
b56b9fe3 2823 build_int_cst (unsigned_type_node, 1));
917948d3
ZD
2824 if (gimple_in_ssa_p (cfun))
2825 SSA_NAME_DEF_STMT (tmp_else) = t;
50674e96
DN
2826 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2827
2828 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2829 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
917948d3
ZD
2830 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
2831 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
50674e96 2832
917948d3
ZD
2833 if (gimple_in_ssa_p (cfun))
2834 {
2835 tree phi = create_phi_node (tmp_join, bb);
2836 SSA_NAME_DEF_STMT (tmp_join) = phi;
2837 add_phi_arg (phi, tmp_then, e_then);
2838 add_phi_arg (phi, tmp_else, e_else);
2839 }
2840
2841 val = tmp_join;
50674e96
DN
2842 }
2843
50674e96 2844 si = bsi_start (bb);
917948d3
ZD
2845 val = force_gimple_operand_bsi (&si, val, true, NULL_TREE,
2846 false, BSI_CONTINUE_LINKING);
953ff289
DN
2847 }
2848
917948d3 2849 si = bsi_last (bb);
777f7f9a 2850 t = OMP_PARALLEL_DATA_ARG (entry_stmt);
953ff289 2851 if (t == NULL)
5039610b 2852 t1 = null_pointer_node;
953ff289 2853 else
5039610b
SL
2854 t1 = build_fold_addr_expr (t);
2855 t2 = build_fold_addr_expr (OMP_PARALLEL_FN (entry_stmt));
50674e96
DN
2856
2857 if (ws_args)
5039610b
SL
2858 {
2859 tree args = tree_cons (NULL, t2,
2860 tree_cons (NULL, t1,
2861 tree_cons (NULL, val, ws_args)));
2862 t = build_function_call_expr (built_in_decls[start_ix], args);
2863 }
2864 else
2865 t = build_call_expr (built_in_decls[start_ix], 3, t2, t1, val);
50674e96 2866
917948d3
ZD
2867 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
2868 false, BSI_CONTINUE_LINKING);
953ff289 2869
777f7f9a 2870 t = OMP_PARALLEL_DATA_ARG (entry_stmt);
953ff289
DN
2871 if (t == NULL)
2872 t = null_pointer_node;
2873 else
2874 t = build_fold_addr_expr (t);
5039610b 2875 t = build_call_expr (OMP_PARALLEL_FN (entry_stmt), 1, t);
917948d3
ZD
2876 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
2877 false, BSI_CONTINUE_LINKING);
953ff289 2878
5039610b 2879 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_PARALLEL_END], 0);
917948d3
ZD
2880 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
2881 false, BSI_CONTINUE_LINKING);
953ff289
DN
2882}
2883
50674e96 2884
a68ab351
JJ
2885static void maybe_catch_exception (tree *stmt_p);
2886
2887
2888/* Finalize task copyfn. */
2889
2890static void
2891expand_task_copyfn (tree task_stmt)
2892{
2893 struct function *child_cfun;
2894 tree child_fn, old_fn;
2895
2896 child_fn = OMP_TASK_COPYFN (task_stmt);
2897 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
2898
2899 /* Inform the callgraph about the new function. */
2900 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
2901 = cfun->curr_properties;
2902
2903 old_fn = current_function_decl;
2904 push_cfun (child_cfun);
2905 current_function_decl = child_fn;
2906 gimplify_body (&DECL_SAVED_TREE (child_fn), child_fn, false);
2907 maybe_catch_exception (&BIND_EXPR_BODY (DECL_SAVED_TREE (child_fn)));
2908 pop_cfun ();
2909 current_function_decl = old_fn;
2910
2911 cgraph_add_new_function (child_fn, false);
2912}
2913
2914/* Build the function call to GOMP_task to actually
2915 generate the task operation. BB is the block where to insert the code. */
2916
2917static void
2918expand_task_call (basic_block bb, tree entry_stmt)
2919{
2920 tree t, t1, t2, t3, flags, cond, c, clauses;
2921 block_stmt_iterator si;
2922
2923 clauses = OMP_TASK_CLAUSES (entry_stmt);
2924
2925 if (OMP_TASK_COPYFN (entry_stmt))
2926 expand_task_copyfn (entry_stmt);
2927
2928 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2929 if (c)
2930 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
2931 else
2932 cond = boolean_true_node;
2933
2934 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
2935 flags = build_int_cst (unsigned_type_node, (c ? 1 : 0));
2936
2937 si = bsi_last (bb);
2938 t = OMP_TASK_DATA_ARG (entry_stmt);
2939 if (t == NULL)
2940 t2 = null_pointer_node;
2941 else
2942 t2 = build_fold_addr_expr (t);
2943 t1 = build_fold_addr_expr (OMP_TASK_FN (entry_stmt));
2944 t = OMP_TASK_COPYFN (entry_stmt);
2945 if (t == NULL)
2946 t3 = null_pointer_node;
2947 else
2948 t3 = build_fold_addr_expr (t);
2949
2950 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_TASK], 7, t1, t2, t3,
2951 OMP_TASK_ARG_SIZE (entry_stmt),
2952 OMP_TASK_ARG_ALIGN (entry_stmt), cond, flags);
2953
2954 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
2955 false, BSI_CONTINUE_LINKING);
2956}
2957
2958
953ff289
DN
2959/* If exceptions are enabled, wrap *STMT_P in a MUST_NOT_THROW catch
2960 handler. This prevents programs from violating the structured
2961 block semantics with throws. */
2962
2963static void
2964maybe_catch_exception (tree *stmt_p)
2965{
2966 tree f, t;
2967
2968 if (!flag_exceptions)
2969 return;
2970
2971 if (lang_protect_cleanup_actions)
2972 t = lang_protect_cleanup_actions ();
2973 else
5039610b 2974 t = build_call_expr (built_in_decls[BUILT_IN_TRAP], 0);
953ff289
DN
2975 f = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
2976 EH_FILTER_MUST_NOT_THROW (f) = 1;
2977 gimplify_and_add (t, &EH_FILTER_FAILURE (f));
2978
2979 t = build2 (TRY_CATCH_EXPR, void_type_node, *stmt_p, NULL);
2980 append_to_statement_list (f, &TREE_OPERAND (t, 1));
2981
2982 *stmt_p = NULL;
2983 append_to_statement_list (t, stmt_p);
2984}
2985
50674e96 2986/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
953ff289 2987
50674e96
DN
2988static tree
2989list2chain (tree list)
953ff289 2990{
50674e96 2991 tree t;
953ff289 2992
50674e96
DN
2993 for (t = list; t; t = TREE_CHAIN (t))
2994 {
2995 tree var = TREE_VALUE (t);
2996 if (TREE_CHAIN (t))
2997 TREE_CHAIN (var) = TREE_VALUE (TREE_CHAIN (t));
2998 else
2999 TREE_CHAIN (var) = NULL_TREE;
3000 }
953ff289 3001
50674e96
DN
3002 return list ? TREE_VALUE (list) : NULL_TREE;
3003}
953ff289 3004
953ff289 3005
50674e96
DN
3006/* Remove barriers in REGION->EXIT's block. Note that this is only
3007 valid for OMP_PARALLEL regions. Since the end of a parallel region
3008 is an implicit barrier, any workshare inside the OMP_PARALLEL that
3009 left a barrier at the end of the OMP_PARALLEL region can now be
3010 removed. */
953ff289 3011
50674e96
DN
3012static void
3013remove_exit_barrier (struct omp_region *region)
3014{
3015 block_stmt_iterator si;
3016 basic_block exit_bb;
777f7f9a
RH
3017 edge_iterator ei;
3018 edge e;
50674e96 3019 tree t;
953ff289 3020
777f7f9a 3021 exit_bb = region->exit;
953ff289 3022
2aee3e57
JJ
3023 /* If the parallel region doesn't return, we don't have REGION->EXIT
3024 block at all. */
3025 if (! exit_bb)
3026 return;
3027
777f7f9a 3028 /* The last insn in the block will be the parallel's OMP_RETURN. The
75c40d56 3029 workshare's OMP_RETURN will be in a preceding block. The kinds of
777f7f9a
RH
3030 statements that can appear in between are extremely limited -- no
3031 memory operations at all. Here, we allow nothing at all, so the
75c40d56 3032 only thing we allow to precede this OMP_RETURN is a label. */
50674e96 3033 si = bsi_last (exit_bb);
777f7f9a 3034 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
50674e96 3035 bsi_prev (&si);
777f7f9a 3036 if (!bsi_end_p (si) && TREE_CODE (bsi_stmt (si)) != LABEL_EXPR)
50674e96 3037 return;
953ff289 3038
777f7f9a
RH
3039 FOR_EACH_EDGE (e, ei, exit_bb->preds)
3040 {
3041 si = bsi_last (e->src);
3042 if (bsi_end_p (si))
3043 continue;
3044 t = bsi_stmt (si);
3045 if (TREE_CODE (t) == OMP_RETURN)
3046 OMP_RETURN_NOWAIT (t) = 1;
3047 }
953ff289
DN
3048}
3049
777f7f9a
RH
3050static void
3051remove_exit_barriers (struct omp_region *region)
3052{
3053 if (region->type == OMP_PARALLEL)
3054 remove_exit_barrier (region);
3055
3056 if (region->inner)
3057 {
3058 region = region->inner;
3059 remove_exit_barriers (region);
3060 while (region->next)
3061 {
3062 region = region->next;
3063 remove_exit_barriers (region);
3064 }
3065 }
3066}
50674e96 3067
2b4cf991
JJ
3068/* Optimize omp_get_thread_num () and omp_get_num_threads ()
3069 calls. These can't be declared as const functions, but
3070 within one parallel body they are constant, so they can be
3071 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
a68ab351
JJ
3072 which are declared const. Similarly for task body, except
3073 that in untied task omp_get_thread_num () can change at any task
3074 scheduling point. */
2b4cf991
JJ
3075
3076static void
a68ab351 3077optimize_omp_library_calls (tree entry_stmt)
2b4cf991
JJ
3078{
3079 basic_block bb;
3080 block_stmt_iterator bsi;
3081 tree thr_num_id
3082 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM]);
3083 tree num_thr_id
3084 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS]);
a68ab351
JJ
3085 bool untied_task = (TREE_CODE (entry_stmt) == OMP_TASK
3086 && find_omp_clause (OMP_TASK_CLAUSES (entry_stmt),
3087 OMP_CLAUSE_UNTIED) != NULL);
2b4cf991
JJ
3088
3089 FOR_EACH_BB (bb)
3090 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3091 {
3092 tree stmt = bsi_stmt (bsi);
3093 tree call = get_call_expr_in (stmt);
3094 tree decl;
3095
3096 if (call
3097 && (decl = get_callee_fndecl (call))
3098 && DECL_EXTERNAL (decl)
3099 && TREE_PUBLIC (decl)
3100 && DECL_INITIAL (decl) == NULL)
3101 {
3102 tree built_in;
3103
3104 if (DECL_NAME (decl) == thr_num_id)
a68ab351
JJ
3105 {
3106 /* In #pragma omp task untied omp_get_thread_num () can change
3107 during the execution of the task region. */
3108 if (untied_task)
3109 continue;
3110 built_in = built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM];
3111 }
2b4cf991
JJ
3112 else if (DECL_NAME (decl) == num_thr_id)
3113 built_in = built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS];
3114 else
3115 continue;
3116
3117 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
3118 || call_expr_nargs (call) != 0)
3119 continue;
3120
3121 if (flag_exceptions && !TREE_NOTHROW (decl))
3122 continue;
3123
3124 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
3125 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (decl)))
3126 != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (built_in))))
3127 continue;
3128
3129 CALL_EXPR_FN (call) = build_fold_addr_expr (built_in);
3130 }
3131 }
3132}
3133
a68ab351 3134/* Expand the OpenMP parallel or task directive starting at REGION. */
953ff289
DN
3135
3136static void
a68ab351 3137expand_omp_taskreg (struct omp_region *region)
953ff289 3138{
50674e96 3139 basic_block entry_bb, exit_bb, new_bb;
db2960f4 3140 struct function *child_cfun;
50674e96
DN
3141 tree child_fn, block, t, ws_args;
3142 block_stmt_iterator si;
777f7f9a 3143 tree entry_stmt;
50674e96
DN
3144 edge e;
3145
777f7f9a 3146 entry_stmt = last_stmt (region->entry);
a68ab351 3147 child_fn = OMP_TASKREG_FN (entry_stmt);
50674e96 3148 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
135a171d
JJ
3149 /* If this function has been already instrumented, make sure
3150 the child function isn't instrumented again. */
3151 child_cfun->after_tree_profile = cfun->after_tree_profile;
50674e96 3152
777f7f9a
RH
3153 entry_bb = region->entry;
3154 exit_bb = region->exit;
50674e96 3155
50674e96 3156 if (is_combined_parallel (region))
777f7f9a 3157 ws_args = region->ws_args;
50674e96
DN
3158 else
3159 ws_args = NULL_TREE;
953ff289 3160
777f7f9a 3161 if (child_cfun->cfg)
953ff289 3162 {
50674e96
DN
3163 /* Due to inlining, it may happen that we have already outlined
3164 the region, in which case all we need to do is make the
3165 sub-graph unreachable and emit the parallel call. */
3166 edge entry_succ_e, exit_succ_e;
3167 block_stmt_iterator si;
3168
3169 entry_succ_e = single_succ_edge (entry_bb);
50674e96
DN
3170
3171 si = bsi_last (entry_bb);
a68ab351
JJ
3172 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_PARALLEL
3173 || TREE_CODE (bsi_stmt (si)) == OMP_TASK);
50674e96
DN
3174 bsi_remove (&si, true);
3175
3176 new_bb = entry_bb;
d3c673c7
JJ
3177 if (exit_bb)
3178 {
3179 exit_succ_e = single_succ_edge (exit_bb);
3180 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
3181 }
917948d3 3182 remove_edge_and_dominated_blocks (entry_succ_e);
953ff289 3183 }
50674e96
DN
3184 else
3185 {
3186 /* If the parallel region needs data sent from the parent
b570947c
JJ
3187 function, then the very first statement (except possible
3188 tree profile counter updates) of the parallel body
50674e96
DN
3189 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
3190 &.OMP_DATA_O is passed as an argument to the child function,
3191 we need to replace it with the argument as seen by the child
3192 function.
3193
3194 In most cases, this will end up being the identity assignment
3195 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
3196 a function call that has been inlined, the original PARM_DECL
3197 .OMP_DATA_I may have been converted into a different local
3198 variable. In which case, we need to keep the assignment. */
a68ab351 3199 if (OMP_TASKREG_DATA_ARG (entry_stmt))
50674e96
DN
3200 {
3201 basic_block entry_succ_bb = single_succ (entry_bb);
b570947c 3202 block_stmt_iterator si;
917948d3 3203 tree parcopy_stmt = NULL_TREE, arg, narg;
953ff289 3204
b570947c
JJ
3205 for (si = bsi_start (entry_succ_bb); ; bsi_next (&si))
3206 {
018b899b 3207 tree stmt, arg;
b570947c
JJ
3208
3209 gcc_assert (!bsi_end_p (si));
3210 stmt = bsi_stmt (si);
07beea0d 3211 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
018b899b
JJ
3212 continue;
3213
07beea0d 3214 arg = GIMPLE_STMT_OPERAND (stmt, 1);
018b899b
JJ
3215 STRIP_NOPS (arg);
3216 if (TREE_CODE (arg) == ADDR_EXPR
3217 && TREE_OPERAND (arg, 0)
a68ab351 3218 == OMP_TASKREG_DATA_ARG (entry_stmt))
b570947c 3219 {
917948d3 3220 parcopy_stmt = stmt;
b570947c
JJ
3221 break;
3222 }
3223 }
917948d3
ZD
3224
3225 gcc_assert (parcopy_stmt != NULL_TREE);
3226 arg = DECL_ARGUMENTS (child_fn);
3227
3228 if (!gimple_in_ssa_p (cfun))
3229 {
3230 if (GIMPLE_STMT_OPERAND (parcopy_stmt, 0) == arg)
3231 bsi_remove (&si, true);
3232 else
3233 GIMPLE_STMT_OPERAND (parcopy_stmt, 1) = arg;
3234 }
3235 else
3236 {
3237 /* If we are in ssa form, we must load the value from the default
3238 definition of the argument. That should not be defined now,
3239 since the argument is not used uninitialized. */
3240 gcc_assert (gimple_default_def (cfun, arg) == NULL);
3241 narg = make_ssa_name (arg, build_empty_stmt ());
3242 set_default_def (arg, narg);
3243 GIMPLE_STMT_OPERAND (parcopy_stmt, 1) = narg;
3244 update_stmt (parcopy_stmt);
3245 }
50674e96
DN
3246 }
3247
3248 /* Declare local variables needed in CHILD_CFUN. */
3249 block = DECL_INITIAL (child_fn);
cb91fab0 3250 BLOCK_VARS (block) = list2chain (child_cfun->local_decls);
7506e1cb 3251 DECL_SAVED_TREE (child_fn) = bb_stmt_list (single_succ (entry_bb));
50674e96 3252
917948d3 3253 /* Reset DECL_CONTEXT on function arguments. */
50674e96
DN
3254 for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
3255 DECL_CONTEXT (t) = child_fn;
3256
a68ab351
JJ
3257 /* Split ENTRY_BB at OMP_PARALLEL or OMP_TASK, so that it can be
3258 moved to the child function. */
50674e96
DN
3259 si = bsi_last (entry_bb);
3260 t = bsi_stmt (si);
a68ab351
JJ
3261 gcc_assert (t && (TREE_CODE (t) == OMP_PARALLEL
3262 || TREE_CODE (t) == OMP_TASK));
50674e96
DN
3263 bsi_remove (&si, true);
3264 e = split_block (entry_bb, t);
3265 entry_bb = e->dest;
3266 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3267
50674e96 3268 /* Convert OMP_RETURN into a RETURN_EXPR. */
2aee3e57
JJ
3269 if (exit_bb)
3270 {
3271 si = bsi_last (exit_bb);
3272 gcc_assert (!bsi_end_p (si)
3273 && TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
3274 t = build1 (RETURN_EXPR, void_type_node, NULL);
2174b867 3275 bsi_insert_after (&si, t, BSI_SAME_STMT);
2aee3e57
JJ
3276 bsi_remove (&si, true);
3277 }
917948d3
ZD
3278
3279 /* Move the parallel region into CHILD_CFUN. */
3280
3281 if (gimple_in_ssa_p (cfun))
3282 {
3283 push_cfun (child_cfun);
5db9ba0c 3284 init_tree_ssa (child_cfun);
917948d3
ZD
3285 init_ssa_operands ();
3286 cfun->gimple_df->in_ssa_p = true;
3287 pop_cfun ();
3288 }
3289 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb);
3290 if (exit_bb)
3291 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
3292
3293 /* Inform the callgraph about the new function. */
3294 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
3295 = cfun->curr_properties;
3296 cgraph_add_new_function (child_fn, true);
3297
3298 /* Fix the callgraph edges for child_cfun. Those for cfun will be
3299 fixed in a following pass. */
3300 push_cfun (child_cfun);
2b4cf991 3301 if (optimize)
a68ab351 3302 optimize_omp_library_calls (entry_stmt);
917948d3 3303 rebuild_cgraph_edges ();
99819c63
JJ
3304
3305 /* Some EH regions might become dead, see PR34608. If
3306 pass_cleanup_cfg isn't the first pass to happen with the
3307 new child, these dead EH edges might cause problems.
3308 Clean them up now. */
3309 if (flag_exceptions)
3310 {
3311 basic_block bb;
3312 tree save_current = current_function_decl;
3313 bool changed = false;
3314
3315 current_function_decl = child_fn;
3316 FOR_EACH_BB (bb)
3317 changed |= tree_purge_dead_eh_edges (bb);
3318 if (changed)
3319 cleanup_tree_cfg ();
3320 current_function_decl = save_current;
3321 }
917948d3 3322 pop_cfun ();
50674e96 3323 }
a509ebb5 3324
50674e96 3325 /* Emit a library call to launch the children threads. */
a68ab351
JJ
3326 if (TREE_CODE (entry_stmt) == OMP_PARALLEL)
3327 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
3328 else
3329 expand_task_call (new_bb, entry_stmt);
5f40b3cb 3330 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
3331}
3332
50674e96
DN
3333
3334/* A subroutine of expand_omp_for. Generate code for a parallel
953ff289
DN
3335 loop with any schedule. Given parameters:
3336
3337 for (V = N1; V cond N2; V += STEP) BODY;
3338
3339 where COND is "<" or ">", we generate pseudocode
3340
3341 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
50674e96 3342 if (more) goto L0; else goto L3;
953ff289
DN
3343 L0:
3344 V = istart0;
3345 iend = iend0;
3346 L1:
3347 BODY;
3348 V += STEP;
50674e96 3349 if (V cond iend) goto L1; else goto L2;
953ff289 3350 L2:
50674e96
DN
3351 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3352 L3:
953ff289 3353
50674e96 3354 If this is a combined omp parallel loop, instead of the call to
a68ab351
JJ
3355 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
3356
3357 For collapsed loops, given parameters:
3358 collapse(3)
3359 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
3360 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
3361 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
3362 BODY;
3363
3364 we generate pseudocode
3365
3366 if (cond3 is <)
3367 adj = STEP3 - 1;
3368 else
3369 adj = STEP3 + 1;
3370 count3 = (adj + N32 - N31) / STEP3;
3371 if (cond2 is <)
3372 adj = STEP2 - 1;
3373 else
3374 adj = STEP2 + 1;
3375 count2 = (adj + N22 - N21) / STEP2;
3376 if (cond1 is <)
3377 adj = STEP1 - 1;
3378 else
3379 adj = STEP1 + 1;
3380 count1 = (adj + N12 - N11) / STEP1;
3381 count = count1 * count2 * count3;
3382 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
3383 if (more) goto L0; else goto L3;
3384 L0:
3385 V = istart0;
3386 T = V;
3387 V3 = N31 + (T % count3) * STEP3;
3388 T = T / count3;
3389 V2 = N21 + (T % count2) * STEP2;
3390 T = T / count2;
3391 V1 = N11 + T * STEP1;
3392 iend = iend0;
3393 L1:
3394 BODY;
3395 V += 1;
3396 if (V < iend) goto L10; else goto L2;
3397 L10:
3398 V3 += STEP3;
3399 if (V3 cond3 N32) goto L1; else goto L11;
3400 L11:
3401 V3 = N31;
3402 V2 += STEP2;
3403 if (V2 cond2 N22) goto L1; else goto L12;
3404 L12:
3405 V2 = N21;
3406 V1 += STEP1;
3407 goto L1;
3408 L2:
3409 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3410 L3:
3411
3412 */
953ff289 3413
777f7f9a 3414static void
50674e96
DN
3415expand_omp_for_generic (struct omp_region *region,
3416 struct omp_for_data *fd,
953ff289
DN
3417 enum built_in_function start_fn,
3418 enum built_in_function next_fn)
3419{
917948d3 3420 tree type, istart0, iend0, iend, phi;
a68ab351
JJ
3421 tree t, vmain, vback, bias = NULL_TREE;
3422 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
d3c673c7 3423 basic_block l2_bb = NULL, l3_bb = NULL;
50674e96
DN
3424 block_stmt_iterator si;
3425 bool in_combined_parallel = is_combined_parallel (region);
e5c95afe 3426 bool broken_loop = region->cont == NULL;
917948d3 3427 edge e, ne;
a68ab351
JJ
3428 tree *counts = NULL;
3429 int i;
e5c95afe
ZD
3430
3431 gcc_assert (!broken_loop || !in_combined_parallel);
a68ab351
JJ
3432 gcc_assert (fd->iter_type == long_integer_type_node
3433 || !in_combined_parallel);
953ff289 3434
a68ab351
JJ
3435 type = TREE_TYPE (fd->loop.v);
3436 istart0 = create_tmp_var (fd->iter_type, ".istart0");
3437 iend0 = create_tmp_var (fd->iter_type, ".iend0");
5b4fc8fb
JJ
3438 TREE_ADDRESSABLE (istart0) = 1;
3439 TREE_ADDRESSABLE (iend0) = 1;
917948d3
ZD
3440 if (gimple_in_ssa_p (cfun))
3441 {
3442 add_referenced_var (istart0);
3443 add_referenced_var (iend0);
3444 }
953ff289 3445
a68ab351
JJ
3446 /* See if we need to bias by LLONG_MIN. */
3447 if (fd->iter_type == long_long_unsigned_type_node
3448 && TREE_CODE (type) == INTEGER_TYPE
3449 && !TYPE_UNSIGNED (type))
3450 {
3451 tree n1, n2;
3452
3453 if (fd->loop.cond_code == LT_EXPR)
3454 {
3455 n1 = fd->loop.n1;
3456 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
3457 }
3458 else
3459 {
3460 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
3461 n2 = fd->loop.n1;
3462 }
3463 if (TREE_CODE (n1) != INTEGER_CST
3464 || TREE_CODE (n2) != INTEGER_CST
3465 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
3466 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
3467 }
3468
777f7f9a 3469 entry_bb = region->entry;
d3c673c7 3470 cont_bb = region->cont;
a68ab351 3471 collapse_bb = NULL;
e5c95afe
ZD
3472 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
3473 gcc_assert (broken_loop
3474 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
3475 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
3476 l1_bb = single_succ (l0_bb);
3477 if (!broken_loop)
d3c673c7
JJ
3478 {
3479 l2_bb = create_empty_bb (cont_bb);
e5c95afe
ZD
3480 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
3481 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
d3c673c7 3482 }
e5c95afe
ZD
3483 else
3484 l2_bb = NULL;
3485 l3_bb = BRANCH_EDGE (entry_bb)->dest;
3486 exit_bb = region->exit;
50674e96 3487
777f7f9a 3488 si = bsi_last (entry_bb);
a68ab351 3489
777f7f9a 3490 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
a68ab351
JJ
3491 if (fd->collapse > 1)
3492 {
3493 /* collapsed loops need work for expansion in SSA form. */
3494 gcc_assert (!gimple_in_ssa_p (cfun));
3495 counts = (tree *) alloca (fd->collapse * sizeof (tree));
3496 for (i = 0; i < fd->collapse; i++)
3497 {
3498 tree itype = TREE_TYPE (fd->loops[i].v);
3499
3500 if (POINTER_TYPE_P (itype))
3501 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
3502 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
3503 ? -1 : 1));
3504 t = fold_build2 (PLUS_EXPR, itype,
3505 fold_convert (itype, fd->loops[i].step), t);
3506 t = fold_build2 (PLUS_EXPR, itype, t,
3507 fold_convert (itype, fd->loops[i].n2));
3508 t = fold_build2 (MINUS_EXPR, itype, t,
3509 fold_convert (itype, fd->loops[i].n1));
3510 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
3511 t = fold_build2 (TRUNC_DIV_EXPR, itype,
3512 fold_build1 (NEGATE_EXPR, itype, t),
3513 fold_build1 (NEGATE_EXPR, itype,
3514 fold_convert (itype,
3515 fd->loops[i].step)));
3516 else
3517 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
3518 fold_convert (itype, fd->loops[i].step));
3519 t = fold_convert (type, t);
3520 if (TREE_CODE (t) == INTEGER_CST)
3521 counts[i] = t;
3522 else
3523 {
3524 counts[i] = create_tmp_var (type, ".count");
3525 t = build_gimple_modify_stmt (counts[i], t);
3526 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3527 true, BSI_SAME_STMT);
3528 }
3529 if (SSA_VAR_P (fd->loop.n2))
3530 {
3531 if (i == 0)
3532 t = build_gimple_modify_stmt (fd->loop.n2, counts[0]);
3533 else
3534 {
3535 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
3536 t = build_gimple_modify_stmt (fd->loop.n2, t);
3537 }
3538 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3539 true, BSI_SAME_STMT);
3540 }
3541 }
3542 }
917948d3
ZD
3543 if (in_combined_parallel)
3544 {
3545 /* In a combined parallel loop, emit a call to
3546 GOMP_loop_foo_next. */
3547 t = build_call_expr (built_in_decls[next_fn], 2,
3548 build_fold_addr_expr (istart0),
3549 build_fold_addr_expr (iend0));
3550 }
3551 else
953ff289 3552 {
5039610b 3553 tree t0, t1, t2, t3, t4;
50674e96
DN
3554 /* If this is not a combined parallel loop, emit a call to
3555 GOMP_loop_foo_start in ENTRY_BB. */
5039610b
SL
3556 t4 = build_fold_addr_expr (iend0);
3557 t3 = build_fold_addr_expr (istart0);
a68ab351
JJ
3558 t2 = fold_convert (fd->iter_type, fd->loop.step);
3559 t1 = fold_convert (fd->iter_type, fd->loop.n2);
3560 t0 = fold_convert (fd->iter_type, fd->loop.n1);
3561 if (bias)
953ff289 3562 {
a68ab351
JJ
3563 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
3564 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
3565 }
3566 if (fd->iter_type == long_integer_type_node)
3567 {
3568 if (fd->chunk_size)
3569 {
3570 t = fold_convert (fd->iter_type, fd->chunk_size);
3571 t = build_call_expr (built_in_decls[start_fn], 6,
3572 t0, t1, t2, t, t3, t4);
3573 }
3574 else
3575 t = build_call_expr (built_in_decls[start_fn], 5,
3576 t0, t1, t2, t3, t4);
953ff289 3577 }
5039610b 3578 else
a68ab351
JJ
3579 {
3580 tree t5;
3581 tree c_bool_type;
3582
3583 /* The GOMP_loop_ull_*start functions have additional boolean
3584 argument, true for < loops and false for > loops.
3585 In Fortran, the C bool type can be different from
3586 boolean_type_node. */
3587 c_bool_type = TREE_TYPE (TREE_TYPE (built_in_decls[start_fn]));
3588 t5 = build_int_cst (c_bool_type,
3589 fd->loop.cond_code == LT_EXPR ? 1 : 0);
3590 if (fd->chunk_size)
3591 {
3592 t = fold_convert (fd->iter_type, fd->chunk_size);
3593 t = build_call_expr (built_in_decls[start_fn], 7,
3594 t5, t0, t1, t2, t, t3, t4);
3595 }
3596 else
3597 t = build_call_expr (built_in_decls[start_fn], 6,
3598 t5, t0, t1, t2, t3, t4);
3599 }
953ff289 3600 }
a68ab351
JJ
3601 if (TREE_TYPE (t) != boolean_type_node)
3602 t = fold_build2 (NE_EXPR, boolean_type_node,
3603 t, build_int_cst (TREE_TYPE (t), 0));
917948d3
ZD
3604 t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3605 true, BSI_SAME_STMT);
3606 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
3607 bsi_insert_after (&si, t, BSI_SAME_STMT);
3608
917948d3 3609 /* Remove the OMP_FOR statement. */
777f7f9a 3610 bsi_remove (&si, true);
953ff289 3611
50674e96 3612 /* Iteration setup for sequential loop goes in L0_BB. */
917948d3 3613 si = bsi_start (l0_bb);
a68ab351
JJ
3614 if (bias)
3615 t = fold_convert (type, fold_build2 (MINUS_EXPR, fd->iter_type,
3616 istart0, bias));
3617 else
3618 t = fold_convert (type, istart0);
917948d3
ZD
3619 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3620 false, BSI_CONTINUE_LINKING);
a68ab351 3621 t = build_gimple_modify_stmt (fd->loop.v, t);
917948d3
ZD
3622 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3623 if (gimple_in_ssa_p (cfun))
a68ab351 3624 SSA_NAME_DEF_STMT (fd->loop.v) = t;
953ff289 3625
a68ab351
JJ
3626 if (bias)
3627 t = fold_convert (type, fold_build2 (MINUS_EXPR, fd->iter_type,
3628 iend0, bias));
3629 else
3630 t = fold_convert (type, iend0);
917948d3
ZD
3631 iend = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3632 false, BSI_CONTINUE_LINKING);
a68ab351
JJ
3633 if (fd->collapse > 1)
3634 {
3635 tree tem = create_tmp_var (type, ".tem");
3636
3637 t = build_gimple_modify_stmt (tem, fd->loop.v);
3638 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3639 for (i = fd->collapse - 1; i >= 0; i--)
3640 {
3641 tree vtype = TREE_TYPE (fd->loops[i].v), itype;
3642 itype = vtype;
3643 if (POINTER_TYPE_P (vtype))
3644 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (vtype), 0);
3645 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
3646 t = fold_convert (itype, t);
3647 t = fold_build2 (MULT_EXPR, itype, t, fd->loops[i].step);
3648 if (POINTER_TYPE_P (vtype))
3649 t = fold_build2 (POINTER_PLUS_EXPR, vtype,
3650 fd->loops[i].n1, fold_convert (sizetype, t));
3651 else
3652 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
3653 t = build_gimple_modify_stmt (fd->loops[i].v, t);
3654 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3655 false, BSI_CONTINUE_LINKING);
3656 if (i != 0)
3657 {
3658 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
3659 t = build_gimple_modify_stmt (tem, t);
3660 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3661 false, BSI_CONTINUE_LINKING);
3662 }
3663 }
3664 }
50674e96 3665
e5c95afe 3666 if (!broken_loop)
d3c673c7 3667 {
e5c95afe
ZD
3668 /* Code to control the increment and predicate for the sequential
3669 loop goes in the CONT_BB. */
917948d3
ZD
3670 si = bsi_last (cont_bb);
3671 t = bsi_stmt (si);
3672 gcc_assert (TREE_CODE (t) == OMP_CONTINUE);
3673 vmain = TREE_OPERAND (t, 1);
3674 vback = TREE_OPERAND (t, 0);
3675
a68ab351
JJ
3676 if (POINTER_TYPE_P (type))
3677 t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
3678 fold_convert (sizetype, fd->loop.step));
3679 else
3680 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
917948d3
ZD
3681 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3682 true, BSI_SAME_STMT);
3683 t = build_gimple_modify_stmt (vback, t);
3684 bsi_insert_before (&si, t, BSI_SAME_STMT);
3685 if (gimple_in_ssa_p (cfun))
3686 SSA_NAME_DEF_STMT (vback) = t;
953ff289 3687
a68ab351 3688 t = build2 (fd->loop.cond_code, boolean_type_node, vback, iend);
e5c95afe 3689 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
917948d3 3690 bsi_insert_before (&si, t, BSI_SAME_STMT);
50674e96 3691
917948d3 3692 /* Remove OMP_CONTINUE. */
e5c95afe 3693 bsi_remove (&si, true);
50674e96 3694
a68ab351
JJ
3695 if (fd->collapse > 1)
3696 {
3697 basic_block last_bb, bb;
3698
3699 last_bb = cont_bb;
3700 for (i = fd->collapse - 1; i >= 0; i--)
3701 {
3702 tree vtype = TREE_TYPE (fd->loops[i].v);
3703
3704 bb = create_empty_bb (last_bb);
3705 si = bsi_start (bb);
3706
3707 if (i < fd->collapse - 1)
3708 {
3709 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
3710 e->probability = REG_BR_PROB_BASE / 8;
3711
3712 t = build_gimple_modify_stmt (fd->loops[i + 1].v,
3713 fd->loops[i + 1].n1);
3714 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3715 false, BSI_CONTINUE_LINKING);
3716 }
3717 else
3718 collapse_bb = bb;
3719
3720 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
3721
3722 if (POINTER_TYPE_P (vtype))
3723 t = fold_build2 (POINTER_PLUS_EXPR, vtype,
3724 fd->loops[i].v,
3725 fold_convert (sizetype, fd->loops[i].step));
3726 else
3727 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v,
3728 fd->loops[i].step);
3729 t = build_gimple_modify_stmt (fd->loops[i].v, t);
3730 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3731 false, BSI_CONTINUE_LINKING);
3732
3733 if (i > 0)
3734 {
3735 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node,
3736 fd->loops[i].v, fd->loops[i].n2);
3737 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3738 false, BSI_CONTINUE_LINKING);
3739 t = build3 (COND_EXPR, void_type_node, t,
3740 NULL_TREE, NULL_TREE);
3741 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3742 e = make_edge (bb, l1_bb, EDGE_TRUE_VALUE);
3743 e->probability = REG_BR_PROB_BASE * 7 / 8;
3744 }
3745 else
3746 make_edge (bb, l1_bb, EDGE_FALLTHRU);
3747 last_bb = bb;
3748 }
3749 }
3750
e5c95afe 3751 /* Emit code to get the next parallel iteration in L2_BB. */
917948d3 3752 si = bsi_start (l2_bb);
50674e96 3753
e5c95afe
ZD
3754 t = build_call_expr (built_in_decls[next_fn], 2,
3755 build_fold_addr_expr (istart0),
3756 build_fold_addr_expr (iend0));
a68ab351
JJ
3757 if (TREE_TYPE (t) != boolean_type_node)
3758 t = fold_build2 (NE_EXPR, boolean_type_node,
3759 t, build_int_cst (TREE_TYPE (t), 0));
917948d3
ZD
3760 t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3761 false, BSI_CONTINUE_LINKING);
e5c95afe 3762 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
917948d3 3763 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
e5c95afe 3764 }
953ff289 3765
777f7f9a
RH
3766 /* Add the loop cleanup function. */
3767 si = bsi_last (exit_bb);
3768 if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
3769 t = built_in_decls[BUILT_IN_GOMP_LOOP_END_NOWAIT];
3770 else
3771 t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
5039610b 3772 t = build_call_expr (t, 0);
777f7f9a
RH
3773 bsi_insert_after (&si, t, BSI_SAME_STMT);
3774 bsi_remove (&si, true);
50674e96
DN
3775
3776 /* Connect the new blocks. */
917948d3
ZD
3777 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
3778 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
953ff289 3779
e5c95afe
ZD
3780 if (!broken_loop)
3781 {
917948d3
ZD
3782 e = find_edge (cont_bb, l3_bb);
3783 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
3784
3785 for (phi = phi_nodes (l3_bb); phi; phi = PHI_CHAIN (phi))
3786 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
3787 PHI_ARG_DEF_FROM_EDGE (phi, e));
3788 remove_edge (e);
3789
e5c95afe 3790 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
a68ab351
JJ
3791 if (fd->collapse > 1)
3792 {
3793 e = find_edge (cont_bb, l1_bb);
3794 remove_edge (e);
3795 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
3796 }
3797 else
3798 {
3799 e = find_edge (cont_bb, l1_bb);
3800 e->flags = EDGE_TRUE_VALUE;
3801 }
3802 e->probability = REG_BR_PROB_BASE * 7 / 8;
3803 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
e5c95afe 3804 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
917948d3
ZD
3805
3806 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
3807 recompute_dominator (CDI_DOMINATORS, l2_bb));
3808 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
3809 recompute_dominator (CDI_DOMINATORS, l3_bb));
3810 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
3811 recompute_dominator (CDI_DOMINATORS, l0_bb));
3812 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
3813 recompute_dominator (CDI_DOMINATORS, l1_bb));
e5c95afe 3814 }
953ff289
DN
3815}
3816
3817
50674e96
DN
3818/* A subroutine of expand_omp_for. Generate code for a parallel
3819 loop with static schedule and no specified chunk size. Given
3820 parameters:
953ff289
DN
3821
3822 for (V = N1; V cond N2; V += STEP) BODY;
3823
3824 where COND is "<" or ">", we generate pseudocode
3825
3826 if (cond is <)
3827 adj = STEP - 1;
3828 else
3829 adj = STEP + 1;
a68ab351
JJ
3830 if ((__typeof (V)) -1 > 0 && cond is >)
3831 n = -(adj + N2 - N1) / -STEP;
3832 else
3833 n = (adj + N2 - N1) / STEP;
953ff289
DN
3834 q = n / nthreads;
3835 q += (q * nthreads != n);
3836 s0 = q * threadid;
3837 e0 = min(s0 + q, n);
917948d3 3838 V = s0 * STEP + N1;
953ff289
DN
3839 if (s0 >= e0) goto L2; else goto L0;
3840 L0:
953ff289
DN
3841 e = e0 * STEP + N1;
3842 L1:
3843 BODY;
3844 V += STEP;
3845 if (V cond e) goto L1;
953ff289
DN
3846 L2:
3847*/
3848
777f7f9a 3849static void
50674e96
DN
3850expand_omp_for_static_nochunk (struct omp_region *region,
3851 struct omp_for_data *fd)
953ff289 3852{
a9b77cd1 3853 tree n, q, s0, e0, e, t, nthreads, threadid;
a68ab351 3854 tree type, itype, vmain, vback;
777f7f9a
RH
3855 basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
3856 basic_block fin_bb;
50674e96 3857 block_stmt_iterator si;
953ff289 3858
a68ab351
JJ
3859 itype = type = TREE_TYPE (fd->loop.v);
3860 if (POINTER_TYPE_P (type))
3861 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
953ff289 3862
777f7f9a 3863 entry_bb = region->entry;
777f7f9a 3864 cont_bb = region->cont;
e5c95afe
ZD
3865 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
3866 gcc_assert (BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
3867 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
3868 body_bb = single_succ (seq_start_bb);
3869 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
3870 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
3871 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
777f7f9a
RH
3872 exit_bb = region->exit;
3873
50674e96 3874 /* Iteration space partitioning goes in ENTRY_BB. */
917948d3
ZD
3875 si = bsi_last (entry_bb);
3876 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
777f7f9a 3877
5039610b 3878 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
a68ab351 3879 t = fold_convert (itype, t);
917948d3
ZD
3880 nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3881 true, BSI_SAME_STMT);
953ff289 3882
5039610b 3883 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
a68ab351 3884 t = fold_convert (itype, t);
917948d3
ZD
3885 threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3886 true, BSI_SAME_STMT);
953ff289 3887
a68ab351
JJ
3888 fd->loop.n1
3889 = force_gimple_operand_bsi (&si, fold_convert (type, fd->loop.n1),
3890 true, NULL_TREE, true, BSI_SAME_STMT);
3891 fd->loop.n2
3892 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.n2),
3893 true, NULL_TREE, true, BSI_SAME_STMT);
3894 fd->loop.step
3895 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.step),
3896 true, NULL_TREE, true, BSI_SAME_STMT);
3897
3898 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
3899 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
3900 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
3901 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
3902 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
3903 t = fold_build2 (TRUNC_DIV_EXPR, itype,
3904 fold_build1 (NEGATE_EXPR, itype, t),
3905 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
3906 else
3907 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
3908 t = fold_convert (itype, t);
917948d3 3909 n = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
953ff289 3910
a68ab351 3911 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
917948d3 3912 q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
953ff289 3913
a68ab351
JJ
3914 t = fold_build2 (MULT_EXPR, itype, q, nthreads);
3915 t = fold_build2 (NE_EXPR, itype, t, n);
3916 t = fold_build2 (PLUS_EXPR, itype, q, t);
917948d3 3917 q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
953ff289 3918
a68ab351 3919 t = build2 (MULT_EXPR, itype, q, threadid);
917948d3 3920 s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
953ff289 3921
a68ab351
JJ
3922 t = fold_build2 (PLUS_EXPR, itype, s0, q);
3923 t = fold_build2 (MIN_EXPR, itype, t, n);
917948d3
ZD
3924 e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
3925
953ff289 3926 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
a9b77cd1 3927 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
917948d3 3928 bsi_insert_before (&si, t, BSI_SAME_STMT);
50674e96 3929
917948d3 3930 /* Remove the OMP_FOR statement. */
50674e96 3931 bsi_remove (&si, true);
50674e96
DN
3932
3933 /* Setup code for sequential iteration goes in SEQ_START_BB. */
917948d3 3934 si = bsi_start (seq_start_bb);
953ff289 3935
a68ab351
JJ
3936 t = fold_convert (itype, s0);
3937 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
3938 if (POINTER_TYPE_P (type))
3939 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
3940 fold_convert (sizetype, t));
3941 else
3942 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
ac84c062
JJ
3943 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3944 false, BSI_CONTINUE_LINKING);
a68ab351 3945 t = build_gimple_modify_stmt (fd->loop.v, t);
ac84c062
JJ
3946 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3947 if (gimple_in_ssa_p (cfun))
a68ab351 3948 SSA_NAME_DEF_STMT (fd->loop.v) = t;
ac84c062 3949
a68ab351
JJ
3950 t = fold_convert (itype, e0);
3951 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
3952 if (POINTER_TYPE_P (type))
3953 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
3954 fold_convert (sizetype, t));
3955 else
3956 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
917948d3
ZD
3957 e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
3958 false, BSI_CONTINUE_LINKING);
953ff289 3959
777f7f9a 3960 /* The code controlling the sequential loop replaces the OMP_CONTINUE. */
917948d3
ZD
3961 si = bsi_last (cont_bb);
3962 t = bsi_stmt (si);
3963 gcc_assert (TREE_CODE (t) == OMP_CONTINUE);
3964 vmain = TREE_OPERAND (t, 1);
3965 vback = TREE_OPERAND (t, 0);
3966
a68ab351
JJ
3967 if (POINTER_TYPE_P (type))
3968 t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
3969 fold_convert (sizetype, fd->loop.step));
3970 else
3971 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
917948d3
ZD
3972 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
3973 true, BSI_SAME_STMT);
3974 t = build_gimple_modify_stmt (vback, t);
3975 bsi_insert_before (&si, t, BSI_SAME_STMT);
3976 if (gimple_in_ssa_p (cfun))
3977 SSA_NAME_DEF_STMT (vback) = t;
3978
a68ab351 3979 t = build2 (fd->loop.cond_code, boolean_type_node, vback, e);
a9b77cd1 3980 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
917948d3 3981 bsi_insert_before (&si, t, BSI_SAME_STMT);
953ff289 3982
917948d3 3983 /* Remove the OMP_CONTINUE statement. */
777f7f9a 3984 bsi_remove (&si, true);
50674e96 3985
777f7f9a
RH
3986 /* Replace the OMP_RETURN with a barrier, or nothing. */
3987 si = bsi_last (exit_bb);
3988 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
917948d3
ZD
3989 force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
3990 false, BSI_SAME_STMT);
777f7f9a 3991 bsi_remove (&si, true);
50674e96
DN
3992
3993 /* Connect all the blocks. */
e5c95afe
ZD
3994 find_edge (entry_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
3995 find_edge (entry_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
917948d3 3996
e5c95afe 3997 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
777f7f9a 3998 find_edge (cont_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
917948d3
ZD
3999
4000 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, entry_bb);
4001 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4002 recompute_dominator (CDI_DOMINATORS, body_bb));
4003 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4004 recompute_dominator (CDI_DOMINATORS, fin_bb));
953ff289
DN
4005}
4006
50674e96
DN
4007
4008/* A subroutine of expand_omp_for. Generate code for a parallel
4009 loop with static schedule and a specified chunk size. Given
4010 parameters:
953ff289
DN
4011
4012 for (V = N1; V cond N2; V += STEP) BODY;
4013
4014 where COND is "<" or ">", we generate pseudocode
4015
4016 if (cond is <)
4017 adj = STEP - 1;
4018 else
4019 adj = STEP + 1;
a68ab351
JJ
4020 if ((__typeof (V)) -1 > 0 && cond is >)
4021 n = -(adj + N2 - N1) / -STEP;
4022 else
4023 n = (adj + N2 - N1) / STEP;
953ff289 4024 trip = 0;
917948d3
ZD
4025 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
4026 here so that V is defined
4027 if the loop is not entered
953ff289
DN
4028 L0:
4029 s0 = (trip * nthreads + threadid) * CHUNK;
4030 e0 = min(s0 + CHUNK, n);
4031 if (s0 < n) goto L1; else goto L4;
4032 L1:
4033 V = s0 * STEP + N1;
4034 e = e0 * STEP + N1;
4035 L2:
4036 BODY;
4037 V += STEP;
4038 if (V cond e) goto L2; else goto L3;
4039 L3:
4040 trip += 1;
4041 goto L0;
4042 L4:
953ff289
DN
4043*/
4044
777f7f9a 4045static void
a68ab351
JJ
4046expand_omp_for_static_chunk (struct omp_region *region,
4047 struct omp_for_data *fd)
953ff289 4048{
917948d3
ZD
4049 tree n, s0, e0, e, t, phi, nphi, args;
4050 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
a68ab351 4051 tree type, itype, cont, v_main, v_back, v_extra;
50674e96 4052 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
777f7f9a 4053 basic_block trip_update_bb, cont_bb, fin_bb;
50674e96 4054 block_stmt_iterator si;
917948d3 4055 edge se, re, ene;
953ff289 4056
a68ab351
JJ
4057 itype = type = TREE_TYPE (fd->loop.v);
4058 if (POINTER_TYPE_P (type))
4059 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
953ff289 4060
777f7f9a 4061 entry_bb = region->entry;
e5c95afe
ZD
4062 se = split_block (entry_bb, last_stmt (entry_bb));
4063 entry_bb = se->src;
4064 iter_part_bb = se->dest;
777f7f9a 4065 cont_bb = region->cont;
e5c95afe
ZD
4066 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
4067 gcc_assert (BRANCH_EDGE (iter_part_bb)->dest
4068 == FALLTHRU_EDGE (cont_bb)->dest);
4069 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
4070 body_bb = single_succ (seq_start_bb);
4071 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
4072 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
4073 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
4074 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
777f7f9a 4075 exit_bb = region->exit;
50674e96 4076
50674e96 4077 /* Trip and adjustment setup goes in ENTRY_BB. */
917948d3
ZD
4078 si = bsi_last (entry_bb);
4079 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
50674e96 4080
5039610b 4081 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
a68ab351 4082 t = fold_convert (itype, t);
917948d3
ZD
4083 nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4084 true, BSI_SAME_STMT);
953ff289 4085
5039610b 4086 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
a68ab351 4087 t = fold_convert (itype, t);
917948d3
ZD
4088 threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4089 true, BSI_SAME_STMT);
4090
a68ab351
JJ
4091 fd->loop.n1
4092 = force_gimple_operand_bsi (&si, fold_convert (type, fd->loop.n1),
4093 true, NULL_TREE, true, BSI_SAME_STMT);
4094 fd->loop.n2
4095 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.n2),
4096 true, NULL_TREE, true, BSI_SAME_STMT);
4097 fd->loop.step
4098 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.step),
4099 true, NULL_TREE, true, BSI_SAME_STMT);
917948d3 4100 fd->chunk_size
a68ab351
JJ
4101 = force_gimple_operand_bsi (&si, fold_convert (itype, fd->chunk_size),
4102 true, NULL_TREE, true, BSI_SAME_STMT);
4103
4104 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
4105 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
4106 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
4107 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
4108 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
4109 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4110 fold_build1 (NEGATE_EXPR, itype, t),
4111 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
4112 else
4113 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
4114 t = fold_convert (itype, t);
917948d3
ZD
4115 n = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4116 true, BSI_SAME_STMT);
4117
a68ab351 4118 trip_var = create_tmp_var (itype, ".trip");
917948d3
ZD
4119 if (gimple_in_ssa_p (cfun))
4120 {
4121 add_referenced_var (trip_var);
4122 trip_init = make_ssa_name (trip_var, NULL_TREE);
4123 trip_main = make_ssa_name (trip_var, NULL_TREE);
4124 trip_back = make_ssa_name (trip_var, NULL_TREE);
4125 }
953ff289 4126 else
917948d3
ZD
4127 {
4128 trip_init = trip_var;
4129 trip_main = trip_var;
4130 trip_back = trip_var;
4131 }
953ff289 4132
a68ab351 4133 t = build_gimple_modify_stmt (trip_init, build_int_cst (itype, 0));
917948d3
ZD
4134 bsi_insert_before (&si, t, BSI_SAME_STMT);
4135 if (gimple_in_ssa_p (cfun))
4136 SSA_NAME_DEF_STMT (trip_init) = t;
50674e96 4137
a68ab351
JJ
4138 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
4139 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4140 if (POINTER_TYPE_P (type))
4141 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4142 fold_convert (sizetype, t));
4143 else
4144 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
917948d3
ZD
4145 v_extra = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4146 true, BSI_SAME_STMT);
4147
4148 /* Remove the OMP_FOR. */
50674e96 4149 bsi_remove (&si, true);
50674e96
DN
4150
4151 /* Iteration space partitioning goes in ITER_PART_BB. */
917948d3 4152 si = bsi_last (iter_part_bb);
953ff289 4153
a68ab351
JJ
4154 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
4155 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
4156 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
917948d3
ZD
4157 s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4158 false, BSI_CONTINUE_LINKING);
953ff289 4159
a68ab351
JJ
4160 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
4161 t = fold_build2 (MIN_EXPR, itype, t, n);
917948d3
ZD
4162 e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4163 false, BSI_CONTINUE_LINKING);
953ff289
DN
4164
4165 t = build2 (LT_EXPR, boolean_type_node, s0, n);
a9b77cd1 4166 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
917948d3 4167 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
50674e96
DN
4168
4169 /* Setup code for sequential iteration goes in SEQ_START_BB. */
917948d3 4170 si = bsi_start (seq_start_bb);
953ff289 4171
a68ab351
JJ
4172 t = fold_convert (itype, s0);
4173 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4174 if (POINTER_TYPE_P (type))
4175 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4176 fold_convert (sizetype, t));
4177 else
4178 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
917948d3
ZD
4179 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
4180 false, BSI_CONTINUE_LINKING);
a68ab351 4181 t = build_gimple_modify_stmt (fd->loop.v, t);
917948d3
ZD
4182 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
4183 if (gimple_in_ssa_p (cfun))
a68ab351 4184 SSA_NAME_DEF_STMT (fd->loop.v) = t;
953ff289 4185
a68ab351
JJ
4186 t = fold_convert (itype, e0);
4187 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4188 if (POINTER_TYPE_P (type))
4189 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4190 fold_convert (sizetype, t));
4191 else
4192 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
917948d3
ZD
4193 e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
4194 false, BSI_CONTINUE_LINKING);
953ff289 4195
777f7f9a
RH
4196 /* The code controlling the sequential loop goes in CONT_BB,
4197 replacing the OMP_CONTINUE. */
917948d3
ZD
4198 si = bsi_last (cont_bb);
4199 cont = bsi_stmt (si);
4200 gcc_assert (TREE_CODE (cont) == OMP_CONTINUE);
4201 v_main = TREE_OPERAND (cont, 1);
4202 v_back = TREE_OPERAND (cont, 0);
4203
a68ab351
JJ
4204 if (POINTER_TYPE_P (type))
4205 t = fold_build2 (POINTER_PLUS_EXPR, type, v_main,
4206 fold_convert (sizetype, fd->loop.step));
4207 else
4208 t = build2 (PLUS_EXPR, type, v_main, fd->loop.step);
917948d3
ZD
4209 t = build_gimple_modify_stmt (v_back, t);
4210 bsi_insert_before (&si, t, BSI_SAME_STMT);
4211 if (gimple_in_ssa_p (cfun))
4212 SSA_NAME_DEF_STMT (v_back) = t;
4213
a68ab351 4214 t = build2 (fd->loop.cond_code, boolean_type_node, v_back, e);
a9b77cd1 4215 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
917948d3 4216 bsi_insert_before (&si, t, BSI_SAME_STMT);
50674e96 4217
917948d3 4218 /* Remove OMP_CONTINUE. */
50674e96 4219 bsi_remove (&si, true);
50674e96
DN
4220
4221 /* Trip update code goes into TRIP_UPDATE_BB. */
917948d3 4222 si = bsi_start (trip_update_bb);
953ff289 4223
a68ab351
JJ
4224 t = build_int_cst (itype, 1);
4225 t = build2 (PLUS_EXPR, itype, trip_main, t);
917948d3
ZD
4226 t = build_gimple_modify_stmt (trip_back, t);
4227 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
4228 if (gimple_in_ssa_p (cfun))
4229 SSA_NAME_DEF_STMT (trip_back) = t;
953ff289 4230
777f7f9a
RH
4231 /* Replace the OMP_RETURN with a barrier, or nothing. */
4232 si = bsi_last (exit_bb);
4233 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
917948d3
ZD
4234 force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
4235 false, BSI_SAME_STMT);
777f7f9a 4236 bsi_remove (&si, true);
953ff289 4237
50674e96 4238 /* Connect the new blocks. */
e5c95afe
ZD
4239 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
4240 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
917948d3 4241
e5c95afe
ZD
4242 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
4243 find_edge (cont_bb, trip_update_bb)->flags = EDGE_FALSE_VALUE;
917948d3 4244
e5c95afe 4245 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
917948d3
ZD
4246
4247 if (gimple_in_ssa_p (cfun))
4248 {
4249 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
4250 remove arguments of the phi nodes in fin_bb. We need to create
4251 appropriate phi nodes in iter_part_bb instead. */
4252 se = single_pred_edge (fin_bb);
4253 re = single_succ_edge (trip_update_bb);
4254 ene = single_succ_edge (entry_bb);
4255
4256 args = PENDING_STMT (re);
4257 PENDING_STMT (re) = NULL_TREE;
4258 for (phi = phi_nodes (fin_bb);
4259 phi && args;
4260 phi = PHI_CHAIN (phi), args = TREE_CHAIN (args))
4261 {
4262 t = PHI_RESULT (phi);
4263 gcc_assert (t == TREE_PURPOSE (args));
4264 nphi = create_phi_node (t, iter_part_bb);
4265 SSA_NAME_DEF_STMT (t) = nphi;
4266
4267 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
a68ab351
JJ
4268 /* A special case -- fd->loop.v is not yet computed in
4269 iter_part_bb, we need to use v_extra instead. */
4270 if (t == fd->loop.v)
917948d3
ZD
4271 t = v_extra;
4272 add_phi_arg (nphi, t, ene);
4273 add_phi_arg (nphi, TREE_VALUE (args), re);
4274 }
4275 gcc_assert (!phi && !args);
4276 while ((phi = phi_nodes (fin_bb)) != NULL_TREE)
4277 remove_phi_node (phi, NULL_TREE, false);
4278
4279 /* Make phi node for trip. */
4280 phi = create_phi_node (trip_main, iter_part_bb);
4281 SSA_NAME_DEF_STMT (trip_main) = phi;
4282 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb));
4283 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb));
4284 }
4285
4286 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
4287 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
4288 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
4289 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4290 recompute_dominator (CDI_DOMINATORS, fin_bb));
4291 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
4292 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
4293 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4294 recompute_dominator (CDI_DOMINATORS, body_bb));
953ff289
DN
4295}
4296
953ff289 4297
50674e96 4298/* Expand the OpenMP loop defined by REGION. */
953ff289 4299
50674e96
DN
4300static void
4301expand_omp_for (struct omp_region *region)
4302{
4303 struct omp_for_data fd;
a68ab351 4304 struct omp_for_data_loop *loops;
953ff289 4305
a68ab351
JJ
4306 loops
4307 = (struct omp_for_data_loop *)
4308 alloca (TREE_VEC_LENGTH (OMP_FOR_INIT (last_stmt (region->entry)))
4309 * sizeof (struct omp_for_data_loop));
4310
4311 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
21a66e91 4312 region->sched_kind = fd.sched_kind;
953ff289 4313
135a171d
JJ
4314 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
4315 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4316 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4317 if (region->cont)
4318 {
4319 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
4320 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4321 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4322 }
4323
d3c673c7
JJ
4324 if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
4325 && !fd.have_ordered
a68ab351 4326 && fd.collapse == 1
e5c95afe 4327 && region->cont != NULL)
953ff289
DN
4328 {
4329 if (fd.chunk_size == NULL)
777f7f9a 4330 expand_omp_for_static_nochunk (region, &fd);
953ff289 4331 else
777f7f9a 4332 expand_omp_for_static_chunk (region, &fd);
953ff289
DN
4333 }
4334 else
4335 {
a68ab351
JJ
4336 int fn_index, start_ix, next_ix;
4337
4338 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
4339 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
4340 ? 3 : fd.sched_kind;
4341 fn_index += fd.have_ordered * 4;
4342 start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
4343 next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
4344 if (fd.iter_type == long_long_unsigned_type_node)
4345 {
4346 start_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_START
4347 - BUILT_IN_GOMP_LOOP_STATIC_START;
4348 next_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
4349 - BUILT_IN_GOMP_LOOP_STATIC_NEXT;
4350 }
777f7f9a 4351 expand_omp_for_generic (region, &fd, start_ix, next_ix);
953ff289 4352 }
5f40b3cb
ZD
4353
4354 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
4355}
4356
953ff289
DN
4357
4358/* Expand code for an OpenMP sections directive. In pseudo code, we generate
4359
953ff289
DN
4360 v = GOMP_sections_start (n);
4361 L0:
4362 switch (v)
4363 {
4364 case 0:
4365 goto L2;
4366 case 1:
4367 section 1;
4368 goto L1;
4369 case 2:
4370 ...
4371 case n:
4372 ...
953ff289
DN
4373 default:
4374 abort ();
4375 }
4376 L1:
4377 v = GOMP_sections_next ();
4378 goto L0;
4379 L2:
4380 reduction;
4381
50674e96 4382 If this is a combined parallel sections, replace the call to
917948d3 4383 GOMP_sections_start with call to GOMP_sections_next. */
953ff289
DN
4384
4385static void
50674e96 4386expand_omp_sections (struct omp_region *region)
953ff289 4387{
917948d3 4388 tree label_vec, l1, l2, t, u, sections_stmt, vin, vmain, vnext, cont;
e5c95afe
ZD
4389 unsigned i, casei, len;
4390 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
50674e96 4391 block_stmt_iterator si;
777f7f9a 4392 struct omp_region *inner;
e5c95afe 4393 bool exit_reachable = region->cont != NULL;
953ff289 4394
e5c95afe 4395 gcc_assert (exit_reachable == (region->exit != NULL));
777f7f9a 4396 entry_bb = region->entry;
e5c95afe 4397 l0_bb = single_succ (entry_bb);
777f7f9a 4398 l1_bb = region->cont;
e5c95afe
ZD
4399 l2_bb = region->exit;
4400 if (exit_reachable)
d3c673c7 4401 {
e5c95afe 4402 gcc_assert (single_pred (l2_bb) == l0_bb);
d3c673c7 4403 default_bb = create_empty_bb (l1_bb->prev_bb);
d3c673c7 4404 l1 = tree_block_label (l1_bb);
e5c95afe 4405 l2 = tree_block_label (l2_bb);
d3c673c7
JJ
4406 }
4407 else
4408 {
e5c95afe
ZD
4409 default_bb = create_empty_bb (l0_bb);
4410 l1 = NULL_TREE;
4411 l2 = tree_block_label (default_bb);
d3c673c7 4412 }
50674e96
DN
4413
4414 /* We will build a switch() with enough cases for all the
4415 OMP_SECTION regions, a '0' case to handle the end of more work
4416 and a default case to abort if something goes wrong. */
e5c95afe
ZD
4417 len = EDGE_COUNT (l0_bb->succs);
4418 label_vec = make_tree_vec (len + 1);
953ff289 4419
777f7f9a
RH
4420 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
4421 OMP_SECTIONS statement. */
50674e96 4422 si = bsi_last (entry_bb);
777f7f9a
RH
4423 sections_stmt = bsi_stmt (si);
4424 gcc_assert (TREE_CODE (sections_stmt) == OMP_SECTIONS);
917948d3 4425 vin = OMP_SECTIONS_CONTROL (sections_stmt);
50674e96 4426 if (!is_combined_parallel (region))
953ff289 4427 {
50674e96
DN
4428 /* If we are not inside a combined parallel+sections region,
4429 call GOMP_sections_start. */
e5c95afe
ZD
4430 t = build_int_cst (unsigned_type_node,
4431 exit_reachable ? len - 1 : len);
953ff289 4432 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
5039610b 4433 t = build_call_expr (u, 1, t);
953ff289 4434 }
917948d3
ZD
4435 else
4436 {
4437 /* Otherwise, call GOMP_sections_next. */
4438 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
4439 t = build_call_expr (u, 0);
4440 }
4441 t = build_gimple_modify_stmt (vin, t);
4442 bsi_insert_after (&si, t, BSI_SAME_STMT);
4443 if (gimple_in_ssa_p (cfun))
4444 SSA_NAME_DEF_STMT (vin) = t;
777f7f9a 4445 bsi_remove (&si, true);
953ff289 4446
e5c95afe
ZD
4447 /* The switch() statement replacing OMP_SECTIONS_SWITCH goes in L0_BB. */
4448 si = bsi_last (l0_bb);
4449 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTIONS_SWITCH);
917948d3
ZD
4450 if (exit_reachable)
4451 {
4452 cont = last_stmt (l1_bb);
4453 gcc_assert (TREE_CODE (cont) == OMP_CONTINUE);
4454 vmain = TREE_OPERAND (cont, 1);
4455 vnext = TREE_OPERAND (cont, 0);
4456 }
4457 else
4458 {
4459 vmain = vin;
4460 vnext = NULL_TREE;
4461 }
953ff289 4462
917948d3 4463 t = build3 (SWITCH_EXPR, void_type_node, vmain, NULL, label_vec);
e5c95afe
ZD
4464 bsi_insert_after (&si, t, BSI_SAME_STMT);
4465 bsi_remove (&si, true);
953ff289 4466
e5c95afe
ZD
4467 i = 0;
4468 if (exit_reachable)
4469 {
4470 t = build3 (CASE_LABEL_EXPR, void_type_node,
4471 build_int_cst (unsigned_type_node, 0), NULL, l2);
4472 TREE_VEC_ELT (label_vec, 0) = t;
4473 i++;
4474 }
d3c673c7 4475
50674e96 4476 /* Convert each OMP_SECTION into a CASE_LABEL_EXPR. */
e5c95afe
ZD
4477 for (inner = region->inner, casei = 1;
4478 inner;
4479 inner = inner->next, i++, casei++)
953ff289 4480 {
50674e96
DN
4481 basic_block s_entry_bb, s_exit_bb;
4482
777f7f9a
RH
4483 s_entry_bb = inner->entry;
4484 s_exit_bb = inner->exit;
953ff289 4485
777f7f9a 4486 t = tree_block_label (s_entry_bb);
e5c95afe 4487 u = build_int_cst (unsigned_type_node, casei);
953ff289 4488 u = build3 (CASE_LABEL_EXPR, void_type_node, u, NULL, t);
50674e96 4489 TREE_VEC_ELT (label_vec, i) = u;
777f7f9a 4490
50674e96 4491 si = bsi_last (s_entry_bb);
777f7f9a
RH
4492 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTION);
4493 gcc_assert (i < len || OMP_SECTION_LAST (bsi_stmt (si)));
4494 bsi_remove (&si, true);
777f7f9a 4495 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
d3c673c7
JJ
4496
4497 if (s_exit_bb == NULL)
4498 continue;
4499
4500 si = bsi_last (s_exit_bb);
4501 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
4502 bsi_remove (&si, true);
4503
50674e96 4504 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
953ff289
DN
4505 }
4506
50674e96 4507 /* Error handling code goes in DEFAULT_BB. */
777f7f9a 4508 t = tree_block_label (default_bb);
953ff289 4509 u = build3 (CASE_LABEL_EXPR, void_type_node, NULL, NULL, t);
e5c95afe 4510 TREE_VEC_ELT (label_vec, len) = u;
777f7f9a 4511 make_edge (l0_bb, default_bb, 0);
953ff289 4512
777f7f9a 4513 si = bsi_start (default_bb);
5039610b 4514 t = build_call_expr (built_in_decls[BUILT_IN_TRAP], 0);
50674e96
DN
4515 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
4516
e5c95afe 4517 if (exit_reachable)
d3c673c7 4518 {
e5c95afe 4519 /* Code to get the next section goes in L1_BB. */
d3c673c7
JJ
4520 si = bsi_last (l1_bb);
4521 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
953ff289 4522
5039610b 4523 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT], 0);
917948d3 4524 t = build_gimple_modify_stmt (vnext, t);
d3c673c7 4525 bsi_insert_after (&si, t, BSI_SAME_STMT);
917948d3
ZD
4526 if (gimple_in_ssa_p (cfun))
4527 SSA_NAME_DEF_STMT (vnext) = t;
d3c673c7 4528 bsi_remove (&si, true);
50674e96 4529
e5c95afe
ZD
4530 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
4531
4532 /* Cleanup function replaces OMP_RETURN in EXIT_BB. */
4533 si = bsi_last (l2_bb);
d3c673c7
JJ
4534 if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
4535 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
4536 else
4537 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
5039610b 4538 t = build_call_expr (t, 0);
d3c673c7
JJ
4539 bsi_insert_after (&si, t, BSI_SAME_STMT);
4540 bsi_remove (&si, true);
4541 }
50674e96 4542
917948d3 4543 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
50674e96 4544}
953ff289 4545
953ff289 4546
777f7f9a
RH
4547/* Expand code for an OpenMP single directive. We've already expanded
4548 much of the code, here we simply place the GOMP_barrier call. */
4549
4550static void
4551expand_omp_single (struct omp_region *region)
4552{
4553 basic_block entry_bb, exit_bb;
4554 block_stmt_iterator si;
4555 bool need_barrier = false;
4556
4557 entry_bb = region->entry;
4558 exit_bb = region->exit;
4559
4560 si = bsi_last (entry_bb);
4561 /* The terminal barrier at the end of a GOMP_single_copy sequence cannot
4562 be removed. We need to ensure that the thread that entered the single
4563 does not exit before the data is copied out by the other threads. */
4564 if (find_omp_clause (OMP_SINGLE_CLAUSES (bsi_stmt (si)),
4565 OMP_CLAUSE_COPYPRIVATE))
4566 need_barrier = true;
4567 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE);
4568 bsi_remove (&si, true);
4569 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4570
4571 si = bsi_last (exit_bb);
4572 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)) || need_barrier)
917948d3
ZD
4573 force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
4574 false, BSI_SAME_STMT);
777f7f9a
RH
4575 bsi_remove (&si, true);
4576 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4577}
4578
4579
4580/* Generic expansion for OpenMP synchronization directives: master,
4581 ordered and critical. All we need to do here is remove the entry
4582 and exit markers for REGION. */
50674e96
DN
4583
4584static void
4585expand_omp_synch (struct omp_region *region)
4586{
4587 basic_block entry_bb, exit_bb;
4588 block_stmt_iterator si;
50674e96 4589
777f7f9a
RH
4590 entry_bb = region->entry;
4591 exit_bb = region->exit;
50674e96
DN
4592
4593 si = bsi_last (entry_bb);
777f7f9a
RH
4594 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE
4595 || TREE_CODE (bsi_stmt (si)) == OMP_MASTER
4596 || TREE_CODE (bsi_stmt (si)) == OMP_ORDERED
4597 || TREE_CODE (bsi_stmt (si)) == OMP_CRITICAL);
50674e96
DN
4598 bsi_remove (&si, true);
4599 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4600
d3c673c7
JJ
4601 if (exit_bb)
4602 {
4603 si = bsi_last (exit_bb);
4604 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
4605 bsi_remove (&si, true);
4606 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4607 }
50674e96 4608}
953ff289 4609
a509ebb5
RL
4610/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
4611 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
4612 size of the data type, and thus usable to find the index of the builtin
4613 decl. Returns false if the expression is not of the proper form. */
4614
4615static bool
4616expand_omp_atomic_fetch_op (basic_block load_bb,
4617 tree addr, tree loaded_val,
4618 tree stored_val, int index)
4619{
4620 enum built_in_function base;
4621 tree decl, itype, call;
4622 enum insn_code *optab;
4623 tree rhs;
4624 basic_block store_bb = single_succ (load_bb);
4625 block_stmt_iterator bsi;
4626 tree stmt;
4627
4628 /* We expect to find the following sequences:
4629
4630 load_bb:
4631 OMP_ATOMIC_LOAD (tmp, mem)
4632
4633 store_bb:
4634 val = tmp OP something; (or: something OP tmp)
4635 OMP_STORE (val)
4636
4637 ???FIXME: Allow a more flexible sequence.
4638 Perhaps use data flow to pick the statements.
4639
4640 */
4641
4642 bsi = bsi_after_labels (store_bb);
4643 stmt = bsi_stmt (bsi);
4644 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
4645 return false;
4646 bsi_next (&bsi);
4647 if (TREE_CODE (bsi_stmt (bsi)) != OMP_ATOMIC_STORE)
4648 return false;
4649
4650 if (!operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0), stored_val, 0))
4651 return false;
4652
4653 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
4654
4655 /* Check for one of the supported fetch-op operations. */
4656 switch (TREE_CODE (rhs))
4657 {
4658 case PLUS_EXPR:
4659 case POINTER_PLUS_EXPR:
4660 base = BUILT_IN_FETCH_AND_ADD_N;
4661 optab = sync_add_optab;
4662 break;
4663 case MINUS_EXPR:
4664 base = BUILT_IN_FETCH_AND_SUB_N;
4665 optab = sync_add_optab;
4666 break;
4667 case BIT_AND_EXPR:
4668 base = BUILT_IN_FETCH_AND_AND_N;
4669 optab = sync_and_optab;
4670 break;
4671 case BIT_IOR_EXPR:
4672 base = BUILT_IN_FETCH_AND_OR_N;
4673 optab = sync_ior_optab;
4674 break;
4675 case BIT_XOR_EXPR:
4676 base = BUILT_IN_FETCH_AND_XOR_N;
4677 optab = sync_xor_optab;
4678 break;
4679 default:
4680 return false;
4681 }
4682 /* Make sure the expression is of the proper form. */
4683 if (operand_equal_p (TREE_OPERAND (rhs, 0), loaded_val, 0))
4684 rhs = TREE_OPERAND (rhs, 1);
4685 else if (commutative_tree_code (TREE_CODE (rhs))
4686 && operand_equal_p (TREE_OPERAND (rhs, 1), loaded_val, 0))
4687 rhs = TREE_OPERAND (rhs, 0);
4688 else
4689 return false;
4690
4691 decl = built_in_decls[base + index + 1];
4692 itype = TREE_TYPE (TREE_TYPE (decl));
4693
4694 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
4695 return false;
4696
4697 bsi = bsi_last (load_bb);
4698 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
4699 call = build_call_expr (decl, 2, addr, fold_convert (itype, rhs));
4700 force_gimple_operand_bsi (&bsi, call, true, NULL_TREE, true, BSI_SAME_STMT);
4701 bsi_remove (&bsi, true);
4702
4703 bsi = bsi_last (store_bb);
4704 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
4705 bsi_remove (&bsi, true);
4706 bsi = bsi_last (store_bb);
4707 bsi_remove (&bsi, true);
4708
4709 if (gimple_in_ssa_p (cfun))
4710 update_ssa (TODO_update_ssa_no_phi);
4711
4712 return true;
4713}
4714
4715/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
4716
4717 oldval = *addr;
4718 repeat:
4719 newval = rhs; // with oldval replacing *addr in rhs
4720 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
4721 if (oldval != newval)
4722 goto repeat;
4723
4724 INDEX is log2 of the size of the data type, and thus usable to find the
4725 index of the builtin decl. */
4726
4727static bool
4728expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
4729 tree addr, tree loaded_val, tree stored_val,
4730 int index)
4731{
c18c98c0 4732 tree loadedi, storedi, initial, new_storedi, old_vali;
a509ebb5
RL
4733 tree type, itype, cmpxchg, iaddr;
4734 block_stmt_iterator bsi;
4735 basic_block loop_header = single_succ (load_bb);
4736 tree phi, x;
4737 edge e;
4738
4739 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
4740 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
4741 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
4742
4743 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
4744 return false;
4745
4746 /* Load the initial value, replacing the OMP_ATOMIC_LOAD. */
4747 bsi = bsi_last (load_bb);
4748 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
c18c98c0
JJ
4749 /* For floating-point values, we'll need to view-convert them to integers
4750 so that we can perform the atomic compare and swap. Simplify the
4751 following code by always setting up the "i"ntegral variables. */
4752 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
4753 {
4754 iaddr = create_tmp_var (build_pointer_type (itype), NULL);
4755 x = build_gimple_modify_stmt (iaddr,
4756 fold_convert (TREE_TYPE (iaddr), addr));
4757 force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
4758 true, BSI_SAME_STMT);
4759 DECL_NO_TBAA_P (iaddr) = 1;
4760 DECL_POINTER_ALIAS_SET (iaddr) = 0;
4761 loadedi = create_tmp_var (itype, NULL);
4762 if (gimple_in_ssa_p (cfun))
4763 {
4764 add_referenced_var (iaddr);
4765 add_referenced_var (loadedi);
4766 loadedi = make_ssa_name (loadedi, NULL);
4767 }
4768 }
4769 else
4770 {
4771 iaddr = addr;
4772 loadedi = loaded_val;
4773 }
4774 initial = force_gimple_operand_bsi (&bsi, build_fold_indirect_ref (iaddr),
a509ebb5 4775 true, NULL_TREE, true, BSI_SAME_STMT);
c18c98c0
JJ
4776
4777 /* Move the value to the LOADEDI temporary. */
a509ebb5
RL
4778 if (gimple_in_ssa_p (cfun))
4779 {
4780 gcc_assert (phi_nodes (loop_header) == NULL_TREE);
c18c98c0
JJ
4781 phi = create_phi_node (loadedi, loop_header);
4782 SSA_NAME_DEF_STMT (loadedi) = phi;
a509ebb5
RL
4783 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
4784 initial);
4785 }
4786 else
4787 bsi_insert_before (&bsi,
c18c98c0 4788 build_gimple_modify_stmt (loadedi, initial),
a509ebb5 4789 BSI_SAME_STMT);
c18c98c0
JJ
4790 if (loadedi != loaded_val)
4791 {
4792 block_stmt_iterator bsi2;
4793
4794 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
4795 bsi2 = bsi_start (loop_header);
4796 if (gimple_in_ssa_p (cfun))
4797 {
4798 x = force_gimple_operand_bsi (&bsi2, x, true, NULL_TREE,
4799 true, BSI_SAME_STMT);
4800 x = build_gimple_modify_stmt (loaded_val, x);
4801 bsi_insert_before (&bsi2, x, BSI_SAME_STMT);
4802 SSA_NAME_DEF_STMT (loaded_val) = x;
4803 }
4804 else
4805 {
4806 x = build_gimple_modify_stmt (loaded_val, x);
4807 force_gimple_operand_bsi (&bsi2, x, true, NULL_TREE,
4808 true, BSI_SAME_STMT);
4809 }
4810 }
a509ebb5
RL
4811 bsi_remove (&bsi, true);
4812
4813 bsi = bsi_last (store_bb);
4814 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
4815
c18c98c0
JJ
4816 if (iaddr == addr)
4817 storedi = stored_val;
a509ebb5 4818 else
c18c98c0
JJ
4819 storedi =
4820 force_gimple_operand_bsi (&bsi,
4821 build1 (VIEW_CONVERT_EXPR, itype,
4822 stored_val), true, NULL_TREE, true,
4823 BSI_SAME_STMT);
a509ebb5
RL
4824
4825 /* Build the compare&swap statement. */
4826 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
4827 new_storedi = force_gimple_operand_bsi (&bsi,
4828 fold_convert (itype, new_storedi),
4829 true, NULL_TREE,
4830 true, BSI_SAME_STMT);
a509ebb5
RL
4831
4832 if (gimple_in_ssa_p (cfun))
4833 old_vali = loadedi;
4834 else
4835 {
4836 old_vali = create_tmp_var (itype, NULL);
c18c98c0
JJ
4837 if (gimple_in_ssa_p (cfun))
4838 add_referenced_var (old_vali);
a509ebb5 4839 x = build_gimple_modify_stmt (old_vali, loadedi);
c18c98c0
JJ
4840 force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
4841 true, BSI_SAME_STMT);
a509ebb5 4842
c18c98c0
JJ
4843 x = build_gimple_modify_stmt (loadedi, new_storedi);
4844 force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
4845 true, BSI_SAME_STMT);
a509ebb5
RL
4846 }
4847
4848 /* Note that we always perform the comparison as an integer, even for
4849 floating point. This allows the atomic operation to properly
4850 succeed even with NaNs and -0.0. */
c18c98c0
JJ
4851 x = build2 (NE_EXPR, boolean_type_node, new_storedi, old_vali);
4852 x = build3 (COND_EXPR, void_type_node, x, NULL_TREE, NULL_TREE);
a509ebb5
RL
4853 bsi_insert_before (&bsi, x, BSI_SAME_STMT);
4854
4855 /* Update cfg. */
4856 e = single_succ_edge (store_bb);
4857 e->flags &= ~EDGE_FALLTHRU;
4858 e->flags |= EDGE_FALSE_VALUE;
4859
4860 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
4861
c18c98c0 4862 /* Copy the new value to loadedi (we already did that before the condition
a509ebb5
RL
4863 if we are not in SSA). */
4864 if (gimple_in_ssa_p (cfun))
4865 {
4866 phi = phi_nodes (loop_header);
c18c98c0 4867 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
a509ebb5
RL
4868 }
4869
4870 /* Remove OMP_ATOMIC_STORE. */
4871 bsi_remove (&bsi, true);
4872
4873 if (gimple_in_ssa_p (cfun))
4874 update_ssa (TODO_update_ssa_no_phi);
4875
4876 return true;
4877}
4878
4879/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
4880
4881 GOMP_atomic_start ();
4882 *addr = rhs;
4883 GOMP_atomic_end ();
4884
4885 The result is not globally atomic, but works so long as all parallel
4886 references are within #pragma omp atomic directives. According to
4887 responses received from omp@openmp.org, appears to be within spec.
4888 Which makes sense, since that's how several other compilers handle
4889 this situation as well.
4890 LOADED_VAL and ADDR are the operands of OMP_ATOMIC_LOAD we're expanding.
4891 STORED_VAL is the operand of the matching OMP_ATOMIC_STORE.
4892
4893 We replace
4894 OMP_ATOMIC_LOAD (loaded_val, addr) with
4895 loaded_val = *addr;
4896
4897 and replace
4898 OMP_ATOMIC_ATORE (stored_val) with
4899 *addr = stored_val;
4900*/
4901
4902static bool
4903expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
4904 tree addr, tree loaded_val, tree stored_val)
4905{
4906 block_stmt_iterator bsi;
4907 tree t;
4908
4909 bsi = bsi_last (load_bb);
4910 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
4911
4912 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
4913 t = build_function_call_expr (t, 0);
4914 force_gimple_operand_bsi (&bsi, t, true, NULL_TREE, true, BSI_SAME_STMT);
4915
4916 t = build_gimple_modify_stmt (loaded_val, build_fold_indirect_ref (addr));
4917 if (gimple_in_ssa_p (cfun))
4918 SSA_NAME_DEF_STMT (loaded_val) = t;
4919 bsi_insert_before (&bsi, t, BSI_SAME_STMT);
4920 bsi_remove (&bsi, true);
4921
4922 bsi = bsi_last (store_bb);
4923 gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
4924
4925 t = build_gimple_modify_stmt (build_fold_indirect_ref (unshare_expr (addr)),
4926 stored_val);
4927 bsi_insert_before (&bsi, t, BSI_SAME_STMT);
4928
4929 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
4930 t = build_function_call_expr (t, 0);
4931 force_gimple_operand_bsi (&bsi, t, true, NULL_TREE, true, BSI_SAME_STMT);
4932 bsi_remove (&bsi, true);
4933
4934 if (gimple_in_ssa_p (cfun))
4935 update_ssa (TODO_update_ssa_no_phi);
4936 return true;
4937}
4938
4939/* Expand an OMP_ATOMIC statement. We try to expand
4940 using expand_omp_atomic_fetch_op. If it failed, we try to
4941 call expand_omp_atomic_pipeline, and if it fails too, the
4942 ultimate fallback is wrapping the operation in a mutex
4943 (expand_omp_atomic_mutex). REGION is the atomic region built
4944 by build_omp_regions_1(). */
4945
4946static void
4947expand_omp_atomic (struct omp_region *region)
4948{
4949 basic_block load_bb = region->entry, store_bb = region->exit;
4950 tree load = last_stmt (load_bb), store = last_stmt (store_bb);
4951 tree loaded_val = TREE_OPERAND (load, 0);
4952 tree addr = TREE_OPERAND (load, 1);
4953 tree stored_val = TREE_OPERAND (store, 0);
4954 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
4955 HOST_WIDE_INT index;
4956
4957 /* Make sure the type is one of the supported sizes. */
4958 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
4959 index = exact_log2 (index);
4960 if (index >= 0 && index <= 4)
4961 {
4962 unsigned int align = TYPE_ALIGN_UNIT (type);
4963
4964 /* __sync builtins require strict data alignment. */
4965 if (exact_log2 (align) >= index)
4966 {
4967 /* When possible, use specialized atomic update functions. */
4968 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
4969 && store_bb == single_succ (load_bb))
4970 {
4971 if (expand_omp_atomic_fetch_op (load_bb, addr,
4972 loaded_val, stored_val, index))
4973 return;
4974 }
4975
4976 /* If we don't have specialized __sync builtins, try and implement
4977 as a compare and swap loop. */
4978 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
4979 loaded_val, stored_val, index))
4980 return;
4981 }
4982 }
4983
4984 /* The ultimate fallback is wrapping the operation in a mutex. */
4985 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
4986}
4987
953ff289 4988
50674e96
DN
4989/* Expand the parallel region tree rooted at REGION. Expansion
4990 proceeds in depth-first order. Innermost regions are expanded
4991 first. This way, parallel regions that require a new function to
4992 be created (e.g., OMP_PARALLEL) can be expanded without having any
4993 internal dependencies in their body. */
4994
4995static void
4996expand_omp (struct omp_region *region)
4997{
4998 while (region)
4999 {
068e1875
ZD
5000 /* First, determine whether this is a combined parallel+workshare
5001 region. */
5002 if (region->type == OMP_PARALLEL)
5003 determine_parallel_type (region);
5004
50674e96
DN
5005 if (region->inner)
5006 expand_omp (region->inner);
5007
777f7f9a 5008 switch (region->type)
50674e96 5009 {
777f7f9a 5010 case OMP_PARALLEL:
a68ab351
JJ
5011 expand_omp_taskreg (region);
5012 break;
5013
5014 case OMP_TASK:
5015 expand_omp_taskreg (region);
777f7f9a 5016 break;
50674e96 5017
777f7f9a
RH
5018 case OMP_FOR:
5019 expand_omp_for (region);
5020 break;
50674e96 5021
777f7f9a
RH
5022 case OMP_SECTIONS:
5023 expand_omp_sections (region);
5024 break;
50674e96 5025
777f7f9a
RH
5026 case OMP_SECTION:
5027 /* Individual omp sections are handled together with their
5028 parent OMP_SECTIONS region. */
5029 break;
50674e96 5030
777f7f9a
RH
5031 case OMP_SINGLE:
5032 expand_omp_single (region);
5033 break;
50674e96 5034
777f7f9a
RH
5035 case OMP_MASTER:
5036 case OMP_ORDERED:
5037 case OMP_CRITICAL:
5038 expand_omp_synch (region);
5039 break;
50674e96 5040
a509ebb5
RL
5041 case OMP_ATOMIC_LOAD:
5042 expand_omp_atomic (region);
5043 break;
5044
5045
777f7f9a
RH
5046 default:
5047 gcc_unreachable ();
5048 }
8d9c1aec 5049
50674e96
DN
5050 region = region->next;
5051 }
5052}
5053
5054
5055/* Helper for build_omp_regions. Scan the dominator tree starting at
5f40b3cb
ZD
5056 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
5057 true, the function ends once a single tree is built (otherwise, whole
5058 forest of OMP constructs may be built). */
50674e96
DN
5059
5060static void
5f40b3cb
ZD
5061build_omp_regions_1 (basic_block bb, struct omp_region *parent,
5062 bool single_tree)
50674e96
DN
5063{
5064 block_stmt_iterator si;
5065 tree stmt;
5066 basic_block son;
5067
5068 si = bsi_last (bb);
5069 if (!bsi_end_p (si) && OMP_DIRECTIVE_P (bsi_stmt (si)))
5070 {
5071 struct omp_region *region;
777f7f9a 5072 enum tree_code code;
50674e96
DN
5073
5074 stmt = bsi_stmt (si);
777f7f9a 5075 code = TREE_CODE (stmt);
777f7f9a 5076 if (code == OMP_RETURN)
50674e96
DN
5077 {
5078 /* STMT is the return point out of region PARENT. Mark it
5079 as the exit point and make PARENT the immediately
5080 enclosing region. */
5081 gcc_assert (parent);
5082 region = parent;
777f7f9a 5083 region->exit = bb;
50674e96 5084 parent = parent->outer;
50674e96 5085 }
a509ebb5
RL
5086 else if (code == OMP_ATOMIC_STORE)
5087 {
fa10beec 5088 /* OMP_ATOMIC_STORE is analogous to OMP_RETURN, but matches with
a509ebb5
RL
5089 OMP_ATOMIC_LOAD. */
5090 gcc_assert (parent);
5091 gcc_assert (parent->type == OMP_ATOMIC_LOAD);
5092 region = parent;
5093 region->exit = bb;
5094 parent = parent->outer;
5095 }
5096
777f7f9a
RH
5097 else if (code == OMP_CONTINUE)
5098 {
5099 gcc_assert (parent);
5100 parent->cont = bb;
5101 }
e5c95afe
ZD
5102 else if (code == OMP_SECTIONS_SWITCH)
5103 {
5104 /* OMP_SECTIONS_SWITCH is part of OMP_SECTIONS, and we do nothing for
a509ebb5 5105 it. */ ;
e5c95afe 5106 }
50674e96
DN
5107 else
5108 {
5109 /* Otherwise, this directive becomes the parent for a new
5110 region. */
777f7f9a 5111 region = new_omp_region (bb, code, parent);
50674e96
DN
5112 parent = region;
5113 }
50674e96
DN
5114 }
5115
5f40b3cb
ZD
5116 if (single_tree && !parent)
5117 return;
5118
50674e96
DN
5119 for (son = first_dom_son (CDI_DOMINATORS, bb);
5120 son;
5121 son = next_dom_son (CDI_DOMINATORS, son))
5f40b3cb
ZD
5122 build_omp_regions_1 (son, parent, single_tree);
5123}
5124
5125/* Builds the tree of OMP regions rooted at ROOT, storing it to
5126 root_omp_region. */
5127
5128static void
5129build_omp_regions_root (basic_block root)
5130{
5131 gcc_assert (root_omp_region == NULL);
5132 build_omp_regions_1 (root, NULL, true);
5133 gcc_assert (root_omp_region != NULL);
50674e96
DN
5134}
5135
5f40b3cb
ZD
5136/* Expands omp construct (and its subconstructs) starting in HEAD. */
5137
5138void
5139omp_expand_local (basic_block head)
5140{
5141 build_omp_regions_root (head);
5142 if (dump_file && (dump_flags & TDF_DETAILS))
5143 {
5144 fprintf (dump_file, "\nOMP region tree\n\n");
5145 dump_omp_region (dump_file, root_omp_region, 0);
5146 fprintf (dump_file, "\n");
5147 }
5148
5149 remove_exit_barriers (root_omp_region);
5150 expand_omp (root_omp_region);
5151
5152 free_omp_regions ();
5153}
50674e96
DN
5154
5155/* Scan the CFG and build a tree of OMP regions. Return the root of
5156 the OMP region tree. */
5157
5158static void
5159build_omp_regions (void)
5160{
777f7f9a 5161 gcc_assert (root_omp_region == NULL);
50674e96 5162 calculate_dominance_info (CDI_DOMINATORS);
5f40b3cb 5163 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
50674e96
DN
5164}
5165
5166
5167/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
5168
c2924966 5169static unsigned int
50674e96
DN
5170execute_expand_omp (void)
5171{
5172 build_omp_regions ();
5173
777f7f9a
RH
5174 if (!root_omp_region)
5175 return 0;
50674e96 5176
777f7f9a
RH
5177 if (dump_file)
5178 {
5179 fprintf (dump_file, "\nOMP region tree\n\n");
5180 dump_omp_region (dump_file, root_omp_region, 0);
5181 fprintf (dump_file, "\n");
50674e96 5182 }
777f7f9a
RH
5183
5184 remove_exit_barriers (root_omp_region);
5185
5186 expand_omp (root_omp_region);
5187
777f7f9a
RH
5188 cleanup_tree_cfg ();
5189
5190 free_omp_regions ();
5191
c2924966 5192 return 0;
50674e96
DN
5193}
5194
917948d3
ZD
5195/* OMP expansion -- the default pass, run before creation of SSA form. */
5196
50674e96
DN
5197static bool
5198gate_expand_omp (void)
5199{
c63cac47 5200 return (flag_openmp != 0 && errorcount == 0);
50674e96
DN
5201}
5202
8ddbbcae 5203struct gimple_opt_pass pass_expand_omp =
50674e96 5204{
8ddbbcae
JH
5205 {
5206 GIMPLE_PASS,
50674e96
DN
5207 "ompexp", /* name */
5208 gate_expand_omp, /* gate */
5209 execute_expand_omp, /* execute */
5210 NULL, /* sub */
5211 NULL, /* next */
5212 0, /* static_pass_number */
5213 0, /* tv_id */
5214 PROP_gimple_any, /* properties_required */
5215 PROP_gimple_lomp, /* properties_provided */
5216 0, /* properties_destroyed */
5217 0, /* todo_flags_start */
8ddbbcae
JH
5218 TODO_dump_func /* todo_flags_finish */
5219 }
50674e96
DN
5220};
5221\f
5222/* Routines to lower OpenMP directives into OMP-GIMPLE. */
5223
5224/* Lower the OpenMP sections directive in *STMT_P. */
5225
5226static void
5227lower_omp_sections (tree *stmt_p, omp_context *ctx)
5228{
e5c95afe 5229 tree new_stmt, stmt, body, bind, block, ilist, olist, new_body, control;
777f7f9a 5230 tree t, dlist;
50674e96
DN
5231 tree_stmt_iterator tsi;
5232 unsigned i, len;
5233
5234 stmt = *stmt_p;
5235
50674e96
DN
5236 push_gimplify_context ();
5237
5238 dlist = NULL;
5239 ilist = NULL;
5240 lower_rec_input_clauses (OMP_SECTIONS_CLAUSES (stmt), &ilist, &dlist, ctx);
5241
5242 tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
5243 for (len = 0; !tsi_end_p (tsi); len++, tsi_next (&tsi))
5244 continue;
5245
50674e96
DN
5246 tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
5247 body = alloc_stmt_list ();
5248 for (i = 0; i < len; i++, tsi_next (&tsi))
5249 {
5250 omp_context *sctx;
777f7f9a 5251 tree sec_start, sec_end;
50674e96
DN
5252
5253 sec_start = tsi_stmt (tsi);
50674e96
DN
5254 sctx = maybe_lookup_ctx (sec_start);
5255 gcc_assert (sctx);
5256
777f7f9a
RH
5257 append_to_statement_list (sec_start, &body);
5258
50674e96 5259 lower_omp (&OMP_SECTION_BODY (sec_start), sctx);
777f7f9a
RH
5260 append_to_statement_list (OMP_SECTION_BODY (sec_start), &body);
5261 OMP_SECTION_BODY (sec_start) = NULL;
50674e96
DN
5262
5263 if (i == len - 1)
5264 {
5265 tree l = alloc_stmt_list ();
5266 lower_lastprivate_clauses (OMP_SECTIONS_CLAUSES (stmt), NULL,
5267 &l, ctx);
777f7f9a
RH
5268 append_to_statement_list (l, &body);
5269 OMP_SECTION_LAST (sec_start) = 1;
50674e96
DN
5270 }
5271
777f7f9a 5272 sec_end = make_node (OMP_RETURN);
50674e96 5273 append_to_statement_list (sec_end, &body);
50674e96 5274 }
953ff289
DN
5275
5276 block = make_node (BLOCK);
50674e96 5277 bind = build3 (BIND_EXPR, void_type_node, NULL, body, block);
953ff289 5278
50674e96
DN
5279 olist = NULL_TREE;
5280 lower_reduction_clauses (OMP_SECTIONS_CLAUSES (stmt), &olist, ctx);
5281
50674e96
DN
5282 pop_gimplify_context (NULL_TREE);
5283 record_vars_into (ctx->block_vars, ctx->cb.dst_fn);
5284
5285 new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
5286 TREE_SIDE_EFFECTS (new_stmt) = 1;
50674e96
DN
5287
5288 new_body = alloc_stmt_list ();
5289 append_to_statement_list (ilist, &new_body);
5290 append_to_statement_list (stmt, &new_body);
e5c95afe 5291 append_to_statement_list (make_node (OMP_SECTIONS_SWITCH), &new_body);
777f7f9a
RH
5292 append_to_statement_list (bind, &new_body);
5293
e5c95afe
ZD
5294 control = create_tmp_var (unsigned_type_node, ".section");
5295 t = build2 (OMP_CONTINUE, void_type_node, control, control);
5296 OMP_SECTIONS_CONTROL (stmt) = control;
777f7f9a
RH
5297 append_to_statement_list (t, &new_body);
5298
50674e96
DN
5299 append_to_statement_list (olist, &new_body);
5300 append_to_statement_list (dlist, &new_body);
50674e96 5301
4a31b7ee
JJ
5302 maybe_catch_exception (&new_body);
5303
777f7f9a
RH
5304 t = make_node (OMP_RETURN);
5305 OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SECTIONS_CLAUSES (stmt),
5306 OMP_CLAUSE_NOWAIT);
5307 append_to_statement_list (t, &new_body);
5308
5309 BIND_EXPR_BODY (new_stmt) = new_body;
5310 OMP_SECTIONS_BODY (stmt) = NULL;
50674e96
DN
5311
5312 *stmt_p = new_stmt;
953ff289
DN
5313}
5314
5315
50674e96 5316/* A subroutine of lower_omp_single. Expand the simple form of
953ff289
DN
5317 an OMP_SINGLE, without a copyprivate clause:
5318
5319 if (GOMP_single_start ())
5320 BODY;
5321 [ GOMP_barrier (); ] -> unless 'nowait' is present.
50674e96
DN
5322
5323 FIXME. It may be better to delay expanding the logic of this until
5324 pass_expand_omp. The expanded logic may make the job more difficult
5325 to a synchronization analysis pass. */
953ff289
DN
5326
5327static void
50674e96 5328lower_omp_single_simple (tree single_stmt, tree *pre_p)
953ff289
DN
5329{
5330 tree t;
5331
5039610b 5332 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_START], 0);
a68ab351
JJ
5333 if (TREE_TYPE (t) != boolean_type_node)
5334 t = fold_build2 (NE_EXPR, boolean_type_node,
5335 t, build_int_cst (TREE_TYPE (t), 0));
953ff289
DN
5336 t = build3 (COND_EXPR, void_type_node, t,
5337 OMP_SINGLE_BODY (single_stmt), NULL);
5338 gimplify_and_add (t, pre_p);
953ff289
DN
5339}
5340
50674e96
DN
5341
5342/* A subroutine of lower_omp_single. Expand the simple form of
953ff289
DN
5343 an OMP_SINGLE, with a copyprivate clause:
5344
5345 #pragma omp single copyprivate (a, b, c)
5346
5347 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5348
5349 {
5350 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5351 {
5352 BODY;
5353 copyout.a = a;
5354 copyout.b = b;
5355 copyout.c = c;
5356 GOMP_single_copy_end (&copyout);
5357 }
5358 else
5359 {
5360 a = copyout_p->a;
5361 b = copyout_p->b;
5362 c = copyout_p->c;
5363 }
5364 GOMP_barrier ();
5365 }
50674e96
DN
5366
5367 FIXME. It may be better to delay expanding the logic of this until
5368 pass_expand_omp. The expanded logic may make the job more difficult
5369 to a synchronization analysis pass. */
953ff289
DN
5370
5371static void
50674e96 5372lower_omp_single_copy (tree single_stmt, tree *pre_p, omp_context *ctx)
953ff289 5373{
5039610b 5374 tree ptr_type, t, l0, l1, l2, copyin_seq;
953ff289
DN
5375
5376 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5377
5378 ptr_type = build_pointer_type (ctx->record_type);
5379 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5380
5381 l0 = create_artificial_label ();
5382 l1 = create_artificial_label ();
5383 l2 = create_artificial_label ();
5384
5039610b 5385 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START], 0);
953ff289 5386 t = fold_convert (ptr_type, t);
b56b9fe3 5387 t = build_gimple_modify_stmt (ctx->receiver_decl, t);
953ff289
DN
5388 gimplify_and_add (t, pre_p);
5389
5390 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5391 build_int_cst (ptr_type, 0));
5392 t = build3 (COND_EXPR, void_type_node, t,
5393 build_and_jump (&l0), build_and_jump (&l1));
5394 gimplify_and_add (t, pre_p);
5395
5396 t = build1 (LABEL_EXPR, void_type_node, l0);
5397 gimplify_and_add (t, pre_p);
5398
5399 append_to_statement_list (OMP_SINGLE_BODY (single_stmt), pre_p);
5400
5401 copyin_seq = NULL;
50674e96 5402 lower_copyprivate_clauses (OMP_SINGLE_CLAUSES (single_stmt), pre_p,
953ff289
DN
5403 &copyin_seq, ctx);
5404
5405 t = build_fold_addr_expr (ctx->sender_decl);
5039610b 5406 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END], 1, t);
953ff289
DN
5407 gimplify_and_add (t, pre_p);
5408
5409 t = build_and_jump (&l2);
5410 gimplify_and_add (t, pre_p);
5411
5412 t = build1 (LABEL_EXPR, void_type_node, l1);
5413 gimplify_and_add (t, pre_p);
5414
5415 append_to_statement_list (copyin_seq, pre_p);
5416
5417 t = build1 (LABEL_EXPR, void_type_node, l2);
5418 gimplify_and_add (t, pre_p);
953ff289
DN
5419}
5420
50674e96 5421
953ff289
DN
5422/* Expand code for an OpenMP single directive. */
5423
5424static void
50674e96 5425lower_omp_single (tree *stmt_p, omp_context *ctx)
953ff289 5426{
50674e96 5427 tree t, bind, block, single_stmt = *stmt_p, dlist;
953ff289
DN
5428
5429 push_gimplify_context ();
5430
5431 block = make_node (BLOCK);
777f7f9a 5432 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
50674e96 5433 TREE_SIDE_EFFECTS (bind) = 1;
953ff289 5434
50674e96
DN
5435 lower_rec_input_clauses (OMP_SINGLE_CLAUSES (single_stmt),
5436 &BIND_EXPR_BODY (bind), &dlist, ctx);
5437 lower_omp (&OMP_SINGLE_BODY (single_stmt), ctx);
777f7f9a
RH
5438
5439 append_to_statement_list (single_stmt, &BIND_EXPR_BODY (bind));
953ff289
DN
5440
5441 if (ctx->record_type)
50674e96 5442 lower_omp_single_copy (single_stmt, &BIND_EXPR_BODY (bind), ctx);
953ff289 5443 else
50674e96 5444 lower_omp_single_simple (single_stmt, &BIND_EXPR_BODY (bind));
953ff289 5445
777f7f9a
RH
5446 OMP_SINGLE_BODY (single_stmt) = NULL;
5447
953ff289 5448 append_to_statement_list (dlist, &BIND_EXPR_BODY (bind));
777f7f9a 5449
4a31b7ee
JJ
5450 maybe_catch_exception (&BIND_EXPR_BODY (bind));
5451
777f7f9a
RH
5452 t = make_node (OMP_RETURN);
5453 OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SINGLE_CLAUSES (single_stmt),
5454 OMP_CLAUSE_NOWAIT);
50674e96 5455 append_to_statement_list (t, &BIND_EXPR_BODY (bind));
777f7f9a 5456
953ff289 5457 pop_gimplify_context (bind);
50674e96 5458
953ff289
DN
5459 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
5460 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
5461}
5462
50674e96 5463
953ff289
DN
5464/* Expand code for an OpenMP master directive. */
5465
5466static void
50674e96 5467lower_omp_master (tree *stmt_p, omp_context *ctx)
953ff289
DN
5468{
5469 tree bind, block, stmt = *stmt_p, lab = NULL, x;
5470
5471 push_gimplify_context ();
5472
5473 block = make_node (BLOCK);
777f7f9a 5474 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
50674e96 5475 TREE_SIDE_EFFECTS (bind) = 1;
953ff289 5476
777f7f9a
RH
5477 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
5478
5039610b 5479 x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
953ff289
DN
5480 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
5481 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
5482 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
5483
50674e96 5484 lower_omp (&OMP_MASTER_BODY (stmt), ctx);
777f7f9a 5485 maybe_catch_exception (&OMP_MASTER_BODY (stmt));
953ff289 5486 append_to_statement_list (OMP_MASTER_BODY (stmt), &BIND_EXPR_BODY (bind));
777f7f9a 5487 OMP_MASTER_BODY (stmt) = NULL;
953ff289
DN
5488
5489 x = build1 (LABEL_EXPR, void_type_node, lab);
5490 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
777f7f9a
RH
5491
5492 x = make_node (OMP_RETURN);
5493 OMP_RETURN_NOWAIT (x) = 1;
50674e96 5494 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
777f7f9a 5495
953ff289 5496 pop_gimplify_context (bind);
50674e96 5497
953ff289
DN
5498 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
5499 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
5500}
5501
50674e96 5502
953ff289
DN
5503/* Expand code for an OpenMP ordered directive. */
5504
5505static void
50674e96 5506lower_omp_ordered (tree *stmt_p, omp_context *ctx)
953ff289
DN
5507{
5508 tree bind, block, stmt = *stmt_p, x;
5509
5510 push_gimplify_context ();
5511
5512 block = make_node (BLOCK);
777f7f9a 5513 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
50674e96 5514 TREE_SIDE_EFFECTS (bind) = 1;
953ff289 5515
777f7f9a
RH
5516 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
5517
5039610b 5518 x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ORDERED_START], 0);
953ff289
DN
5519 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
5520
50674e96 5521 lower_omp (&OMP_ORDERED_BODY (stmt), ctx);
777f7f9a 5522 maybe_catch_exception (&OMP_ORDERED_BODY (stmt));
953ff289 5523 append_to_statement_list (OMP_ORDERED_BODY (stmt), &BIND_EXPR_BODY (bind));
777f7f9a 5524 OMP_ORDERED_BODY (stmt) = NULL;
953ff289 5525
5039610b 5526 x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ORDERED_END], 0);
953ff289 5527 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
777f7f9a
RH
5528
5529 x = make_node (OMP_RETURN);
5530 OMP_RETURN_NOWAIT (x) = 1;
50674e96 5531 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
777f7f9a 5532
953ff289 5533 pop_gimplify_context (bind);
50674e96 5534
953ff289
DN
5535 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
5536 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
5537}
5538
953ff289
DN
5539
5540/* Gimplify an OMP_CRITICAL statement. This is a relatively simple
5541 substitution of a couple of function calls. But in the NAMED case,
5542 requires that languages coordinate a symbol name. It is therefore
5543 best put here in common code. */
5544
5545static GTY((param1_is (tree), param2_is (tree)))
5546 splay_tree critical_name_mutexes;
5547
5548static void
50674e96 5549lower_omp_critical (tree *stmt_p, omp_context *ctx)
953ff289
DN
5550{
5551 tree bind, block, stmt = *stmt_p;
50674e96 5552 tree t, lock, unlock, name;
953ff289
DN
5553
5554 name = OMP_CRITICAL_NAME (stmt);
5555 if (name)
5556 {
5039610b 5557 tree decl;
953ff289
DN
5558 splay_tree_node n;
5559
5560 if (!critical_name_mutexes)
5561 critical_name_mutexes
5562 = splay_tree_new_ggc (splay_tree_compare_pointers);
5563
5564 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
5565 if (n == NULL)
5566 {
5567 char *new_str;
5568
5569 decl = create_tmp_var_raw (ptr_type_node, NULL);
5570
5571 new_str = ACONCAT ((".gomp_critical_user_",
5572 IDENTIFIER_POINTER (name), NULL));
5573 DECL_NAME (decl) = get_identifier (new_str);
5574 TREE_PUBLIC (decl) = 1;
5575 TREE_STATIC (decl) = 1;
5576 DECL_COMMON (decl) = 1;
5577 DECL_ARTIFICIAL (decl) = 1;
5578 DECL_IGNORED_P (decl) = 1;
8a4a83ed 5579 varpool_finalize_decl (decl);
953ff289
DN
5580
5581 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
5582 (splay_tree_value) decl);
5583 }
5584 else
5585 decl = (tree) n->value;
5586
953ff289 5587 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_START];
5039610b 5588 lock = build_call_expr (lock, 1, build_fold_addr_expr (decl));
953ff289 5589
953ff289 5590 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_END];
5039610b 5591 unlock = build_call_expr (unlock, 1, build_fold_addr_expr (decl));
953ff289
DN
5592 }
5593 else
5594 {
5595 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_START];
5039610b 5596 lock = build_call_expr (lock, 0);
953ff289
DN
5597
5598 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_END];
5039610b 5599 unlock = build_call_expr (unlock, 0);
953ff289
DN
5600 }
5601
5602 push_gimplify_context ();
5603
5604 block = make_node (BLOCK);
777f7f9a 5605 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
50674e96 5606 TREE_SIDE_EFFECTS (bind) = 1;
953ff289 5607
777f7f9a
RH
5608 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
5609
953ff289
DN
5610 gimplify_and_add (lock, &BIND_EXPR_BODY (bind));
5611
50674e96 5612 lower_omp (&OMP_CRITICAL_BODY (stmt), ctx);
953ff289
DN
5613 maybe_catch_exception (&OMP_CRITICAL_BODY (stmt));
5614 append_to_statement_list (OMP_CRITICAL_BODY (stmt), &BIND_EXPR_BODY (bind));
777f7f9a 5615 OMP_CRITICAL_BODY (stmt) = NULL;
953ff289
DN
5616
5617 gimplify_and_add (unlock, &BIND_EXPR_BODY (bind));
777f7f9a
RH
5618
5619 t = make_node (OMP_RETURN);
5620 OMP_RETURN_NOWAIT (t) = 1;
50674e96 5621 append_to_statement_list (t, &BIND_EXPR_BODY (bind));
953ff289
DN
5622
5623 pop_gimplify_context (bind);
5624 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
5625 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
50674e96
DN
5626}
5627
5628
5629/* A subroutine of lower_omp_for. Generate code to emit the predicate
5630 for a lastprivate clause. Given a loop control predicate of (V
5631 cond N2), we gate the clause on (!(V cond N2)). The lowered form
3d55c64b
JJ
5632 is appended to *DLIST, iterator initialization is appended to
5633 *BODY_P. */
50674e96
DN
5634
5635static void
5636lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
3d55c64b 5637 tree *dlist, struct omp_context *ctx)
50674e96 5638{
3d55c64b 5639 tree clauses, cond, stmts, vinit, t;
50674e96
DN
5640 enum tree_code cond_code;
5641
a68ab351 5642 cond_code = fd->loop.cond_code;
50674e96
DN
5643 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
5644
5645 /* When possible, use a strict equality expression. This can let VRP
5646 type optimizations deduce the value and remove a copy. */
a68ab351 5647 if (host_integerp (fd->loop.step, 0))
50674e96 5648 {
a68ab351 5649 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
50674e96
DN
5650 if (step == 1 || step == -1)
5651 cond_code = EQ_EXPR;
5652 }
5653
a68ab351 5654 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
50674e96
DN
5655
5656 clauses = OMP_FOR_CLAUSES (fd->for_stmt);
3d55c64b
JJ
5657 stmts = NULL;
5658 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
5659 if (stmts != NULL)
5660 {
a68ab351
JJ
5661 append_to_statement_list (*dlist, &stmts);
5662 *dlist = stmts;
3d55c64b
JJ
5663
5664 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
a68ab351 5665 vinit = fd->loop.n1;
3d55c64b 5666 if (cond_code == EQ_EXPR
a68ab351
JJ
5667 && host_integerp (fd->loop.n2, 0)
5668 && ! integer_zerop (fd->loop.n2))
5669 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
3d55c64b
JJ
5670
5671 /* Initialize the iterator variable, so that threads that don't execute
5672 any iterations don't execute the lastprivate clauses by accident. */
a68ab351 5673 t = build_gimple_modify_stmt (fd->loop.v, vinit);
3d55c64b
JJ
5674 gimplify_and_add (t, body_p);
5675 }
50674e96
DN
5676}
5677
5678
5679/* Lower code for an OpenMP loop directive. */
5680
5681static void
5682lower_omp_for (tree *stmt_p, omp_context *ctx)
5683{
5684 tree t, stmt, ilist, dlist, new_stmt, *body_p, *rhs_p;
5685 struct omp_for_data fd;
a68ab351 5686 int i;
50674e96
DN
5687
5688 stmt = *stmt_p;
5689
5690 push_gimplify_context ();
5691
5692 lower_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
5693 lower_omp (&OMP_FOR_BODY (stmt), ctx);
5694
5695 /* Move declaration of temporaries in the loop body before we make
5696 it go away. */
5697 if (TREE_CODE (OMP_FOR_BODY (stmt)) == BIND_EXPR)
5698 record_vars_into (BIND_EXPR_VARS (OMP_FOR_BODY (stmt)), ctx->cb.dst_fn);
5699
5700 new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
5701 TREE_SIDE_EFFECTS (new_stmt) = 1;
5702 body_p = &BIND_EXPR_BODY (new_stmt);
5703
5704 /* The pre-body and input clauses go before the lowered OMP_FOR. */
5705 ilist = NULL;
5706 dlist = NULL;
50674e96 5707 lower_rec_input_clauses (OMP_FOR_CLAUSES (stmt), body_p, &dlist, ctx);
a68ab351 5708 append_to_statement_list (OMP_FOR_PRE_BODY (stmt), body_p);
50674e96
DN
5709
5710 /* Lower the header expressions. At this point, we can assume that
5711 the header is of the form:
5712
5713 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
5714
5715 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
5716 using the .omp_data_s mapping, if needed. */
a68ab351
JJ
5717 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
5718 {
5719 rhs_p = &GIMPLE_STMT_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (stmt), i), 1);
5720 if (!is_gimple_min_invariant (*rhs_p))
5721 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
5722
5723 rhs_p = &TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_COND (stmt), i), 1);
5724 if (!is_gimple_min_invariant (*rhs_p))
5725 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
5726
5727 rhs_p = &TREE_OPERAND (GIMPLE_STMT_OPERAND
5728 (TREE_VEC_ELT (OMP_FOR_INCR (stmt), i), 1), 1);
5729 if (!is_gimple_min_invariant (*rhs_p))
5730 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
5731 }
50674e96
DN
5732
5733 /* Once lowered, extract the bounds and clauses. */
a68ab351 5734 extract_omp_for_data (stmt, &fd, NULL);
50674e96 5735
3d55c64b
JJ
5736 lower_omp_for_lastprivate (&fd, body_p, &dlist, ctx);
5737
50674e96
DN
5738 append_to_statement_list (stmt, body_p);
5739
777f7f9a
RH
5740 append_to_statement_list (OMP_FOR_BODY (stmt), body_p);
5741
a68ab351 5742 t = build2 (OMP_CONTINUE, void_type_node, fd.loop.v, fd.loop.v);
777f7f9a
RH
5743 append_to_statement_list (t, body_p);
5744
50674e96 5745 /* After the loop, add exit clauses. */
50674e96
DN
5746 lower_reduction_clauses (OMP_FOR_CLAUSES (stmt), body_p, ctx);
5747 append_to_statement_list (dlist, body_p);
5748
4a31b7ee
JJ
5749 maybe_catch_exception (body_p);
5750
777f7f9a
RH
5751 /* Region exit marker goes at the end of the loop body. */
5752 t = make_node (OMP_RETURN);
5753 OMP_RETURN_NOWAIT (t) = fd.have_nowait;
5754 append_to_statement_list (t, body_p);
50674e96
DN
5755
5756 pop_gimplify_context (NULL_TREE);
5757 record_vars_into (ctx->block_vars, ctx->cb.dst_fn);
5758
777f7f9a 5759 OMP_FOR_BODY (stmt) = NULL_TREE;
50674e96
DN
5760 OMP_FOR_PRE_BODY (stmt) = NULL_TREE;
5761 *stmt_p = new_stmt;
953ff289
DN
5762}
5763
69f1837b
JJ
5764/* Callback for walk_stmts. Check if *TP only contains OMP_FOR
5765 or OMP_PARALLEL. */
5766
5767static tree
5768check_combined_parallel (tree *tp, int *walk_subtrees, void *data)
5769{
5770 struct walk_stmt_info *wi = data;
5771 int *info = wi->info;
5772
5773 *walk_subtrees = 0;
5774 switch (TREE_CODE (*tp))
5775 {
5776 case OMP_FOR:
5777 case OMP_SECTIONS:
5778 *info = *info == 0 ? 1 : -1;
5779 break;
5780 default:
5781 *info = -1;
5782 break;
5783 }
5784 return NULL;
5785}
50674e96 5786
a68ab351
JJ
5787struct omp_taskcopy_context
5788{
5789 /* This field must be at the beginning, as we do "inheritance": Some
5790 callback functions for tree-inline.c (e.g., omp_copy_decl)
5791 receive a copy_body_data pointer that is up-casted to an
5792 omp_context pointer. */
5793 copy_body_data cb;
5794 omp_context *ctx;
5795};
5796
5797static tree
5798task_copyfn_copy_decl (tree var, copy_body_data *cb)
5799{
5800 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
5801
5802 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
5803 return create_tmp_var (TREE_TYPE (var), NULL);
5804
5805 return var;
5806}
5807
5808static tree
5809task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
5810{
5811 tree name, new_fields = NULL, type, f;
5812
5813 type = lang_hooks.types.make_type (RECORD_TYPE);
5814 name = DECL_NAME (TYPE_NAME (orig_type));
5815 name = build_decl (TYPE_DECL, name, type);
5816 TYPE_NAME (type) = name;
5817
5818 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
5819 {
5820 tree new_f = copy_node (f);
5821 DECL_CONTEXT (new_f) = type;
5822 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
5823 TREE_CHAIN (new_f) = new_fields;
5824 walk_tree (&DECL_SIZE (new_f), copy_body_r, &tcctx->cb, NULL);
5825 walk_tree (&DECL_SIZE_UNIT (new_f), copy_body_r, &tcctx->cb, NULL);
5826 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_body_r, &tcctx->cb, NULL);
5827 new_fields = new_f;
5828 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
5829 }
5830 TYPE_FIELDS (type) = nreverse (new_fields);
5831 layout_type (type);
5832 return type;
5833}
5834
5835/* Create task copyfn. */
5836
5837static void
5838create_task_copyfn (tree task_stmt, omp_context *ctx)
5839{
5840 struct function *child_cfun;
5841 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
5842 tree record_type, srecord_type, bind, list;
5843 bool record_needs_remap = false, srecord_needs_remap = false;
5844 splay_tree_node n;
5845 struct omp_taskcopy_context tcctx;
5846
5847 child_fn = OMP_TASK_COPYFN (task_stmt);
5848 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
5849 gcc_assert (child_cfun->cfg == NULL);
5850 child_cfun->dont_save_pending_sizes_p = 1;
5851 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
5852
5853 /* Reset DECL_CONTEXT on function arguments. */
5854 for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
5855 DECL_CONTEXT (t) = child_fn;
5856
5857 /* Populate the function. */
5858 push_gimplify_context ();
5859 current_function_decl = child_fn;
5860
5861 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
5862 TREE_SIDE_EFFECTS (bind) = 1;
5863 list = NULL;
5864 DECL_SAVED_TREE (child_fn) = bind;
5865 DECL_SOURCE_LOCATION (child_fn) = EXPR_LOCATION (task_stmt);
5866
5867 /* Remap src and dst argument types if needed. */
5868 record_type = ctx->record_type;
5869 srecord_type = ctx->srecord_type;
5870 for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
5871 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
5872 {
5873 record_needs_remap = true;
5874 break;
5875 }
5876 for (f = TYPE_FIELDS (srecord_type); f ; f = TREE_CHAIN (f))
5877 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
5878 {
5879 srecord_needs_remap = true;
5880 break;
5881 }
5882
5883 if (record_needs_remap || srecord_needs_remap)
5884 {
5885 memset (&tcctx, '\0', sizeof (tcctx));
5886 tcctx.cb.src_fn = ctx->cb.src_fn;
5887 tcctx.cb.dst_fn = child_fn;
5888 tcctx.cb.src_node = cgraph_node (tcctx.cb.src_fn);
5889 tcctx.cb.dst_node = tcctx.cb.src_node;
5890 tcctx.cb.src_cfun = ctx->cb.src_cfun;
5891 tcctx.cb.copy_decl = task_copyfn_copy_decl;
5892 tcctx.cb.eh_region = -1;
5893 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
5894 tcctx.cb.decl_map = pointer_map_create ();
5895 tcctx.ctx = ctx;
5896
5897 if (record_needs_remap)
5898 record_type = task_copyfn_remap_type (&tcctx, record_type);
5899 if (srecord_needs_remap)
5900 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
5901 }
5902 else
5903 tcctx.cb.decl_map = NULL;
5904
5905 push_cfun (child_cfun);
5906
5907 arg = DECL_ARGUMENTS (child_fn);
5908 TREE_TYPE (arg) = build_pointer_type (record_type);
5909 sarg = TREE_CHAIN (arg);
5910 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
5911
5912 /* First pass: initialize temporaries used in record_type and srecord_type
5913 sizes and field offsets. */
5914 if (tcctx.cb.decl_map)
5915 for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
5916 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
5917 {
5918 tree *p;
5919
5920 decl = OMP_CLAUSE_DECL (c);
5921 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
5922 if (p == NULL)
5923 continue;
5924 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
5925 sf = (tree) n->value;
5926 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
5927 src = build_fold_indirect_ref (sarg);
5928 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
5929 t = build_gimple_modify_stmt (*p, src);
5930 append_to_statement_list (t, &list);
5931 }
5932
5933 /* Second pass: copy shared var pointers and copy construct non-VLA
5934 firstprivate vars. */
5935 for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
5936 switch (OMP_CLAUSE_CODE (c))
5937 {
5938 case OMP_CLAUSE_SHARED:
5939 decl = OMP_CLAUSE_DECL (c);
5940 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
5941 if (n == NULL)
5942 break;
5943 f = (tree) n->value;
5944 if (tcctx.cb.decl_map)
5945 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
5946 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
5947 sf = (tree) n->value;
5948 if (tcctx.cb.decl_map)
5949 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
5950 src = build_fold_indirect_ref (sarg);
5951 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
5952 dst = build_fold_indirect_ref (arg);
5953 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
5954 t = build_gimple_modify_stmt (dst, src);
5955 append_to_statement_list (t, &list);
5956 break;
5957 case OMP_CLAUSE_FIRSTPRIVATE:
5958 decl = OMP_CLAUSE_DECL (c);
5959 if (is_variable_sized (decl))
5960 break;
5961 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
5962 if (n == NULL)
5963 break;
5964 f = (tree) n->value;
5965 if (tcctx.cb.decl_map)
5966 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
5967 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
5968 if (n != NULL)
5969 {
5970 sf = (tree) n->value;
5971 if (tcctx.cb.decl_map)
5972 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
5973 src = build_fold_indirect_ref (sarg);
5974 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
5975 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
5976 src = build_fold_indirect_ref (src);
5977 }
5978 else
5979 src = decl;
5980 dst = build_fold_indirect_ref (arg);
5981 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
5982 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
5983 append_to_statement_list (t, &list);
5984 break;
5985 case OMP_CLAUSE_PRIVATE:
5986 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5987 break;
5988 decl = OMP_CLAUSE_DECL (c);
5989 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
5990 f = (tree) n->value;
5991 if (tcctx.cb.decl_map)
5992 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
5993 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
5994 if (n != NULL)
5995 {
5996 sf = (tree) n->value;
5997 if (tcctx.cb.decl_map)
5998 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
5999 src = build_fold_indirect_ref (sarg);
6000 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6001 if (use_pointer_for_field (decl, NULL))
6002 src = build_fold_indirect_ref (src);
6003 }
6004 else
6005 src = decl;
6006 dst = build_fold_indirect_ref (arg);
6007 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6008 t = build_gimple_modify_stmt (dst, src);
6009 append_to_statement_list (t, &list);
6010 break;
6011 default:
6012 break;
6013 }
6014
6015 /* Last pass: handle VLA firstprivates. */
6016 if (tcctx.cb.decl_map)
6017 for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
6018 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
6019 {
6020 tree ind, ptr, df;
6021
6022 decl = OMP_CLAUSE_DECL (c);
6023 if (!is_variable_sized (decl))
6024 continue;
6025 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6026 if (n == NULL)
6027 continue;
6028 f = (tree) n->value;
6029 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6030 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
6031 ind = DECL_VALUE_EXPR (decl);
6032 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
6033 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
6034 n = splay_tree_lookup (ctx->sfield_map,
6035 (splay_tree_key) TREE_OPERAND (ind, 0));
6036 sf = (tree) n->value;
6037 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
6038 src = build_fold_indirect_ref (sarg);
6039 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6040 src = build_fold_indirect_ref (src);
6041 dst = build_fold_indirect_ref (arg);
6042 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6043 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6044 append_to_statement_list (t, &list);
6045 n = splay_tree_lookup (ctx->field_map,
6046 (splay_tree_key) TREE_OPERAND (ind, 0));
6047 df = (tree) n->value;
6048 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
6049 ptr = build_fold_indirect_ref (arg);
6050 ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
6051 t = build_gimple_modify_stmt (ptr, build_fold_addr_expr (dst));
6052 append_to_statement_list (t, &list);
6053 }
6054
6055 t = build1 (RETURN_EXPR, void_type_node, NULL);
6056 append_to_statement_list (t, &list);
6057
6058 if (tcctx.cb.decl_map)
6059 pointer_map_destroy (tcctx.cb.decl_map);
6060 pop_gimplify_context (NULL);
6061 BIND_EXPR_BODY (bind) = list;
6062 pop_cfun ();
6063 current_function_decl = ctx->cb.src_fn;
6064}
6065
6066/* Lower the OpenMP parallel or task directive in *STMT_P. CTX holds context
50674e96
DN
6067 information for the directive. */
6068
6069static void
a68ab351 6070lower_omp_taskreg (tree *stmt_p, omp_context *ctx)
50674e96
DN
6071{
6072 tree clauses, par_bind, par_body, new_body, bind;
6073 tree olist, ilist, par_olist, par_ilist;
6074 tree stmt, child_fn, t;
6075
6076 stmt = *stmt_p;
6077
a68ab351
JJ
6078 clauses = OMP_TASKREG_CLAUSES (stmt);
6079 par_bind = OMP_TASKREG_BODY (stmt);
50674e96
DN
6080 par_body = BIND_EXPR_BODY (par_bind);
6081 child_fn = ctx->cb.dst_fn;
a68ab351 6082 if (TREE_CODE (stmt) == OMP_PARALLEL && !OMP_PARALLEL_COMBINED (stmt))
69f1837b
JJ
6083 {
6084 struct walk_stmt_info wi;
6085 int ws_num = 0;
6086
6087 memset (&wi, 0, sizeof (wi));
6088 wi.callback = check_combined_parallel;
6089 wi.info = &ws_num;
6090 wi.val_only = true;
6091 walk_stmts (&wi, &par_bind);
6092 if (ws_num == 1)
6093 OMP_PARALLEL_COMBINED (stmt) = 1;
6094 }
a68ab351
JJ
6095 if (ctx->srecord_type)
6096 create_task_copyfn (stmt, ctx);
50674e96
DN
6097
6098 push_gimplify_context ();
6099
6100 par_olist = NULL_TREE;
6101 par_ilist = NULL_TREE;
6102 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
6103 lower_omp (&par_body, ctx);
a68ab351
JJ
6104 if (TREE_CODE (stmt) == OMP_PARALLEL)
6105 lower_reduction_clauses (clauses, &par_olist, ctx);
50674e96
DN
6106
6107 /* Declare all the variables created by mapping and the variables
6108 declared in the scope of the parallel body. */
6109 record_vars_into (ctx->block_vars, child_fn);
6110 record_vars_into (BIND_EXPR_VARS (par_bind), child_fn);
6111
6112 if (ctx->record_type)
6113 {
a68ab351
JJ
6114 ctx->sender_decl
6115 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
6116 : ctx->record_type, ".omp_data_o");
6117 OMP_TASKREG_DATA_ARG (stmt) = ctx->sender_decl;
50674e96
DN
6118 }
6119
6120 olist = NULL_TREE;
6121 ilist = NULL_TREE;
6122 lower_send_clauses (clauses, &ilist, &olist, ctx);
6123 lower_send_shared_vars (&ilist, &olist, ctx);
6124
6125 /* Once all the expansions are done, sequence all the different
a68ab351 6126 fragments inside OMP_TASKREG_BODY. */
50674e96
DN
6127 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6128 append_to_statement_list (ilist, &BIND_EXPR_BODY (bind));
6129
6130 new_body = alloc_stmt_list ();
6131
6132 if (ctx->record_type)
6133 {
6134 t = build_fold_addr_expr (ctx->sender_decl);
018b899b
JJ
6135 /* fixup_child_record_type might have changed receiver_decl's type. */
6136 t = fold_convert (TREE_TYPE (ctx->receiver_decl), t);
b56b9fe3 6137 t = build_gimple_modify_stmt (ctx->receiver_decl, t);
50674e96
DN
6138 append_to_statement_list (t, &new_body);
6139 }
6140
6141 append_to_statement_list (par_ilist, &new_body);
6142 append_to_statement_list (par_body, &new_body);
6143 append_to_statement_list (par_olist, &new_body);
4a31b7ee 6144 maybe_catch_exception (&new_body);
777f7f9a 6145 t = make_node (OMP_RETURN);
50674e96 6146 append_to_statement_list (t, &new_body);
a68ab351 6147 OMP_TASKREG_BODY (stmt) = new_body;
50674e96
DN
6148
6149 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
6150 append_to_statement_list (olist, &BIND_EXPR_BODY (bind));
6151
6152 *stmt_p = bind;
6153
6154 pop_gimplify_context (NULL_TREE);
6155}
6156
d0fb20be
JJ
6157/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
6158 regimplified. */
75a4c3c1
AP
6159
6160static tree
a68ab351 6161lower_omp_2 (tree *tp, int *walk_subtrees, void *data)
75a4c3c1 6162{
d0fb20be 6163 tree t = *tp;
a68ab351 6164 omp_context *ctx = data;
75a4c3c1 6165
d0fb20be 6166 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
a68ab351
JJ
6167 if (TREE_CODE (t) == VAR_DECL
6168 && ((ctx && DECL_HAS_VALUE_EXPR_P (t))
6169 || (task_shared_vars
6170 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))))
d0fb20be 6171 return t;
75a4c3c1 6172
d0fb20be
JJ
6173 /* If a global variable has been privatized, TREE_CONSTANT on
6174 ADDR_EXPR might be wrong. */
a68ab351 6175 if (ctx && TREE_CODE (t) == ADDR_EXPR)
d0fb20be 6176 recompute_tree_invariant_for_addr_expr (t);
75a4c3c1 6177
d0fb20be
JJ
6178 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
6179 return NULL_TREE;
75a4c3c1 6180}
50674e96 6181
d0fb20be
JJ
6182static void
6183lower_omp_1 (tree *tp, omp_context *ctx, tree_stmt_iterator *tsi)
953ff289 6184{
953ff289
DN
6185 tree t = *tp;
6186
d0fb20be
JJ
6187 if (!t)
6188 return;
6189
6190 if (EXPR_HAS_LOCATION (t))
6191 input_location = EXPR_LOCATION (t);
6192
50674e96
DN
6193 /* If we have issued syntax errors, avoid doing any heavy lifting.
6194 Just replace the OpenMP directives with a NOP to avoid
6195 confusing RTL expansion. */
d0fb20be 6196 if (errorcount && OMP_DIRECTIVE_P (t))
50674e96
DN
6197 {
6198 *tp = build_empty_stmt ();
d0fb20be 6199 return;
50674e96
DN
6200 }
6201
d0fb20be 6202 switch (TREE_CODE (t))
953ff289 6203 {
d0fb20be
JJ
6204 case STATEMENT_LIST:
6205 {
6206 tree_stmt_iterator i;
6207 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
6208 lower_omp_1 (tsi_stmt_ptr (i), ctx, &i);
6209 }
6210 break;
6211
6212 case COND_EXPR:
6213 lower_omp_1 (&COND_EXPR_THEN (t), ctx, NULL);
6214 lower_omp_1 (&COND_EXPR_ELSE (t), ctx, NULL);
a68ab351 6215 if ((ctx || task_shared_vars)
d0fb20be
JJ
6216 && walk_tree (&COND_EXPR_COND (t), lower_omp_2, ctx, NULL))
6217 {
6218 tree pre = NULL;
6219 gimplify_expr (&COND_EXPR_COND (t), &pre, NULL,
6220 is_gimple_condexpr, fb_rvalue);
6221 if (pre)
6222 {
6223 if (tsi)
6224 tsi_link_before (tsi, pre, TSI_SAME_STMT);
6225 else
6226 {
6227 append_to_statement_list (t, &pre);
6228 *tp = pre;
6229 }
6230 }
6231 }
6232 break;
6233 case CATCH_EXPR:
6234 lower_omp_1 (&CATCH_BODY (t), ctx, NULL);
6235 break;
6236 case EH_FILTER_EXPR:
6237 lower_omp_1 (&EH_FILTER_FAILURE (t), ctx, NULL);
6238 break;
6239 case TRY_CATCH_EXPR:
6240 case TRY_FINALLY_EXPR:
6241 lower_omp_1 (&TREE_OPERAND (t, 0), ctx, NULL);
6242 lower_omp_1 (&TREE_OPERAND (t, 1), ctx, NULL);
6243 break;
6244 case BIND_EXPR:
6245 lower_omp_1 (&BIND_EXPR_BODY (t), ctx, NULL);
6246 break;
6247 case RETURN_EXPR:
6248 lower_omp_1 (&TREE_OPERAND (t, 0), ctx, NULL);
6249 break;
6250
953ff289 6251 case OMP_PARALLEL:
a68ab351 6252 case OMP_TASK:
953ff289 6253 ctx = maybe_lookup_ctx (t);
a68ab351 6254 lower_omp_taskreg (tp, ctx);
953ff289 6255 break;
953ff289
DN
6256 case OMP_FOR:
6257 ctx = maybe_lookup_ctx (t);
6258 gcc_assert (ctx);
50674e96 6259 lower_omp_for (tp, ctx);
953ff289 6260 break;
953ff289
DN
6261 case OMP_SECTIONS:
6262 ctx = maybe_lookup_ctx (t);
6263 gcc_assert (ctx);
50674e96 6264 lower_omp_sections (tp, ctx);
953ff289 6265 break;
953ff289
DN
6266 case OMP_SINGLE:
6267 ctx = maybe_lookup_ctx (t);
6268 gcc_assert (ctx);
50674e96 6269 lower_omp_single (tp, ctx);
953ff289 6270 break;
953ff289
DN
6271 case OMP_MASTER:
6272 ctx = maybe_lookup_ctx (t);
6273 gcc_assert (ctx);
50674e96 6274 lower_omp_master (tp, ctx);
953ff289 6275 break;
953ff289
DN
6276 case OMP_ORDERED:
6277 ctx = maybe_lookup_ctx (t);
6278 gcc_assert (ctx);
50674e96 6279 lower_omp_ordered (tp, ctx);
953ff289 6280 break;
953ff289
DN
6281 case OMP_CRITICAL:
6282 ctx = maybe_lookup_ctx (t);
6283 gcc_assert (ctx);
50674e96 6284 lower_omp_critical (tp, ctx);
953ff289
DN
6285 break;
6286
d0fb20be 6287 default:
a68ab351
JJ
6288 if ((ctx || task_shared_vars)
6289 && walk_tree (tp, lower_omp_2, ctx, NULL))
75a4c3c1 6290 {
d0fb20be
JJ
6291 /* The gimplifier doesn't gimplify CALL_EXPR_STATIC_CHAIN.
6292 Handle that here. */
6293 tree call = get_call_expr_in (t);
6294 if (call
6295 && CALL_EXPR_STATIC_CHAIN (call)
6296 && walk_tree (&CALL_EXPR_STATIC_CHAIN (call), lower_omp_2,
6297 ctx, NULL))
75a4c3c1 6298 {
d0fb20be
JJ
6299 tree pre = NULL;
6300 gimplify_expr (&CALL_EXPR_STATIC_CHAIN (call), &pre, NULL,
6301 is_gimple_val, fb_rvalue);
6302 if (pre)
6303 {
6304 if (tsi)
6305 tsi_link_before (tsi, pre, TSI_SAME_STMT);
6306 else
6307 {
6308 append_to_statement_list (t, &pre);
6309 lower_omp_1 (&pre, ctx, NULL);
6310 *tp = pre;
6311 return;
6312 }
6313 }
75a4c3c1 6314 }
953ff289 6315
d0fb20be
JJ
6316 if (tsi == NULL)
6317 gimplify_stmt (tp);
6318 else
6319 {
6320 tree pre = NULL;
6321 gimplify_expr (tp, &pre, NULL, is_gimple_stmt, fb_none);
6322 if (pre)
6323 tsi_link_before (tsi, pre, TSI_SAME_STMT);
6324 }
953ff289
DN
6325 }
6326 break;
953ff289 6327 }
953ff289
DN
6328}
6329
6330static void
50674e96 6331lower_omp (tree *stmt_p, omp_context *ctx)
953ff289 6332{
d0fb20be 6333 lower_omp_1 (stmt_p, ctx, NULL);
953ff289
DN
6334}
6335\f
6336/* Main entry point. */
6337
c2924966 6338static unsigned int
953ff289
DN
6339execute_lower_omp (void)
6340{
6341 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
6342 delete_omp_context);
6343
6344 scan_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
a68ab351 6345 gcc_assert (taskreg_nesting_level == 0);
953ff289
DN
6346
6347 if (all_contexts->root)
a68ab351
JJ
6348 {
6349 if (task_shared_vars)
6350 push_gimplify_context ();
6351 lower_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
6352 if (task_shared_vars)
6353 pop_gimplify_context (NULL);
6354 }
953ff289 6355
50674e96
DN
6356 if (all_contexts)
6357 {
6358 splay_tree_delete (all_contexts);
6359 all_contexts = NULL;
6360 }
a68ab351 6361 BITMAP_FREE (task_shared_vars);
c2924966 6362 return 0;
953ff289
DN
6363}
6364
6365static bool
6366gate_lower_omp (void)
6367{
6368 return flag_openmp != 0;
6369}
6370
8ddbbcae 6371struct gimple_opt_pass pass_lower_omp =
953ff289 6372{
8ddbbcae
JH
6373 {
6374 GIMPLE_PASS,
953ff289
DN
6375 "omplower", /* name */
6376 gate_lower_omp, /* gate */
6377 execute_lower_omp, /* execute */
6378 NULL, /* sub */
6379 NULL, /* next */
6380 0, /* static_pass_number */
6381 0, /* tv_id */
6382 PROP_gimple_any, /* properties_required */
6383 PROP_gimple_lomp, /* properties_provided */
6384 0, /* properties_destroyed */
6385 0, /* todo_flags_start */
8ddbbcae
JH
6386 TODO_dump_func /* todo_flags_finish */
6387 }
953ff289 6388};
953ff289
DN
6389\f
6390/* The following is a utility to diagnose OpenMP structured block violations.
777f7f9a
RH
6391 It is not part of the "omplower" pass, as that's invoked too late. It
6392 should be invoked by the respective front ends after gimplification. */
953ff289
DN
6393
6394static splay_tree all_labels;
6395
6396/* Check for mismatched contexts and generate an error if needed. Return
6397 true if an error is detected. */
6398
6399static bool
6400diagnose_sb_0 (tree *stmt_p, tree branch_ctx, tree label_ctx)
6401{
6402 bool exit_p = true;
6403
6404 if ((label_ctx ? TREE_VALUE (label_ctx) : NULL) == branch_ctx)
6405 return false;
6406
6407 /* Try to avoid confusing the user by producing and error message
fa10beec 6408 with correct "exit" or "enter" verbiage. We prefer "exit"
953ff289
DN
6409 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
6410 if (branch_ctx == NULL)
6411 exit_p = false;
6412 else
6413 {
6414 while (label_ctx)
6415 {
6416 if (TREE_VALUE (label_ctx) == branch_ctx)
6417 {
6418 exit_p = false;
6419 break;
6420 }
6421 label_ctx = TREE_CHAIN (label_ctx);
6422 }
6423 }
6424
6425 if (exit_p)
6426 error ("invalid exit from OpenMP structured block");
6427 else
6428 error ("invalid entry to OpenMP structured block");
6429
6430 *stmt_p = build_empty_stmt ();
6431 return true;
6432}
6433
6434/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
6435 where in the tree each label is found. */
6436
6437static tree
6438diagnose_sb_1 (tree *tp, int *walk_subtrees, void *data)
6439{
6440 struct walk_stmt_info *wi = data;
6441 tree context = (tree) wi->info;
6442 tree inner_context;
6443 tree t = *tp;
a68ab351 6444 int i;
953ff289
DN
6445
6446 *walk_subtrees = 0;
6447 switch (TREE_CODE (t))
6448 {
6449 case OMP_PARALLEL:
a68ab351 6450 case OMP_TASK:
953ff289
DN
6451 case OMP_SECTIONS:
6452 case OMP_SINGLE:
6453 walk_tree (&OMP_CLAUSES (t), diagnose_sb_1, wi, NULL);
6454 /* FALLTHRU */
6455 case OMP_SECTION:
6456 case OMP_MASTER:
6457 case OMP_ORDERED:
6458 case OMP_CRITICAL:
6459 /* The minimal context here is just a tree of statements. */
6460 inner_context = tree_cons (NULL, t, context);
6461 wi->info = inner_context;
6462 walk_stmts (wi, &OMP_BODY (t));
6463 wi->info = context;
6464 break;
6465
6466 case OMP_FOR:
6467 walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_1, wi, NULL);
6468 inner_context = tree_cons (NULL, t, context);
6469 wi->info = inner_context;
a68ab351
JJ
6470 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
6471 {
6472 walk_tree (&TREE_VEC_ELT (OMP_FOR_INIT (t), i), diagnose_sb_1,
6473 wi, NULL);
6474 walk_tree (&TREE_VEC_ELT (OMP_FOR_COND (t), i), diagnose_sb_1,
6475 wi, NULL);
6476 walk_tree (&TREE_VEC_ELT (OMP_FOR_INCR (t), i), diagnose_sb_1,
6477 wi, NULL);
6478 }
953ff289
DN
6479 walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
6480 walk_stmts (wi, &OMP_FOR_BODY (t));
6481 wi->info = context;
6482 break;
6483
6484 case LABEL_EXPR:
6485 splay_tree_insert (all_labels, (splay_tree_key) LABEL_EXPR_LABEL (t),
6486 (splay_tree_value) context);
6487 break;
6488
6489 default:
6490 break;
6491 }
6492
6493 return NULL_TREE;
6494}
6495
6496/* Pass 2: Check each branch and see if its context differs from that of
6497 the destination label's context. */
6498
6499static tree
6500diagnose_sb_2 (tree *tp, int *walk_subtrees, void *data)
6501{
6502 struct walk_stmt_info *wi = data;
6503 tree context = (tree) wi->info;
6504 splay_tree_node n;
6505 tree t = *tp;
a68ab351 6506 int i;
953ff289
DN
6507
6508 *walk_subtrees = 0;
6509 switch (TREE_CODE (t))
6510 {
6511 case OMP_PARALLEL:
a68ab351 6512 case OMP_TASK:
953ff289
DN
6513 case OMP_SECTIONS:
6514 case OMP_SINGLE:
6515 walk_tree (&OMP_CLAUSES (t), diagnose_sb_2, wi, NULL);
6516 /* FALLTHRU */
6517 case OMP_SECTION:
6518 case OMP_MASTER:
6519 case OMP_ORDERED:
6520 case OMP_CRITICAL:
6521 wi->info = t;
6522 walk_stmts (wi, &OMP_BODY (t));
6523 wi->info = context;
6524 break;
6525
6526 case OMP_FOR:
6527 walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_2, wi, NULL);
6528 wi->info = t;
a68ab351
JJ
6529 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
6530 {
6531 walk_tree (&TREE_VEC_ELT (OMP_FOR_INIT (t), i), diagnose_sb_2,
6532 wi, NULL);
6533 walk_tree (&TREE_VEC_ELT (OMP_FOR_COND (t), i), diagnose_sb_2,
6534 wi, NULL);
6535 walk_tree (&TREE_VEC_ELT (OMP_FOR_INCR (t), i), diagnose_sb_2,
6536 wi, NULL);
6537 }
953ff289
DN
6538 walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
6539 walk_stmts (wi, &OMP_FOR_BODY (t));
6540 wi->info = context;
6541 break;
6542
6543 case GOTO_EXPR:
6544 {
6545 tree lab = GOTO_DESTINATION (t);
6546 if (TREE_CODE (lab) != LABEL_DECL)
6547 break;
6548
6549 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
6550 diagnose_sb_0 (tp, context, n ? (tree) n->value : NULL_TREE);
6551 }
6552 break;
6553
6554 case SWITCH_EXPR:
6555 {
6556 tree vec = SWITCH_LABELS (t);
6557 int i, len = TREE_VEC_LENGTH (vec);
6558 for (i = 0; i < len; ++i)
6559 {
6560 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
6561 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
6562 if (diagnose_sb_0 (tp, context, (tree) n->value))
6563 break;
6564 }
6565 }
6566 break;
6567
6568 case RETURN_EXPR:
6569 diagnose_sb_0 (tp, context, NULL_TREE);
6570 break;
6571
6572 default:
6573 break;
6574 }
6575
6576 return NULL_TREE;
6577}
6578
6579void
6580diagnose_omp_structured_block_errors (tree fndecl)
6581{
6582 tree save_current = current_function_decl;
6583 struct walk_stmt_info wi;
6584
6585 current_function_decl = fndecl;
6586
6587 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
6588
6589 memset (&wi, 0, sizeof (wi));
6590 wi.callback = diagnose_sb_1;
6591 walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
6592
6593 memset (&wi, 0, sizeof (wi));
6594 wi.callback = diagnose_sb_2;
6595 wi.want_locations = true;
6596 wi.want_return_expr = true;
6597 walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
6598
6599 splay_tree_delete (all_labels);
6600 all_labels = NULL;
6601
6602 current_function_decl = save_current;
6603}
6604
6605#include "gt-omp-low.h"