]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
predicates.md (tp_or_register_operand): Remove predicate.
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
953ff289
DN
1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
fb79f500 6 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
c75c517d 7 Free Software Foundation, Inc.
953ff289
DN
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
9dcd6f09 13Software Foundation; either version 3, or (at your option) any later
953ff289
DN
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
9dcd6f09
NC
22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
953ff289
DN
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "tree.h"
30#include "rtl.h"
726a989a
RB
31#include "gimple.h"
32#include "tree-iterator.h"
953ff289
DN
33#include "tree-inline.h"
34#include "langhooks.h"
1da2ed5f 35#include "diagnostic-core.h"
953ff289
DN
36#include "tree-flow.h"
37#include "timevar.h"
38#include "flags.h"
39#include "function.h"
40#include "expr.h"
953ff289
DN
41#include "tree-pass.h"
42#include "ggc.h"
43#include "except.h"
6be42dd4 44#include "splay-tree.h"
a509ebb5
RL
45#include "optabs.h"
46#include "cfgloop.h"
953ff289 47
726a989a 48
b8698a0f 49/* Lowering of OpenMP parallel and workshare constructs proceeds in two
953ff289
DN
50 phases. The first phase scans the function looking for OMP statements
51 and then for variables that must be replaced to satisfy data sharing
52 clauses. The second phase expands code for the constructs, as well as
c0220ea4 53 re-gimplifying things when variables have been replaced with complex
953ff289
DN
54 expressions.
55
7ebaeab5
DN
56 Final code generation is done by pass_expand_omp. The flowgraph is
57 scanned for parallel regions which are then moved to a new
58 function, to be invoked by the thread library. */
953ff289
DN
59
60/* Context structure. Used to store information about each parallel
61 directive in the code. */
62
63typedef struct omp_context
64{
65 /* This field must be at the beginning, as we do "inheritance": Some
66 callback functions for tree-inline.c (e.g., omp_copy_decl)
67 receive a copy_body_data pointer that is up-casted to an
68 omp_context pointer. */
69 copy_body_data cb;
70
71 /* The tree of contexts corresponding to the encountered constructs. */
72 struct omp_context *outer;
726a989a 73 gimple stmt;
953ff289 74
b8698a0f 75 /* Map variables to fields in a structure that allows communication
953ff289
DN
76 between sending and receiving threads. */
77 splay_tree field_map;
78 tree record_type;
79 tree sender_decl;
80 tree receiver_decl;
81
a68ab351
JJ
82 /* These are used just by task contexts, if task firstprivate fn is
83 needed. srecord_type is used to communicate from the thread
84 that encountered the task construct to task firstprivate fn,
85 record_type is allocated by GOMP_task, initialized by task firstprivate
86 fn and passed to the task body fn. */
87 splay_tree sfield_map;
88 tree srecord_type;
89
953ff289
DN
90 /* A chain of variables to add to the top-level block surrounding the
91 construct. In the case of a parallel, this is in the child function. */
92 tree block_vars;
93
94 /* What to do with variables with implicitly determined sharing
95 attributes. */
96 enum omp_clause_default_kind default_kind;
97
98 /* Nesting depth of this context. Used to beautify error messages re
99 invalid gotos. The outermost ctx is depth 1, with depth 0 being
100 reserved for the main body of the function. */
101 int depth;
102
953ff289
DN
103 /* True if this parallel directive is nested within another. */
104 bool is_nested;
953ff289
DN
105} omp_context;
106
107
a68ab351
JJ
108struct omp_for_data_loop
109{
110 tree v, n1, n2, step;
111 enum tree_code cond_code;
112};
113
50674e96 114/* A structure describing the main elements of a parallel loop. */
953ff289 115
50674e96 116struct omp_for_data
953ff289 117{
a68ab351 118 struct omp_for_data_loop loop;
726a989a
RB
119 tree chunk_size;
120 gimple for_stmt;
a68ab351
JJ
121 tree pre, iter_type;
122 int collapse;
953ff289
DN
123 bool have_nowait, have_ordered;
124 enum omp_clause_schedule_kind sched_kind;
a68ab351 125 struct omp_for_data_loop *loops;
953ff289
DN
126};
127
50674e96 128
953ff289 129static splay_tree all_contexts;
a68ab351 130static int taskreg_nesting_level;
777f7f9a 131struct omp_region *root_omp_region;
a68ab351 132static bitmap task_shared_vars;
953ff289 133
726a989a
RB
134static void scan_omp (gimple_seq, omp_context *);
135static tree scan_omp_1_op (tree *, int *, void *);
136
137#define WALK_SUBSTMTS \
138 case GIMPLE_BIND: \
139 case GIMPLE_TRY: \
140 case GIMPLE_CATCH: \
141 case GIMPLE_EH_FILTER: \
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
144 break;
145
146/* Convenience function for calling scan_omp_1_op on tree operands. */
147
148static inline tree
149scan_omp_op (tree *tp, omp_context *ctx)
150{
151 struct walk_stmt_info wi;
152
153 memset (&wi, 0, sizeof (wi));
154 wi.info = ctx;
155 wi.want_locations = true;
156
157 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
158}
159
160static void lower_omp (gimple_seq, omp_context *);
8ca5b2a2
JJ
161static tree lookup_decl_in_outer_ctx (tree, omp_context *);
162static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289
DN
163
164/* Find an OpenMP clause of type KIND within CLAUSES. */
165
917948d3 166tree
e0c68ce9 167find_omp_clause (tree clauses, enum omp_clause_code kind)
953ff289
DN
168{
169 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
aaf46ef9 170 if (OMP_CLAUSE_CODE (clauses) == kind)
953ff289
DN
171 return clauses;
172
173 return NULL_TREE;
174}
175
176/* Return true if CTX is for an omp parallel. */
177
178static inline bool
179is_parallel_ctx (omp_context *ctx)
180{
726a989a 181 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
182}
183
50674e96 184
a68ab351
JJ
185/* Return true if CTX is for an omp task. */
186
187static inline bool
188is_task_ctx (omp_context *ctx)
189{
726a989a 190 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
191}
192
193
194/* Return true if CTX is for an omp parallel or omp task. */
195
196static inline bool
197is_taskreg_ctx (omp_context *ctx)
198{
726a989a
RB
199 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
200 || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
201}
202
203
50674e96 204/* Return true if REGION is a combined parallel+workshare region. */
953ff289
DN
205
206static inline bool
50674e96
DN
207is_combined_parallel (struct omp_region *region)
208{
209 return region->is_combined_parallel;
210}
211
212
213/* Extract the header elements of parallel loop FOR_STMT and store
214 them into *FD. */
215
216static void
726a989a 217extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
a68ab351 218 struct omp_for_data_loop *loops)
50674e96 219{
a68ab351
JJ
220 tree t, var, *collapse_iter, *collapse_count;
221 tree count = NULL_TREE, iter_type = long_integer_type_node;
222 struct omp_for_data_loop *loop;
223 int i;
224 struct omp_for_data_loop dummy_loop;
db3927fb 225 location_t loc = gimple_location (for_stmt);
50674e96
DN
226
227 fd->for_stmt = for_stmt;
228 fd->pre = NULL;
726a989a 229 fd->collapse = gimple_omp_for_collapse (for_stmt);
a68ab351
JJ
230 if (fd->collapse > 1)
231 fd->loops = loops;
232 else
233 fd->loops = &fd->loop;
50674e96
DN
234
235 fd->have_nowait = fd->have_ordered = false;
236 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
237 fd->chunk_size = NULL_TREE;
a68ab351
JJ
238 collapse_iter = NULL;
239 collapse_count = NULL;
50674e96 240
726a989a 241 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
aaf46ef9 242 switch (OMP_CLAUSE_CODE (t))
50674e96
DN
243 {
244 case OMP_CLAUSE_NOWAIT:
245 fd->have_nowait = true;
246 break;
247 case OMP_CLAUSE_ORDERED:
248 fd->have_ordered = true;
249 break;
250 case OMP_CLAUSE_SCHEDULE:
251 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
252 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
253 break;
a68ab351
JJ
254 case OMP_CLAUSE_COLLAPSE:
255 if (fd->collapse > 1)
256 {
257 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
258 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
259 }
50674e96
DN
260 default:
261 break;
262 }
263
a68ab351
JJ
264 /* FIXME: for now map schedule(auto) to schedule(static).
265 There should be analysis to determine whether all iterations
266 are approximately the same amount of work (then schedule(static)
1cbc62c0 267 is best) or if it varies (then schedule(dynamic,N) is better). */
a68ab351
JJ
268 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
269 {
270 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
271 gcc_assert (fd->chunk_size == NULL);
272 }
273 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
50674e96
DN
274 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
275 gcc_assert (fd->chunk_size == NULL);
276 else if (fd->chunk_size == NULL)
277 {
278 /* We only need to compute a default chunk size for ordered
279 static loops and dynamic loops. */
a68ab351
JJ
280 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
281 || fd->have_ordered
282 || fd->collapse > 1)
50674e96
DN
283 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
284 ? integer_zero_node : integer_one_node;
285 }
a68ab351
JJ
286
287 for (i = 0; i < fd->collapse; i++)
288 {
289 if (fd->collapse == 1)
290 loop = &fd->loop;
291 else if (loops != NULL)
292 loop = loops + i;
293 else
294 loop = &dummy_loop;
295
b8698a0f 296
726a989a 297 loop->v = gimple_omp_for_index (for_stmt, i);
a68ab351
JJ
298 gcc_assert (SSA_VAR_P (loop->v));
299 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
300 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
301 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
726a989a 302 loop->n1 = gimple_omp_for_initial (for_stmt, i);
a68ab351 303
726a989a
RB
304 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
305 loop->n2 = gimple_omp_for_final (for_stmt, i);
a68ab351
JJ
306 switch (loop->cond_code)
307 {
308 case LT_EXPR:
309 case GT_EXPR:
310 break;
311 case LE_EXPR:
312 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
5d49b6a7 313 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, 1);
a68ab351 314 else
db3927fb
AH
315 loop->n2 = fold_build2_loc (loc,
316 PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
317 build_int_cst (TREE_TYPE (loop->n2), 1));
318 loop->cond_code = LT_EXPR;
319 break;
320 case GE_EXPR:
321 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
5d49b6a7 322 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, -1);
a68ab351 323 else
db3927fb
AH
324 loop->n2 = fold_build2_loc (loc,
325 MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
326 build_int_cst (TREE_TYPE (loop->n2), 1));
327 loop->cond_code = GT_EXPR;
328 break;
329 default:
330 gcc_unreachable ();
331 }
332
726a989a 333 t = gimple_omp_for_incr (for_stmt, i);
a68ab351
JJ
334 gcc_assert (TREE_OPERAND (t, 0) == var);
335 switch (TREE_CODE (t))
336 {
337 case PLUS_EXPR:
338 case POINTER_PLUS_EXPR:
339 loop->step = TREE_OPERAND (t, 1);
340 break;
341 case MINUS_EXPR:
342 loop->step = TREE_OPERAND (t, 1);
db3927fb
AH
343 loop->step = fold_build1_loc (loc,
344 NEGATE_EXPR, TREE_TYPE (loop->step),
a68ab351
JJ
345 loop->step);
346 break;
347 default:
348 gcc_unreachable ();
349 }
350
351 if (iter_type != long_long_unsigned_type_node)
352 {
353 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
354 iter_type = long_long_unsigned_type_node;
355 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
356 && TYPE_PRECISION (TREE_TYPE (loop->v))
357 >= TYPE_PRECISION (iter_type))
358 {
359 tree n;
360
361 if (loop->cond_code == LT_EXPR)
db3927fb
AH
362 n = fold_build2_loc (loc,
363 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
364 loop->n2, loop->step);
365 else
366 n = loop->n1;
367 if (TREE_CODE (n) != INTEGER_CST
368 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
369 iter_type = long_long_unsigned_type_node;
370 }
371 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
372 > TYPE_PRECISION (iter_type))
373 {
374 tree n1, n2;
375
376 if (loop->cond_code == LT_EXPR)
377 {
378 n1 = loop->n1;
db3927fb
AH
379 n2 = fold_build2_loc (loc,
380 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
381 loop->n2, loop->step);
382 }
383 else
384 {
db3927fb
AH
385 n1 = fold_build2_loc (loc,
386 MINUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
387 loop->n2, loop->step);
388 n2 = loop->n1;
389 }
390 if (TREE_CODE (n1) != INTEGER_CST
391 || TREE_CODE (n2) != INTEGER_CST
392 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
393 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
394 iter_type = long_long_unsigned_type_node;
395 }
396 }
397
398 if (collapse_count && *collapse_count == NULL)
399 {
400 if ((i == 0 || count != NULL_TREE)
401 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
402 && TREE_CONSTANT (loop->n1)
403 && TREE_CONSTANT (loop->n2)
404 && TREE_CODE (loop->step) == INTEGER_CST)
405 {
406 tree itype = TREE_TYPE (loop->v);
407
408 if (POINTER_TYPE_P (itype))
409 itype
410 = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
411 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
db3927fb
AH
412 t = fold_build2_loc (loc,
413 PLUS_EXPR, itype,
414 fold_convert_loc (loc, itype, loop->step), t);
415 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
416 fold_convert_loc (loc, itype, loop->n2));
417 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
418 fold_convert_loc (loc, itype, loop->n1));
a68ab351 419 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
db3927fb
AH
420 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
421 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
422 fold_build1_loc (loc, NEGATE_EXPR, itype,
423 fold_convert_loc (loc, itype,
424 loop->step)));
a68ab351 425 else
db3927fb
AH
426 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
427 fold_convert_loc (loc, itype, loop->step));
428 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
a68ab351 429 if (count != NULL_TREE)
db3927fb
AH
430 count = fold_build2_loc (loc,
431 MULT_EXPR, long_long_unsigned_type_node,
a68ab351
JJ
432 count, t);
433 else
434 count = t;
435 if (TREE_CODE (count) != INTEGER_CST)
436 count = NULL_TREE;
437 }
438 else
439 count = NULL_TREE;
440 }
441 }
442
443 if (count)
444 {
445 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
446 iter_type = long_long_unsigned_type_node;
447 else
448 iter_type = long_integer_type_node;
449 }
450 else if (collapse_iter && *collapse_iter != NULL)
451 iter_type = TREE_TYPE (*collapse_iter);
452 fd->iter_type = iter_type;
453 if (collapse_iter && *collapse_iter == NULL)
454 *collapse_iter = create_tmp_var (iter_type, ".iter");
455 if (collapse_count && *collapse_count == NULL)
456 {
457 if (count)
db3927fb 458 *collapse_count = fold_convert_loc (loc, iter_type, count);
a68ab351
JJ
459 else
460 *collapse_count = create_tmp_var (iter_type, ".count");
461 }
462
463 if (fd->collapse > 1)
464 {
465 fd->loop.v = *collapse_iter;
466 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
467 fd->loop.n2 = *collapse_count;
468 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
469 fd->loop.cond_code = LT_EXPR;
470 }
50674e96
DN
471}
472
473
474/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
475 is the immediate dominator of PAR_ENTRY_BB, return true if there
476 are no data dependencies that would prevent expanding the parallel
477 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
478
479 When expanding a combined parallel+workshare region, the call to
480 the child function may need additional arguments in the case of
726a989a
RB
481 GIMPLE_OMP_FOR regions. In some cases, these arguments are
482 computed out of variables passed in from the parent to the child
483 via 'struct .omp_data_s'. For instance:
50674e96
DN
484
485 #pragma omp parallel for schedule (guided, i * 4)
486 for (j ...)
487
488 Is lowered into:
489
490 # BLOCK 2 (PAR_ENTRY_BB)
491 .omp_data_o.i = i;
492 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
b8698a0f 493
50674e96
DN
494 # BLOCK 3 (WS_ENTRY_BB)
495 .omp_data_i = &.omp_data_o;
496 D.1667 = .omp_data_i->i;
497 D.1598 = D.1667 * 4;
498 #pragma omp for schedule (guided, D.1598)
499
500 When we outline the parallel region, the call to the child function
501 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
502 that value is computed *after* the call site. So, in principle we
503 cannot do the transformation.
504
505 To see whether the code in WS_ENTRY_BB blocks the combined
506 parallel+workshare call, we collect all the variables used in the
726a989a 507 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
50674e96
DN
508 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
509 call.
510
511 FIXME. If we had the SSA form built at this point, we could merely
512 hoist the code in block 3 into block 2 and be done with it. But at
513 this point we don't have dataflow information and though we could
514 hack something up here, it is really not worth the aggravation. */
515
516static bool
0f900dfa 517workshare_safe_to_combine_p (basic_block ws_entry_bb)
50674e96
DN
518{
519 struct omp_for_data fd;
0f900dfa 520 gimple ws_stmt = last_stmt (ws_entry_bb);
50674e96 521
726a989a 522 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96
DN
523 return true;
524
726a989a 525 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
50674e96 526
a68ab351
JJ
527 extract_omp_for_data (ws_stmt, &fd, NULL);
528
529 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
530 return false;
531 if (fd.iter_type != long_integer_type_node)
532 return false;
50674e96
DN
533
534 /* FIXME. We give up too easily here. If any of these arguments
535 are not constants, they will likely involve variables that have
536 been mapped into fields of .omp_data_s for sharing with the child
537 function. With appropriate data flow, it would be possible to
538 see through this. */
a68ab351
JJ
539 if (!is_gimple_min_invariant (fd.loop.n1)
540 || !is_gimple_min_invariant (fd.loop.n2)
541 || !is_gimple_min_invariant (fd.loop.step)
50674e96
DN
542 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
543 return false;
544
545 return true;
546}
547
548
549/* Collect additional arguments needed to emit a combined
550 parallel+workshare call. WS_STMT is the workshare directive being
551 expanded. */
552
3bb06db4 553static VEC(tree,gc) *
726a989a 554get_ws_args_for (gimple ws_stmt)
50674e96
DN
555{
556 tree t;
db3927fb 557 location_t loc = gimple_location (ws_stmt);
3bb06db4 558 VEC(tree,gc) *ws_args;
50674e96 559
726a989a 560 if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
50674e96
DN
561 {
562 struct omp_for_data fd;
50674e96 563
a68ab351 564 extract_omp_for_data (ws_stmt, &fd, NULL);
50674e96 565
3bb06db4 566 ws_args = VEC_alloc (tree, gc, 3 + (fd.chunk_size != 0));
50674e96 567
3bb06db4
NF
568 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n1);
569 VEC_quick_push (tree, ws_args, t);
50674e96 570
db3927fb 571 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n2);
3bb06db4 572 VEC_quick_push (tree, ws_args, t);
50674e96 573
3bb06db4
NF
574 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
575 VEC_quick_push (tree, ws_args, t);
576
577 if (fd.chunk_size)
578 {
579 t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
580 VEC_quick_push (tree, ws_args, t);
581 }
50674e96
DN
582
583 return ws_args;
584 }
726a989a 585 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96 586 {
e5c95afe 587 /* Number of sections is equal to the number of edges from the
726a989a
RB
588 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
589 the exit of the sections region. */
590 basic_block bb = single_succ (gimple_bb (ws_stmt));
e5c95afe 591 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
3bb06db4
NF
592 ws_args = VEC_alloc (tree, gc, 1);
593 VEC_quick_push (tree, ws_args, t);
594 return ws_args;
50674e96
DN
595 }
596
597 gcc_unreachable ();
598}
599
600
601/* Discover whether REGION is a combined parallel+workshare region. */
602
603static void
604determine_parallel_type (struct omp_region *region)
953ff289 605{
50674e96
DN
606 basic_block par_entry_bb, par_exit_bb;
607 basic_block ws_entry_bb, ws_exit_bb;
608
d3c673c7 609 if (region == NULL || region->inner == NULL
e5c95afe
ZD
610 || region->exit == NULL || region->inner->exit == NULL
611 || region->inner->cont == NULL)
50674e96
DN
612 return;
613
614 /* We only support parallel+for and parallel+sections. */
726a989a
RB
615 if (region->type != GIMPLE_OMP_PARALLEL
616 || (region->inner->type != GIMPLE_OMP_FOR
617 && region->inner->type != GIMPLE_OMP_SECTIONS))
50674e96
DN
618 return;
619
620 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
621 WS_EXIT_BB -> PAR_EXIT_BB. */
777f7f9a
RH
622 par_entry_bb = region->entry;
623 par_exit_bb = region->exit;
624 ws_entry_bb = region->inner->entry;
625 ws_exit_bb = region->inner->exit;
50674e96
DN
626
627 if (single_succ (par_entry_bb) == ws_entry_bb
628 && single_succ (ws_exit_bb) == par_exit_bb
0f900dfa 629 && workshare_safe_to_combine_p (ws_entry_bb)
726a989a 630 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
69f1837b
JJ
631 || (last_and_only_stmt (ws_entry_bb)
632 && last_and_only_stmt (par_exit_bb))))
50674e96 633 {
726a989a 634 gimple ws_stmt = last_stmt (ws_entry_bb);
777f7f9a 635
726a989a 636 if (region->inner->type == GIMPLE_OMP_FOR)
50674e96
DN
637 {
638 /* If this is a combined parallel loop, we need to determine
639 whether or not to use the combined library calls. There
640 are two cases where we do not apply the transformation:
641 static loops and any kind of ordered loop. In the first
642 case, we already open code the loop so there is no need
643 to do anything else. In the latter case, the combined
644 parallel loop call would still need extra synchronization
645 to implement ordered semantics, so there would not be any
646 gain in using the combined call. */
726a989a 647 tree clauses = gimple_omp_for_clauses (ws_stmt);
50674e96
DN
648 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
649 if (c == NULL
650 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
651 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
652 {
653 region->is_combined_parallel = false;
654 region->inner->is_combined_parallel = false;
655 return;
656 }
657 }
658
659 region->is_combined_parallel = true;
660 region->inner->is_combined_parallel = true;
777f7f9a 661 region->ws_args = get_ws_args_for (ws_stmt);
50674e96 662 }
953ff289
DN
663}
664
50674e96 665
953ff289
DN
666/* Return true if EXPR is variable sized. */
667
668static inline bool
22ea9ec0 669is_variable_sized (const_tree expr)
953ff289
DN
670{
671 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
672}
673
674/* Return true if DECL is a reference type. */
675
676static inline bool
677is_reference (tree decl)
678{
679 return lang_hooks.decls.omp_privatize_by_reference (decl);
680}
681
682/* Lookup variables in the decl or field splay trees. The "maybe" form
683 allows for the variable form to not have been entered, otherwise we
684 assert that the variable must have been entered. */
685
686static inline tree
687lookup_decl (tree var, omp_context *ctx)
688{
6be42dd4
RG
689 tree *n;
690 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
691 return *n;
953ff289
DN
692}
693
694static inline tree
7c8f7639 695maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 696{
6be42dd4
RG
697 tree *n;
698 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
699 return n ? *n : NULL_TREE;
953ff289
DN
700}
701
702static inline tree
703lookup_field (tree var, omp_context *ctx)
704{
705 splay_tree_node n;
706 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
707 return (tree) n->value;
708}
709
a68ab351
JJ
710static inline tree
711lookup_sfield (tree var, omp_context *ctx)
712{
713 splay_tree_node n;
714 n = splay_tree_lookup (ctx->sfield_map
715 ? ctx->sfield_map : ctx->field_map,
716 (splay_tree_key) var);
717 return (tree) n->value;
718}
719
953ff289
DN
720static inline tree
721maybe_lookup_field (tree var, omp_context *ctx)
722{
723 splay_tree_node n;
724 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
725 return n ? (tree) n->value : NULL_TREE;
726}
727
7c8f7639
JJ
728/* Return true if DECL should be copied by pointer. SHARED_CTX is
729 the parallel context if DECL is to be shared. */
953ff289
DN
730
731static bool
a68ab351 732use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289
DN
733{
734 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
735 return true;
736
6fc0bb99 737 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 738 when we know the value is not accessible from an outer scope. */
7c8f7639 739 if (shared_ctx)
953ff289
DN
740 {
741 /* ??? Trivially accessible from anywhere. But why would we even
742 be passing an address in this case? Should we simply assert
743 this to be false, or should we have a cleanup pass that removes
744 these from the list of mappings? */
745 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
746 return true;
747
748 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
749 without analyzing the expression whether or not its location
750 is accessible to anyone else. In the case of nested parallel
751 regions it certainly may be. */
077b0dfb 752 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
753 return true;
754
755 /* Do not use copy-in/copy-out for variables that have their
756 address taken. */
757 if (TREE_ADDRESSABLE (decl))
758 return true;
7c8f7639
JJ
759
760 /* Disallow copy-in/out in nested parallel if
761 decl is shared in outer parallel, otherwise
762 each thread could store the shared variable
763 in its own copy-in location, making the
764 variable no longer really shared. */
765 if (!TREE_READONLY (decl) && shared_ctx->is_nested)
766 {
767 omp_context *up;
768
769 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 770 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
771 break;
772
d9c194cb 773 if (up)
7c8f7639
JJ
774 {
775 tree c;
776
726a989a 777 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
778 c; c = OMP_CLAUSE_CHAIN (c))
779 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
780 && OMP_CLAUSE_DECL (c) == decl)
781 break;
782
783 if (c)
784 return true;
785 }
786 }
a68ab351
JJ
787
788 /* For tasks avoid using copy-in/out, unless they are readonly
789 (in which case just copy-in is used). As tasks can be
790 deferred or executed in different thread, when GOMP_task
791 returns, the task hasn't necessarily terminated. */
792 if (!TREE_READONLY (decl) && is_task_ctx (shared_ctx))
793 {
794 tree outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
795 if (is_gimple_reg (outer))
796 {
797 /* Taking address of OUTER in lower_send_shared_vars
798 might need regimplification of everything that uses the
799 variable. */
800 if (!task_shared_vars)
801 task_shared_vars = BITMAP_ALLOC (NULL);
802 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
803 TREE_ADDRESSABLE (outer) = 1;
804 }
805 return true;
806 }
953ff289
DN
807 }
808
809 return false;
810}
811
917948d3 812/* Create a new VAR_DECL and copy information from VAR to it. */
953ff289 813
917948d3
ZD
814tree
815copy_var_decl (tree var, tree name, tree type)
953ff289 816{
c2255bc4 817 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
953ff289
DN
818
819 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
917948d3 820 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
0890b981 821 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
953ff289
DN
822 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
823 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
917948d3 824 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
953ff289 825 TREE_USED (copy) = 1;
953ff289
DN
826 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
827
917948d3
ZD
828 return copy;
829}
830
831/* Construct a new automatic decl similar to VAR. */
832
833static tree
834omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
835{
836 tree copy = copy_var_decl (var, name, type);
837
838 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 839 DECL_CHAIN (copy) = ctx->block_vars;
953ff289
DN
840 ctx->block_vars = copy;
841
842 return copy;
843}
844
845static tree
846omp_copy_decl_1 (tree var, omp_context *ctx)
847{
848 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
849}
850
851/* Build tree nodes to access the field for VAR on the receiver side. */
852
853static tree
854build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
855{
856 tree x, field = lookup_field (var, ctx);
857
858 /* If the receiver record type was remapped in the child function,
859 remap the field into the new record type. */
860 x = maybe_lookup_field (field, ctx);
861 if (x != NULL)
862 field = x;
863
70f34814 864 x = build_simple_mem_ref (ctx->receiver_decl);
953ff289
DN
865 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL);
866 if (by_ref)
70f34814 867 x = build_simple_mem_ref (x);
953ff289
DN
868
869 return x;
870}
871
872/* Build tree nodes to access VAR in the scope outer to CTX. In the case
873 of a parallel, this is a component reference; for workshare constructs
874 this is some variable. */
875
876static tree
877build_outer_var_ref (tree var, omp_context *ctx)
878{
879 tree x;
880
8ca5b2a2 881 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
882 x = var;
883 else if (is_variable_sized (var))
884 {
885 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
886 x = build_outer_var_ref (x, ctx);
70f34814 887 x = build_simple_mem_ref (x);
953ff289 888 }
a68ab351 889 else if (is_taskreg_ctx (ctx))
953ff289 890 {
7c8f7639 891 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
892 x = build_receiver_ref (var, by_ref, ctx);
893 }
894 else if (ctx->outer)
895 x = lookup_decl (var, ctx->outer);
eeb1d9e0
JJ
896 else if (is_reference (var))
897 /* This can happen with orphaned constructs. If var is reference, it is
898 possible it is shared and as such valid. */
899 x = var;
953ff289
DN
900 else
901 gcc_unreachable ();
902
903 if (is_reference (var))
70f34814 904 x = build_simple_mem_ref (x);
953ff289
DN
905
906 return x;
907}
908
909/* Build tree nodes to access the field for VAR on the sender side. */
910
911static tree
912build_sender_ref (tree var, omp_context *ctx)
913{
a68ab351 914 tree field = lookup_sfield (var, ctx);
953ff289
DN
915 return build3 (COMPONENT_REF, TREE_TYPE (field),
916 ctx->sender_decl, field, NULL);
917}
918
919/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
920
921static void
a68ab351 922install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 923{
a68ab351 924 tree field, type, sfield = NULL_TREE;
953ff289 925
a68ab351
JJ
926 gcc_assert ((mask & 1) == 0
927 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
928 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
929 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
953ff289
DN
930
931 type = TREE_TYPE (var);
932 if (by_ref)
933 type = build_pointer_type (type);
a68ab351
JJ
934 else if ((mask & 3) == 1 && is_reference (var))
935 type = TREE_TYPE (type);
953ff289 936
c2255bc4
AH
937 field = build_decl (DECL_SOURCE_LOCATION (var),
938 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
939
940 /* Remember what variable this field was created for. This does have a
941 side effect of making dwarf2out ignore this member, so for helpful
942 debugging we clear it later in delete_omp_context. */
943 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
944 if (type == TREE_TYPE (var))
945 {
946 DECL_ALIGN (field) = DECL_ALIGN (var);
947 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
948 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
949 }
950 else
951 DECL_ALIGN (field) = TYPE_ALIGN (type);
953ff289 952
a68ab351
JJ
953 if ((mask & 3) == 3)
954 {
955 insert_field_into_struct (ctx->record_type, field);
956 if (ctx->srecord_type)
957 {
c2255bc4
AH
958 sfield = build_decl (DECL_SOURCE_LOCATION (var),
959 FIELD_DECL, DECL_NAME (var), type);
a68ab351
JJ
960 DECL_ABSTRACT_ORIGIN (sfield) = var;
961 DECL_ALIGN (sfield) = DECL_ALIGN (field);
962 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
963 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
964 insert_field_into_struct (ctx->srecord_type, sfield);
965 }
966 }
967 else
968 {
969 if (ctx->srecord_type == NULL_TREE)
970 {
971 tree t;
972
973 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
974 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
975 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
976 {
c2255bc4
AH
977 sfield = build_decl (DECL_SOURCE_LOCATION (var),
978 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
979 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
980 insert_field_into_struct (ctx->srecord_type, sfield);
981 splay_tree_insert (ctx->sfield_map,
982 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
983 (splay_tree_value) sfield);
984 }
985 }
986 sfield = field;
987 insert_field_into_struct ((mask & 1) ? ctx->record_type
988 : ctx->srecord_type, field);
989 }
953ff289 990
a68ab351
JJ
991 if (mask & 1)
992 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
993 (splay_tree_value) field);
994 if ((mask & 2) && ctx->sfield_map)
995 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
996 (splay_tree_value) sfield);
953ff289
DN
997}
998
999static tree
1000install_var_local (tree var, omp_context *ctx)
1001{
1002 tree new_var = omp_copy_decl_1 (var, ctx);
1003 insert_decl_map (&ctx->cb, var, new_var);
1004 return new_var;
1005}
1006
1007/* Adjust the replacement for DECL in CTX for the new context. This means
1008 copying the DECL_VALUE_EXPR, and fixing up the type. */
1009
1010static void
1011fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
1012{
1013 tree new_decl, size;
1014
1015 new_decl = lookup_decl (decl, ctx);
1016
1017 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
1018
1019 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
1020 && DECL_HAS_VALUE_EXPR_P (decl))
1021 {
1022 tree ve = DECL_VALUE_EXPR (decl);
726a989a 1023 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
1024 SET_DECL_VALUE_EXPR (new_decl, ve);
1025 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1026 }
1027
1028 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1029 {
1030 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1031 if (size == error_mark_node)
1032 size = TYPE_SIZE (TREE_TYPE (new_decl));
1033 DECL_SIZE (new_decl) = size;
1034
1035 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1036 if (size == error_mark_node)
1037 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1038 DECL_SIZE_UNIT (new_decl) = size;
1039 }
1040}
1041
1042/* The callback for remap_decl. Search all containing contexts for a
1043 mapping of the variable; this avoids having to duplicate the splay
1044 tree ahead of time. We know a mapping doesn't already exist in the
1045 given context. Create new mappings to implement default semantics. */
1046
1047static tree
1048omp_copy_decl (tree var, copy_body_data *cb)
1049{
1050 omp_context *ctx = (omp_context *) cb;
1051 tree new_var;
1052
953ff289
DN
1053 if (TREE_CODE (var) == LABEL_DECL)
1054 {
c2255bc4 1055 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 1056 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
1057 insert_decl_map (&ctx->cb, var, new_var);
1058 return new_var;
1059 }
1060
a68ab351 1061 while (!is_taskreg_ctx (ctx))
953ff289
DN
1062 {
1063 ctx = ctx->outer;
1064 if (ctx == NULL)
1065 return var;
1066 new_var = maybe_lookup_decl (var, ctx);
1067 if (new_var)
1068 return new_var;
1069 }
1070
8ca5b2a2
JJ
1071 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1072 return var;
1073
953ff289
DN
1074 return error_mark_node;
1075}
1076
50674e96
DN
1077
1078/* Return the parallel region associated with STMT. */
1079
50674e96
DN
1080/* Debugging dumps for parallel regions. */
1081void dump_omp_region (FILE *, struct omp_region *, int);
1082void debug_omp_region (struct omp_region *);
1083void debug_all_omp_regions (void);
1084
1085/* Dump the parallel region tree rooted at REGION. */
1086
1087void
1088dump_omp_region (FILE *file, struct omp_region *region, int indent)
1089{
777f7f9a 1090 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
726a989a 1091 gimple_code_name[region->type]);
50674e96
DN
1092
1093 if (region->inner)
1094 dump_omp_region (file, region->inner, indent + 4);
1095
777f7f9a
RH
1096 if (region->cont)
1097 {
726a989a 1098 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
777f7f9a
RH
1099 region->cont->index);
1100 }
b8698a0f 1101
50674e96 1102 if (region->exit)
726a989a 1103 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
777f7f9a 1104 region->exit->index);
50674e96 1105 else
777f7f9a 1106 fprintf (file, "%*s[no exit marker]\n", indent, "");
50674e96
DN
1107
1108 if (region->next)
777f7f9a 1109 dump_omp_region (file, region->next, indent);
50674e96
DN
1110}
1111
24e47c76 1112DEBUG_FUNCTION void
50674e96
DN
1113debug_omp_region (struct omp_region *region)
1114{
1115 dump_omp_region (stderr, region, 0);
1116}
1117
24e47c76 1118DEBUG_FUNCTION void
50674e96
DN
1119debug_all_omp_regions (void)
1120{
1121 dump_omp_region (stderr, root_omp_region, 0);
1122}
1123
1124
1125/* Create a new parallel region starting at STMT inside region PARENT. */
1126
777f7f9a 1127struct omp_region *
726a989a
RB
1128new_omp_region (basic_block bb, enum gimple_code type,
1129 struct omp_region *parent)
50674e96 1130{
d3bfe4de 1131 struct omp_region *region = XCNEW (struct omp_region);
50674e96
DN
1132
1133 region->outer = parent;
777f7f9a
RH
1134 region->entry = bb;
1135 region->type = type;
50674e96
DN
1136
1137 if (parent)
1138 {
1139 /* This is a nested region. Add it to the list of inner
1140 regions in PARENT. */
1141 region->next = parent->inner;
1142 parent->inner = region;
1143 }
777f7f9a 1144 else
50674e96
DN
1145 {
1146 /* This is a toplevel region. Add it to the list of toplevel
1147 regions in ROOT_OMP_REGION. */
1148 region->next = root_omp_region;
1149 root_omp_region = region;
1150 }
777f7f9a
RH
1151
1152 return region;
1153}
1154
1155/* Release the memory associated with the region tree rooted at REGION. */
1156
1157static void
1158free_omp_region_1 (struct omp_region *region)
1159{
1160 struct omp_region *i, *n;
1161
1162 for (i = region->inner; i ; i = n)
50674e96 1163 {
777f7f9a
RH
1164 n = i->next;
1165 free_omp_region_1 (i);
50674e96
DN
1166 }
1167
777f7f9a
RH
1168 free (region);
1169}
50674e96 1170
777f7f9a
RH
1171/* Release the memory for the entire omp region tree. */
1172
1173void
1174free_omp_regions (void)
1175{
1176 struct omp_region *r, *n;
1177 for (r = root_omp_region; r ; r = n)
1178 {
1179 n = r->next;
1180 free_omp_region_1 (r);
1181 }
1182 root_omp_region = NULL;
50674e96
DN
1183}
1184
1185
953ff289
DN
1186/* Create a new context, with OUTER_CTX being the surrounding context. */
1187
1188static omp_context *
726a989a 1189new_omp_context (gimple stmt, omp_context *outer_ctx)
953ff289
DN
1190{
1191 omp_context *ctx = XCNEW (omp_context);
1192
1193 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1194 (splay_tree_value) ctx);
1195 ctx->stmt = stmt;
1196
1197 if (outer_ctx)
1198 {
1199 ctx->outer = outer_ctx;
1200 ctx->cb = outer_ctx->cb;
1201 ctx->cb.block = NULL;
1202 ctx->depth = outer_ctx->depth + 1;
1203 }
1204 else
1205 {
1206 ctx->cb.src_fn = current_function_decl;
1207 ctx->cb.dst_fn = current_function_decl;
fe660d7b
MJ
1208 ctx->cb.src_node = cgraph_get_node (current_function_decl);
1209 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
1210 ctx->cb.dst_node = ctx->cb.src_node;
1211 ctx->cb.src_cfun = cfun;
1212 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 1213 ctx->cb.eh_lp_nr = 0;
953ff289
DN
1214 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1215 ctx->depth = 1;
1216 }
1217
6be42dd4 1218 ctx->cb.decl_map = pointer_map_create ();
953ff289
DN
1219
1220 return ctx;
1221}
1222
726a989a 1223static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
1224
1225/* Finalize task copyfn. */
1226
1227static void
726a989a 1228finalize_task_copyfn (gimple task_stmt)
2368a460
JJ
1229{
1230 struct function *child_cfun;
1231 tree child_fn, old_fn;
726a989a
RB
1232 gimple_seq seq, new_seq;
1233 gimple bind;
2368a460 1234
726a989a 1235 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
1236 if (child_fn == NULL_TREE)
1237 return;
1238
1239 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1240
1241 /* Inform the callgraph about the new function. */
1242 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
1243 = cfun->curr_properties;
1244
1245 old_fn = current_function_decl;
1246 push_cfun (child_cfun);
1247 current_function_decl = child_fn;
726a989a
RB
1248 bind = gimplify_body (&DECL_SAVED_TREE (child_fn), child_fn, false);
1249 seq = gimple_seq_alloc ();
1250 gimple_seq_add_stmt (&seq, bind);
1251 new_seq = maybe_catch_exception (seq);
1252 if (new_seq != seq)
1253 {
1254 bind = gimple_build_bind (NULL, new_seq, NULL);
1255 seq = gimple_seq_alloc ();
1256 gimple_seq_add_stmt (&seq, bind);
1257 }
1258 gimple_set_body (child_fn, seq);
2368a460
JJ
1259 pop_cfun ();
1260 current_function_decl = old_fn;
1261
1262 cgraph_add_new_function (child_fn, false);
1263}
1264
953ff289
DN
1265/* Destroy a omp_context data structures. Called through the splay tree
1266 value delete callback. */
1267
1268static void
1269delete_omp_context (splay_tree_value value)
1270{
1271 omp_context *ctx = (omp_context *) value;
1272
6be42dd4 1273 pointer_map_destroy (ctx->cb.decl_map);
953ff289
DN
1274
1275 if (ctx->field_map)
1276 splay_tree_delete (ctx->field_map);
a68ab351
JJ
1277 if (ctx->sfield_map)
1278 splay_tree_delete (ctx->sfield_map);
953ff289
DN
1279
1280 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1281 it produces corrupt debug information. */
1282 if (ctx->record_type)
1283 {
1284 tree t;
910ad8de 1285 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
1286 DECL_ABSTRACT_ORIGIN (t) = NULL;
1287 }
a68ab351
JJ
1288 if (ctx->srecord_type)
1289 {
1290 tree t;
910ad8de 1291 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
1292 DECL_ABSTRACT_ORIGIN (t) = NULL;
1293 }
953ff289 1294
2368a460
JJ
1295 if (is_task_ctx (ctx))
1296 finalize_task_copyfn (ctx->stmt);
1297
953ff289
DN
1298 XDELETE (ctx);
1299}
1300
1301/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1302 context. */
1303
1304static void
1305fixup_child_record_type (omp_context *ctx)
1306{
1307 tree f, type = ctx->record_type;
1308
1309 /* ??? It isn't sufficient to just call remap_type here, because
1310 variably_modified_type_p doesn't work the way we expect for
1311 record types. Testing each field for whether it needs remapping
1312 and creating a new record by hand works, however. */
910ad8de 1313 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
1314 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1315 break;
1316 if (f)
1317 {
1318 tree name, new_fields = NULL;
1319
1320 type = lang_hooks.types.make_type (RECORD_TYPE);
1321 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
1322 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1323 TYPE_DECL, name, type);
953ff289
DN
1324 TYPE_NAME (type) = name;
1325
910ad8de 1326 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
1327 {
1328 tree new_f = copy_node (f);
1329 DECL_CONTEXT (new_f) = type;
1330 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 1331 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
1332 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1333 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1334 &ctx->cb, NULL);
1335 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1336 &ctx->cb, NULL);
953ff289
DN
1337 new_fields = new_f;
1338
1339 /* Arrange to be able to look up the receiver field
1340 given the sender field. */
1341 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1342 (splay_tree_value) new_f);
1343 }
1344 TYPE_FIELDS (type) = nreverse (new_fields);
1345 layout_type (type);
1346 }
1347
1348 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1349}
1350
1351/* Instantiate decls as necessary in CTX to satisfy the data sharing
1352 specified by CLAUSES. */
1353
1354static void
1355scan_sharing_clauses (tree clauses, omp_context *ctx)
1356{
1357 tree c, decl;
1358 bool scan_array_reductions = false;
1359
1360 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1361 {
1362 bool by_ref;
1363
aaf46ef9 1364 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1365 {
1366 case OMP_CLAUSE_PRIVATE:
1367 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1368 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1369 goto do_private;
1370 else if (!is_variable_sized (decl))
953ff289
DN
1371 install_var_local (decl, ctx);
1372 break;
1373
1374 case OMP_CLAUSE_SHARED:
a68ab351 1375 gcc_assert (is_taskreg_ctx (ctx));
953ff289 1376 decl = OMP_CLAUSE_DECL (c);
5da250fc
JJ
1377 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1378 || !is_variable_sized (decl));
8ca5b2a2
JJ
1379 /* Global variables don't need to be copied,
1380 the receiver side will use them directly. */
1381 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1382 break;
a68ab351 1383 by_ref = use_pointer_for_field (decl, ctx);
953ff289
DN
1384 if (! TREE_READONLY (decl)
1385 || TREE_ADDRESSABLE (decl)
1386 || by_ref
1387 || is_reference (decl))
1388 {
a68ab351 1389 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1390 install_var_local (decl, ctx);
1391 break;
1392 }
1393 /* We don't need to copy const scalar vars back. */
aaf46ef9 1394 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1395 goto do_private;
1396
1397 case OMP_CLAUSE_LASTPRIVATE:
1398 /* Let the corresponding firstprivate clause create
1399 the variable. */
1400 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1401 break;
1402 /* FALLTHRU */
1403
1404 case OMP_CLAUSE_FIRSTPRIVATE:
1405 case OMP_CLAUSE_REDUCTION:
1406 decl = OMP_CLAUSE_DECL (c);
1407 do_private:
1408 if (is_variable_sized (decl))
953ff289 1409 {
a68ab351
JJ
1410 if (is_task_ctx (ctx))
1411 install_var_field (decl, false, 1, ctx);
1412 break;
1413 }
1414 else if (is_taskreg_ctx (ctx))
1415 {
1416 bool global
1417 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1418 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1419
1420 if (is_task_ctx (ctx)
1421 && (global || by_ref || is_reference (decl)))
1422 {
1423 install_var_field (decl, false, 1, ctx);
1424 if (!global)
1425 install_var_field (decl, by_ref, 2, ctx);
1426 }
1427 else if (!global)
1428 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1429 }
1430 install_var_local (decl, ctx);
1431 break;
1432
1433 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1434 case OMP_CLAUSE_COPYIN:
1435 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1436 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1437 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1438 break;
1439
1440 case OMP_CLAUSE_DEFAULT:
1441 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1442 break;
1443
1444 case OMP_CLAUSE_IF:
1445 case OMP_CLAUSE_NUM_THREADS:
1446 case OMP_CLAUSE_SCHEDULE:
1447 if (ctx->outer)
726a989a 1448 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1449 break;
1450
1451 case OMP_CLAUSE_NOWAIT:
1452 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1453 case OMP_CLAUSE_COLLAPSE:
1454 case OMP_CLAUSE_UNTIED:
953ff289
DN
1455 break;
1456
1457 default:
1458 gcc_unreachable ();
1459 }
1460 }
1461
1462 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1463 {
aaf46ef9 1464 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1465 {
1466 case OMP_CLAUSE_LASTPRIVATE:
1467 /* Let the corresponding firstprivate clause create
1468 the variable. */
726a989a 1469 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1470 scan_array_reductions = true;
953ff289
DN
1471 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1472 break;
1473 /* FALLTHRU */
1474
1475 case OMP_CLAUSE_PRIVATE:
1476 case OMP_CLAUSE_FIRSTPRIVATE:
1477 case OMP_CLAUSE_REDUCTION:
1478 decl = OMP_CLAUSE_DECL (c);
1479 if (is_variable_sized (decl))
1480 install_var_local (decl, ctx);
1481 fixup_remapped_decl (decl, ctx,
aaf46ef9 1482 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1483 && OMP_CLAUSE_PRIVATE_DEBUG (c));
aaf46ef9 1484 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1485 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1486 scan_array_reductions = true;
1487 break;
1488
1489 case OMP_CLAUSE_SHARED:
1490 decl = OMP_CLAUSE_DECL (c);
8ca5b2a2
JJ
1491 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1492 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1493 break;
1494
1495 case OMP_CLAUSE_COPYPRIVATE:
1496 case OMP_CLAUSE_COPYIN:
1497 case OMP_CLAUSE_DEFAULT:
1498 case OMP_CLAUSE_IF:
1499 case OMP_CLAUSE_NUM_THREADS:
1500 case OMP_CLAUSE_SCHEDULE:
1501 case OMP_CLAUSE_NOWAIT:
1502 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1503 case OMP_CLAUSE_COLLAPSE:
1504 case OMP_CLAUSE_UNTIED:
953ff289
DN
1505 break;
1506
1507 default:
1508 gcc_unreachable ();
1509 }
1510 }
1511
1512 if (scan_array_reductions)
1513 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 1514 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1515 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1516 {
726a989a
RB
1517 scan_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1518 scan_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
953ff289 1519 }
a68ab351 1520 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
726a989a
RB
1521 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1522 scan_omp (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
953ff289
DN
1523}
1524
1525/* Create a new name for omp child function. Returns an identifier. */
1526
1527static GTY(()) unsigned int tmp_ompfn_id_num;
1528
1529static tree
a68ab351 1530create_omp_child_function_name (bool task_copy)
953ff289 1531{
036546e5
JH
1532 return (clone_function_name (current_function_decl,
1533 task_copy ? "_omp_cpyfn" : "_omp_fn"));
953ff289
DN
1534}
1535
1536/* Build a decl for the omp child function. It'll not contain a body
1537 yet, just the bare decl. */
1538
1539static void
a68ab351 1540create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1541{
1542 tree decl, type, name, t;
1543
a68ab351
JJ
1544 name = create_omp_child_function_name (task_copy);
1545 if (task_copy)
1546 type = build_function_type_list (void_type_node, ptr_type_node,
1547 ptr_type_node, NULL_TREE);
1548 else
1549 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1550
c2255bc4
AH
1551 decl = build_decl (gimple_location (ctx->stmt),
1552 FUNCTION_DECL, name, type);
953ff289 1553
a68ab351
JJ
1554 if (!task_copy)
1555 ctx->cb.dst_fn = decl;
1556 else
726a989a 1557 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1558
1559 TREE_STATIC (decl) = 1;
1560 TREE_USED (decl) = 1;
1561 DECL_ARTIFICIAL (decl) = 1;
cd3f04c8 1562 DECL_NAMELESS (decl) = 1;
953ff289
DN
1563 DECL_IGNORED_P (decl) = 0;
1564 TREE_PUBLIC (decl) = 0;
1565 DECL_UNINLINABLE (decl) = 1;
1566 DECL_EXTERNAL (decl) = 0;
1567 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1568 DECL_INITIAL (decl) = make_node (BLOCK);
953ff289 1569
c2255bc4
AH
1570 t = build_decl (DECL_SOURCE_LOCATION (decl),
1571 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1572 DECL_ARTIFICIAL (t) = 1;
1573 DECL_IGNORED_P (t) = 1;
07485407 1574 DECL_CONTEXT (t) = decl;
953ff289
DN
1575 DECL_RESULT (decl) = t;
1576
c2255bc4
AH
1577 t = build_decl (DECL_SOURCE_LOCATION (decl),
1578 PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
953ff289 1579 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1580 DECL_NAMELESS (t) = 1;
953ff289 1581 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1582 DECL_CONTEXT (t) = current_function_decl;
953ff289
DN
1583 TREE_USED (t) = 1;
1584 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1585 if (!task_copy)
1586 ctx->receiver_decl = t;
1587 else
1588 {
c2255bc4
AH
1589 t = build_decl (DECL_SOURCE_LOCATION (decl),
1590 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1591 ptr_type_node);
1592 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1593 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1594 DECL_ARG_TYPE (t) = ptr_type_node;
1595 DECL_CONTEXT (t) = current_function_decl;
1596 TREE_USED (t) = 1;
628c189e 1597 TREE_ADDRESSABLE (t) = 1;
910ad8de 1598 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1599 DECL_ARGUMENTS (decl) = t;
1600 }
953ff289 1601
b8698a0f 1602 /* Allocate memory for the function structure. The call to
50674e96 1603 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1604 it afterward. */
db2960f4 1605 push_struct_function (decl);
726a989a 1606 cfun->function_end_locus = gimple_location (ctx->stmt);
db2960f4 1607 pop_cfun ();
953ff289
DN
1608}
1609
953ff289
DN
1610
1611/* Scan an OpenMP parallel directive. */
1612
1613static void
726a989a 1614scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1615{
1616 omp_context *ctx;
1617 tree name;
726a989a 1618 gimple stmt = gsi_stmt (*gsi);
953ff289
DN
1619
1620 /* Ignore parallel directives with empty bodies, unless there
1621 are copyin clauses. */
1622 if (optimize > 0
726a989a
RB
1623 && empty_body_p (gimple_omp_body (stmt))
1624 && find_omp_clause (gimple_omp_parallel_clauses (stmt),
1625 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1626 {
726a989a 1627 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1628 return;
1629 }
1630
726a989a 1631 ctx = new_omp_context (stmt, outer_ctx);
a68ab351 1632 if (taskreg_nesting_level > 1)
50674e96 1633 ctx->is_nested = true;
953ff289 1634 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289
DN
1635 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1636 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 1637 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1638 name = build_decl (gimple_location (stmt),
1639 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1640 DECL_ARTIFICIAL (name) = 1;
1641 DECL_NAMELESS (name) = 1;
953ff289 1642 TYPE_NAME (ctx->record_type) = name;
a68ab351 1643 create_omp_child_function (ctx, false);
726a989a 1644 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
953ff289 1645
726a989a
RB
1646 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1647 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
1648
1649 if (TYPE_FIELDS (ctx->record_type) == NULL)
1650 ctx->record_type = ctx->receiver_decl = NULL;
1651 else
1652 {
1653 layout_type (ctx->record_type);
1654 fixup_child_record_type (ctx);
1655 }
1656}
1657
a68ab351
JJ
1658/* Scan an OpenMP task directive. */
1659
1660static void
726a989a 1661scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
1662{
1663 omp_context *ctx;
726a989a
RB
1664 tree name, t;
1665 gimple stmt = gsi_stmt (*gsi);
db3927fb 1666 location_t loc = gimple_location (stmt);
a68ab351
JJ
1667
1668 /* Ignore task directives with empty bodies. */
1669 if (optimize > 0
726a989a 1670 && empty_body_p (gimple_omp_body (stmt)))
a68ab351 1671 {
726a989a 1672 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
1673 return;
1674 }
1675
726a989a 1676 ctx = new_omp_context (stmt, outer_ctx);
a68ab351
JJ
1677 if (taskreg_nesting_level > 1)
1678 ctx->is_nested = true;
1679 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1680 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1681 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1682 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1683 name = build_decl (gimple_location (stmt),
1684 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1685 DECL_ARTIFICIAL (name) = 1;
1686 DECL_NAMELESS (name) = 1;
a68ab351
JJ
1687 TYPE_NAME (ctx->record_type) = name;
1688 create_omp_child_function (ctx, false);
726a989a 1689 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 1690
726a989a 1691 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
1692
1693 if (ctx->srecord_type)
1694 {
1695 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
1696 name = build_decl (gimple_location (stmt),
1697 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
1698 DECL_ARTIFICIAL (name) = 1;
1699 DECL_NAMELESS (name) = 1;
a68ab351
JJ
1700 TYPE_NAME (ctx->srecord_type) = name;
1701 create_omp_child_function (ctx, true);
1702 }
1703
726a989a 1704 scan_omp (gimple_omp_body (stmt), ctx);
a68ab351
JJ
1705
1706 if (TYPE_FIELDS (ctx->record_type) == NULL)
1707 {
1708 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
1709 t = build_int_cst (long_integer_type_node, 0);
1710 gimple_omp_task_set_arg_size (stmt, t);
1711 t = build_int_cst (long_integer_type_node, 1);
1712 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
1713 }
1714 else
1715 {
1716 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1717 /* Move VLA fields to the end. */
1718 p = &TYPE_FIELDS (ctx->record_type);
1719 while (*p)
1720 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1721 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1722 {
1723 *q = *p;
1724 *p = TREE_CHAIN (*p);
1725 TREE_CHAIN (*q) = NULL_TREE;
1726 q = &TREE_CHAIN (*q);
1727 }
1728 else
910ad8de 1729 p = &DECL_CHAIN (*p);
a68ab351
JJ
1730 *p = vla_fields;
1731 layout_type (ctx->record_type);
1732 fixup_child_record_type (ctx);
1733 if (ctx->srecord_type)
1734 layout_type (ctx->srecord_type);
db3927fb 1735 t = fold_convert_loc (loc, long_integer_type_node,
a68ab351 1736 TYPE_SIZE_UNIT (ctx->record_type));
726a989a
RB
1737 gimple_omp_task_set_arg_size (stmt, t);
1738 t = build_int_cst (long_integer_type_node,
a68ab351 1739 TYPE_ALIGN_UNIT (ctx->record_type));
726a989a 1740 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
1741 }
1742}
1743
953ff289 1744
50674e96 1745/* Scan an OpenMP loop directive. */
953ff289
DN
1746
1747static void
726a989a 1748scan_omp_for (gimple stmt, omp_context *outer_ctx)
953ff289 1749{
50674e96 1750 omp_context *ctx;
726a989a 1751 size_t i;
953ff289 1752
50674e96 1753 ctx = new_omp_context (stmt, outer_ctx);
953ff289 1754
726a989a 1755 scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
953ff289 1756
726a989a
RB
1757 scan_omp (gimple_omp_for_pre_body (stmt), ctx);
1758 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 1759 {
726a989a
RB
1760 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
1761 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
1762 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
1763 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 1764 }
726a989a 1765 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
1766}
1767
1768/* Scan an OpenMP sections directive. */
1769
1770static void
726a989a 1771scan_omp_sections (gimple stmt, omp_context *outer_ctx)
953ff289 1772{
953ff289
DN
1773 omp_context *ctx;
1774
1775 ctx = new_omp_context (stmt, outer_ctx);
726a989a
RB
1776 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
1777 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
1778}
1779
1780/* Scan an OpenMP single directive. */
1781
1782static void
726a989a 1783scan_omp_single (gimple stmt, omp_context *outer_ctx)
953ff289 1784{
953ff289
DN
1785 omp_context *ctx;
1786 tree name;
1787
1788 ctx = new_omp_context (stmt, outer_ctx);
1789 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1790 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1791 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
1792 name = build_decl (gimple_location (stmt),
1793 TYPE_DECL, name, ctx->record_type);
953ff289
DN
1794 TYPE_NAME (ctx->record_type) = name;
1795
726a989a
RB
1796 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
1797 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
1798
1799 if (TYPE_FIELDS (ctx->record_type) == NULL)
1800 ctx->record_type = NULL;
1801 else
1802 layout_type (ctx->record_type);
1803}
1804
953ff289 1805
a6fc8e21
JJ
1806/* Check OpenMP nesting restrictions. */
1807static void
726a989a 1808check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
a6fc8e21 1809{
726a989a 1810 switch (gimple_code (stmt))
a6fc8e21 1811 {
726a989a
RB
1812 case GIMPLE_OMP_FOR:
1813 case GIMPLE_OMP_SECTIONS:
1814 case GIMPLE_OMP_SINGLE:
1815 case GIMPLE_CALL:
a6fc8e21 1816 for (; ctx != NULL; ctx = ctx->outer)
726a989a 1817 switch (gimple_code (ctx->stmt))
a6fc8e21 1818 {
726a989a
RB
1819 case GIMPLE_OMP_FOR:
1820 case GIMPLE_OMP_SECTIONS:
1821 case GIMPLE_OMP_SINGLE:
1822 case GIMPLE_OMP_ORDERED:
1823 case GIMPLE_OMP_MASTER:
1824 case GIMPLE_OMP_TASK:
1825 if (is_gimple_call (stmt))
a68ab351
JJ
1826 {
1827 warning (0, "barrier region may not be closely nested inside "
1828 "of work-sharing, critical, ordered, master or "
1829 "explicit task region");
1830 return;
1831 }
a6fc8e21 1832 warning (0, "work-sharing region may not be closely nested inside "
a68ab351
JJ
1833 "of work-sharing, critical, ordered, master or explicit "
1834 "task region");
a6fc8e21 1835 return;
726a989a 1836 case GIMPLE_OMP_PARALLEL:
a6fc8e21
JJ
1837 return;
1838 default:
1839 break;
1840 }
1841 break;
726a989a 1842 case GIMPLE_OMP_MASTER:
a6fc8e21 1843 for (; ctx != NULL; ctx = ctx->outer)
726a989a 1844 switch (gimple_code (ctx->stmt))
a6fc8e21 1845 {
726a989a
RB
1846 case GIMPLE_OMP_FOR:
1847 case GIMPLE_OMP_SECTIONS:
1848 case GIMPLE_OMP_SINGLE:
1849 case GIMPLE_OMP_TASK:
a6fc8e21 1850 warning (0, "master region may not be closely nested inside "
a68ab351 1851 "of work-sharing or explicit task region");
a6fc8e21 1852 return;
726a989a 1853 case GIMPLE_OMP_PARALLEL:
a6fc8e21
JJ
1854 return;
1855 default:
1856 break;
1857 }
1858 break;
726a989a 1859 case GIMPLE_OMP_ORDERED:
a6fc8e21 1860 for (; ctx != NULL; ctx = ctx->outer)
726a989a 1861 switch (gimple_code (ctx->stmt))
a6fc8e21 1862 {
726a989a
RB
1863 case GIMPLE_OMP_CRITICAL:
1864 case GIMPLE_OMP_TASK:
a6fc8e21 1865 warning (0, "ordered region may not be closely nested inside "
a68ab351 1866 "of critical or explicit task region");
a6fc8e21 1867 return;
726a989a
RB
1868 case GIMPLE_OMP_FOR:
1869 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
a6fc8e21
JJ
1870 OMP_CLAUSE_ORDERED) == NULL)
1871 warning (0, "ordered region must be closely nested inside "
1872 "a loop region with an ordered clause");
1873 return;
726a989a 1874 case GIMPLE_OMP_PARALLEL:
a6fc8e21
JJ
1875 return;
1876 default:
1877 break;
1878 }
1879 break;
726a989a 1880 case GIMPLE_OMP_CRITICAL:
a6fc8e21 1881 for (; ctx != NULL; ctx = ctx->outer)
726a989a
RB
1882 if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
1883 && (gimple_omp_critical_name (stmt)
1884 == gimple_omp_critical_name (ctx->stmt)))
a6fc8e21
JJ
1885 {
1886 warning (0, "critical region may not be nested inside a critical "
1887 "region with the same name");
1888 return;
1889 }
1890 break;
1891 default:
1892 break;
1893 }
1894}
1895
1896
726a989a
RB
1897/* Helper function scan_omp.
1898
1899 Callback for walk_tree or operators in walk_gimple_stmt used to
1900 scan for OpenMP directives in TP. */
953ff289
DN
1901
1902static tree
726a989a 1903scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 1904{
d3bfe4de
KG
1905 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1906 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
1907 tree t = *tp;
1908
726a989a
RB
1909 switch (TREE_CODE (t))
1910 {
1911 case VAR_DECL:
1912 case PARM_DECL:
1913 case LABEL_DECL:
1914 case RESULT_DECL:
1915 if (ctx)
1916 *tp = remap_decl (t, &ctx->cb);
1917 break;
1918
1919 default:
1920 if (ctx && TYPE_P (t))
1921 *tp = remap_type (t, &ctx->cb);
1922 else if (!DECL_P (t))
a900ae6b
JJ
1923 {
1924 *walk_subtrees = 1;
1925 if (ctx)
70f34814
RG
1926 {
1927 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
1928 if (tem != TREE_TYPE (t))
1929 {
1930 if (TREE_CODE (t) == INTEGER_CST)
1931 *tp = build_int_cst_wide (tem,
1932 TREE_INT_CST_LOW (t),
1933 TREE_INT_CST_HIGH (t));
1934 else
1935 TREE_TYPE (t) = tem;
1936 }
1937 }
a900ae6b 1938 }
726a989a
RB
1939 break;
1940 }
1941
1942 return NULL_TREE;
1943}
1944
1945
1946/* Helper function for scan_omp.
1947
1948 Callback for walk_gimple_stmt used to scan for OpenMP directives in
1949 the current statement in GSI. */
1950
1951static tree
1952scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1953 struct walk_stmt_info *wi)
1954{
1955 gimple stmt = gsi_stmt (*gsi);
1956 omp_context *ctx = (omp_context *) wi->info;
1957
1958 if (gimple_has_location (stmt))
1959 input_location = gimple_location (stmt);
953ff289 1960
a6fc8e21 1961 /* Check the OpenMP nesting restrictions. */
a68ab351
JJ
1962 if (ctx != NULL)
1963 {
726a989a
RB
1964 if (is_gimple_omp (stmt))
1965 check_omp_nesting_restrictions (stmt, ctx);
1966 else if (is_gimple_call (stmt))
a68ab351 1967 {
726a989a 1968 tree fndecl = gimple_call_fndecl (stmt);
a68ab351
JJ
1969 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1970 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
726a989a 1971 check_omp_nesting_restrictions (stmt, ctx);
a68ab351
JJ
1972 }
1973 }
a6fc8e21 1974
726a989a
RB
1975 *handled_ops_p = true;
1976
1977 switch (gimple_code (stmt))
953ff289 1978 {
726a989a 1979 case GIMPLE_OMP_PARALLEL:
a68ab351 1980 taskreg_nesting_level++;
726a989a 1981 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
1982 taskreg_nesting_level--;
1983 break;
1984
726a989a 1985 case GIMPLE_OMP_TASK:
a68ab351 1986 taskreg_nesting_level++;
726a989a 1987 scan_omp_task (gsi, ctx);
a68ab351 1988 taskreg_nesting_level--;
953ff289
DN
1989 break;
1990
726a989a
RB
1991 case GIMPLE_OMP_FOR:
1992 scan_omp_for (stmt, ctx);
953ff289
DN
1993 break;
1994
726a989a
RB
1995 case GIMPLE_OMP_SECTIONS:
1996 scan_omp_sections (stmt, ctx);
953ff289
DN
1997 break;
1998
726a989a
RB
1999 case GIMPLE_OMP_SINGLE:
2000 scan_omp_single (stmt, ctx);
953ff289
DN
2001 break;
2002
726a989a
RB
2003 case GIMPLE_OMP_SECTION:
2004 case GIMPLE_OMP_MASTER:
2005 case GIMPLE_OMP_ORDERED:
2006 case GIMPLE_OMP_CRITICAL:
2007 ctx = new_omp_context (stmt, ctx);
2008 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
2009 break;
2010
726a989a 2011 case GIMPLE_BIND:
953ff289
DN
2012 {
2013 tree var;
953ff289 2014
726a989a
RB
2015 *handled_ops_p = false;
2016 if (ctx)
910ad8de 2017 for (var = gimple_bind_vars (stmt); var ; var = DECL_CHAIN (var))
726a989a 2018 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
2019 }
2020 break;
953ff289 2021 default:
726a989a 2022 *handled_ops_p = false;
953ff289
DN
2023 break;
2024 }
2025
2026 return NULL_TREE;
2027}
2028
2029
726a989a
RB
2030/* Scan all the statements starting at the current statement. CTX
2031 contains context information about the OpenMP directives and
2032 clauses found during the scan. */
953ff289
DN
2033
2034static void
726a989a 2035scan_omp (gimple_seq body, omp_context *ctx)
953ff289
DN
2036{
2037 location_t saved_location;
2038 struct walk_stmt_info wi;
2039
2040 memset (&wi, 0, sizeof (wi));
953ff289 2041 wi.info = ctx;
953ff289
DN
2042 wi.want_locations = true;
2043
2044 saved_location = input_location;
726a989a 2045 walk_gimple_seq (body, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
2046 input_location = saved_location;
2047}
2048\f
2049/* Re-gimplification and code generation routines. */
2050
2051/* Build a call to GOMP_barrier. */
2052
917948d3
ZD
2053static tree
2054build_omp_barrier (void)
953ff289 2055{
917948d3 2056 return build_call_expr (built_in_decls[BUILT_IN_GOMP_BARRIER], 0);
953ff289
DN
2057}
2058
2059/* If a context was created for STMT when it was scanned, return it. */
2060
2061static omp_context *
726a989a 2062maybe_lookup_ctx (gimple stmt)
953ff289
DN
2063{
2064 splay_tree_node n;
2065 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
2066 return n ? (omp_context *) n->value : NULL;
2067}
2068
50674e96
DN
2069
2070/* Find the mapping for DECL in CTX or the immediately enclosing
2071 context that has a mapping for DECL.
2072
2073 If CTX is a nested parallel directive, we may have to use the decl
2074 mappings created in CTX's parent context. Suppose that we have the
2075 following parallel nesting (variable UIDs showed for clarity):
2076
2077 iD.1562 = 0;
2078 #omp parallel shared(iD.1562) -> outer parallel
2079 iD.1562 = iD.1562 + 1;
2080
2081 #omp parallel shared (iD.1562) -> inner parallel
2082 iD.1562 = iD.1562 - 1;
2083
2084 Each parallel structure will create a distinct .omp_data_s structure
2085 for copying iD.1562 in/out of the directive:
2086
2087 outer parallel .omp_data_s.1.i -> iD.1562
2088 inner parallel .omp_data_s.2.i -> iD.1562
2089
2090 A shared variable mapping will produce a copy-out operation before
2091 the parallel directive and a copy-in operation after it. So, in
2092 this case we would have:
2093
2094 iD.1562 = 0;
2095 .omp_data_o.1.i = iD.1562;
2096 #omp parallel shared(iD.1562) -> outer parallel
2097 .omp_data_i.1 = &.omp_data_o.1
2098 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2099
2100 .omp_data_o.2.i = iD.1562; -> **
2101 #omp parallel shared(iD.1562) -> inner parallel
2102 .omp_data_i.2 = &.omp_data_o.2
2103 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2104
2105
2106 ** This is a problem. The symbol iD.1562 cannot be referenced
2107 inside the body of the outer parallel region. But since we are
2108 emitting this copy operation while expanding the inner parallel
2109 directive, we need to access the CTX structure of the outer
2110 parallel directive to get the correct mapping:
2111
2112 .omp_data_o.2.i = .omp_data_i.1->i
2113
2114 Since there may be other workshare or parallel directives enclosing
2115 the parallel directive, it may be necessary to walk up the context
2116 parent chain. This is not a problem in general because nested
2117 parallelism happens only rarely. */
2118
2119static tree
2120lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2121{
2122 tree t;
2123 omp_context *up;
2124
50674e96
DN
2125 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2126 t = maybe_lookup_decl (decl, up);
2127
d2dda7fe 2128 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 2129
64964499 2130 return t ? t : decl;
50674e96
DN
2131}
2132
2133
8ca5b2a2
JJ
2134/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2135 in outer contexts. */
2136
2137static tree
2138maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2139{
2140 tree t = NULL;
2141 omp_context *up;
2142
d2dda7fe
JJ
2143 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2144 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
2145
2146 return t ? t : decl;
2147}
2148
2149
953ff289
DN
2150/* Construct the initialization value for reduction CLAUSE. */
2151
2152tree
2153omp_reduction_init (tree clause, tree type)
2154{
db3927fb 2155 location_t loc = OMP_CLAUSE_LOCATION (clause);
953ff289
DN
2156 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2157 {
2158 case PLUS_EXPR:
2159 case MINUS_EXPR:
2160 case BIT_IOR_EXPR:
2161 case BIT_XOR_EXPR:
2162 case TRUTH_OR_EXPR:
2163 case TRUTH_ORIF_EXPR:
2164 case TRUTH_XOR_EXPR:
2165 case NE_EXPR:
e8160c9a 2166 return build_zero_cst (type);
953ff289
DN
2167
2168 case MULT_EXPR:
2169 case TRUTH_AND_EXPR:
2170 case TRUTH_ANDIF_EXPR:
2171 case EQ_EXPR:
db3927fb 2172 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
2173
2174 case BIT_AND_EXPR:
db3927fb 2175 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
2176
2177 case MAX_EXPR:
2178 if (SCALAR_FLOAT_TYPE_P (type))
2179 {
2180 REAL_VALUE_TYPE max, min;
2181 if (HONOR_INFINITIES (TYPE_MODE (type)))
2182 {
2183 real_inf (&max);
2184 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2185 }
2186 else
2187 real_maxval (&min, 1, TYPE_MODE (type));
2188 return build_real (type, min);
2189 }
2190 else
2191 {
2192 gcc_assert (INTEGRAL_TYPE_P (type));
2193 return TYPE_MIN_VALUE (type);
2194 }
2195
2196 case MIN_EXPR:
2197 if (SCALAR_FLOAT_TYPE_P (type))
2198 {
2199 REAL_VALUE_TYPE max;
2200 if (HONOR_INFINITIES (TYPE_MODE (type)))
2201 real_inf (&max);
2202 else
2203 real_maxval (&max, 0, TYPE_MODE (type));
2204 return build_real (type, max);
2205 }
2206 else
2207 {
2208 gcc_assert (INTEGRAL_TYPE_P (type));
2209 return TYPE_MAX_VALUE (type);
2210 }
2211
2212 default:
2213 gcc_unreachable ();
2214 }
2215}
2216
2217/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2218 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2219 private variables. Initialization statements go in ILIST, while calls
2220 to destructors go in DLIST. */
2221
2222static void
726a989a 2223lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3d55c64b 2224 omp_context *ctx)
953ff289 2225{
726a989a 2226 gimple_stmt_iterator diter;
5039610b 2227 tree c, dtor, copyin_seq, x, ptr;
953ff289 2228 bool copyin_by_ref = false;
8ca5b2a2 2229 bool lastprivate_firstprivate = false;
953ff289
DN
2230 int pass;
2231
726a989a
RB
2232 *dlist = gimple_seq_alloc ();
2233 diter = gsi_start (*dlist);
953ff289
DN
2234 copyin_seq = NULL;
2235
2236 /* Do all the fixed sized types in the first pass, and the variable sized
2237 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 2238 the variable sized types are processed before we use them in the
953ff289
DN
2239 variable sized operations. */
2240 for (pass = 0; pass < 2; ++pass)
2241 {
2242 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2243 {
aaf46ef9 2244 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
2245 tree var, new_var;
2246 bool by_ref;
db3927fb 2247 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
2248
2249 switch (c_kind)
2250 {
2251 case OMP_CLAUSE_PRIVATE:
2252 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
2253 continue;
2254 break;
2255 case OMP_CLAUSE_SHARED:
8ca5b2a2
JJ
2256 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
2257 {
2258 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
2259 continue;
2260 }
953ff289 2261 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289
DN
2262 case OMP_CLAUSE_COPYIN:
2263 case OMP_CLAUSE_REDUCTION:
2264 break;
077b0dfb 2265 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
2266 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2267 {
2268 lastprivate_firstprivate = true;
2269 if (pass != 0)
2270 continue;
2271 }
077b0dfb 2272 break;
953ff289
DN
2273 default:
2274 continue;
2275 }
2276
2277 new_var = var = OMP_CLAUSE_DECL (c);
2278 if (c_kind != OMP_CLAUSE_COPYIN)
2279 new_var = lookup_decl (var, ctx);
2280
2281 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
2282 {
2283 if (pass != 0)
2284 continue;
2285 }
953ff289
DN
2286 else if (is_variable_sized (var))
2287 {
50674e96
DN
2288 /* For variable sized types, we need to allocate the
2289 actual storage here. Call alloca and store the
2290 result in the pointer decl that we created elsewhere. */
953ff289
DN
2291 if (pass == 0)
2292 continue;
2293
a68ab351
JJ
2294 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
2295 {
726a989a
RB
2296 gimple stmt;
2297 tree tmp;
2298
a68ab351
JJ
2299 ptr = DECL_VALUE_EXPR (new_var);
2300 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
2301 ptr = TREE_OPERAND (ptr, 0);
2302 gcc_assert (DECL_P (ptr));
2303 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
2304
2305 /* void *tmp = __builtin_alloca */
2306 stmt
2307 = gimple_build_call (built_in_decls[BUILT_IN_ALLOCA], 1, x);
2308 tmp = create_tmp_var_raw (ptr_type_node, NULL);
2309 gimple_add_tmp_var (tmp);
2310 gimple_call_set_lhs (stmt, tmp);
2311
2312 gimple_seq_add_stmt (ilist, stmt);
2313
db3927fb 2314 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 2315 gimplify_assign (ptr, x, ilist);
a68ab351 2316 }
953ff289 2317 }
953ff289
DN
2318 else if (is_reference (var))
2319 {
50674e96
DN
2320 /* For references that are being privatized for Fortran,
2321 allocate new backing storage for the new pointer
2322 variable. This allows us to avoid changing all the
2323 code that expects a pointer to something that expects
2324 a direct variable. Note that this doesn't apply to
2325 C++, since reference types are disallowed in data
077b0dfb
JJ
2326 sharing clauses there, except for NRV optimized
2327 return values. */
953ff289
DN
2328 if (pass == 0)
2329 continue;
2330
2331 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
2332 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
2333 {
2334 x = build_receiver_ref (var, false, ctx);
db3927fb 2335 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
2336 }
2337 else if (TREE_CONSTANT (x))
953ff289
DN
2338 {
2339 const char *name = NULL;
2340 if (DECL_NAME (var))
2341 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
2342
077b0dfb
JJ
2343 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
2344 name);
2345 gimple_add_tmp_var (x);
628c189e 2346 TREE_ADDRESSABLE (x) = 1;
db3927fb 2347 x = build_fold_addr_expr_loc (clause_loc, x);
953ff289
DN
2348 }
2349 else
2350 {
db3927fb
AH
2351 x = build_call_expr_loc (clause_loc,
2352 built_in_decls[BUILT_IN_ALLOCA], 1, x);
953ff289
DN
2353 }
2354
db3927fb 2355 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
726a989a 2356 gimplify_assign (new_var, x, ilist);
953ff289 2357
70f34814 2358 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
2359 }
2360 else if (c_kind == OMP_CLAUSE_REDUCTION
2361 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2362 {
2363 if (pass == 0)
2364 continue;
2365 }
2366 else if (pass != 0)
2367 continue;
2368
aaf46ef9 2369 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
2370 {
2371 case OMP_CLAUSE_SHARED:
8ca5b2a2
JJ
2372 /* Shared global vars are just accessed directly. */
2373 if (is_global_var (new_var))
2374 break;
953ff289
DN
2375 /* Set up the DECL_VALUE_EXPR for shared variables now. This
2376 needs to be delayed until after fixup_child_record_type so
2377 that we get the correct type during the dereference. */
7c8f7639 2378 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
2379 x = build_receiver_ref (var, by_ref, ctx);
2380 SET_DECL_VALUE_EXPR (new_var, x);
2381 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2382
2383 /* ??? If VAR is not passed by reference, and the variable
2384 hasn't been initialized yet, then we'll get a warning for
2385 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 2386 able to notice this and not store anything at all, but
953ff289
DN
2387 we're generating code too early. Suppress the warning. */
2388 if (!by_ref)
2389 TREE_NO_WARNING (var) = 1;
2390 break;
2391
2392 case OMP_CLAUSE_LASTPRIVATE:
2393 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2394 break;
2395 /* FALLTHRU */
2396
2397 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
2398 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
2399 x = build_outer_var_ref (var, ctx);
2400 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2401 {
2402 if (is_task_ctx (ctx))
2403 x = build_receiver_ref (var, false, ctx);
2404 else
2405 x = build_outer_var_ref (var, ctx);
2406 }
2407 else
2408 x = NULL;
2409 x = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
953ff289
DN
2410 if (x)
2411 gimplify_and_add (x, ilist);
2412 /* FALLTHRU */
2413
2414 do_dtor:
2415 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
2416 if (x)
2417 {
726a989a
RB
2418 gimple_seq tseq = NULL;
2419
953ff289 2420 dtor = x;
726a989a
RB
2421 gimplify_stmt (&dtor, &tseq);
2422 gsi_insert_seq_before (&diter, tseq, GSI_SAME_STMT);
953ff289
DN
2423 }
2424 break;
2425
2426 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
2427 if (is_task_ctx (ctx))
2428 {
2429 if (is_reference (var) || is_variable_sized (var))
2430 goto do_dtor;
2431 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
2432 ctx))
2433 || use_pointer_for_field (var, NULL))
2434 {
2435 x = build_receiver_ref (var, false, ctx);
2436 SET_DECL_VALUE_EXPR (new_var, x);
2437 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2438 goto do_dtor;
2439 }
2440 }
953ff289
DN
2441 x = build_outer_var_ref (var, ctx);
2442 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
2443 gimplify_and_add (x, ilist);
2444 goto do_dtor;
2445 break;
2446
2447 case OMP_CLAUSE_COPYIN:
7c8f7639 2448 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
2449 x = build_receiver_ref (var, by_ref, ctx);
2450 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
2451 append_to_statement_list (x, &copyin_seq);
2452 copyin_by_ref |= by_ref;
2453 break;
2454
2455 case OMP_CLAUSE_REDUCTION:
2456 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2457 {
a68ab351
JJ
2458 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2459 x = build_outer_var_ref (var, ctx);
2460
2461 if (is_reference (var))
db3927fb 2462 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
2463 SET_DECL_VALUE_EXPR (placeholder, x);
2464 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
726a989a
RB
2465 lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
2466 gimple_seq_add_seq (ilist,
2467 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
2468 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
a68ab351 2469 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
953ff289
DN
2470 }
2471 else
2472 {
2473 x = omp_reduction_init (c, TREE_TYPE (new_var));
2474 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
726a989a 2475 gimplify_assign (new_var, x, ilist);
953ff289
DN
2476 }
2477 break;
2478
2479 default:
2480 gcc_unreachable ();
2481 }
2482 }
2483 }
2484
2485 /* The copyin sequence is not to be executed by the main thread, since
2486 that would result in self-copies. Perhaps not visible to scalars,
2487 but it certainly is to C++ operator=. */
2488 if (copyin_seq)
2489 {
5039610b 2490 x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
953ff289
DN
2491 x = build2 (NE_EXPR, boolean_type_node, x,
2492 build_int_cst (TREE_TYPE (x), 0));
2493 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
2494 gimplify_and_add (x, ilist);
2495 }
2496
2497 /* If any copyin variable is passed by reference, we must ensure the
2498 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
2499 threads. Similarly for variables in both firstprivate and
2500 lastprivate clauses we need to ensure the lastprivate copying
2501 happens after firstprivate copying in all threads. */
2502 if (copyin_by_ref || lastprivate_firstprivate)
917948d3 2503 gimplify_and_add (build_omp_barrier (), ilist);
953ff289
DN
2504}
2505
50674e96 2506
953ff289
DN
2507/* Generate code to implement the LASTPRIVATE clauses. This is used for
2508 both parallel and workshare constructs. PREDICATE may be NULL if it's
2509 always true. */
2510
2511static void
726a989a
RB
2512lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
2513 omp_context *ctx)
953ff289 2514{
726a989a 2515 tree x, c, label = NULL;
a68ab351 2516 bool par_clauses = false;
953ff289
DN
2517
2518 /* Early exit if there are no lastprivate clauses. */
2519 clauses = find_omp_clause (clauses, OMP_CLAUSE_LASTPRIVATE);
2520 if (clauses == NULL)
2521 {
2522 /* If this was a workshare clause, see if it had been combined
2523 with its parallel. In that case, look for the clauses on the
2524 parallel statement itself. */
2525 if (is_parallel_ctx (ctx))
2526 return;
2527
2528 ctx = ctx->outer;
2529 if (ctx == NULL || !is_parallel_ctx (ctx))
2530 return;
2531
726a989a 2532 clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
2533 OMP_CLAUSE_LASTPRIVATE);
2534 if (clauses == NULL)
2535 return;
a68ab351 2536 par_clauses = true;
953ff289
DN
2537 }
2538
726a989a
RB
2539 if (predicate)
2540 {
2541 gimple stmt;
2542 tree label_true, arm1, arm2;
2543
c2255bc4
AH
2544 label = create_artificial_label (UNKNOWN_LOCATION);
2545 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
2546 arm1 = TREE_OPERAND (predicate, 0);
2547 arm2 = TREE_OPERAND (predicate, 1);
2548 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
2549 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
2550 stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
2551 label_true, label);
2552 gimple_seq_add_stmt (stmt_list, stmt);
2553 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
2554 }
953ff289 2555
a68ab351 2556 for (c = clauses; c ;)
953ff289
DN
2557 {
2558 tree var, new_var;
db3927fb 2559 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 2560
a68ab351
JJ
2561 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
2562 {
2563 var = OMP_CLAUSE_DECL (c);
2564 new_var = lookup_decl (var, ctx);
953ff289 2565
726a989a
RB
2566 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
2567 {
2568 lower_omp (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2569 gimple_seq_add_seq (stmt_list,
2570 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
2571 }
2572 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
953ff289 2573
a68ab351
JJ
2574 x = build_outer_var_ref (var, ctx);
2575 if (is_reference (var))
70f34814 2576 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 2577 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 2578 gimplify_and_add (x, stmt_list);
a68ab351
JJ
2579 }
2580 c = OMP_CLAUSE_CHAIN (c);
2581 if (c == NULL && !par_clauses)
2582 {
2583 /* If this was a workshare clause, see if it had been combined
2584 with its parallel. In that case, continue looking for the
2585 clauses also on the parallel statement itself. */
2586 if (is_parallel_ctx (ctx))
2587 break;
2588
2589 ctx = ctx->outer;
2590 if (ctx == NULL || !is_parallel_ctx (ctx))
2591 break;
2592
726a989a 2593 c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
2594 OMP_CLAUSE_LASTPRIVATE);
2595 par_clauses = true;
2596 }
953ff289
DN
2597 }
2598
726a989a
RB
2599 if (label)
2600 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
2601}
2602
50674e96 2603
953ff289
DN
2604/* Generate code to implement the REDUCTION clauses. */
2605
2606static void
726a989a 2607lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 2608{
726a989a
RB
2609 gimple_seq sub_seq = NULL;
2610 gimple stmt;
2611 tree x, c;
953ff289
DN
2612 int count = 0;
2613
2614 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
2615 update in that case, otherwise use a lock. */
2616 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 2617 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289
DN
2618 {
2619 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2620 {
2621 /* Never use OMP_ATOMIC for array reductions. */
2622 count = -1;
2623 break;
2624 }
2625 count++;
2626 }
2627
2628 if (count == 0)
2629 return;
2630
2631 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2632 {
2633 tree var, ref, new_var;
2634 enum tree_code code;
db3927fb 2635 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 2636
aaf46ef9 2637 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
2638 continue;
2639
2640 var = OMP_CLAUSE_DECL (c);
2641 new_var = lookup_decl (var, ctx);
2642 if (is_reference (var))
70f34814 2643 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
2644 ref = build_outer_var_ref (var, ctx);
2645 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
2646
2647 /* reduction(-:var) sums up the partial results, so it acts
2648 identically to reduction(+:var). */
953ff289
DN
2649 if (code == MINUS_EXPR)
2650 code = PLUS_EXPR;
2651
2652 if (count == 1)
2653 {
db3927fb 2654 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
2655
2656 addr = save_expr (addr);
2657 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 2658 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 2659 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 2660 gimplify_and_add (x, stmt_seqp);
953ff289
DN
2661 return;
2662 }
2663
2664 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2665 {
2666 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2667
2668 if (is_reference (var))
db3927fb 2669 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
2670 SET_DECL_VALUE_EXPR (placeholder, ref);
2671 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
726a989a
RB
2672 lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
2673 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
2674 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
2675 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
2676 }
2677 else
2678 {
2679 x = build2 (code, TREE_TYPE (ref), ref, new_var);
2680 ref = build_outer_var_ref (var, ctx);
726a989a 2681 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
2682 }
2683 }
2684
726a989a
RB
2685 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ATOMIC_START], 0);
2686 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 2687
726a989a 2688 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 2689
726a989a
RB
2690 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ATOMIC_END], 0);
2691 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
2692}
2693
50674e96 2694
953ff289
DN
2695/* Generate code to implement the COPYPRIVATE clauses. */
2696
2697static void
726a989a 2698lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
2699 omp_context *ctx)
2700{
2701 tree c;
2702
2703 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2704 {
78db7d92 2705 tree var, new_var, ref, x;
953ff289 2706 bool by_ref;
db3927fb 2707 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 2708
aaf46ef9 2709 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
2710 continue;
2711
2712 var = OMP_CLAUSE_DECL (c);
7c8f7639 2713 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
2714
2715 ref = build_sender_ref (var, ctx);
78db7d92
JJ
2716 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
2717 if (by_ref)
2718 {
2719 x = build_fold_addr_expr_loc (clause_loc, new_var);
2720 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
2721 }
726a989a 2722 gimplify_assign (ref, x, slist);
953ff289 2723
78db7d92
JJ
2724 ref = build_receiver_ref (var, false, ctx);
2725 if (by_ref)
2726 {
2727 ref = fold_convert_loc (clause_loc,
2728 build_pointer_type (TREE_TYPE (new_var)),
2729 ref);
2730 ref = build_fold_indirect_ref_loc (clause_loc, ref);
2731 }
953ff289
DN
2732 if (is_reference (var))
2733 {
78db7d92 2734 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
2735 ref = build_simple_mem_ref_loc (clause_loc, ref);
2736 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 2737 }
78db7d92 2738 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
2739 gimplify_and_add (x, rlist);
2740 }
2741}
2742
50674e96 2743
953ff289
DN
2744/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
2745 and REDUCTION from the sender (aka parent) side. */
2746
2747static void
726a989a
RB
2748lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
2749 omp_context *ctx)
953ff289
DN
2750{
2751 tree c;
2752
2753 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2754 {
50674e96 2755 tree val, ref, x, var;
953ff289 2756 bool by_ref, do_in = false, do_out = false;
db3927fb 2757 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 2758
aaf46ef9 2759 switch (OMP_CLAUSE_CODE (c))
953ff289 2760 {
a68ab351
JJ
2761 case OMP_CLAUSE_PRIVATE:
2762 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2763 break;
2764 continue;
953ff289
DN
2765 case OMP_CLAUSE_FIRSTPRIVATE:
2766 case OMP_CLAUSE_COPYIN:
2767 case OMP_CLAUSE_LASTPRIVATE:
2768 case OMP_CLAUSE_REDUCTION:
2769 break;
2770 default:
2771 continue;
2772 }
2773
d2dda7fe
JJ
2774 val = OMP_CLAUSE_DECL (c);
2775 var = lookup_decl_in_outer_ctx (val, ctx);
50674e96 2776
8ca5b2a2
JJ
2777 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
2778 && is_global_var (var))
2779 continue;
953ff289
DN
2780 if (is_variable_sized (val))
2781 continue;
7c8f7639 2782 by_ref = use_pointer_for_field (val, NULL);
953ff289 2783
aaf46ef9 2784 switch (OMP_CLAUSE_CODE (c))
953ff289 2785 {
a68ab351 2786 case OMP_CLAUSE_PRIVATE:
953ff289
DN
2787 case OMP_CLAUSE_FIRSTPRIVATE:
2788 case OMP_CLAUSE_COPYIN:
2789 do_in = true;
2790 break;
2791
2792 case OMP_CLAUSE_LASTPRIVATE:
2793 if (by_ref || is_reference (val))
2794 {
2795 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2796 continue;
2797 do_in = true;
2798 }
2799 else
a68ab351
JJ
2800 {
2801 do_out = true;
2802 if (lang_hooks.decls.omp_private_outer_ref (val))
2803 do_in = true;
2804 }
953ff289
DN
2805 break;
2806
2807 case OMP_CLAUSE_REDUCTION:
2808 do_in = true;
2809 do_out = !(by_ref || is_reference (val));
2810 break;
2811
2812 default:
2813 gcc_unreachable ();
2814 }
2815
2816 if (do_in)
2817 {
2818 ref = build_sender_ref (val, ctx);
db3927fb 2819 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 2820 gimplify_assign (ref, x, ilist);
a68ab351
JJ
2821 if (is_task_ctx (ctx))
2822 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 2823 }
50674e96 2824
953ff289
DN
2825 if (do_out)
2826 {
2827 ref = build_sender_ref (val, ctx);
726a989a 2828 gimplify_assign (var, ref, olist);
953ff289
DN
2829 }
2830 }
2831}
2832
726a989a
RB
2833/* Generate code to implement SHARED from the sender (aka parent)
2834 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
2835 list things that got automatically shared. */
953ff289
DN
2836
2837static void
726a989a 2838lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 2839{
a68ab351 2840 tree var, ovar, nvar, f, x, record_type;
953ff289
DN
2841
2842 if (ctx->record_type == NULL)
2843 return;
50674e96 2844
a68ab351 2845 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 2846 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
2847 {
2848 ovar = DECL_ABSTRACT_ORIGIN (f);
2849 nvar = maybe_lookup_decl (ovar, ctx);
2850 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
2851 continue;
2852
50674e96
DN
2853 /* If CTX is a nested parallel directive. Find the immediately
2854 enclosing parallel or workshare construct that contains a
2855 mapping for OVAR. */
d2dda7fe 2856 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 2857
7c8f7639 2858 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
2859 {
2860 x = build_sender_ref (ovar, ctx);
50674e96 2861 var = build_fold_addr_expr (var);
726a989a 2862 gimplify_assign (x, var, ilist);
953ff289
DN
2863 }
2864 else
2865 {
2866 x = build_sender_ref (ovar, ctx);
726a989a 2867 gimplify_assign (x, var, ilist);
953ff289 2868
14e5b285
RG
2869 if (!TREE_READONLY (var)
2870 /* We don't need to receive a new reference to a result
2871 or parm decl. In fact we may not store to it as we will
2872 invalidate any pending RSO and generate wrong gimple
2873 during inlining. */
2874 && !((TREE_CODE (var) == RESULT_DECL
2875 || TREE_CODE (var) == PARM_DECL)
2876 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
2877 {
2878 x = build_sender_ref (ovar, ctx);
726a989a 2879 gimplify_assign (var, x, olist);
a68ab351 2880 }
953ff289
DN
2881 }
2882 }
2883}
2884
726a989a
RB
2885
2886/* A convenience function to build an empty GIMPLE_COND with just the
2887 condition. */
2888
2889static gimple
2890gimple_build_cond_empty (tree cond)
2891{
2892 enum tree_code pred_code;
2893 tree lhs, rhs;
2894
2895 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
2896 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
2897}
2898
2899
b8698a0f 2900/* Build the function calls to GOMP_parallel_start etc to actually
50674e96
DN
2901 generate the parallel operation. REGION is the parallel region
2902 being expanded. BB is the block where to insert the code. WS_ARGS
2903 will be set if this is a call to a combined parallel+workshare
2904 construct, it contains the list of additional arguments needed by
2905 the workshare construct. */
953ff289
DN
2906
2907static void
777f7f9a 2908expand_parallel_call (struct omp_region *region, basic_block bb,
3bb06db4 2909 gimple entry_stmt, VEC(tree,gc) *ws_args)
953ff289 2910{
917948d3 2911 tree t, t1, t2, val, cond, c, clauses;
726a989a
RB
2912 gimple_stmt_iterator gsi;
2913 gimple stmt;
50674e96 2914 int start_ix;
db3927fb 2915 location_t clause_loc;
3bb06db4 2916 VEC(tree,gc) *args;
50674e96 2917
726a989a 2918 clauses = gimple_omp_parallel_clauses (entry_stmt);
50674e96 2919
c0220ea4 2920 /* Determine what flavor of GOMP_parallel_start we will be
50674e96
DN
2921 emitting. */
2922 start_ix = BUILT_IN_GOMP_PARALLEL_START;
2923 if (is_combined_parallel (region))
2924 {
777f7f9a 2925 switch (region->inner->type)
50674e96 2926 {
726a989a 2927 case GIMPLE_OMP_FOR:
a68ab351 2928 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
21a66e91 2929 start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
a68ab351
JJ
2930 + (region->inner->sched_kind
2931 == OMP_CLAUSE_SCHEDULE_RUNTIME
2932 ? 3 : region->inner->sched_kind);
777f7f9a 2933 break;
726a989a 2934 case GIMPLE_OMP_SECTIONS:
777f7f9a
RH
2935 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
2936 break;
2937 default:
2938 gcc_unreachable ();
50674e96 2939 }
50674e96 2940 }
953ff289
DN
2941
2942 /* By default, the value of NUM_THREADS is zero (selected at run time)
2943 and there is no conditional. */
2944 cond = NULL_TREE;
2945 val = build_int_cst (unsigned_type_node, 0);
2946
2947 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2948 if (c)
2949 cond = OMP_CLAUSE_IF_EXPR (c);
2950
2951 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
2952 if (c)
db3927fb
AH
2953 {
2954 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
2955 clause_loc = OMP_CLAUSE_LOCATION (c);
2956 }
2957 else
2958 clause_loc = gimple_location (entry_stmt);
953ff289
DN
2959
2960 /* Ensure 'val' is of the correct type. */
db3927fb 2961 val = fold_convert_loc (clause_loc, unsigned_type_node, val);
953ff289
DN
2962
2963 /* If we found the clause 'if (cond)', build either
2964 (cond != 0) or (cond ? val : 1u). */
2965 if (cond)
2966 {
726a989a 2967 gimple_stmt_iterator gsi;
50674e96
DN
2968
2969 cond = gimple_boolify (cond);
2970
953ff289 2971 if (integer_zerop (val))
db3927fb
AH
2972 val = fold_build2_loc (clause_loc,
2973 EQ_EXPR, unsigned_type_node, cond,
917948d3 2974 build_int_cst (TREE_TYPE (cond), 0));
953ff289 2975 else
50674e96
DN
2976 {
2977 basic_block cond_bb, then_bb, else_bb;
917948d3 2978 edge e, e_then, e_else;
726a989a 2979 tree tmp_then, tmp_else, tmp_join, tmp_var;
917948d3
ZD
2980
2981 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
2982 if (gimple_in_ssa_p (cfun))
2983 {
726a989a
RB
2984 tmp_then = make_ssa_name (tmp_var, NULL);
2985 tmp_else = make_ssa_name (tmp_var, NULL);
2986 tmp_join = make_ssa_name (tmp_var, NULL);
917948d3
ZD
2987 }
2988 else
2989 {
2990 tmp_then = tmp_var;
2991 tmp_else = tmp_var;
2992 tmp_join = tmp_var;
2993 }
50674e96 2994
50674e96
DN
2995 e = split_block (bb, NULL);
2996 cond_bb = e->src;
2997 bb = e->dest;
2998 remove_edge (e);
2999
3000 then_bb = create_empty_bb (cond_bb);
3001 else_bb = create_empty_bb (then_bb);
917948d3
ZD
3002 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
3003 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
50674e96 3004
726a989a
RB
3005 stmt = gimple_build_cond_empty (cond);
3006 gsi = gsi_start_bb (cond_bb);
3007 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 3008
726a989a
RB
3009 gsi = gsi_start_bb (then_bb);
3010 stmt = gimple_build_assign (tmp_then, val);
3011 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 3012
726a989a
RB
3013 gsi = gsi_start_bb (else_bb);
3014 stmt = gimple_build_assign
3015 (tmp_else, build_int_cst (unsigned_type_node, 1));
3016 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96
DN
3017
3018 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
3019 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
917948d3
ZD
3020 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
3021 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
50674e96 3022
917948d3
ZD
3023 if (gimple_in_ssa_p (cfun))
3024 {
726a989a 3025 gimple phi = create_phi_node (tmp_join, bb);
917948d3 3026 SSA_NAME_DEF_STMT (tmp_join) = phi;
f5045c96
AM
3027 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
3028 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
917948d3
ZD
3029 }
3030
3031 val = tmp_join;
50674e96
DN
3032 }
3033
726a989a
RB
3034 gsi = gsi_start_bb (bb);
3035 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
3036 false, GSI_CONTINUE_LINKING);
953ff289
DN
3037 }
3038
726a989a
RB
3039 gsi = gsi_last_bb (bb);
3040 t = gimple_omp_parallel_data_arg (entry_stmt);
953ff289 3041 if (t == NULL)
5039610b 3042 t1 = null_pointer_node;
953ff289 3043 else
5039610b 3044 t1 = build_fold_addr_expr (t);
726a989a 3045 t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
50674e96 3046
3bb06db4
NF
3047 args = VEC_alloc (tree, gc, 3 + VEC_length (tree, ws_args));
3048 VEC_quick_push (tree, args, t2);
3049 VEC_quick_push (tree, args, t1);
3050 VEC_quick_push (tree, args, val);
3051 VEC_splice (tree, args, ws_args);
3052
3053 t = build_call_expr_loc_vec (UNKNOWN_LOCATION,
3054 built_in_decls[start_ix], args);
50674e96 3055
726a989a
RB
3056 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3057 false, GSI_CONTINUE_LINKING);
953ff289 3058
726a989a 3059 t = gimple_omp_parallel_data_arg (entry_stmt);
953ff289
DN
3060 if (t == NULL)
3061 t = null_pointer_node;
3062 else
3063 t = build_fold_addr_expr (t);
db3927fb
AH
3064 t = build_call_expr_loc (gimple_location (entry_stmt),
3065 gimple_omp_parallel_child_fn (entry_stmt), 1, t);
726a989a
RB
3066 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3067 false, GSI_CONTINUE_LINKING);
953ff289 3068
db3927fb
AH
3069 t = build_call_expr_loc (gimple_location (entry_stmt),
3070 built_in_decls[BUILT_IN_GOMP_PARALLEL_END], 0);
726a989a
RB
3071 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3072 false, GSI_CONTINUE_LINKING);
953ff289
DN
3073}
3074
50674e96 3075
a68ab351
JJ
3076/* Build the function call to GOMP_task to actually
3077 generate the task operation. BB is the block where to insert the code. */
3078
3079static void
726a989a 3080expand_task_call (basic_block bb, gimple entry_stmt)
a68ab351
JJ
3081{
3082 tree t, t1, t2, t3, flags, cond, c, clauses;
726a989a 3083 gimple_stmt_iterator gsi;
db3927fb 3084 location_t loc = gimple_location (entry_stmt);
a68ab351 3085
726a989a 3086 clauses = gimple_omp_task_clauses (entry_stmt);
a68ab351 3087
a68ab351
JJ
3088 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
3089 if (c)
3090 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
3091 else
3092 cond = boolean_true_node;
3093
3094 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
3095 flags = build_int_cst (unsigned_type_node, (c ? 1 : 0));
3096
726a989a
RB
3097 gsi = gsi_last_bb (bb);
3098 t = gimple_omp_task_data_arg (entry_stmt);
a68ab351
JJ
3099 if (t == NULL)
3100 t2 = null_pointer_node;
3101 else
db3927fb
AH
3102 t2 = build_fold_addr_expr_loc (loc, t);
3103 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
726a989a 3104 t = gimple_omp_task_copy_fn (entry_stmt);
a68ab351
JJ
3105 if (t == NULL)
3106 t3 = null_pointer_node;
3107 else
db3927fb 3108 t3 = build_fold_addr_expr_loc (loc, t);
a68ab351
JJ
3109
3110 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_TASK], 7, t1, t2, t3,
726a989a
RB
3111 gimple_omp_task_arg_size (entry_stmt),
3112 gimple_omp_task_arg_align (entry_stmt), cond, flags);
a68ab351 3113
726a989a
RB
3114 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3115 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
3116}
3117
3118
726a989a
RB
3119/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
3120 catch handler and return it. This prevents programs from violating the
3121 structured block semantics with throws. */
953ff289 3122
726a989a
RB
3123static gimple_seq
3124maybe_catch_exception (gimple_seq body)
953ff289 3125{
1d65f45c
RH
3126 gimple g;
3127 tree decl;
953ff289
DN
3128
3129 if (!flag_exceptions)
726a989a 3130 return body;
953ff289 3131
3b06d379
SB
3132 if (lang_hooks.eh_protect_cleanup_actions != NULL)
3133 decl = lang_hooks.eh_protect_cleanup_actions ();
953ff289 3134 else
1d65f45c 3135 decl = built_in_decls[BUILT_IN_TRAP];
726a989a 3136
1d65f45c
RH
3137 g = gimple_build_eh_must_not_throw (decl);
3138 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
726a989a 3139 GIMPLE_TRY_CATCH);
953ff289 3140
1d65f45c 3141 return gimple_seq_alloc_with_stmt (g);
953ff289
DN
3142}
3143
50674e96 3144/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
953ff289 3145
50674e96 3146static tree
c021f10b 3147vec2chain (VEC(tree,gc) *v)
953ff289 3148{
c021f10b
NF
3149 tree chain = NULL_TREE, t;
3150 unsigned ix;
953ff289 3151
c021f10b 3152 FOR_EACH_VEC_ELT_REVERSE (tree, v, ix, t)
50674e96 3153 {
910ad8de 3154 DECL_CHAIN (t) = chain;
c021f10b 3155 chain = t;
50674e96 3156 }
953ff289 3157
c021f10b 3158 return chain;
50674e96 3159}
953ff289 3160
953ff289 3161
50674e96 3162/* Remove barriers in REGION->EXIT's block. Note that this is only
726a989a
RB
3163 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
3164 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
3165 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
50674e96 3166 removed. */
953ff289 3167
50674e96
DN
3168static void
3169remove_exit_barrier (struct omp_region *region)
3170{
726a989a 3171 gimple_stmt_iterator gsi;
50674e96 3172 basic_block exit_bb;
777f7f9a
RH
3173 edge_iterator ei;
3174 edge e;
726a989a 3175 gimple stmt;
03742a9b 3176 int any_addressable_vars = -1;
953ff289 3177
777f7f9a 3178 exit_bb = region->exit;
953ff289 3179
2aee3e57
JJ
3180 /* If the parallel region doesn't return, we don't have REGION->EXIT
3181 block at all. */
3182 if (! exit_bb)
3183 return;
3184
726a989a
RB
3185 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
3186 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
777f7f9a
RH
3187 statements that can appear in between are extremely limited -- no
3188 memory operations at all. Here, we allow nothing at all, so the
726a989a
RB
3189 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
3190 gsi = gsi_last_bb (exit_bb);
3191 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
3192 gsi_prev (&gsi);
3193 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
50674e96 3194 return;
953ff289 3195
777f7f9a
RH
3196 FOR_EACH_EDGE (e, ei, exit_bb->preds)
3197 {
726a989a
RB
3198 gsi = gsi_last_bb (e->src);
3199 if (gsi_end_p (gsi))
777f7f9a 3200 continue;
726a989a 3201 stmt = gsi_stmt (gsi);
03742a9b
JJ
3202 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
3203 && !gimple_omp_return_nowait_p (stmt))
3204 {
3205 /* OpenMP 3.0 tasks unfortunately prevent this optimization
3206 in many cases. If there could be tasks queued, the barrier
3207 might be needed to let the tasks run before some local
3208 variable of the parallel that the task uses as shared
3209 runs out of scope. The task can be spawned either
3210 from within current function (this would be easy to check)
3211 or from some function it calls and gets passed an address
3212 of such a variable. */
3213 if (any_addressable_vars < 0)
3214 {
3215 gimple parallel_stmt = last_stmt (region->entry);
3216 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
c021f10b
NF
3217 tree local_decls, block, decl;
3218 unsigned ix;
03742a9b
JJ
3219
3220 any_addressable_vars = 0;
c021f10b
NF
3221 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (child_fun), ix, decl)
3222 if (TREE_ADDRESSABLE (decl))
03742a9b
JJ
3223 {
3224 any_addressable_vars = 1;
3225 break;
3226 }
3227 for (block = gimple_block (stmt);
3228 !any_addressable_vars
3229 && block
3230 && TREE_CODE (block) == BLOCK;
3231 block = BLOCK_SUPERCONTEXT (block))
3232 {
3233 for (local_decls = BLOCK_VARS (block);
3234 local_decls;
910ad8de 3235 local_decls = DECL_CHAIN (local_decls))
03742a9b
JJ
3236 if (TREE_ADDRESSABLE (local_decls))
3237 {
3238 any_addressable_vars = 1;
3239 break;
3240 }
3241 if (block == gimple_block (parallel_stmt))
3242 break;
3243 }
3244 }
3245 if (!any_addressable_vars)
3246 gimple_omp_return_set_nowait (stmt);
3247 }
777f7f9a 3248 }
953ff289
DN
3249}
3250
777f7f9a
RH
3251static void
3252remove_exit_barriers (struct omp_region *region)
3253{
726a989a 3254 if (region->type == GIMPLE_OMP_PARALLEL)
777f7f9a
RH
3255 remove_exit_barrier (region);
3256
3257 if (region->inner)
3258 {
3259 region = region->inner;
3260 remove_exit_barriers (region);
3261 while (region->next)
3262 {
3263 region = region->next;
3264 remove_exit_barriers (region);
3265 }
3266 }
3267}
50674e96 3268
2b4cf991
JJ
3269/* Optimize omp_get_thread_num () and omp_get_num_threads ()
3270 calls. These can't be declared as const functions, but
3271 within one parallel body they are constant, so they can be
3272 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
a68ab351
JJ
3273 which are declared const. Similarly for task body, except
3274 that in untied task omp_get_thread_num () can change at any task
3275 scheduling point. */
2b4cf991
JJ
3276
3277static void
726a989a 3278optimize_omp_library_calls (gimple entry_stmt)
2b4cf991
JJ
3279{
3280 basic_block bb;
726a989a 3281 gimple_stmt_iterator gsi;
2b4cf991
JJ
3282 tree thr_num_id
3283 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM]);
3284 tree num_thr_id
3285 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS]);
726a989a
RB
3286 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
3287 && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
a68ab351 3288 OMP_CLAUSE_UNTIED) != NULL);
2b4cf991
JJ
3289
3290 FOR_EACH_BB (bb)
726a989a 3291 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2b4cf991 3292 {
726a989a 3293 gimple call = gsi_stmt (gsi);
2b4cf991
JJ
3294 tree decl;
3295
726a989a
RB
3296 if (is_gimple_call (call)
3297 && (decl = gimple_call_fndecl (call))
2b4cf991
JJ
3298 && DECL_EXTERNAL (decl)
3299 && TREE_PUBLIC (decl)
3300 && DECL_INITIAL (decl) == NULL)
3301 {
3302 tree built_in;
3303
3304 if (DECL_NAME (decl) == thr_num_id)
a68ab351
JJ
3305 {
3306 /* In #pragma omp task untied omp_get_thread_num () can change
3307 during the execution of the task region. */
3308 if (untied_task)
3309 continue;
3310 built_in = built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM];
3311 }
2b4cf991
JJ
3312 else if (DECL_NAME (decl) == num_thr_id)
3313 built_in = built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS];
3314 else
3315 continue;
3316
3317 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
726a989a 3318 || gimple_call_num_args (call) != 0)
2b4cf991
JJ
3319 continue;
3320
3321 if (flag_exceptions && !TREE_NOTHROW (decl))
3322 continue;
3323
3324 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
9600efe1
MM
3325 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl)),
3326 TREE_TYPE (TREE_TYPE (built_in))))
2b4cf991
JJ
3327 continue;
3328
7c9577be 3329 gimple_call_set_fndecl (call, built_in);
2b4cf991
JJ
3330 }
3331 }
3332}
3333
a68ab351 3334/* Expand the OpenMP parallel or task directive starting at REGION. */
953ff289
DN
3335
3336static void
a68ab351 3337expand_omp_taskreg (struct omp_region *region)
953ff289 3338{
50674e96 3339 basic_block entry_bb, exit_bb, new_bb;
db2960f4 3340 struct function *child_cfun;
3bb06db4 3341 tree child_fn, block, t;
133f9369 3342 tree save_current;
726a989a
RB
3343 gimple_stmt_iterator gsi;
3344 gimple entry_stmt, stmt;
50674e96 3345 edge e;
3bb06db4 3346 VEC(tree,gc) *ws_args;
50674e96 3347
777f7f9a 3348 entry_stmt = last_stmt (region->entry);
726a989a 3349 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
50674e96 3350 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
135a171d
JJ
3351 /* If this function has been already instrumented, make sure
3352 the child function isn't instrumented again. */
3353 child_cfun->after_tree_profile = cfun->after_tree_profile;
50674e96 3354
777f7f9a
RH
3355 entry_bb = region->entry;
3356 exit_bb = region->exit;
50674e96 3357
50674e96 3358 if (is_combined_parallel (region))
777f7f9a 3359 ws_args = region->ws_args;
50674e96 3360 else
3bb06db4 3361 ws_args = NULL;
953ff289 3362
777f7f9a 3363 if (child_cfun->cfg)
953ff289 3364 {
50674e96
DN
3365 /* Due to inlining, it may happen that we have already outlined
3366 the region, in which case all we need to do is make the
3367 sub-graph unreachable and emit the parallel call. */
3368 edge entry_succ_e, exit_succ_e;
726a989a 3369 gimple_stmt_iterator gsi;
50674e96
DN
3370
3371 entry_succ_e = single_succ_edge (entry_bb);
50674e96 3372
726a989a
RB
3373 gsi = gsi_last_bb (entry_bb);
3374 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
3375 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
3376 gsi_remove (&gsi, true);
50674e96
DN
3377
3378 new_bb = entry_bb;
d3c673c7
JJ
3379 if (exit_bb)
3380 {
3381 exit_succ_e = single_succ_edge (exit_bb);
3382 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
3383 }
917948d3 3384 remove_edge_and_dominated_blocks (entry_succ_e);
953ff289 3385 }
50674e96
DN
3386 else
3387 {
2fed2012 3388 unsigned srcidx, dstidx, num;
c021f10b 3389
50674e96 3390 /* If the parallel region needs data sent from the parent
b570947c
JJ
3391 function, then the very first statement (except possible
3392 tree profile counter updates) of the parallel body
50674e96
DN
3393 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
3394 &.OMP_DATA_O is passed as an argument to the child function,
3395 we need to replace it with the argument as seen by the child
3396 function.
3397
3398 In most cases, this will end up being the identity assignment
3399 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
3400 a function call that has been inlined, the original PARM_DECL
3401 .OMP_DATA_I may have been converted into a different local
3402 variable. In which case, we need to keep the assignment. */
726a989a 3403 if (gimple_omp_taskreg_data_arg (entry_stmt))
50674e96
DN
3404 {
3405 basic_block entry_succ_bb = single_succ (entry_bb);
726a989a
RB
3406 gimple_stmt_iterator gsi;
3407 tree arg, narg;
3408 gimple parcopy_stmt = NULL;
953ff289 3409
726a989a 3410 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
b570947c 3411 {
726a989a 3412 gimple stmt;
b570947c 3413
726a989a
RB
3414 gcc_assert (!gsi_end_p (gsi));
3415 stmt = gsi_stmt (gsi);
3416 if (gimple_code (stmt) != GIMPLE_ASSIGN)
018b899b
JJ
3417 continue;
3418
726a989a 3419 if (gimple_num_ops (stmt) == 2)
b570947c 3420 {
726a989a
RB
3421 tree arg = gimple_assign_rhs1 (stmt);
3422
3423 /* We're ignore the subcode because we're
3424 effectively doing a STRIP_NOPS. */
3425
3426 if (TREE_CODE (arg) == ADDR_EXPR
3427 && TREE_OPERAND (arg, 0)
3428 == gimple_omp_taskreg_data_arg (entry_stmt))
3429 {
3430 parcopy_stmt = stmt;
3431 break;
3432 }
b570947c
JJ
3433 }
3434 }
917948d3 3435
726a989a 3436 gcc_assert (parcopy_stmt != NULL);
917948d3
ZD
3437 arg = DECL_ARGUMENTS (child_fn);
3438
3439 if (!gimple_in_ssa_p (cfun))
3440 {
726a989a
RB
3441 if (gimple_assign_lhs (parcopy_stmt) == arg)
3442 gsi_remove (&gsi, true);
917948d3 3443 else
726a989a
RB
3444 {
3445 /* ?? Is setting the subcode really necessary ?? */
3446 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
3447 gimple_assign_set_rhs1 (parcopy_stmt, arg);
3448 }
917948d3
ZD
3449 }
3450 else
3451 {
3452 /* If we are in ssa form, we must load the value from the default
3453 definition of the argument. That should not be defined now,
3454 since the argument is not used uninitialized. */
3455 gcc_assert (gimple_default_def (cfun, arg) == NULL);
726a989a 3456 narg = make_ssa_name (arg, gimple_build_nop ());
917948d3 3457 set_default_def (arg, narg);
726a989a
RB
3458 /* ?? Is setting the subcode really necessary ?? */
3459 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
3460 gimple_assign_set_rhs1 (parcopy_stmt, narg);
917948d3
ZD
3461 update_stmt (parcopy_stmt);
3462 }
50674e96
DN
3463 }
3464
3465 /* Declare local variables needed in CHILD_CFUN. */
3466 block = DECL_INITIAL (child_fn);
c021f10b 3467 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
4f0ae266
JJ
3468 /* The gimplifier could record temporaries in parallel/task block
3469 rather than in containing function's local_decls chain,
3470 which would mean cgraph missed finalizing them. Do it now. */
910ad8de 3471 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
4f0ae266
JJ
3472 if (TREE_CODE (t) == VAR_DECL
3473 && TREE_STATIC (t)
3474 && !DECL_EXTERNAL (t))
3475 varpool_finalize_decl (t);
726a989a
RB
3476 DECL_SAVED_TREE (child_fn) = NULL;
3477 gimple_set_body (child_fn, bb_seq (single_succ (entry_bb)));
b357f682 3478 TREE_USED (block) = 1;
50674e96 3479
917948d3 3480 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 3481 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
50674e96
DN
3482 DECL_CONTEXT (t) = child_fn;
3483
726a989a
RB
3484 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
3485 so that it can be moved to the child function. */
3486 gsi = gsi_last_bb (entry_bb);
3487 stmt = gsi_stmt (gsi);
3488 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
3489 || gimple_code (stmt) == GIMPLE_OMP_TASK));
3490 gsi_remove (&gsi, true);
3491 e = split_block (entry_bb, stmt);
50674e96
DN
3492 entry_bb = e->dest;
3493 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3494
726a989a 3495 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
2aee3e57
JJ
3496 if (exit_bb)
3497 {
726a989a
RB
3498 gsi = gsi_last_bb (exit_bb);
3499 gcc_assert (!gsi_end_p (gsi)
3500 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
3501 stmt = gimple_build_return (NULL);
3502 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
3503 gsi_remove (&gsi, true);
2aee3e57 3504 }
917948d3
ZD
3505
3506 /* Move the parallel region into CHILD_CFUN. */
b8698a0f 3507
917948d3
ZD
3508 if (gimple_in_ssa_p (cfun))
3509 {
3510 push_cfun (child_cfun);
5db9ba0c 3511 init_tree_ssa (child_cfun);
917948d3
ZD
3512 init_ssa_operands ();
3513 cfun->gimple_df->in_ssa_p = true;
3514 pop_cfun ();
b357f682 3515 block = NULL_TREE;
917948d3 3516 }
b357f682 3517 else
726a989a 3518 block = gimple_block (entry_stmt);
b357f682
JJ
3519
3520 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
917948d3
ZD
3521 if (exit_bb)
3522 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
3523
b357f682 3524 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
2fed2012
JJ
3525 num = VEC_length (tree, child_cfun->local_decls);
3526 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
3527 {
3528 t = VEC_index (tree, child_cfun->local_decls, srcidx);
3529 if (DECL_CONTEXT (t) == cfun->decl)
3530 continue;
3531 if (srcidx != dstidx)
3532 VEC_replace (tree, child_cfun->local_decls, dstidx, t);
3533 dstidx++;
3534 }
3535 if (dstidx != num)
3536 VEC_truncate (tree, child_cfun->local_decls, dstidx);
b357f682 3537
917948d3
ZD
3538 /* Inform the callgraph about the new function. */
3539 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
3540 = cfun->curr_properties;
3541 cgraph_add_new_function (child_fn, true);
3542
3543 /* Fix the callgraph edges for child_cfun. Those for cfun will be
3544 fixed in a following pass. */
3545 push_cfun (child_cfun);
133f9369
MJ
3546 save_current = current_function_decl;
3547 current_function_decl = child_fn;
2b4cf991 3548 if (optimize)
a68ab351 3549 optimize_omp_library_calls (entry_stmt);
917948d3 3550 rebuild_cgraph_edges ();
99819c63
JJ
3551
3552 /* Some EH regions might become dead, see PR34608. If
3553 pass_cleanup_cfg isn't the first pass to happen with the
3554 new child, these dead EH edges might cause problems.
3555 Clean them up now. */
3556 if (flag_exceptions)
3557 {
3558 basic_block bb;
99819c63
JJ
3559 bool changed = false;
3560
99819c63 3561 FOR_EACH_BB (bb)
726a989a 3562 changed |= gimple_purge_dead_eh_edges (bb);
99819c63
JJ
3563 if (changed)
3564 cleanup_tree_cfg ();
99819c63 3565 }
5006671f
RG
3566 if (gimple_in_ssa_p (cfun))
3567 update_ssa (TODO_update_ssa);
133f9369 3568 current_function_decl = save_current;
917948d3 3569 pop_cfun ();
50674e96 3570 }
b8698a0f 3571
50674e96 3572 /* Emit a library call to launch the children threads. */
726a989a 3573 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
a68ab351
JJ
3574 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
3575 else
3576 expand_task_call (new_bb, entry_stmt);
5f40b3cb 3577 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
3578}
3579
50674e96
DN
3580
3581/* A subroutine of expand_omp_for. Generate code for a parallel
953ff289
DN
3582 loop with any schedule. Given parameters:
3583
3584 for (V = N1; V cond N2; V += STEP) BODY;
3585
3586 where COND is "<" or ">", we generate pseudocode
3587
3588 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
50674e96 3589 if (more) goto L0; else goto L3;
953ff289
DN
3590 L0:
3591 V = istart0;
3592 iend = iend0;
3593 L1:
3594 BODY;
3595 V += STEP;
50674e96 3596 if (V cond iend) goto L1; else goto L2;
953ff289 3597 L2:
50674e96
DN
3598 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3599 L3:
953ff289 3600
50674e96 3601 If this is a combined omp parallel loop, instead of the call to
a68ab351
JJ
3602 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
3603
3604 For collapsed loops, given parameters:
3605 collapse(3)
3606 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
3607 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
3608 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
3609 BODY;
3610
3611 we generate pseudocode
3612
3613 if (cond3 is <)
3614 adj = STEP3 - 1;
3615 else
3616 adj = STEP3 + 1;
3617 count3 = (adj + N32 - N31) / STEP3;
3618 if (cond2 is <)
3619 adj = STEP2 - 1;
3620 else
3621 adj = STEP2 + 1;
3622 count2 = (adj + N22 - N21) / STEP2;
3623 if (cond1 is <)
3624 adj = STEP1 - 1;
3625 else
3626 adj = STEP1 + 1;
3627 count1 = (adj + N12 - N11) / STEP1;
3628 count = count1 * count2 * count3;
3629 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
3630 if (more) goto L0; else goto L3;
3631 L0:
3632 V = istart0;
3633 T = V;
3634 V3 = N31 + (T % count3) * STEP3;
3635 T = T / count3;
3636 V2 = N21 + (T % count2) * STEP2;
3637 T = T / count2;
3638 V1 = N11 + T * STEP1;
3639 iend = iend0;
3640 L1:
3641 BODY;
3642 V += 1;
3643 if (V < iend) goto L10; else goto L2;
3644 L10:
3645 V3 += STEP3;
3646 if (V3 cond3 N32) goto L1; else goto L11;
3647 L11:
3648 V3 = N31;
3649 V2 += STEP2;
3650 if (V2 cond2 N22) goto L1; else goto L12;
3651 L12:
3652 V2 = N21;
3653 V1 += STEP1;
3654 goto L1;
3655 L2:
3656 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3657 L3:
3658
3659 */
953ff289 3660
777f7f9a 3661static void
50674e96
DN
3662expand_omp_for_generic (struct omp_region *region,
3663 struct omp_for_data *fd,
953ff289
DN
3664 enum built_in_function start_fn,
3665 enum built_in_function next_fn)
3666{
726a989a 3667 tree type, istart0, iend0, iend;
a68ab351
JJ
3668 tree t, vmain, vback, bias = NULL_TREE;
3669 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
d3c673c7 3670 basic_block l2_bb = NULL, l3_bb = NULL;
726a989a
RB
3671 gimple_stmt_iterator gsi;
3672 gimple stmt;
50674e96 3673 bool in_combined_parallel = is_combined_parallel (region);
e5c95afe 3674 bool broken_loop = region->cont == NULL;
917948d3 3675 edge e, ne;
a68ab351
JJ
3676 tree *counts = NULL;
3677 int i;
e5c95afe
ZD
3678
3679 gcc_assert (!broken_loop || !in_combined_parallel);
a68ab351
JJ
3680 gcc_assert (fd->iter_type == long_integer_type_node
3681 || !in_combined_parallel);
953ff289 3682
a68ab351
JJ
3683 type = TREE_TYPE (fd->loop.v);
3684 istart0 = create_tmp_var (fd->iter_type, ".istart0");
3685 iend0 = create_tmp_var (fd->iter_type, ".iend0");
5b4fc8fb
JJ
3686 TREE_ADDRESSABLE (istart0) = 1;
3687 TREE_ADDRESSABLE (iend0) = 1;
917948d3
ZD
3688 if (gimple_in_ssa_p (cfun))
3689 {
3690 add_referenced_var (istart0);
3691 add_referenced_var (iend0);
3692 }
953ff289 3693
a68ab351
JJ
3694 /* See if we need to bias by LLONG_MIN. */
3695 if (fd->iter_type == long_long_unsigned_type_node
3696 && TREE_CODE (type) == INTEGER_TYPE
3697 && !TYPE_UNSIGNED (type))
3698 {
3699 tree n1, n2;
3700
3701 if (fd->loop.cond_code == LT_EXPR)
3702 {
3703 n1 = fd->loop.n1;
3704 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
3705 }
3706 else
3707 {
3708 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
3709 n2 = fd->loop.n1;
3710 }
3711 if (TREE_CODE (n1) != INTEGER_CST
3712 || TREE_CODE (n2) != INTEGER_CST
3713 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
3714 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
3715 }
3716
777f7f9a 3717 entry_bb = region->entry;
d3c673c7 3718 cont_bb = region->cont;
a68ab351 3719 collapse_bb = NULL;
e5c95afe
ZD
3720 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
3721 gcc_assert (broken_loop
3722 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
3723 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
3724 l1_bb = single_succ (l0_bb);
3725 if (!broken_loop)
d3c673c7
JJ
3726 {
3727 l2_bb = create_empty_bb (cont_bb);
e5c95afe
ZD
3728 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
3729 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
d3c673c7 3730 }
e5c95afe
ZD
3731 else
3732 l2_bb = NULL;
3733 l3_bb = BRANCH_EDGE (entry_bb)->dest;
3734 exit_bb = region->exit;
50674e96 3735
726a989a 3736 gsi = gsi_last_bb (entry_bb);
a68ab351 3737
726a989a 3738 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
a68ab351
JJ
3739 if (fd->collapse > 1)
3740 {
3741 /* collapsed loops need work for expansion in SSA form. */
3742 gcc_assert (!gimple_in_ssa_p (cfun));
3743 counts = (tree *) alloca (fd->collapse * sizeof (tree));
3744 for (i = 0; i < fd->collapse; i++)
3745 {
3746 tree itype = TREE_TYPE (fd->loops[i].v);
3747
3748 if (POINTER_TYPE_P (itype))
3749 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
3750 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
3751 ? -1 : 1));
3752 t = fold_build2 (PLUS_EXPR, itype,
3753 fold_convert (itype, fd->loops[i].step), t);
3754 t = fold_build2 (PLUS_EXPR, itype, t,
3755 fold_convert (itype, fd->loops[i].n2));
3756 t = fold_build2 (MINUS_EXPR, itype, t,
3757 fold_convert (itype, fd->loops[i].n1));
3758 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
3759 t = fold_build2 (TRUNC_DIV_EXPR, itype,
3760 fold_build1 (NEGATE_EXPR, itype, t),
3761 fold_build1 (NEGATE_EXPR, itype,
3762 fold_convert (itype,
3763 fd->loops[i].step)));
3764 else
3765 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
3766 fold_convert (itype, fd->loops[i].step));
3767 t = fold_convert (type, t);
3768 if (TREE_CODE (t) == INTEGER_CST)
3769 counts[i] = t;
3770 else
3771 {
3772 counts[i] = create_tmp_var (type, ".count");
726a989a
RB
3773 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3774 true, GSI_SAME_STMT);
3775 stmt = gimple_build_assign (counts[i], t);
3776 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
a68ab351
JJ
3777 }
3778 if (SSA_VAR_P (fd->loop.n2))
3779 {
3780 if (i == 0)
726a989a 3781 t = counts[0];
a68ab351
JJ
3782 else
3783 {
3784 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
726a989a
RB
3785 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3786 true, GSI_SAME_STMT);
a68ab351 3787 }
726a989a
RB
3788 stmt = gimple_build_assign (fd->loop.n2, t);
3789 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
a68ab351
JJ
3790 }
3791 }
3792 }
917948d3
ZD
3793 if (in_combined_parallel)
3794 {
3795 /* In a combined parallel loop, emit a call to
3796 GOMP_loop_foo_next. */
3797 t = build_call_expr (built_in_decls[next_fn], 2,
3798 build_fold_addr_expr (istart0),
3799 build_fold_addr_expr (iend0));
3800 }
3801 else
953ff289 3802 {
5039610b 3803 tree t0, t1, t2, t3, t4;
50674e96
DN
3804 /* If this is not a combined parallel loop, emit a call to
3805 GOMP_loop_foo_start in ENTRY_BB. */
5039610b
SL
3806 t4 = build_fold_addr_expr (iend0);
3807 t3 = build_fold_addr_expr (istart0);
a68ab351 3808 t2 = fold_convert (fd->iter_type, fd->loop.step);
c6ff4493
SE
3809 if (POINTER_TYPE_P (type)
3810 && TYPE_PRECISION (type) != TYPE_PRECISION (fd->iter_type))
3811 {
3812 /* Avoid casting pointers to integer of a different size. */
3813 tree itype
3814 = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
3815 t1 = fold_convert (fd->iter_type, fold_convert (itype, fd->loop.n2));
3816 t0 = fold_convert (fd->iter_type, fold_convert (itype, fd->loop.n1));
3817 }
3818 else
3819 {
3820 t1 = fold_convert (fd->iter_type, fd->loop.n2);
3821 t0 = fold_convert (fd->iter_type, fd->loop.n1);
3822 }
a68ab351 3823 if (bias)
953ff289 3824 {
a68ab351
JJ
3825 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
3826 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
3827 }
3828 if (fd->iter_type == long_integer_type_node)
3829 {
3830 if (fd->chunk_size)
3831 {
3832 t = fold_convert (fd->iter_type, fd->chunk_size);
3833 t = build_call_expr (built_in_decls[start_fn], 6,
3834 t0, t1, t2, t, t3, t4);
3835 }
3836 else
3837 t = build_call_expr (built_in_decls[start_fn], 5,
3838 t0, t1, t2, t3, t4);
953ff289 3839 }
5039610b 3840 else
a68ab351
JJ
3841 {
3842 tree t5;
3843 tree c_bool_type;
3844
3845 /* The GOMP_loop_ull_*start functions have additional boolean
3846 argument, true for < loops and false for > loops.
3847 In Fortran, the C bool type can be different from
3848 boolean_type_node. */
3849 c_bool_type = TREE_TYPE (TREE_TYPE (built_in_decls[start_fn]));
3850 t5 = build_int_cst (c_bool_type,
3851 fd->loop.cond_code == LT_EXPR ? 1 : 0);
3852 if (fd->chunk_size)
3853 {
3854 t = fold_convert (fd->iter_type, fd->chunk_size);
3855 t = build_call_expr (built_in_decls[start_fn], 7,
3856 t5, t0, t1, t2, t, t3, t4);
3857 }
3858 else
3859 t = build_call_expr (built_in_decls[start_fn], 6,
3860 t5, t0, t1, t2, t3, t4);
3861 }
953ff289 3862 }
a68ab351
JJ
3863 if (TREE_TYPE (t) != boolean_type_node)
3864 t = fold_build2 (NE_EXPR, boolean_type_node,
3865 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
3866 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3867 true, GSI_SAME_STMT);
3868 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
917948d3 3869
726a989a
RB
3870 /* Remove the GIMPLE_OMP_FOR statement. */
3871 gsi_remove (&gsi, true);
953ff289 3872
50674e96 3873 /* Iteration setup for sequential loop goes in L0_BB. */
726a989a 3874 gsi = gsi_start_bb (l0_bb);
550918ca 3875 t = istart0;
a68ab351 3876 if (bias)
550918ca
RG
3877 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3878 if (POINTER_TYPE_P (type))
3879 t = fold_convert (lang_hooks.types.type_for_size (TYPE_PRECISION (type),
3880 0), t);
3881 t = fold_convert (type, t);
726a989a
RB
3882 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3883 false, GSI_CONTINUE_LINKING);
3884 stmt = gimple_build_assign (fd->loop.v, t);
3885 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
953ff289 3886
550918ca 3887 t = iend0;
a68ab351 3888 if (bias)
550918ca
RG
3889 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3890 if (POINTER_TYPE_P (type))
3891 t = fold_convert (lang_hooks.types.type_for_size (TYPE_PRECISION (type),
3892 0), t);
3893 t = fold_convert (type, t);
726a989a
RB
3894 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3895 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
3896 if (fd->collapse > 1)
3897 {
3898 tree tem = create_tmp_var (type, ".tem");
3899
726a989a
RB
3900 stmt = gimple_build_assign (tem, fd->loop.v);
3901 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3902 for (i = fd->collapse - 1; i >= 0; i--)
3903 {
3904 tree vtype = TREE_TYPE (fd->loops[i].v), itype;
3905 itype = vtype;
3906 if (POINTER_TYPE_P (vtype))
3907 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (vtype), 0);
3908 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
3909 t = fold_convert (itype, t);
ada39f0b
RG
3910 t = fold_build2 (MULT_EXPR, itype, t,
3911 fold_convert (itype, fd->loops[i].step));
a68ab351 3912 if (POINTER_TYPE_P (vtype))
5d49b6a7 3913 t = fold_build_pointer_plus (fd->loops[i].n1, t);
a68ab351
JJ
3914 else
3915 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
726a989a
RB
3916 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3917 false, GSI_CONTINUE_LINKING);
3918 stmt = gimple_build_assign (fd->loops[i].v, t);
3919 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3920 if (i != 0)
3921 {
3922 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
726a989a
RB
3923 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3924 false, GSI_CONTINUE_LINKING);
3925 stmt = gimple_build_assign (tem, t);
3926 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3927 }
3928 }
3929 }
50674e96 3930
e5c95afe 3931 if (!broken_loop)
d3c673c7 3932 {
e5c95afe
ZD
3933 /* Code to control the increment and predicate for the sequential
3934 loop goes in the CONT_BB. */
726a989a
RB
3935 gsi = gsi_last_bb (cont_bb);
3936 stmt = gsi_stmt (gsi);
3937 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
3938 vmain = gimple_omp_continue_control_use (stmt);
3939 vback = gimple_omp_continue_control_def (stmt);
917948d3 3940
a68ab351 3941 if (POINTER_TYPE_P (type))
5d49b6a7 3942 t = fold_build_pointer_plus (vmain, fd->loop.step);
a68ab351
JJ
3943 else
3944 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
726a989a
RB
3945 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3946 true, GSI_SAME_STMT);
3947 stmt = gimple_build_assign (vback, t);
3948 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3949
a68ab351 3950 t = build2 (fd->loop.cond_code, boolean_type_node, vback, iend);
726a989a
RB
3951 stmt = gimple_build_cond_empty (t);
3952 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
50674e96 3953
726a989a
RB
3954 /* Remove GIMPLE_OMP_CONTINUE. */
3955 gsi_remove (&gsi, true);
50674e96 3956
a68ab351
JJ
3957 if (fd->collapse > 1)
3958 {
3959 basic_block last_bb, bb;
3960
3961 last_bb = cont_bb;
3962 for (i = fd->collapse - 1; i >= 0; i--)
3963 {
3964 tree vtype = TREE_TYPE (fd->loops[i].v);
3965
3966 bb = create_empty_bb (last_bb);
726a989a 3967 gsi = gsi_start_bb (bb);
a68ab351
JJ
3968
3969 if (i < fd->collapse - 1)
3970 {
3971 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
3972 e->probability = REG_BR_PROB_BASE / 8;
3973
726a989a
RB
3974 t = fd->loops[i + 1].n1;
3975 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3976 false, GSI_CONTINUE_LINKING);
3977 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
3978 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3979 }
3980 else
3981 collapse_bb = bb;
3982
3983 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
3984
3985 if (POINTER_TYPE_P (vtype))
5d49b6a7 3986 t = fold_build_pointer_plus (fd->loops[i].v, fd->loops[i].step);
a68ab351
JJ
3987 else
3988 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v,
3989 fd->loops[i].step);
726a989a
RB
3990 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3991 false, GSI_CONTINUE_LINKING);
3992 stmt = gimple_build_assign (fd->loops[i].v, t);
3993 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3994
3995 if (i > 0)
3996 {
726a989a
RB
3997 t = fd->loops[i].n2;
3998 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3999 false, GSI_CONTINUE_LINKING);
a68ab351 4000 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node,
726a989a
RB
4001 fd->loops[i].v, t);
4002 stmt = gimple_build_cond_empty (t);
4003 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
4004 e = make_edge (bb, l1_bb, EDGE_TRUE_VALUE);
4005 e->probability = REG_BR_PROB_BASE * 7 / 8;
4006 }
4007 else
4008 make_edge (bb, l1_bb, EDGE_FALLTHRU);
4009 last_bb = bb;
4010 }
4011 }
4012
e5c95afe 4013 /* Emit code to get the next parallel iteration in L2_BB. */
726a989a 4014 gsi = gsi_start_bb (l2_bb);
50674e96 4015
e5c95afe
ZD
4016 t = build_call_expr (built_in_decls[next_fn], 2,
4017 build_fold_addr_expr (istart0),
4018 build_fold_addr_expr (iend0));
726a989a
RB
4019 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4020 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
4021 if (TREE_TYPE (t) != boolean_type_node)
4022 t = fold_build2 (NE_EXPR, boolean_type_node,
4023 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
4024 stmt = gimple_build_cond_empty (t);
4025 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
e5c95afe 4026 }
953ff289 4027
777f7f9a 4028 /* Add the loop cleanup function. */
726a989a
RB
4029 gsi = gsi_last_bb (exit_bb);
4030 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
777f7f9a
RH
4031 t = built_in_decls[BUILT_IN_GOMP_LOOP_END_NOWAIT];
4032 else
4033 t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
726a989a
RB
4034 stmt = gimple_build_call (t, 0);
4035 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
4036 gsi_remove (&gsi, true);
50674e96
DN
4037
4038 /* Connect the new blocks. */
917948d3
ZD
4039 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
4040 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
953ff289 4041
e5c95afe
ZD
4042 if (!broken_loop)
4043 {
726a989a
RB
4044 gimple_seq phis;
4045
917948d3
ZD
4046 e = find_edge (cont_bb, l3_bb);
4047 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
4048
726a989a
RB
4049 phis = phi_nodes (l3_bb);
4050 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4051 {
4052 gimple phi = gsi_stmt (gsi);
4053 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
4054 PHI_ARG_DEF_FROM_EDGE (phi, e));
4055 }
917948d3
ZD
4056 remove_edge (e);
4057
e5c95afe 4058 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
a68ab351
JJ
4059 if (fd->collapse > 1)
4060 {
4061 e = find_edge (cont_bb, l1_bb);
4062 remove_edge (e);
4063 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
4064 }
4065 else
4066 {
4067 e = find_edge (cont_bb, l1_bb);
4068 e->flags = EDGE_TRUE_VALUE;
4069 }
4070 e->probability = REG_BR_PROB_BASE * 7 / 8;
4071 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
e5c95afe 4072 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
917948d3
ZD
4073
4074 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
4075 recompute_dominator (CDI_DOMINATORS, l2_bb));
4076 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
4077 recompute_dominator (CDI_DOMINATORS, l3_bb));
4078 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
4079 recompute_dominator (CDI_DOMINATORS, l0_bb));
4080 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
4081 recompute_dominator (CDI_DOMINATORS, l1_bb));
e5c95afe 4082 }
953ff289
DN
4083}
4084
4085
50674e96
DN
4086/* A subroutine of expand_omp_for. Generate code for a parallel
4087 loop with static schedule and no specified chunk size. Given
4088 parameters:
953ff289
DN
4089
4090 for (V = N1; V cond N2; V += STEP) BODY;
4091
4092 where COND is "<" or ">", we generate pseudocode
4093
4094 if (cond is <)
4095 adj = STEP - 1;
4096 else
4097 adj = STEP + 1;
a68ab351
JJ
4098 if ((__typeof (V)) -1 > 0 && cond is >)
4099 n = -(adj + N2 - N1) / -STEP;
4100 else
4101 n = (adj + N2 - N1) / STEP;
953ff289 4102 q = n / nthreads;
fb79f500
JJ
4103 tt = n % nthreads;
4104 if (threadid < tt) goto L3; else goto L4;
4105 L3:
4106 tt = 0;
4107 q = q + 1;
4108 L4:
4109 s0 = q * threadid + tt;
4110 e0 = s0 + q;
917948d3 4111 V = s0 * STEP + N1;
953ff289
DN
4112 if (s0 >= e0) goto L2; else goto L0;
4113 L0:
953ff289
DN
4114 e = e0 * STEP + N1;
4115 L1:
4116 BODY;
4117 V += STEP;
4118 if (V cond e) goto L1;
953ff289
DN
4119 L2:
4120*/
4121
777f7f9a 4122static void
50674e96
DN
4123expand_omp_for_static_nochunk (struct omp_region *region,
4124 struct omp_for_data *fd)
953ff289 4125{
fb79f500 4126 tree n, q, s0, e0, e, t, tt, nthreads, threadid;
a68ab351 4127 tree type, itype, vmain, vback;
fb79f500
JJ
4128 basic_block entry_bb, second_bb, third_bb, exit_bb, seq_start_bb;
4129 basic_block body_bb, cont_bb;
777f7f9a 4130 basic_block fin_bb;
726a989a
RB
4131 gimple_stmt_iterator gsi;
4132 gimple stmt;
fb79f500 4133 edge ep;
953ff289 4134
a68ab351
JJ
4135 itype = type = TREE_TYPE (fd->loop.v);
4136 if (POINTER_TYPE_P (type))
4137 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
953ff289 4138
777f7f9a 4139 entry_bb = region->entry;
777f7f9a 4140 cont_bb = region->cont;
e5c95afe
ZD
4141 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
4142 gcc_assert (BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
4143 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
4144 body_bb = single_succ (seq_start_bb);
4145 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
4146 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
4147 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
777f7f9a
RH
4148 exit_bb = region->exit;
4149
50674e96 4150 /* Iteration space partitioning goes in ENTRY_BB. */
726a989a
RB
4151 gsi = gsi_last_bb (entry_bb);
4152 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
777f7f9a 4153
5039610b 4154 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
a68ab351 4155 t = fold_convert (itype, t);
726a989a
RB
4156 nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4157 true, GSI_SAME_STMT);
b8698a0f 4158
5039610b 4159 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
a68ab351 4160 t = fold_convert (itype, t);
726a989a
RB
4161 threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4162 true, GSI_SAME_STMT);
953ff289 4163
a68ab351 4164 fd->loop.n1
726a989a
RB
4165 = force_gimple_operand_gsi (&gsi, fold_convert (type, fd->loop.n1),
4166 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351 4167 fd->loop.n2
726a989a
RB
4168 = force_gimple_operand_gsi (&gsi, fold_convert (itype, fd->loop.n2),
4169 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351 4170 fd->loop.step
726a989a
RB
4171 = force_gimple_operand_gsi (&gsi, fold_convert (itype, fd->loop.step),
4172 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
4173
4174 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
4175 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
4176 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
4177 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
4178 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
4179 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4180 fold_build1 (NEGATE_EXPR, itype, t),
4181 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
4182 else
4183 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
4184 t = fold_convert (itype, t);
726a989a 4185 n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 4186
fb79f500 4187 q = create_tmp_var (itype, "q");
a68ab351 4188 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
fb79f500
JJ
4189 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
4190 gsi_insert_before (&gsi, gimple_build_assign (q, t), GSI_SAME_STMT);
4191
4192 tt = create_tmp_var (itype, "tt");
4193 t = fold_build2 (TRUNC_MOD_EXPR, itype, n, nthreads);
4194 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
4195 gsi_insert_before (&gsi, gimple_build_assign (tt, t), GSI_SAME_STMT);
953ff289 4196
fb79f500
JJ
4197 t = build2 (LT_EXPR, boolean_type_node, threadid, tt);
4198 stmt = gimple_build_cond_empty (t);
4199 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
4200
4201 second_bb = split_block (entry_bb, stmt)->dest;
4202 gsi = gsi_last_bb (second_bb);
4203 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
4204
4205 gsi_insert_before (&gsi, gimple_build_assign (tt, build_int_cst (itype, 0)),
4206 GSI_SAME_STMT);
4207 stmt = gimple_build_assign_with_ops (PLUS_EXPR, q, q,
4208 build_int_cst (itype, 1));
4209 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
4210
4211 third_bb = split_block (second_bb, stmt)->dest;
4212 gsi = gsi_last_bb (third_bb);
4213 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
953ff289 4214
a68ab351 4215 t = build2 (MULT_EXPR, itype, q, threadid);
fb79f500 4216 t = build2 (PLUS_EXPR, itype, t, tt);
726a989a 4217 s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 4218
a68ab351 4219 t = fold_build2 (PLUS_EXPR, itype, s0, q);
726a989a 4220 e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 4221
953ff289 4222 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
726a989a 4223 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
50674e96 4224
726a989a
RB
4225 /* Remove the GIMPLE_OMP_FOR statement. */
4226 gsi_remove (&gsi, true);
50674e96
DN
4227
4228 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 4229 gsi = gsi_start_bb (seq_start_bb);
953ff289 4230
a68ab351
JJ
4231 t = fold_convert (itype, s0);
4232 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4233 if (POINTER_TYPE_P (type))
5d49b6a7 4234 t = fold_build_pointer_plus (fd->loop.n1, t);
a68ab351
JJ
4235 else
4236 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4237 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
4238 false, GSI_CONTINUE_LINKING);
4239 stmt = gimple_build_assign (fd->loop.v, t);
4240 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
b8698a0f 4241
a68ab351
JJ
4242 t = fold_convert (itype, e0);
4243 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4244 if (POINTER_TYPE_P (type))
5d49b6a7 4245 t = fold_build_pointer_plus (fd->loop.n1, t);
a68ab351
JJ
4246 else
4247 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4248 e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4249 false, GSI_CONTINUE_LINKING);
953ff289 4250
726a989a
RB
4251 /* The code controlling the sequential loop replaces the
4252 GIMPLE_OMP_CONTINUE. */
4253 gsi = gsi_last_bb (cont_bb);
4254 stmt = gsi_stmt (gsi);
4255 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
4256 vmain = gimple_omp_continue_control_use (stmt);
4257 vback = gimple_omp_continue_control_def (stmt);
917948d3 4258
a68ab351 4259 if (POINTER_TYPE_P (type))
5d49b6a7 4260 t = fold_build_pointer_plus (vmain, fd->loop.step);
a68ab351
JJ
4261 else
4262 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
726a989a
RB
4263 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
4264 true, GSI_SAME_STMT);
4265 stmt = gimple_build_assign (vback, t);
4266 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
917948d3 4267
a68ab351 4268 t = build2 (fd->loop.cond_code, boolean_type_node, vback, e);
726a989a 4269 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
953ff289 4270
726a989a
RB
4271 /* Remove the GIMPLE_OMP_CONTINUE statement. */
4272 gsi_remove (&gsi, true);
50674e96 4273
726a989a
RB
4274 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
4275 gsi = gsi_last_bb (exit_bb);
4276 if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
4277 force_gimple_operand_gsi (&gsi, build_omp_barrier (), false, NULL_TREE,
4278 false, GSI_SAME_STMT);
4279 gsi_remove (&gsi, true);
50674e96
DN
4280
4281 /* Connect all the blocks. */
fb79f500
JJ
4282 ep = make_edge (entry_bb, third_bb, EDGE_FALSE_VALUE);
4283 ep->probability = REG_BR_PROB_BASE / 4 * 3;
4284 ep = find_edge (entry_bb, second_bb);
4285 ep->flags = EDGE_TRUE_VALUE;
4286 ep->probability = REG_BR_PROB_BASE / 4;
4287 find_edge (third_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
4288 find_edge (third_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
917948d3 4289
e5c95afe 4290 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
777f7f9a 4291 find_edge (cont_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
b8698a0f 4292
fb79f500
JJ
4293 set_immediate_dominator (CDI_DOMINATORS, second_bb, entry_bb);
4294 set_immediate_dominator (CDI_DOMINATORS, third_bb, entry_bb);
4295 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, third_bb);
917948d3
ZD
4296 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4297 recompute_dominator (CDI_DOMINATORS, body_bb));
4298 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4299 recompute_dominator (CDI_DOMINATORS, fin_bb));
953ff289
DN
4300}
4301
50674e96
DN
4302
4303/* A subroutine of expand_omp_for. Generate code for a parallel
4304 loop with static schedule and a specified chunk size. Given
4305 parameters:
953ff289
DN
4306
4307 for (V = N1; V cond N2; V += STEP) BODY;
4308
4309 where COND is "<" or ">", we generate pseudocode
4310
4311 if (cond is <)
4312 adj = STEP - 1;
4313 else
4314 adj = STEP + 1;
a68ab351
JJ
4315 if ((__typeof (V)) -1 > 0 && cond is >)
4316 n = -(adj + N2 - N1) / -STEP;
4317 else
4318 n = (adj + N2 - N1) / STEP;
953ff289 4319 trip = 0;
917948d3
ZD
4320 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
4321 here so that V is defined
4322 if the loop is not entered
953ff289
DN
4323 L0:
4324 s0 = (trip * nthreads + threadid) * CHUNK;
4325 e0 = min(s0 + CHUNK, n);
4326 if (s0 < n) goto L1; else goto L4;
4327 L1:
4328 V = s0 * STEP + N1;
4329 e = e0 * STEP + N1;
4330 L2:
4331 BODY;
4332 V += STEP;
4333 if (V cond e) goto L2; else goto L3;
4334 L3:
4335 trip += 1;
4336 goto L0;
4337 L4:
953ff289
DN
4338*/
4339
777f7f9a 4340static void
726a989a 4341expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
953ff289 4342{
726a989a 4343 tree n, s0, e0, e, t;
917948d3 4344 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
726a989a 4345 tree type, itype, v_main, v_back, v_extra;
50674e96 4346 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
777f7f9a 4347 basic_block trip_update_bb, cont_bb, fin_bb;
726a989a
RB
4348 gimple_stmt_iterator si;
4349 gimple stmt;
4350 edge se;
953ff289 4351
a68ab351
JJ
4352 itype = type = TREE_TYPE (fd->loop.v);
4353 if (POINTER_TYPE_P (type))
4354 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
953ff289 4355
777f7f9a 4356 entry_bb = region->entry;
e5c95afe
ZD
4357 se = split_block (entry_bb, last_stmt (entry_bb));
4358 entry_bb = se->src;
4359 iter_part_bb = se->dest;
777f7f9a 4360 cont_bb = region->cont;
e5c95afe
ZD
4361 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
4362 gcc_assert (BRANCH_EDGE (iter_part_bb)->dest
4363 == FALLTHRU_EDGE (cont_bb)->dest);
4364 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
4365 body_bb = single_succ (seq_start_bb);
4366 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
4367 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
4368 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
4369 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
777f7f9a 4370 exit_bb = region->exit;
50674e96 4371
50674e96 4372 /* Trip and adjustment setup goes in ENTRY_BB. */
726a989a
RB
4373 si = gsi_last_bb (entry_bb);
4374 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
50674e96 4375
5039610b 4376 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
a68ab351 4377 t = fold_convert (itype, t);
726a989a
RB
4378 nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4379 true, GSI_SAME_STMT);
b8698a0f 4380
5039610b 4381 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
a68ab351 4382 t = fold_convert (itype, t);
726a989a
RB
4383 threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4384 true, GSI_SAME_STMT);
917948d3 4385
a68ab351 4386 fd->loop.n1
726a989a
RB
4387 = force_gimple_operand_gsi (&si, fold_convert (type, fd->loop.n1),
4388 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351 4389 fd->loop.n2
726a989a
RB
4390 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->loop.n2),
4391 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351 4392 fd->loop.step
726a989a
RB
4393 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->loop.step),
4394 true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 4395 fd->chunk_size
726a989a
RB
4396 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
4397 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
4398
4399 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
4400 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
4401 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
4402 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
4403 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
4404 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4405 fold_build1 (NEGATE_EXPR, itype, t),
4406 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
4407 else
4408 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
4409 t = fold_convert (itype, t);
726a989a
RB
4410 n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4411 true, GSI_SAME_STMT);
917948d3 4412
a68ab351 4413 trip_var = create_tmp_var (itype, ".trip");
917948d3
ZD
4414 if (gimple_in_ssa_p (cfun))
4415 {
4416 add_referenced_var (trip_var);
726a989a
RB
4417 trip_init = make_ssa_name (trip_var, NULL);
4418 trip_main = make_ssa_name (trip_var, NULL);
4419 trip_back = make_ssa_name (trip_var, NULL);
917948d3 4420 }
953ff289 4421 else
917948d3
ZD
4422 {
4423 trip_init = trip_var;
4424 trip_main = trip_var;
4425 trip_back = trip_var;
4426 }
953ff289 4427
726a989a
RB
4428 stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
4429 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
50674e96 4430
a68ab351
JJ
4431 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
4432 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4433 if (POINTER_TYPE_P (type))
5d49b6a7 4434 t = fold_build_pointer_plus (fd->loop.n1, t);
a68ab351
JJ
4435 else
4436 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4437 v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4438 true, GSI_SAME_STMT);
917948d3 4439
726a989a
RB
4440 /* Remove the GIMPLE_OMP_FOR. */
4441 gsi_remove (&si, true);
50674e96
DN
4442
4443 /* Iteration space partitioning goes in ITER_PART_BB. */
726a989a 4444 si = gsi_last_bb (iter_part_bb);
953ff289 4445
a68ab351
JJ
4446 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
4447 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
4448 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
726a989a
RB
4449 s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4450 false, GSI_CONTINUE_LINKING);
953ff289 4451
a68ab351
JJ
4452 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
4453 t = fold_build2 (MIN_EXPR, itype, t, n);
726a989a
RB
4454 e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4455 false, GSI_CONTINUE_LINKING);
953ff289
DN
4456
4457 t = build2 (LT_EXPR, boolean_type_node, s0, n);
726a989a 4458 gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
50674e96
DN
4459
4460 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 4461 si = gsi_start_bb (seq_start_bb);
953ff289 4462
a68ab351
JJ
4463 t = fold_convert (itype, s0);
4464 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4465 if (POINTER_TYPE_P (type))
5d49b6a7 4466 t = fold_build_pointer_plus (fd->loop.n1, t);
a68ab351
JJ
4467 else
4468 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4469 t = force_gimple_operand_gsi (&si, t, false, NULL_TREE,
4470 false, GSI_CONTINUE_LINKING);
4471 stmt = gimple_build_assign (fd->loop.v, t);
4472 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
953ff289 4473
a68ab351
JJ
4474 t = fold_convert (itype, e0);
4475 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4476 if (POINTER_TYPE_P (type))
5d49b6a7 4477 t = fold_build_pointer_plus (fd->loop.n1, t);
a68ab351
JJ
4478 else
4479 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4480 e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4481 false, GSI_CONTINUE_LINKING);
953ff289 4482
777f7f9a 4483 /* The code controlling the sequential loop goes in CONT_BB,
726a989a
RB
4484 replacing the GIMPLE_OMP_CONTINUE. */
4485 si = gsi_last_bb (cont_bb);
4486 stmt = gsi_stmt (si);
4487 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
4488 v_main = gimple_omp_continue_control_use (stmt);
4489 v_back = gimple_omp_continue_control_def (stmt);
917948d3 4490
a68ab351 4491 if (POINTER_TYPE_P (type))
5d49b6a7 4492 t = fold_build_pointer_plus (v_main, fd->loop.step);
a68ab351 4493 else
726a989a
RB
4494 t = fold_build2 (PLUS_EXPR, type, v_main, fd->loop.step);
4495 stmt = gimple_build_assign (v_back, t);
4496 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
917948d3 4497
a68ab351 4498 t = build2 (fd->loop.cond_code, boolean_type_node, v_back, e);
726a989a 4499 gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
b8698a0f 4500
726a989a
RB
4501 /* Remove GIMPLE_OMP_CONTINUE. */
4502 gsi_remove (&si, true);
50674e96
DN
4503
4504 /* Trip update code goes into TRIP_UPDATE_BB. */
726a989a 4505 si = gsi_start_bb (trip_update_bb);
953ff289 4506
a68ab351
JJ
4507 t = build_int_cst (itype, 1);
4508 t = build2 (PLUS_EXPR, itype, trip_main, t);
726a989a
RB
4509 stmt = gimple_build_assign (trip_back, t);
4510 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
953ff289 4511
726a989a
RB
4512 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
4513 si = gsi_last_bb (exit_bb);
4514 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
4515 force_gimple_operand_gsi (&si, build_omp_barrier (), false, NULL_TREE,
4516 false, GSI_SAME_STMT);
4517 gsi_remove (&si, true);
953ff289 4518
50674e96 4519 /* Connect the new blocks. */
e5c95afe
ZD
4520 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
4521 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
917948d3 4522
e5c95afe
ZD
4523 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
4524 find_edge (cont_bb, trip_update_bb)->flags = EDGE_FALSE_VALUE;
917948d3 4525
e5c95afe 4526 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
917948d3
ZD
4527
4528 if (gimple_in_ssa_p (cfun))
4529 {
726a989a
RB
4530 gimple_stmt_iterator psi;
4531 gimple phi;
4532 edge re, ene;
4533 edge_var_map_vector head;
4534 edge_var_map *vm;
4535 size_t i;
4536
917948d3
ZD
4537 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
4538 remove arguments of the phi nodes in fin_bb. We need to create
4539 appropriate phi nodes in iter_part_bb instead. */
4540 se = single_pred_edge (fin_bb);
4541 re = single_succ_edge (trip_update_bb);
726a989a 4542 head = redirect_edge_var_map_vector (re);
917948d3
ZD
4543 ene = single_succ_edge (entry_bb);
4544
726a989a
RB
4545 psi = gsi_start_phis (fin_bb);
4546 for (i = 0; !gsi_end_p (psi) && VEC_iterate (edge_var_map, head, i, vm);
4547 gsi_next (&psi), ++i)
917948d3 4548 {
726a989a 4549 gimple nphi;
f5045c96 4550 source_location locus;
726a989a
RB
4551
4552 phi = gsi_stmt (psi);
4553 t = gimple_phi_result (phi);
4554 gcc_assert (t == redirect_edge_var_map_result (vm));
917948d3
ZD
4555 nphi = create_phi_node (t, iter_part_bb);
4556 SSA_NAME_DEF_STMT (t) = nphi;
4557
4558 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
f5045c96
AM
4559 locus = gimple_phi_arg_location_from_edge (phi, se);
4560
a68ab351
JJ
4561 /* A special case -- fd->loop.v is not yet computed in
4562 iter_part_bb, we need to use v_extra instead. */
4563 if (t == fd->loop.v)
917948d3 4564 t = v_extra;
f5045c96
AM
4565 add_phi_arg (nphi, t, ene, locus);
4566 locus = redirect_edge_var_map_location (vm);
4567 add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
726a989a
RB
4568 }
4569 gcc_assert (!gsi_end_p (psi) && i == VEC_length (edge_var_map, head));
4570 redirect_edge_var_map_clear (re);
4571 while (1)
4572 {
4573 psi = gsi_start_phis (fin_bb);
4574 if (gsi_end_p (psi))
4575 break;
4576 remove_phi_node (&psi, false);
917948d3 4577 }
917948d3
ZD
4578
4579 /* Make phi node for trip. */
4580 phi = create_phi_node (trip_main, iter_part_bb);
4581 SSA_NAME_DEF_STMT (trip_main) = phi;
f5045c96
AM
4582 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb),
4583 UNKNOWN_LOCATION);
4584 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb),
4585 UNKNOWN_LOCATION);
917948d3
ZD
4586 }
4587
4588 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
4589 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
4590 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
4591 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4592 recompute_dominator (CDI_DOMINATORS, fin_bb));
4593 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
4594 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
4595 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4596 recompute_dominator (CDI_DOMINATORS, body_bb));
953ff289
DN
4597}
4598
953ff289 4599
50674e96 4600/* Expand the OpenMP loop defined by REGION. */
953ff289 4601
50674e96
DN
4602static void
4603expand_omp_for (struct omp_region *region)
4604{
4605 struct omp_for_data fd;
a68ab351 4606 struct omp_for_data_loop *loops;
953ff289 4607
a68ab351
JJ
4608 loops
4609 = (struct omp_for_data_loop *)
726a989a 4610 alloca (gimple_omp_for_collapse (last_stmt (region->entry))
a68ab351 4611 * sizeof (struct omp_for_data_loop));
a68ab351 4612 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
21a66e91 4613 region->sched_kind = fd.sched_kind;
953ff289 4614
135a171d
JJ
4615 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
4616 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4617 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4618 if (region->cont)
4619 {
4620 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
4621 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4622 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4623 }
4624
d3c673c7
JJ
4625 if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
4626 && !fd.have_ordered
a68ab351 4627 && fd.collapse == 1
e5c95afe 4628 && region->cont != NULL)
953ff289
DN
4629 {
4630 if (fd.chunk_size == NULL)
777f7f9a 4631 expand_omp_for_static_nochunk (region, &fd);
953ff289 4632 else
777f7f9a 4633 expand_omp_for_static_chunk (region, &fd);
953ff289
DN
4634 }
4635 else
4636 {
a68ab351
JJ
4637 int fn_index, start_ix, next_ix;
4638
4639 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
4640 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
726a989a 4641 ? 3 : fd.sched_kind;
a68ab351
JJ
4642 fn_index += fd.have_ordered * 4;
4643 start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
4644 next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
4645 if (fd.iter_type == long_long_unsigned_type_node)
4646 {
4647 start_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_START
4648 - BUILT_IN_GOMP_LOOP_STATIC_START;
4649 next_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
4650 - BUILT_IN_GOMP_LOOP_STATIC_NEXT;
4651 }
bbbbb16a
ILT
4652 expand_omp_for_generic (region, &fd, (enum built_in_function) start_ix,
4653 (enum built_in_function) next_ix);
953ff289 4654 }
5f40b3cb
ZD
4655
4656 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
4657}
4658
953ff289
DN
4659
4660/* Expand code for an OpenMP sections directive. In pseudo code, we generate
4661
953ff289
DN
4662 v = GOMP_sections_start (n);
4663 L0:
4664 switch (v)
4665 {
4666 case 0:
4667 goto L2;
4668 case 1:
4669 section 1;
4670 goto L1;
4671 case 2:
4672 ...
4673 case n:
4674 ...
953ff289
DN
4675 default:
4676 abort ();
4677 }
4678 L1:
4679 v = GOMP_sections_next ();
4680 goto L0;
4681 L2:
4682 reduction;
4683
50674e96 4684 If this is a combined parallel sections, replace the call to
917948d3 4685 GOMP_sections_start with call to GOMP_sections_next. */
953ff289
DN
4686
4687static void
50674e96 4688expand_omp_sections (struct omp_region *region)
953ff289 4689{
0f900dfa 4690 tree t, u, vin = NULL, vmain, vnext, l2;
726a989a
RB
4691 VEC (tree,heap) *label_vec;
4692 unsigned len;
e5c95afe 4693 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
726a989a
RB
4694 gimple_stmt_iterator si, switch_si;
4695 gimple sections_stmt, stmt, cont;
c34938a8
JJ
4696 edge_iterator ei;
4697 edge e;
777f7f9a 4698 struct omp_region *inner;
726a989a 4699 unsigned i, casei;
e5c95afe 4700 bool exit_reachable = region->cont != NULL;
953ff289 4701
e5c95afe 4702 gcc_assert (exit_reachable == (region->exit != NULL));
777f7f9a 4703 entry_bb = region->entry;
e5c95afe 4704 l0_bb = single_succ (entry_bb);
777f7f9a 4705 l1_bb = region->cont;
e5c95afe
ZD
4706 l2_bb = region->exit;
4707 if (exit_reachable)
d3c673c7 4708 {
057dd91d 4709 if (single_pred_p (l2_bb) && single_pred (l2_bb) == l0_bb)
726a989a 4710 l2 = gimple_block_label (l2_bb);
c34938a8
JJ
4711 else
4712 {
4713 /* This can happen if there are reductions. */
4714 len = EDGE_COUNT (l0_bb->succs);
4715 gcc_assert (len > 0);
4716 e = EDGE_SUCC (l0_bb, len - 1);
726a989a 4717 si = gsi_last_bb (e->dest);
3ac4c44a 4718 l2 = NULL_TREE;
726a989a
RB
4719 if (gsi_end_p (si)
4720 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
4721 l2 = gimple_block_label (e->dest);
c34938a8
JJ
4722 else
4723 FOR_EACH_EDGE (e, ei, l0_bb->succs)
4724 {
726a989a
RB
4725 si = gsi_last_bb (e->dest);
4726 if (gsi_end_p (si)
4727 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
c34938a8 4728 {
726a989a 4729 l2 = gimple_block_label (e->dest);
c34938a8
JJ
4730 break;
4731 }
4732 }
4733 }
d3c673c7 4734 default_bb = create_empty_bb (l1_bb->prev_bb);
d3c673c7
JJ
4735 }
4736 else
4737 {
e5c95afe 4738 default_bb = create_empty_bb (l0_bb);
726a989a 4739 l2 = gimple_block_label (default_bb);
d3c673c7 4740 }
50674e96
DN
4741
4742 /* We will build a switch() with enough cases for all the
726a989a 4743 GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
50674e96 4744 and a default case to abort if something goes wrong. */
e5c95afe 4745 len = EDGE_COUNT (l0_bb->succs);
726a989a
RB
4746
4747 /* Use VEC_quick_push on label_vec throughout, since we know the size
4748 in advance. */
4749 label_vec = VEC_alloc (tree, heap, len);
953ff289 4750
777f7f9a 4751 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
726a989a
RB
4752 GIMPLE_OMP_SECTIONS statement. */
4753 si = gsi_last_bb (entry_bb);
4754 sections_stmt = gsi_stmt (si);
4755 gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
4756 vin = gimple_omp_sections_control (sections_stmt);
50674e96 4757 if (!is_combined_parallel (region))
953ff289 4758 {
50674e96
DN
4759 /* If we are not inside a combined parallel+sections region,
4760 call GOMP_sections_start. */
e5c95afe
ZD
4761 t = build_int_cst (unsigned_type_node,
4762 exit_reachable ? len - 1 : len);
953ff289 4763 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
726a989a 4764 stmt = gimple_build_call (u, 1, t);
953ff289 4765 }
917948d3
ZD
4766 else
4767 {
4768 /* Otherwise, call GOMP_sections_next. */
4769 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
726a989a 4770 stmt = gimple_build_call (u, 0);
917948d3 4771 }
726a989a
RB
4772 gimple_call_set_lhs (stmt, vin);
4773 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4774 gsi_remove (&si, true);
4775
4776 /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
4777 L0_BB. */
4778 switch_si = gsi_last_bb (l0_bb);
4779 gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
917948d3
ZD
4780 if (exit_reachable)
4781 {
4782 cont = last_stmt (l1_bb);
726a989a
RB
4783 gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
4784 vmain = gimple_omp_continue_control_use (cont);
4785 vnext = gimple_omp_continue_control_def (cont);
917948d3
ZD
4786 }
4787 else
4788 {
4789 vmain = vin;
4790 vnext = NULL_TREE;
4791 }
953ff289 4792
e5c95afe
ZD
4793 i = 0;
4794 if (exit_reachable)
4795 {
3d528853 4796 t = build_case_label (build_int_cst (unsigned_type_node, 0), NULL, l2);
726a989a 4797 VEC_quick_push (tree, label_vec, t);
e5c95afe
ZD
4798 i++;
4799 }
d3c673c7 4800
726a989a 4801 /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
e5c95afe
ZD
4802 for (inner = region->inner, casei = 1;
4803 inner;
4804 inner = inner->next, i++, casei++)
953ff289 4805 {
50674e96
DN
4806 basic_block s_entry_bb, s_exit_bb;
4807
c34938a8 4808 /* Skip optional reduction region. */
726a989a 4809 if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
c34938a8
JJ
4810 {
4811 --i;
4812 --casei;
4813 continue;
4814 }
4815
777f7f9a
RH
4816 s_entry_bb = inner->entry;
4817 s_exit_bb = inner->exit;
953ff289 4818
726a989a 4819 t = gimple_block_label (s_entry_bb);
e5c95afe 4820 u = build_int_cst (unsigned_type_node, casei);
3d528853 4821 u = build_case_label (u, NULL, t);
726a989a 4822 VEC_quick_push (tree, label_vec, u);
777f7f9a 4823
726a989a
RB
4824 si = gsi_last_bb (s_entry_bb);
4825 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
4826 gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
4827 gsi_remove (&si, true);
777f7f9a 4828 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
d3c673c7
JJ
4829
4830 if (s_exit_bb == NULL)
4831 continue;
4832
726a989a
RB
4833 si = gsi_last_bb (s_exit_bb);
4834 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
4835 gsi_remove (&si, true);
d3c673c7 4836
50674e96 4837 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
953ff289
DN
4838 }
4839
50674e96 4840 /* Error handling code goes in DEFAULT_BB. */
726a989a 4841 t = gimple_block_label (default_bb);
3d528853 4842 u = build_case_label (NULL, NULL, t);
777f7f9a 4843 make_edge (l0_bb, default_bb, 0);
953ff289 4844
726a989a
RB
4845 stmt = gimple_build_switch_vec (vmain, u, label_vec);
4846 gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
4847 gsi_remove (&switch_si, true);
4848 VEC_free (tree, heap, label_vec);
4849
4850 si = gsi_start_bb (default_bb);
4851 stmt = gimple_build_call (built_in_decls[BUILT_IN_TRAP], 0);
4852 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
50674e96 4853
e5c95afe 4854 if (exit_reachable)
d3c673c7 4855 {
e5c95afe 4856 /* Code to get the next section goes in L1_BB. */
726a989a
RB
4857 si = gsi_last_bb (l1_bb);
4858 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
953ff289 4859
726a989a
RB
4860 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT], 0);
4861 gimple_call_set_lhs (stmt, vnext);
4862 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4863 gsi_remove (&si, true);
50674e96 4864
e5c95afe
ZD
4865 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
4866
726a989a
RB
4867 /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB. */
4868 si = gsi_last_bb (l2_bb);
4869 if (gimple_omp_return_nowait_p (gsi_stmt (si)))
d3c673c7
JJ
4870 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
4871 else
4872 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
726a989a
RB
4873 stmt = gimple_build_call (t, 0);
4874 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4875 gsi_remove (&si, true);
d3c673c7 4876 }
50674e96 4877
917948d3 4878 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
50674e96 4879}
953ff289 4880
953ff289 4881
777f7f9a
RH
4882/* Expand code for an OpenMP single directive. We've already expanded
4883 much of the code, here we simply place the GOMP_barrier call. */
4884
4885static void
4886expand_omp_single (struct omp_region *region)
4887{
4888 basic_block entry_bb, exit_bb;
726a989a 4889 gimple_stmt_iterator si;
777f7f9a
RH
4890 bool need_barrier = false;
4891
4892 entry_bb = region->entry;
4893 exit_bb = region->exit;
4894
726a989a 4895 si = gsi_last_bb (entry_bb);
777f7f9a
RH
4896 /* The terminal barrier at the end of a GOMP_single_copy sequence cannot
4897 be removed. We need to ensure that the thread that entered the single
4898 does not exit before the data is copied out by the other threads. */
726a989a 4899 if (find_omp_clause (gimple_omp_single_clauses (gsi_stmt (si)),
777f7f9a
RH
4900 OMP_CLAUSE_COPYPRIVATE))
4901 need_barrier = true;
726a989a
RB
4902 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
4903 gsi_remove (&si, true);
777f7f9a
RH
4904 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4905
726a989a
RB
4906 si = gsi_last_bb (exit_bb);
4907 if (!gimple_omp_return_nowait_p (gsi_stmt (si)) || need_barrier)
4908 force_gimple_operand_gsi (&si, build_omp_barrier (), false, NULL_TREE,
4909 false, GSI_SAME_STMT);
4910 gsi_remove (&si, true);
777f7f9a
RH
4911 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4912}
4913
4914
4915/* Generic expansion for OpenMP synchronization directives: master,
4916 ordered and critical. All we need to do here is remove the entry
4917 and exit markers for REGION. */
50674e96
DN
4918
4919static void
4920expand_omp_synch (struct omp_region *region)
4921{
4922 basic_block entry_bb, exit_bb;
726a989a 4923 gimple_stmt_iterator si;
50674e96 4924
777f7f9a
RH
4925 entry_bb = region->entry;
4926 exit_bb = region->exit;
50674e96 4927
726a989a
RB
4928 si = gsi_last_bb (entry_bb);
4929 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
4930 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
4931 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
4932 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL);
4933 gsi_remove (&si, true);
50674e96
DN
4934 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4935
d3c673c7
JJ
4936 if (exit_bb)
4937 {
726a989a
RB
4938 si = gsi_last_bb (exit_bb);
4939 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
4940 gsi_remove (&si, true);
d3c673c7
JJ
4941 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4942 }
50674e96 4943}
953ff289 4944
a509ebb5
RL
4945/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
4946 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
4947 size of the data type, and thus usable to find the index of the builtin
4948 decl. Returns false if the expression is not of the proper form. */
4949
4950static bool
4951expand_omp_atomic_fetch_op (basic_block load_bb,
4952 tree addr, tree loaded_val,
4953 tree stored_val, int index)
4954{
4955 enum built_in_function base;
4956 tree decl, itype, call;
f9621cc4 4957 direct_optab optab;
a509ebb5
RL
4958 tree rhs;
4959 basic_block store_bb = single_succ (load_bb);
726a989a
RB
4960 gimple_stmt_iterator gsi;
4961 gimple stmt;
db3927fb 4962 location_t loc;
a509ebb5
RL
4963
4964 /* We expect to find the following sequences:
b8698a0f 4965
a509ebb5 4966 load_bb:
726a989a 4967 GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
a509ebb5
RL
4968
4969 store_bb:
4970 val = tmp OP something; (or: something OP tmp)
b8698a0f 4971 GIMPLE_OMP_STORE (val)
a509ebb5 4972
b8698a0f 4973 ???FIXME: Allow a more flexible sequence.
a509ebb5 4974 Perhaps use data flow to pick the statements.
b8698a0f 4975
a509ebb5
RL
4976 */
4977
726a989a
RB
4978 gsi = gsi_after_labels (store_bb);
4979 stmt = gsi_stmt (gsi);
db3927fb 4980 loc = gimple_location (stmt);
726a989a 4981 if (!is_gimple_assign (stmt))
a509ebb5 4982 return false;
726a989a
RB
4983 gsi_next (&gsi);
4984 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
a509ebb5
RL
4985 return false;
4986
726a989a 4987 if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
a509ebb5
RL
4988 return false;
4989
a509ebb5 4990 /* Check for one of the supported fetch-op operations. */
726a989a 4991 switch (gimple_assign_rhs_code (stmt))
a509ebb5
RL
4992 {
4993 case PLUS_EXPR:
4994 case POINTER_PLUS_EXPR:
e0a8ecf2 4995 base = BUILT_IN_SYNC_FETCH_AND_ADD_N;
a509ebb5
RL
4996 optab = sync_add_optab;
4997 break;
4998 case MINUS_EXPR:
e0a8ecf2 4999 base = BUILT_IN_SYNC_FETCH_AND_SUB_N;
a509ebb5
RL
5000 optab = sync_add_optab;
5001 break;
5002 case BIT_AND_EXPR:
e0a8ecf2 5003 base = BUILT_IN_SYNC_FETCH_AND_AND_N;
a509ebb5
RL
5004 optab = sync_and_optab;
5005 break;
5006 case BIT_IOR_EXPR:
e0a8ecf2 5007 base = BUILT_IN_SYNC_FETCH_AND_OR_N;
a509ebb5
RL
5008 optab = sync_ior_optab;
5009 break;
5010 case BIT_XOR_EXPR:
e0a8ecf2 5011 base = BUILT_IN_SYNC_FETCH_AND_XOR_N;
a509ebb5
RL
5012 optab = sync_xor_optab;
5013 break;
5014 default:
5015 return false;
5016 }
5017 /* Make sure the expression is of the proper form. */
726a989a
RB
5018 if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
5019 rhs = gimple_assign_rhs2 (stmt);
5020 else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
5021 && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
5022 rhs = gimple_assign_rhs1 (stmt);
a509ebb5
RL
5023 else
5024 return false;
5025
5026 decl = built_in_decls[base + index + 1];
20790697
JJ
5027 if (decl == NULL_TREE)
5028 return false;
a509ebb5
RL
5029 itype = TREE_TYPE (TREE_TYPE (decl));
5030
f9621cc4 5031 if (direct_optab_handler (optab, TYPE_MODE (itype)) == CODE_FOR_nothing)
a509ebb5
RL
5032 return false;
5033
726a989a
RB
5034 gsi = gsi_last_bb (load_bb);
5035 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
db3927fb
AH
5036 call = build_call_expr_loc (loc,
5037 decl, 2, addr,
5038 fold_convert_loc (loc, itype, rhs));
5039 call = fold_convert_loc (loc, void_type_node, call);
726a989a
RB
5040 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
5041 gsi_remove (&gsi, true);
a509ebb5 5042
726a989a
RB
5043 gsi = gsi_last_bb (store_bb);
5044 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
5045 gsi_remove (&gsi, true);
5046 gsi = gsi_last_bb (store_bb);
5047 gsi_remove (&gsi, true);
a509ebb5
RL
5048
5049 if (gimple_in_ssa_p (cfun))
5050 update_ssa (TODO_update_ssa_no_phi);
5051
5052 return true;
5053}
5054
5055/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
5056
5057 oldval = *addr;
5058 repeat:
5059 newval = rhs; // with oldval replacing *addr in rhs
5060 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5061 if (oldval != newval)
5062 goto repeat;
5063
5064 INDEX is log2 of the size of the data type, and thus usable to find the
5065 index of the builtin decl. */
5066
5067static bool
5068expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
5069 tree addr, tree loaded_val, tree stored_val,
5070 int index)
5071{
c18c98c0 5072 tree loadedi, storedi, initial, new_storedi, old_vali;
a509ebb5 5073 tree type, itype, cmpxchg, iaddr;
726a989a 5074 gimple_stmt_iterator si;
a509ebb5 5075 basic_block loop_header = single_succ (load_bb);
726a989a 5076 gimple phi, stmt;
a509ebb5
RL
5077 edge e;
5078
e0a8ecf2 5079 cmpxchg = built_in_decls[BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N + index + 1];
20790697
JJ
5080 if (cmpxchg == NULL_TREE)
5081 return false;
a509ebb5
RL
5082 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5083 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5084
f9621cc4
RS
5085 if (direct_optab_handler (sync_compare_and_swap_optab, TYPE_MODE (itype))
5086 == CODE_FOR_nothing)
a509ebb5
RL
5087 return false;
5088
726a989a
RB
5089 /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD. */
5090 si = gsi_last_bb (load_bb);
5091 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
5092
c18c98c0
JJ
5093 /* For floating-point values, we'll need to view-convert them to integers
5094 so that we can perform the atomic compare and swap. Simplify the
5095 following code by always setting up the "i"ntegral variables. */
5096 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5097 {
726a989a
RB
5098 tree iaddr_val;
5099
5b21f0f3
RG
5100 iaddr = create_tmp_var (build_pointer_type_for_mode (itype, ptr_mode,
5101 true), NULL);
726a989a
RB
5102 iaddr_val
5103 = force_gimple_operand_gsi (&si,
5104 fold_convert (TREE_TYPE (iaddr), addr),
5105 false, NULL_TREE, true, GSI_SAME_STMT);
5106 stmt = gimple_build_assign (iaddr, iaddr_val);
5107 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
c18c98c0
JJ
5108 loadedi = create_tmp_var (itype, NULL);
5109 if (gimple_in_ssa_p (cfun))
5110 {
5111 add_referenced_var (iaddr);
5112 add_referenced_var (loadedi);
5113 loadedi = make_ssa_name (loadedi, NULL);
5114 }
5115 }
5116 else
5117 {
5118 iaddr = addr;
5119 loadedi = loaded_val;
5120 }
726a989a 5121
70f34814
RG
5122 initial
5123 = force_gimple_operand_gsi (&si,
5124 build2 (MEM_REF, TREE_TYPE (TREE_TYPE (iaddr)),
5125 iaddr,
5126 build_int_cst (TREE_TYPE (iaddr), 0)),
5127 true, NULL_TREE, true, GSI_SAME_STMT);
c18c98c0
JJ
5128
5129 /* Move the value to the LOADEDI temporary. */
a509ebb5
RL
5130 if (gimple_in_ssa_p (cfun))
5131 {
726a989a 5132 gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
c18c98c0
JJ
5133 phi = create_phi_node (loadedi, loop_header);
5134 SSA_NAME_DEF_STMT (loadedi) = phi;
a509ebb5
RL
5135 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
5136 initial);
5137 }
5138 else
726a989a
RB
5139 gsi_insert_before (&si,
5140 gimple_build_assign (loadedi, initial),
5141 GSI_SAME_STMT);
c18c98c0
JJ
5142 if (loadedi != loaded_val)
5143 {
726a989a
RB
5144 gimple_stmt_iterator gsi2;
5145 tree x;
c18c98c0
JJ
5146
5147 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
726a989a 5148 gsi2 = gsi_start_bb (loop_header);
c18c98c0
JJ
5149 if (gimple_in_ssa_p (cfun))
5150 {
726a989a
RB
5151 gimple stmt;
5152 x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
5153 true, GSI_SAME_STMT);
5154 stmt = gimple_build_assign (loaded_val, x);
5155 gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
c18c98c0
JJ
5156 }
5157 else
5158 {
726a989a
RB
5159 x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
5160 force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
5161 true, GSI_SAME_STMT);
c18c98c0
JJ
5162 }
5163 }
726a989a 5164 gsi_remove (&si, true);
a509ebb5 5165
726a989a
RB
5166 si = gsi_last_bb (store_bb);
5167 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 5168
c18c98c0
JJ
5169 if (iaddr == addr)
5170 storedi = stored_val;
a509ebb5 5171 else
c18c98c0 5172 storedi =
726a989a 5173 force_gimple_operand_gsi (&si,
c18c98c0
JJ
5174 build1 (VIEW_CONVERT_EXPR, itype,
5175 stored_val), true, NULL_TREE, true,
726a989a 5176 GSI_SAME_STMT);
a509ebb5
RL
5177
5178 /* Build the compare&swap statement. */
5179 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
726a989a 5180 new_storedi = force_gimple_operand_gsi (&si,
587aa063
RG
5181 fold_convert (TREE_TYPE (loadedi),
5182 new_storedi),
a509ebb5 5183 true, NULL_TREE,
726a989a 5184 true, GSI_SAME_STMT);
a509ebb5
RL
5185
5186 if (gimple_in_ssa_p (cfun))
5187 old_vali = loadedi;
5188 else
5189 {
587aa063 5190 old_vali = create_tmp_var (TREE_TYPE (loadedi), NULL);
c18c98c0
JJ
5191 if (gimple_in_ssa_p (cfun))
5192 add_referenced_var (old_vali);
726a989a
RB
5193 stmt = gimple_build_assign (old_vali, loadedi);
5194 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5 5195
726a989a
RB
5196 stmt = gimple_build_assign (loadedi, new_storedi);
5197 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
5198 }
5199
5200 /* Note that we always perform the comparison as an integer, even for
b8698a0f 5201 floating point. This allows the atomic operation to properly
a509ebb5 5202 succeed even with NaNs and -0.0. */
726a989a
RB
5203 stmt = gimple_build_cond_empty
5204 (build2 (NE_EXPR, boolean_type_node,
5205 new_storedi, old_vali));
5206 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
5207
5208 /* Update cfg. */
5209 e = single_succ_edge (store_bb);
5210 e->flags &= ~EDGE_FALLTHRU;
5211 e->flags |= EDGE_FALSE_VALUE;
5212
5213 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
5214
c18c98c0 5215 /* Copy the new value to loadedi (we already did that before the condition
a509ebb5
RL
5216 if we are not in SSA). */
5217 if (gimple_in_ssa_p (cfun))
5218 {
726a989a 5219 phi = gimple_seq_first_stmt (phi_nodes (loop_header));
c18c98c0 5220 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
a509ebb5
RL
5221 }
5222
726a989a
RB
5223 /* Remove GIMPLE_OMP_ATOMIC_STORE. */
5224 gsi_remove (&si, true);
a509ebb5
RL
5225
5226 if (gimple_in_ssa_p (cfun))
5227 update_ssa (TODO_update_ssa_no_phi);
5228
5229 return true;
5230}
5231
5232/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
5233
5234 GOMP_atomic_start ();
5235 *addr = rhs;
5236 GOMP_atomic_end ();
5237
5238 The result is not globally atomic, but works so long as all parallel
5239 references are within #pragma omp atomic directives. According to
5240 responses received from omp@openmp.org, appears to be within spec.
5241 Which makes sense, since that's how several other compilers handle
b8698a0f 5242 this situation as well.
726a989a
RB
5243 LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
5244 expanding. STORED_VAL is the operand of the matching
5245 GIMPLE_OMP_ATOMIC_STORE.
a509ebb5 5246
b8698a0f
L
5247 We replace
5248 GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with
a509ebb5
RL
5249 loaded_val = *addr;
5250
5251 and replace
726a989a 5252 GIMPLE_OMP_ATOMIC_ATORE (stored_val) with
b8698a0f 5253 *addr = stored_val;
a509ebb5
RL
5254*/
5255
5256static bool
5257expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
5258 tree addr, tree loaded_val, tree stored_val)
5259{
726a989a
RB
5260 gimple_stmt_iterator si;
5261 gimple stmt;
a509ebb5
RL
5262 tree t;
5263
726a989a
RB
5264 si = gsi_last_bb (load_bb);
5265 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5
RL
5266
5267 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
3bb06db4 5268 t = build_call_expr (t, 0);
726a989a 5269 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
a509ebb5 5270
70f34814 5271 stmt = gimple_build_assign (loaded_val, build_simple_mem_ref (addr));
726a989a
RB
5272 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
5273 gsi_remove (&si, true);
a509ebb5 5274
726a989a
RB
5275 si = gsi_last_bb (store_bb);
5276 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 5277
70f34814
RG
5278 stmt = gimple_build_assign (build_simple_mem_ref (unshare_expr (addr)),
5279 stored_val);
726a989a 5280 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
5281
5282 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
3bb06db4 5283 t = build_call_expr (t, 0);
726a989a
RB
5284 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
5285 gsi_remove (&si, true);
a509ebb5
RL
5286
5287 if (gimple_in_ssa_p (cfun))
5288 update_ssa (TODO_update_ssa_no_phi);
5289 return true;
5290}
5291
b8698a0f
L
5292/* Expand an GIMPLE_OMP_ATOMIC statement. We try to expand
5293 using expand_omp_atomic_fetch_op. If it failed, we try to
a509ebb5
RL
5294 call expand_omp_atomic_pipeline, and if it fails too, the
5295 ultimate fallback is wrapping the operation in a mutex
b8698a0f
L
5296 (expand_omp_atomic_mutex). REGION is the atomic region built
5297 by build_omp_regions_1(). */
a509ebb5
RL
5298
5299static void
5300expand_omp_atomic (struct omp_region *region)
5301{
5302 basic_block load_bb = region->entry, store_bb = region->exit;
726a989a
RB
5303 gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
5304 tree loaded_val = gimple_omp_atomic_load_lhs (load);
5305 tree addr = gimple_omp_atomic_load_rhs (load);
5306 tree stored_val = gimple_omp_atomic_store_val (store);
a509ebb5
RL
5307 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5308 HOST_WIDE_INT index;
5309
5310 /* Make sure the type is one of the supported sizes. */
5311 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5312 index = exact_log2 (index);
5313 if (index >= 0 && index <= 4)
5314 {
5315 unsigned int align = TYPE_ALIGN_UNIT (type);
5316
5317 /* __sync builtins require strict data alignment. */
5318 if (exact_log2 (align) >= index)
5319 {
5320 /* When possible, use specialized atomic update functions. */
5321 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5322 && store_bb == single_succ (load_bb))
5323 {
5324 if (expand_omp_atomic_fetch_op (load_bb, addr,
5325 loaded_val, stored_val, index))
5326 return;
5327 }
5328
5329 /* If we don't have specialized __sync builtins, try and implement
5330 as a compare and swap loop. */
5331 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
5332 loaded_val, stored_val, index))
5333 return;
5334 }
5335 }
5336
5337 /* The ultimate fallback is wrapping the operation in a mutex. */
5338 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
5339}
5340
953ff289 5341
50674e96
DN
5342/* Expand the parallel region tree rooted at REGION. Expansion
5343 proceeds in depth-first order. Innermost regions are expanded
5344 first. This way, parallel regions that require a new function to
726a989a 5345 be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
50674e96
DN
5346 internal dependencies in their body. */
5347
5348static void
5349expand_omp (struct omp_region *region)
5350{
5351 while (region)
5352 {
b357f682
JJ
5353 location_t saved_location;
5354
068e1875
ZD
5355 /* First, determine whether this is a combined parallel+workshare
5356 region. */
726a989a 5357 if (region->type == GIMPLE_OMP_PARALLEL)
068e1875
ZD
5358 determine_parallel_type (region);
5359
50674e96
DN
5360 if (region->inner)
5361 expand_omp (region->inner);
5362
b357f682 5363 saved_location = input_location;
726a989a
RB
5364 if (gimple_has_location (last_stmt (region->entry)))
5365 input_location = gimple_location (last_stmt (region->entry));
b357f682 5366
777f7f9a 5367 switch (region->type)
50674e96 5368 {
726a989a
RB
5369 case GIMPLE_OMP_PARALLEL:
5370 case GIMPLE_OMP_TASK:
a68ab351
JJ
5371 expand_omp_taskreg (region);
5372 break;
5373
726a989a 5374 case GIMPLE_OMP_FOR:
777f7f9a
RH
5375 expand_omp_for (region);
5376 break;
50674e96 5377
726a989a 5378 case GIMPLE_OMP_SECTIONS:
777f7f9a
RH
5379 expand_omp_sections (region);
5380 break;
50674e96 5381
726a989a 5382 case GIMPLE_OMP_SECTION:
777f7f9a 5383 /* Individual omp sections are handled together with their
726a989a 5384 parent GIMPLE_OMP_SECTIONS region. */
777f7f9a 5385 break;
50674e96 5386
726a989a 5387 case GIMPLE_OMP_SINGLE:
777f7f9a
RH
5388 expand_omp_single (region);
5389 break;
50674e96 5390
726a989a
RB
5391 case GIMPLE_OMP_MASTER:
5392 case GIMPLE_OMP_ORDERED:
5393 case GIMPLE_OMP_CRITICAL:
777f7f9a
RH
5394 expand_omp_synch (region);
5395 break;
50674e96 5396
726a989a 5397 case GIMPLE_OMP_ATOMIC_LOAD:
a509ebb5
RL
5398 expand_omp_atomic (region);
5399 break;
5400
777f7f9a
RH
5401 default:
5402 gcc_unreachable ();
5403 }
8d9c1aec 5404
b357f682 5405 input_location = saved_location;
50674e96
DN
5406 region = region->next;
5407 }
5408}
5409
5410
5411/* Helper for build_omp_regions. Scan the dominator tree starting at
5f40b3cb
ZD
5412 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
5413 true, the function ends once a single tree is built (otherwise, whole
5414 forest of OMP constructs may be built). */
50674e96
DN
5415
5416static void
5f40b3cb
ZD
5417build_omp_regions_1 (basic_block bb, struct omp_region *parent,
5418 bool single_tree)
50674e96 5419{
726a989a
RB
5420 gimple_stmt_iterator gsi;
5421 gimple stmt;
50674e96
DN
5422 basic_block son;
5423
726a989a
RB
5424 gsi = gsi_last_bb (bb);
5425 if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
50674e96
DN
5426 {
5427 struct omp_region *region;
726a989a 5428 enum gimple_code code;
50674e96 5429
726a989a
RB
5430 stmt = gsi_stmt (gsi);
5431 code = gimple_code (stmt);
5432 if (code == GIMPLE_OMP_RETURN)
50674e96
DN
5433 {
5434 /* STMT is the return point out of region PARENT. Mark it
5435 as the exit point and make PARENT the immediately
5436 enclosing region. */
5437 gcc_assert (parent);
5438 region = parent;
777f7f9a 5439 region->exit = bb;
50674e96 5440 parent = parent->outer;
50674e96 5441 }
726a989a 5442 else if (code == GIMPLE_OMP_ATOMIC_STORE)
a509ebb5 5443 {
726a989a
RB
5444 /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
5445 GIMPLE_OMP_RETURN, but matches with
5446 GIMPLE_OMP_ATOMIC_LOAD. */
a509ebb5 5447 gcc_assert (parent);
726a989a 5448 gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5
RL
5449 region = parent;
5450 region->exit = bb;
5451 parent = parent->outer;
5452 }
5453
726a989a 5454 else if (code == GIMPLE_OMP_CONTINUE)
777f7f9a
RH
5455 {
5456 gcc_assert (parent);
5457 parent->cont = bb;
5458 }
726a989a 5459 else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
e5c95afe 5460 {
726a989a
RB
5461 /* GIMPLE_OMP_SECTIONS_SWITCH is part of
5462 GIMPLE_OMP_SECTIONS, and we do nothing for it. */
5463 ;
e5c95afe 5464 }
50674e96
DN
5465 else
5466 {
5467 /* Otherwise, this directive becomes the parent for a new
5468 region. */
777f7f9a 5469 region = new_omp_region (bb, code, parent);
50674e96
DN
5470 parent = region;
5471 }
50674e96
DN
5472 }
5473
5f40b3cb
ZD
5474 if (single_tree && !parent)
5475 return;
5476
50674e96
DN
5477 for (son = first_dom_son (CDI_DOMINATORS, bb);
5478 son;
5479 son = next_dom_son (CDI_DOMINATORS, son))
5f40b3cb
ZD
5480 build_omp_regions_1 (son, parent, single_tree);
5481}
5482
5483/* Builds the tree of OMP regions rooted at ROOT, storing it to
5484 root_omp_region. */
5485
5486static void
5487build_omp_regions_root (basic_block root)
5488{
5489 gcc_assert (root_omp_region == NULL);
5490 build_omp_regions_1 (root, NULL, true);
5491 gcc_assert (root_omp_region != NULL);
50674e96
DN
5492}
5493
5f40b3cb
ZD
5494/* Expands omp construct (and its subconstructs) starting in HEAD. */
5495
5496void
5497omp_expand_local (basic_block head)
5498{
5499 build_omp_regions_root (head);
5500 if (dump_file && (dump_flags & TDF_DETAILS))
5501 {
5502 fprintf (dump_file, "\nOMP region tree\n\n");
5503 dump_omp_region (dump_file, root_omp_region, 0);
5504 fprintf (dump_file, "\n");
5505 }
5506
5507 remove_exit_barriers (root_omp_region);
5508 expand_omp (root_omp_region);
5509
5510 free_omp_regions ();
5511}
50674e96
DN
5512
5513/* Scan the CFG and build a tree of OMP regions. Return the root of
5514 the OMP region tree. */
5515
5516static void
5517build_omp_regions (void)
5518{
777f7f9a 5519 gcc_assert (root_omp_region == NULL);
50674e96 5520 calculate_dominance_info (CDI_DOMINATORS);
5f40b3cb 5521 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
50674e96
DN
5522}
5523
50674e96
DN
5524/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
5525
c2924966 5526static unsigned int
50674e96
DN
5527execute_expand_omp (void)
5528{
5529 build_omp_regions ();
5530
777f7f9a
RH
5531 if (!root_omp_region)
5532 return 0;
50674e96 5533
777f7f9a
RH
5534 if (dump_file)
5535 {
5536 fprintf (dump_file, "\nOMP region tree\n\n");
5537 dump_omp_region (dump_file, root_omp_region, 0);
5538 fprintf (dump_file, "\n");
50674e96 5539 }
777f7f9a
RH
5540
5541 remove_exit_barriers (root_omp_region);
5542
5543 expand_omp (root_omp_region);
5544
777f7f9a
RH
5545 cleanup_tree_cfg ();
5546
5547 free_omp_regions ();
5548
c2924966 5549 return 0;
50674e96
DN
5550}
5551
917948d3
ZD
5552/* OMP expansion -- the default pass, run before creation of SSA form. */
5553
50674e96
DN
5554static bool
5555gate_expand_omp (void)
5556{
1da2ed5f 5557 return (flag_openmp != 0 && !seen_error ());
50674e96
DN
5558}
5559
b8698a0f 5560struct gimple_opt_pass pass_expand_omp =
50674e96 5561{
8ddbbcae
JH
5562 {
5563 GIMPLE_PASS,
50674e96
DN
5564 "ompexp", /* name */
5565 gate_expand_omp, /* gate */
5566 execute_expand_omp, /* execute */
5567 NULL, /* sub */
5568 NULL, /* next */
5569 0, /* static_pass_number */
7072a650 5570 TV_NONE, /* tv_id */
50674e96 5571 PROP_gimple_any, /* properties_required */
535b544a 5572 0, /* properties_provided */
50674e96
DN
5573 0, /* properties_destroyed */
5574 0, /* todo_flags_start */
22c5fa5f 5575 0 /* todo_flags_finish */
8ddbbcae 5576 }
50674e96
DN
5577};
5578\f
5579/* Routines to lower OpenMP directives into OMP-GIMPLE. */
5580
726a989a
RB
5581/* Lower the OpenMP sections directive in the current statement in GSI_P.
5582 CTX is the enclosing OMP context for the current statement. */
50674e96
DN
5583
5584static void
726a989a 5585lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 5586{
726a989a
RB
5587 tree block, control;
5588 gimple_stmt_iterator tgsi;
50674e96 5589 unsigned i, len;
726a989a
RB
5590 gimple stmt, new_stmt, bind, t;
5591 gimple_seq ilist, dlist, olist, new_body, body;
d406b663 5592 struct gimplify_ctx gctx;
50674e96 5593
726a989a 5594 stmt = gsi_stmt (*gsi_p);
50674e96 5595
d406b663 5596 push_gimplify_context (&gctx);
50674e96
DN
5597
5598 dlist = NULL;
5599 ilist = NULL;
726a989a
RB
5600 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5601 &ilist, &dlist, ctx);
50674e96 5602
726a989a
RB
5603 tgsi = gsi_start (gimple_omp_body (stmt));
5604 for (len = 0; !gsi_end_p (tgsi); len++, gsi_next (&tgsi))
50674e96
DN
5605 continue;
5606
726a989a
RB
5607 tgsi = gsi_start (gimple_omp_body (stmt));
5608 body = NULL;
5609 for (i = 0; i < len; i++, gsi_next (&tgsi))
50674e96
DN
5610 {
5611 omp_context *sctx;
726a989a 5612 gimple sec_start;
50674e96 5613
726a989a 5614 sec_start = gsi_stmt (tgsi);
50674e96
DN
5615 sctx = maybe_lookup_ctx (sec_start);
5616 gcc_assert (sctx);
5617
726a989a 5618 gimple_seq_add_stmt (&body, sec_start);
777f7f9a 5619
726a989a
RB
5620 lower_omp (gimple_omp_body (sec_start), sctx);
5621 gimple_seq_add_seq (&body, gimple_omp_body (sec_start));
5622 gimple_omp_set_body (sec_start, NULL);
50674e96
DN
5623
5624 if (i == len - 1)
5625 {
726a989a
RB
5626 gimple_seq l = NULL;
5627 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
50674e96 5628 &l, ctx);
726a989a
RB
5629 gimple_seq_add_seq (&body, l);
5630 gimple_omp_section_set_last (sec_start);
50674e96 5631 }
b8698a0f 5632
726a989a 5633 gimple_seq_add_stmt (&body, gimple_build_omp_return (false));
50674e96 5634 }
953ff289
DN
5635
5636 block = make_node (BLOCK);
726a989a 5637 bind = gimple_build_bind (NULL, body, block);
953ff289 5638
726a989a
RB
5639 olist = NULL;
5640 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 5641
b357f682 5642 block = make_node (BLOCK);
726a989a 5643 new_stmt = gimple_build_bind (NULL, NULL, block);
50674e96 5644
b357f682 5645 pop_gimplify_context (new_stmt);
726a989a
RB
5646 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5647 BLOCK_VARS (block) = gimple_bind_vars (bind);
b357f682
JJ
5648 if (BLOCK_VARS (block))
5649 TREE_USED (block) = 1;
5650
726a989a
RB
5651 new_body = NULL;
5652 gimple_seq_add_seq (&new_body, ilist);
5653 gimple_seq_add_stmt (&new_body, stmt);
5654 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5655 gimple_seq_add_stmt (&new_body, bind);
777f7f9a 5656
e5c95afe 5657 control = create_tmp_var (unsigned_type_node, ".section");
726a989a
RB
5658 t = gimple_build_omp_continue (control, control);
5659 gimple_omp_sections_set_control (stmt, control);
5660 gimple_seq_add_stmt (&new_body, t);
777f7f9a 5661
726a989a
RB
5662 gimple_seq_add_seq (&new_body, olist);
5663 gimple_seq_add_seq (&new_body, dlist);
50674e96 5664
726a989a 5665 new_body = maybe_catch_exception (new_body);
4a31b7ee 5666
726a989a
RB
5667 t = gimple_build_omp_return
5668 (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
5669 OMP_CLAUSE_NOWAIT));
5670 gimple_seq_add_stmt (&new_body, t);
777f7f9a 5671
726a989a
RB
5672 gimple_bind_set_body (new_stmt, new_body);
5673 gimple_omp_set_body (stmt, NULL);
50674e96 5674
726a989a 5675 gsi_replace (gsi_p, new_stmt, true);
953ff289
DN
5676}
5677
5678
50674e96 5679/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 5680 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
953ff289
DN
5681
5682 if (GOMP_single_start ())
5683 BODY;
5684 [ GOMP_barrier (); ] -> unless 'nowait' is present.
50674e96
DN
5685
5686 FIXME. It may be better to delay expanding the logic of this until
5687 pass_expand_omp. The expanded logic may make the job more difficult
5688 to a synchronization analysis pass. */
953ff289
DN
5689
5690static void
726a989a 5691lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
953ff289 5692{
c2255bc4
AH
5693 location_t loc = gimple_location (single_stmt);
5694 tree tlabel = create_artificial_label (loc);
5695 tree flabel = create_artificial_label (loc);
726a989a
RB
5696 gimple call, cond;
5697 tree lhs, decl;
5698
5699 decl = built_in_decls[BUILT_IN_GOMP_SINGLE_START];
5700 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
5701 call = gimple_build_call (decl, 0);
5702 gimple_call_set_lhs (call, lhs);
5703 gimple_seq_add_stmt (pre_p, call);
5704
5705 cond = gimple_build_cond (EQ_EXPR, lhs,
db3927fb
AH
5706 fold_convert_loc (loc, TREE_TYPE (lhs),
5707 boolean_true_node),
726a989a
RB
5708 tlabel, flabel);
5709 gimple_seq_add_stmt (pre_p, cond);
5710 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5711 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5712 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
953ff289
DN
5713}
5714
50674e96
DN
5715
5716/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 5717 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289
DN
5718
5719 #pragma omp single copyprivate (a, b, c)
5720
5721 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5722
5723 {
5724 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5725 {
5726 BODY;
5727 copyout.a = a;
5728 copyout.b = b;
5729 copyout.c = c;
5730 GOMP_single_copy_end (&copyout);
5731 }
5732 else
5733 {
5734 a = copyout_p->a;
5735 b = copyout_p->b;
5736 c = copyout_p->c;
5737 }
5738 GOMP_barrier ();
5739 }
50674e96
DN
5740
5741 FIXME. It may be better to delay expanding the logic of this until
5742 pass_expand_omp. The expanded logic may make the job more difficult
5743 to a synchronization analysis pass. */
953ff289
DN
5744
5745static void
726a989a 5746lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
953ff289 5747{
726a989a
RB
5748 tree ptr_type, t, l0, l1, l2;
5749 gimple_seq copyin_seq;
c2255bc4 5750 location_t loc = gimple_location (single_stmt);
953ff289
DN
5751
5752 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5753
5754 ptr_type = build_pointer_type (ctx->record_type);
5755 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5756
c2255bc4
AH
5757 l0 = create_artificial_label (loc);
5758 l1 = create_artificial_label (loc);
5759 l2 = create_artificial_label (loc);
953ff289 5760
db3927fb
AH
5761 t = build_call_expr_loc (loc, built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START], 0);
5762 t = fold_convert_loc (loc, ptr_type, t);
726a989a 5763 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289
DN
5764
5765 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5766 build_int_cst (ptr_type, 0));
5767 t = build3 (COND_EXPR, void_type_node, t,
5768 build_and_jump (&l0), build_and_jump (&l1));
5769 gimplify_and_add (t, pre_p);
5770
726a989a 5771 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 5772
726a989a 5773 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289
DN
5774
5775 copyin_seq = NULL;
726a989a 5776 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
953ff289
DN
5777 &copyin_seq, ctx);
5778
db3927fb
AH
5779 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
5780 t = build_call_expr_loc (loc, built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END],
5781 1, t);
953ff289
DN
5782 gimplify_and_add (t, pre_p);
5783
5784 t = build_and_jump (&l2);
5785 gimplify_and_add (t, pre_p);
5786
726a989a 5787 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 5788
726a989a 5789 gimple_seq_add_seq (pre_p, copyin_seq);
953ff289 5790
726a989a 5791 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
953ff289
DN
5792}
5793
50674e96 5794
953ff289
DN
5795/* Expand code for an OpenMP single directive. */
5796
5797static void
726a989a 5798lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 5799{
726a989a
RB
5800 tree block;
5801 gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
5802 gimple_seq bind_body, dlist;
d406b663 5803 struct gimplify_ctx gctx;
953ff289 5804
d406b663 5805 push_gimplify_context (&gctx);
953ff289 5806
726a989a
RB
5807 bind_body = NULL;
5808 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
5809 &bind_body, &dlist, ctx);
5810 lower_omp (gimple_omp_body (single_stmt), ctx);
953ff289 5811
726a989a 5812 gimple_seq_add_stmt (&bind_body, single_stmt);
953ff289
DN
5813
5814 if (ctx->record_type)
726a989a 5815 lower_omp_single_copy (single_stmt, &bind_body, ctx);
953ff289 5816 else
726a989a
RB
5817 lower_omp_single_simple (single_stmt, &bind_body);
5818
5819 gimple_omp_set_body (single_stmt, NULL);
953ff289 5820
726a989a 5821 gimple_seq_add_seq (&bind_body, dlist);
777f7f9a 5822
726a989a 5823 bind_body = maybe_catch_exception (bind_body);
777f7f9a 5824
b8698a0f 5825 t = gimple_build_omp_return
726a989a
RB
5826 (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
5827 OMP_CLAUSE_NOWAIT));
5828 gimple_seq_add_stmt (&bind_body, t);
4a31b7ee 5829
726a989a
RB
5830 block = make_node (BLOCK);
5831 bind = gimple_build_bind (NULL, bind_body, block);
777f7f9a 5832
953ff289 5833 pop_gimplify_context (bind);
50674e96 5834
726a989a
RB
5835 gimple_bind_append_vars (bind, ctx->block_vars);
5836 BLOCK_VARS (block) = ctx->block_vars;
5837 gsi_replace (gsi_p, bind, true);
b357f682
JJ
5838 if (BLOCK_VARS (block))
5839 TREE_USED (block) = 1;
953ff289
DN
5840}
5841
50674e96 5842
953ff289
DN
5843/* Expand code for an OpenMP master directive. */
5844
5845static void
726a989a 5846lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 5847{
726a989a
RB
5848 tree block, lab = NULL, x;
5849 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 5850 location_t loc = gimple_location (stmt);
726a989a 5851 gimple_seq tseq;
d406b663 5852 struct gimplify_ctx gctx;
953ff289 5853
d406b663 5854 push_gimplify_context (&gctx);
953ff289
DN
5855
5856 block = make_node (BLOCK);
726a989a
RB
5857 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
5858 block);
777f7f9a 5859
db3927fb 5860 x = build_call_expr_loc (loc, built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
953ff289
DN
5861 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
5862 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
726a989a
RB
5863 tseq = NULL;
5864 gimplify_and_add (x, &tseq);
5865 gimple_bind_add_seq (bind, tseq);
953ff289 5866
726a989a
RB
5867 lower_omp (gimple_omp_body (stmt), ctx);
5868 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
5869 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
5870 gimple_omp_set_body (stmt, NULL);
953ff289 5871
726a989a 5872 gimple_bind_add_stmt (bind, gimple_build_label (lab));
777f7f9a 5873
726a989a 5874 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 5875
953ff289 5876 pop_gimplify_context (bind);
50674e96 5877
726a989a
RB
5878 gimple_bind_append_vars (bind, ctx->block_vars);
5879 BLOCK_VARS (block) = ctx->block_vars;
5880 gsi_replace (gsi_p, bind, true);
953ff289
DN
5881}
5882
50674e96 5883
953ff289
DN
5884/* Expand code for an OpenMP ordered directive. */
5885
5886static void
726a989a 5887lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 5888{
726a989a
RB
5889 tree block;
5890 gimple stmt = gsi_stmt (*gsi_p), bind, x;
d406b663 5891 struct gimplify_ctx gctx;
953ff289 5892
d406b663 5893 push_gimplify_context (&gctx);
953ff289
DN
5894
5895 block = make_node (BLOCK);
726a989a
RB
5896 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
5897 block);
777f7f9a 5898
726a989a
RB
5899 x = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ORDERED_START], 0);
5900 gimple_bind_add_stmt (bind, x);
953ff289 5901
726a989a
RB
5902 lower_omp (gimple_omp_body (stmt), ctx);
5903 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
5904 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
5905 gimple_omp_set_body (stmt, NULL);
953ff289 5906
726a989a
RB
5907 x = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ORDERED_END], 0);
5908 gimple_bind_add_stmt (bind, x);
777f7f9a 5909
726a989a 5910 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 5911
953ff289 5912 pop_gimplify_context (bind);
50674e96 5913
726a989a
RB
5914 gimple_bind_append_vars (bind, ctx->block_vars);
5915 BLOCK_VARS (block) = gimple_bind_vars (bind);
5916 gsi_replace (gsi_p, bind, true);
953ff289
DN
5917}
5918
953ff289 5919
726a989a 5920/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
953ff289
DN
5921 substitution of a couple of function calls. But in the NAMED case,
5922 requires that languages coordinate a symbol name. It is therefore
5923 best put here in common code. */
5924
5925static GTY((param1_is (tree), param2_is (tree)))
5926 splay_tree critical_name_mutexes;
5927
5928static void
726a989a 5929lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 5930{
726a989a
RB
5931 tree block;
5932 tree name, lock, unlock;
5933 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 5934 location_t loc = gimple_location (stmt);
726a989a 5935 gimple_seq tbody;
d406b663 5936 struct gimplify_ctx gctx;
953ff289 5937
726a989a 5938 name = gimple_omp_critical_name (stmt);
953ff289
DN
5939 if (name)
5940 {
5039610b 5941 tree decl;
953ff289
DN
5942 splay_tree_node n;
5943
5944 if (!critical_name_mutexes)
5945 critical_name_mutexes
a9429e29
LB
5946 = splay_tree_new_ggc (splay_tree_compare_pointers,
5947 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
5948 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
953ff289
DN
5949
5950 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
5951 if (n == NULL)
5952 {
5953 char *new_str;
5954
5955 decl = create_tmp_var_raw (ptr_type_node, NULL);
5956
5957 new_str = ACONCAT ((".gomp_critical_user_",
5958 IDENTIFIER_POINTER (name), NULL));
5959 DECL_NAME (decl) = get_identifier (new_str);
5960 TREE_PUBLIC (decl) = 1;
5961 TREE_STATIC (decl) = 1;
5962 DECL_COMMON (decl) = 1;
5963 DECL_ARTIFICIAL (decl) = 1;
5964 DECL_IGNORED_P (decl) = 1;
8a4a83ed 5965 varpool_finalize_decl (decl);
953ff289
DN
5966
5967 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
5968 (splay_tree_value) decl);
5969 }
5970 else
5971 decl = (tree) n->value;
5972
953ff289 5973 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_START];
db3927fb 5974 lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
953ff289 5975
953ff289 5976 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_END];
db3927fb
AH
5977 unlock = build_call_expr_loc (loc, unlock, 1,
5978 build_fold_addr_expr_loc (loc, decl));
953ff289
DN
5979 }
5980 else
5981 {
5982 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_START];
db3927fb 5983 lock = build_call_expr_loc (loc, lock, 0);
953ff289
DN
5984
5985 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_END];
db3927fb 5986 unlock = build_call_expr_loc (loc, unlock, 0);
953ff289
DN
5987 }
5988
d406b663 5989 push_gimplify_context (&gctx);
953ff289
DN
5990
5991 block = make_node (BLOCK);
726a989a 5992 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt), block);
777f7f9a 5993
726a989a
RB
5994 tbody = gimple_bind_body (bind);
5995 gimplify_and_add (lock, &tbody);
5996 gimple_bind_set_body (bind, tbody);
953ff289 5997
726a989a
RB
5998 lower_omp (gimple_omp_body (stmt), ctx);
5999 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6000 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6001 gimple_omp_set_body (stmt, NULL);
953ff289 6002
726a989a
RB
6003 tbody = gimple_bind_body (bind);
6004 gimplify_and_add (unlock, &tbody);
6005 gimple_bind_set_body (bind, tbody);
777f7f9a 6006
726a989a 6007 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
953ff289
DN
6008
6009 pop_gimplify_context (bind);
726a989a
RB
6010 gimple_bind_append_vars (bind, ctx->block_vars);
6011 BLOCK_VARS (block) = gimple_bind_vars (bind);
6012 gsi_replace (gsi_p, bind, true);
50674e96
DN
6013}
6014
6015
6016/* A subroutine of lower_omp_for. Generate code to emit the predicate
6017 for a lastprivate clause. Given a loop control predicate of (V
6018 cond N2), we gate the clause on (!(V cond N2)). The lowered form
3d55c64b
JJ
6019 is appended to *DLIST, iterator initialization is appended to
6020 *BODY_P. */
50674e96
DN
6021
6022static void
726a989a
RB
6023lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6024 gimple_seq *dlist, struct omp_context *ctx)
50674e96 6025{
726a989a 6026 tree clauses, cond, vinit;
50674e96 6027 enum tree_code cond_code;
726a989a 6028 gimple_seq stmts;
b8698a0f 6029
a68ab351 6030 cond_code = fd->loop.cond_code;
50674e96
DN
6031 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6032
6033 /* When possible, use a strict equality expression. This can let VRP
6034 type optimizations deduce the value and remove a copy. */
a68ab351 6035 if (host_integerp (fd->loop.step, 0))
50674e96 6036 {
a68ab351 6037 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
50674e96
DN
6038 if (step == 1 || step == -1)
6039 cond_code = EQ_EXPR;
6040 }
6041
a68ab351 6042 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
50674e96 6043
726a989a 6044 clauses = gimple_omp_for_clauses (fd->for_stmt);
3d55c64b
JJ
6045 stmts = NULL;
6046 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
726a989a 6047 if (!gimple_seq_empty_p (stmts))
3d55c64b 6048 {
726a989a 6049 gimple_seq_add_seq (&stmts, *dlist);
a68ab351 6050 *dlist = stmts;
3d55c64b
JJ
6051
6052 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
a68ab351 6053 vinit = fd->loop.n1;
3d55c64b 6054 if (cond_code == EQ_EXPR
a68ab351
JJ
6055 && host_integerp (fd->loop.n2, 0)
6056 && ! integer_zerop (fd->loop.n2))
6057 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
3d55c64b
JJ
6058
6059 /* Initialize the iterator variable, so that threads that don't execute
6060 any iterations don't execute the lastprivate clauses by accident. */
726a989a 6061 gimplify_assign (fd->loop.v, vinit, body_p);
3d55c64b 6062 }
50674e96
DN
6063}
6064
6065
6066/* Lower code for an OpenMP loop directive. */
6067
6068static void
726a989a 6069lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 6070{
726a989a 6071 tree *rhs_p, block;
50674e96 6072 struct omp_for_data fd;
726a989a 6073 gimple stmt = gsi_stmt (*gsi_p), new_stmt;
0f900dfa 6074 gimple_seq omp_for_body, body, dlist;
726a989a 6075 size_t i;
d406b663 6076 struct gimplify_ctx gctx;
50674e96 6077
d406b663 6078 push_gimplify_context (&gctx);
50674e96 6079
726a989a
RB
6080 lower_omp (gimple_omp_for_pre_body (stmt), ctx);
6081 lower_omp (gimple_omp_body (stmt), ctx);
50674e96 6082
b357f682 6083 block = make_node (BLOCK);
726a989a 6084 new_stmt = gimple_build_bind (NULL, NULL, block);
b357f682 6085
50674e96
DN
6086 /* Move declaration of temporaries in the loop body before we make
6087 it go away. */
726a989a
RB
6088 omp_for_body = gimple_omp_body (stmt);
6089 if (!gimple_seq_empty_p (omp_for_body)
6090 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6091 {
6092 tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
6093 gimple_bind_append_vars (new_stmt, vars);
6094 }
50674e96 6095
726a989a 6096 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
50674e96 6097 dlist = NULL;
726a989a
RB
6098 body = NULL;
6099 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx);
6100 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
50674e96
DN
6101
6102 /* Lower the header expressions. At this point, we can assume that
6103 the header is of the form:
6104
6105 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6106
6107 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6108 using the .omp_data_s mapping, if needed. */
726a989a 6109 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 6110 {
726a989a 6111 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
a68ab351 6112 if (!is_gimple_min_invariant (*rhs_p))
726a989a 6113 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 6114
726a989a 6115 rhs_p = gimple_omp_for_final_ptr (stmt, i);
a68ab351 6116 if (!is_gimple_min_invariant (*rhs_p))
726a989a 6117 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 6118
726a989a 6119 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
a68ab351 6120 if (!is_gimple_min_invariant (*rhs_p))
726a989a 6121 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 6122 }
50674e96
DN
6123
6124 /* Once lowered, extract the bounds and clauses. */
a68ab351 6125 extract_omp_for_data (stmt, &fd, NULL);
50674e96 6126
726a989a 6127 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
50674e96 6128
726a989a
RB
6129 gimple_seq_add_stmt (&body, stmt);
6130 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
777f7f9a 6131
726a989a
RB
6132 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6133 fd.loop.v));
777f7f9a 6134
50674e96 6135 /* After the loop, add exit clauses. */
726a989a
RB
6136 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6137 gimple_seq_add_seq (&body, dlist);
50674e96 6138
726a989a 6139 body = maybe_catch_exception (body);
4a31b7ee 6140
777f7f9a 6141 /* Region exit marker goes at the end of the loop body. */
726a989a 6142 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
50674e96 6143
b357f682 6144 pop_gimplify_context (new_stmt);
726a989a
RB
6145
6146 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6147 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
b357f682
JJ
6148 if (BLOCK_VARS (block))
6149 TREE_USED (block) = 1;
50674e96 6150
726a989a
RB
6151 gimple_bind_set_body (new_stmt, body);
6152 gimple_omp_set_body (stmt, NULL);
6153 gimple_omp_for_set_pre_body (stmt, NULL);
6154 gsi_replace (gsi_p, new_stmt, true);
953ff289
DN
6155}
6156
b8698a0f 6157/* Callback for walk_stmts. Check if the current statement only contains
726a989a 6158 GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL. */
69f1837b
JJ
6159
6160static tree
726a989a
RB
6161check_combined_parallel (gimple_stmt_iterator *gsi_p,
6162 bool *handled_ops_p,
6163 struct walk_stmt_info *wi)
69f1837b 6164{
d3bfe4de 6165 int *info = (int *) wi->info;
726a989a 6166 gimple stmt = gsi_stmt (*gsi_p);
69f1837b 6167
726a989a
RB
6168 *handled_ops_p = true;
6169 switch (gimple_code (stmt))
69f1837b 6170 {
726a989a
RB
6171 WALK_SUBSTMTS;
6172
6173 case GIMPLE_OMP_FOR:
6174 case GIMPLE_OMP_SECTIONS:
69f1837b
JJ
6175 *info = *info == 0 ? 1 : -1;
6176 break;
6177 default:
6178 *info = -1;
6179 break;
6180 }
6181 return NULL;
6182}
50674e96 6183
a68ab351
JJ
6184struct omp_taskcopy_context
6185{
6186 /* This field must be at the beginning, as we do "inheritance": Some
6187 callback functions for tree-inline.c (e.g., omp_copy_decl)
6188 receive a copy_body_data pointer that is up-casted to an
6189 omp_context pointer. */
6190 copy_body_data cb;
6191 omp_context *ctx;
6192};
6193
6194static tree
6195task_copyfn_copy_decl (tree var, copy_body_data *cb)
6196{
6197 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6198
6199 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6200 return create_tmp_var (TREE_TYPE (var), NULL);
6201
6202 return var;
6203}
6204
6205static tree
6206task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6207{
6208 tree name, new_fields = NULL, type, f;
6209
6210 type = lang_hooks.types.make_type (RECORD_TYPE);
6211 name = DECL_NAME (TYPE_NAME (orig_type));
c2255bc4
AH
6212 name = build_decl (gimple_location (tcctx->ctx->stmt),
6213 TYPE_DECL, name, type);
a68ab351
JJ
6214 TYPE_NAME (type) = name;
6215
6216 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
6217 {
6218 tree new_f = copy_node (f);
6219 DECL_CONTEXT (new_f) = type;
6220 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
6221 TREE_CHAIN (new_f) = new_fields;
726a989a
RB
6222 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6223 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6224 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
6225 &tcctx->cb, NULL);
a68ab351
JJ
6226 new_fields = new_f;
6227 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
6228 }
6229 TYPE_FIELDS (type) = nreverse (new_fields);
6230 layout_type (type);
6231 return type;
6232}
6233
6234/* Create task copyfn. */
6235
6236static void
726a989a 6237create_task_copyfn (gimple task_stmt, omp_context *ctx)
a68ab351
JJ
6238{
6239 struct function *child_cfun;
6240 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
6241 tree record_type, srecord_type, bind, list;
6242 bool record_needs_remap = false, srecord_needs_remap = false;
6243 splay_tree_node n;
6244 struct omp_taskcopy_context tcctx;
d406b663 6245 struct gimplify_ctx gctx;
db3927fb 6246 location_t loc = gimple_location (task_stmt);
a68ab351 6247
726a989a 6248 child_fn = gimple_omp_task_copy_fn (task_stmt);
a68ab351
JJ
6249 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
6250 gcc_assert (child_cfun->cfg == NULL);
a68ab351
JJ
6251 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
6252
6253 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 6254 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
a68ab351
JJ
6255 DECL_CONTEXT (t) = child_fn;
6256
6257 /* Populate the function. */
d406b663 6258 push_gimplify_context (&gctx);
a68ab351
JJ
6259 current_function_decl = child_fn;
6260
6261 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6262 TREE_SIDE_EFFECTS (bind) = 1;
6263 list = NULL;
6264 DECL_SAVED_TREE (child_fn) = bind;
726a989a 6265 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
a68ab351
JJ
6266
6267 /* Remap src and dst argument types if needed. */
6268 record_type = ctx->record_type;
6269 srecord_type = ctx->srecord_type;
910ad8de 6270 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
6271 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6272 {
6273 record_needs_remap = true;
6274 break;
6275 }
910ad8de 6276 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
6277 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6278 {
6279 srecord_needs_remap = true;
6280 break;
6281 }
6282
6283 if (record_needs_remap || srecord_needs_remap)
6284 {
6285 memset (&tcctx, '\0', sizeof (tcctx));
6286 tcctx.cb.src_fn = ctx->cb.src_fn;
6287 tcctx.cb.dst_fn = child_fn;
fe660d7b
MJ
6288 tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
6289 gcc_checking_assert (tcctx.cb.src_node);
a68ab351
JJ
6290 tcctx.cb.dst_node = tcctx.cb.src_node;
6291 tcctx.cb.src_cfun = ctx->cb.src_cfun;
6292 tcctx.cb.copy_decl = task_copyfn_copy_decl;
1d65f45c 6293 tcctx.cb.eh_lp_nr = 0;
a68ab351
JJ
6294 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
6295 tcctx.cb.decl_map = pointer_map_create ();
6296 tcctx.ctx = ctx;
6297
6298 if (record_needs_remap)
6299 record_type = task_copyfn_remap_type (&tcctx, record_type);
6300 if (srecord_needs_remap)
6301 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
6302 }
6303 else
6304 tcctx.cb.decl_map = NULL;
6305
6306 push_cfun (child_cfun);
6307
6308 arg = DECL_ARGUMENTS (child_fn);
6309 TREE_TYPE (arg) = build_pointer_type (record_type);
910ad8de 6310 sarg = DECL_CHAIN (arg);
a68ab351
JJ
6311 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
6312
6313 /* First pass: initialize temporaries used in record_type and srecord_type
6314 sizes and field offsets. */
6315 if (tcctx.cb.decl_map)
726a989a 6316 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
6317 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
6318 {
6319 tree *p;
6320
6321 decl = OMP_CLAUSE_DECL (c);
6322 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
6323 if (p == NULL)
6324 continue;
6325 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6326 sf = (tree) n->value;
6327 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6328 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351 6329 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
726a989a 6330 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
a68ab351
JJ
6331 append_to_statement_list (t, &list);
6332 }
6333
6334 /* Second pass: copy shared var pointers and copy construct non-VLA
6335 firstprivate vars. */
726a989a 6336 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
6337 switch (OMP_CLAUSE_CODE (c))
6338 {
6339 case OMP_CLAUSE_SHARED:
6340 decl = OMP_CLAUSE_DECL (c);
6341 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6342 if (n == NULL)
6343 break;
6344 f = (tree) n->value;
6345 if (tcctx.cb.decl_map)
6346 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6347 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6348 sf = (tree) n->value;
6349 if (tcctx.cb.decl_map)
6350 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6351 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351 6352 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
70f34814 6353 dst = build_simple_mem_ref_loc (loc, arg);
a68ab351 6354 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
726a989a 6355 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
6356 append_to_statement_list (t, &list);
6357 break;
6358 case OMP_CLAUSE_FIRSTPRIVATE:
6359 decl = OMP_CLAUSE_DECL (c);
6360 if (is_variable_sized (decl))
6361 break;
6362 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6363 if (n == NULL)
6364 break;
6365 f = (tree) n->value;
6366 if (tcctx.cb.decl_map)
6367 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6368 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6369 if (n != NULL)
6370 {
6371 sf = (tree) n->value;
6372 if (tcctx.cb.decl_map)
6373 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6374 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351
JJ
6375 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6376 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
70f34814 6377 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
6378 }
6379 else
6380 src = decl;
70f34814 6381 dst = build_simple_mem_ref_loc (loc, arg);
a68ab351
JJ
6382 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6383 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6384 append_to_statement_list (t, &list);
6385 break;
6386 case OMP_CLAUSE_PRIVATE:
6387 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6388 break;
6389 decl = OMP_CLAUSE_DECL (c);
6390 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6391 f = (tree) n->value;
6392 if (tcctx.cb.decl_map)
6393 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6394 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6395 if (n != NULL)
6396 {
6397 sf = (tree) n->value;
6398 if (tcctx.cb.decl_map)
6399 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6400 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351
JJ
6401 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6402 if (use_pointer_for_field (decl, NULL))
70f34814 6403 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
6404 }
6405 else
6406 src = decl;
70f34814 6407 dst = build_simple_mem_ref_loc (loc, arg);
a68ab351 6408 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
726a989a 6409 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
6410 append_to_statement_list (t, &list);
6411 break;
6412 default:
6413 break;
6414 }
6415
6416 /* Last pass: handle VLA firstprivates. */
6417 if (tcctx.cb.decl_map)
726a989a 6418 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
6419 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
6420 {
6421 tree ind, ptr, df;
6422
6423 decl = OMP_CLAUSE_DECL (c);
6424 if (!is_variable_sized (decl))
6425 continue;
6426 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6427 if (n == NULL)
6428 continue;
6429 f = (tree) n->value;
6430 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6431 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
6432 ind = DECL_VALUE_EXPR (decl);
6433 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
6434 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
6435 n = splay_tree_lookup (ctx->sfield_map,
6436 (splay_tree_key) TREE_OPERAND (ind, 0));
6437 sf = (tree) n->value;
6438 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6439 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351 6440 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
70f34814
RG
6441 src = build_simple_mem_ref_loc (loc, src);
6442 dst = build_simple_mem_ref_loc (loc, arg);
a68ab351
JJ
6443 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6444 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6445 append_to_statement_list (t, &list);
6446 n = splay_tree_lookup (ctx->field_map,
6447 (splay_tree_key) TREE_OPERAND (ind, 0));
6448 df = (tree) n->value;
6449 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
70f34814 6450 ptr = build_simple_mem_ref_loc (loc, arg);
a68ab351 6451 ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
726a989a 6452 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
db3927fb 6453 build_fold_addr_expr_loc (loc, dst));
a68ab351
JJ
6454 append_to_statement_list (t, &list);
6455 }
6456
6457 t = build1 (RETURN_EXPR, void_type_node, NULL);
6458 append_to_statement_list (t, &list);
6459
6460 if (tcctx.cb.decl_map)
6461 pointer_map_destroy (tcctx.cb.decl_map);
6462 pop_gimplify_context (NULL);
6463 BIND_EXPR_BODY (bind) = list;
6464 pop_cfun ();
6465 current_function_decl = ctx->cb.src_fn;
6466}
6467
726a989a
RB
6468/* Lower the OpenMP parallel or task directive in the current statement
6469 in GSI_P. CTX holds context information for the directive. */
50674e96
DN
6470
6471static void
726a989a 6472lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 6473{
726a989a
RB
6474 tree clauses;
6475 tree child_fn, t;
6476 gimple stmt = gsi_stmt (*gsi_p);
6477 gimple par_bind, bind;
6478 gimple_seq par_body, olist, ilist, par_olist, par_ilist, new_body;
d406b663 6479 struct gimplify_ctx gctx;
db3927fb 6480 location_t loc = gimple_location (stmt);
50674e96 6481
726a989a
RB
6482 clauses = gimple_omp_taskreg_clauses (stmt);
6483 par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
6484 par_body = gimple_bind_body (par_bind);
50674e96 6485 child_fn = ctx->cb.dst_fn;
726a989a
RB
6486 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
6487 && !gimple_omp_parallel_combined_p (stmt))
69f1837b
JJ
6488 {
6489 struct walk_stmt_info wi;
6490 int ws_num = 0;
6491
6492 memset (&wi, 0, sizeof (wi));
69f1837b
JJ
6493 wi.info = &ws_num;
6494 wi.val_only = true;
726a989a 6495 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
69f1837b 6496 if (ws_num == 1)
726a989a 6497 gimple_omp_parallel_set_combined_p (stmt, true);
69f1837b 6498 }
a68ab351
JJ
6499 if (ctx->srecord_type)
6500 create_task_copyfn (stmt, ctx);
50674e96 6501
d406b663 6502 push_gimplify_context (&gctx);
50674e96 6503
726a989a
RB
6504 par_olist = NULL;
6505 par_ilist = NULL;
50674e96 6506 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
726a989a
RB
6507 lower_omp (par_body, ctx);
6508 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
a68ab351 6509 lower_reduction_clauses (clauses, &par_olist, ctx);
50674e96
DN
6510
6511 /* Declare all the variables created by mapping and the variables
6512 declared in the scope of the parallel body. */
6513 record_vars_into (ctx->block_vars, child_fn);
726a989a 6514 record_vars_into (gimple_bind_vars (par_bind), child_fn);
50674e96
DN
6515
6516 if (ctx->record_type)
6517 {
a68ab351
JJ
6518 ctx->sender_decl
6519 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
6520 : ctx->record_type, ".omp_data_o");
cd3f04c8 6521 DECL_NAMELESS (ctx->sender_decl) = 1;
628c189e 6522 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
726a989a 6523 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
50674e96
DN
6524 }
6525
726a989a
RB
6526 olist = NULL;
6527 ilist = NULL;
50674e96
DN
6528 lower_send_clauses (clauses, &ilist, &olist, ctx);
6529 lower_send_shared_vars (&ilist, &olist, ctx);
6530
6531 /* Once all the expansions are done, sequence all the different
726a989a 6532 fragments inside gimple_omp_body. */
50674e96 6533
726a989a 6534 new_body = NULL;
50674e96
DN
6535
6536 if (ctx->record_type)
6537 {
db3927fb 6538 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
018b899b 6539 /* fixup_child_record_type might have changed receiver_decl's type. */
db3927fb 6540 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
726a989a
RB
6541 gimple_seq_add_stmt (&new_body,
6542 gimple_build_assign (ctx->receiver_decl, t));
50674e96
DN
6543 }
6544
726a989a
RB
6545 gimple_seq_add_seq (&new_body, par_ilist);
6546 gimple_seq_add_seq (&new_body, par_body);
6547 gimple_seq_add_seq (&new_body, par_olist);
6548 new_body = maybe_catch_exception (new_body);
6549 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
6550 gimple_omp_set_body (stmt, new_body);
50674e96 6551
726a989a
RB
6552 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
6553 gimple_bind_add_stmt (bind, stmt);
b357f682
JJ
6554 if (ilist || olist)
6555 {
726a989a
RB
6556 gimple_seq_add_stmt (&ilist, bind);
6557 gimple_seq_add_seq (&ilist, olist);
6558 bind = gimple_build_bind (NULL, ilist, NULL);
b357f682 6559 }
50674e96 6560
726a989a 6561 gsi_replace (gsi_p, bind, true);
50674e96 6562
726a989a 6563 pop_gimplify_context (NULL);
50674e96
DN
6564}
6565
d0fb20be 6566/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
726a989a
RB
6567 regimplified. If DATA is non-NULL, lower_omp_1 is outside
6568 of OpenMP context, but with task_shared_vars set. */
75a4c3c1
AP
6569
6570static tree
726a989a
RB
6571lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
6572 void *data)
75a4c3c1 6573{
d0fb20be 6574 tree t = *tp;
75a4c3c1 6575
d0fb20be 6576 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
726a989a 6577 if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
b826efd9
JJ
6578 return t;
6579
6580 if (task_shared_vars
6581 && DECL_P (t)
6582 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
d0fb20be 6583 return t;
75a4c3c1 6584
d0fb20be
JJ
6585 /* If a global variable has been privatized, TREE_CONSTANT on
6586 ADDR_EXPR might be wrong. */
726a989a 6587 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
d0fb20be 6588 recompute_tree_invariant_for_addr_expr (t);
75a4c3c1 6589
d0fb20be
JJ
6590 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
6591 return NULL_TREE;
75a4c3c1 6592}
50674e96 6593
d0fb20be 6594static void
726a989a 6595lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6596{
726a989a
RB
6597 gimple stmt = gsi_stmt (*gsi_p);
6598 struct walk_stmt_info wi;
953ff289 6599
726a989a
RB
6600 if (gimple_has_location (stmt))
6601 input_location = gimple_location (stmt);
d0fb20be 6602
726a989a
RB
6603 if (task_shared_vars)
6604 memset (&wi, '\0', sizeof (wi));
d0fb20be 6605
50674e96
DN
6606 /* If we have issued syntax errors, avoid doing any heavy lifting.
6607 Just replace the OpenMP directives with a NOP to avoid
6608 confusing RTL expansion. */
1da2ed5f 6609 if (seen_error () && is_gimple_omp (stmt))
50674e96 6610 {
726a989a 6611 gsi_replace (gsi_p, gimple_build_nop (), true);
d0fb20be 6612 return;
50674e96
DN
6613 }
6614
726a989a 6615 switch (gimple_code (stmt))
953ff289 6616 {
726a989a 6617 case GIMPLE_COND:
a68ab351 6618 if ((ctx || task_shared_vars)
726a989a
RB
6619 && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
6620 ctx ? NULL : &wi, NULL)
6621 || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
6622 ctx ? NULL : &wi, NULL)))
6623 gimple_regimplify_operands (stmt, gsi_p);
d0fb20be 6624 break;
726a989a
RB
6625 case GIMPLE_CATCH:
6626 lower_omp (gimple_catch_handler (stmt), ctx);
d0fb20be 6627 break;
726a989a
RB
6628 case GIMPLE_EH_FILTER:
6629 lower_omp (gimple_eh_filter_failure (stmt), ctx);
d0fb20be 6630 break;
726a989a
RB
6631 case GIMPLE_TRY:
6632 lower_omp (gimple_try_eval (stmt), ctx);
6633 lower_omp (gimple_try_cleanup (stmt), ctx);
d0fb20be 6634 break;
726a989a
RB
6635 case GIMPLE_BIND:
6636 lower_omp (gimple_bind_body (stmt), ctx);
d0fb20be 6637 break;
726a989a
RB
6638 case GIMPLE_OMP_PARALLEL:
6639 case GIMPLE_OMP_TASK:
6640 ctx = maybe_lookup_ctx (stmt);
6641 lower_omp_taskreg (gsi_p, ctx);
d0fb20be 6642 break;
726a989a
RB
6643 case GIMPLE_OMP_FOR:
6644 ctx = maybe_lookup_ctx (stmt);
953ff289 6645 gcc_assert (ctx);
726a989a 6646 lower_omp_for (gsi_p, ctx);
953ff289 6647 break;
726a989a
RB
6648 case GIMPLE_OMP_SECTIONS:
6649 ctx = maybe_lookup_ctx (stmt);
953ff289 6650 gcc_assert (ctx);
726a989a 6651 lower_omp_sections (gsi_p, ctx);
953ff289 6652 break;
726a989a
RB
6653 case GIMPLE_OMP_SINGLE:
6654 ctx = maybe_lookup_ctx (stmt);
953ff289 6655 gcc_assert (ctx);
726a989a 6656 lower_omp_single (gsi_p, ctx);
953ff289 6657 break;
726a989a
RB
6658 case GIMPLE_OMP_MASTER:
6659 ctx = maybe_lookup_ctx (stmt);
953ff289 6660 gcc_assert (ctx);
726a989a 6661 lower_omp_master (gsi_p, ctx);
953ff289 6662 break;
726a989a
RB
6663 case GIMPLE_OMP_ORDERED:
6664 ctx = maybe_lookup_ctx (stmt);
953ff289 6665 gcc_assert (ctx);
726a989a 6666 lower_omp_ordered (gsi_p, ctx);
953ff289 6667 break;
726a989a
RB
6668 case GIMPLE_OMP_CRITICAL:
6669 ctx = maybe_lookup_ctx (stmt);
953ff289 6670 gcc_assert (ctx);
726a989a
RB
6671 lower_omp_critical (gsi_p, ctx);
6672 break;
6673 case GIMPLE_OMP_ATOMIC_LOAD:
6674 if ((ctx || task_shared_vars)
6675 && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
6676 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
6677 gimple_regimplify_operands (stmt, gsi_p);
953ff289 6678 break;
d0fb20be 6679 default:
a68ab351 6680 if ((ctx || task_shared_vars)
726a989a
RB
6681 && walk_gimple_op (stmt, lower_omp_regimplify_p,
6682 ctx ? NULL : &wi))
6683 gimple_regimplify_operands (stmt, gsi_p);
953ff289 6684 break;
953ff289 6685 }
953ff289
DN
6686}
6687
6688static void
726a989a 6689lower_omp (gimple_seq body, omp_context *ctx)
953ff289 6690{
b357f682 6691 location_t saved_location = input_location;
726a989a
RB
6692 gimple_stmt_iterator gsi = gsi_start (body);
6693 for (gsi = gsi_start (body); !gsi_end_p (gsi); gsi_next (&gsi))
6694 lower_omp_1 (&gsi, ctx);
b357f682 6695 input_location = saved_location;
953ff289
DN
6696}
6697\f
6698/* Main entry point. */
6699
c2924966 6700static unsigned int
953ff289
DN
6701execute_lower_omp (void)
6702{
726a989a
RB
6703 gimple_seq body;
6704
535b544a
SB
6705 /* This pass always runs, to provide PROP_gimple_lomp.
6706 But there is nothing to do unless -fopenmp is given. */
6707 if (flag_openmp == 0)
6708 return 0;
6709
953ff289
DN
6710 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
6711 delete_omp_context);
6712
726a989a
RB
6713 body = gimple_body (current_function_decl);
6714 scan_omp (body, NULL);
a68ab351 6715 gcc_assert (taskreg_nesting_level == 0);
953ff289
DN
6716
6717 if (all_contexts->root)
a68ab351 6718 {
d406b663
JJ
6719 struct gimplify_ctx gctx;
6720
a68ab351 6721 if (task_shared_vars)
d406b663 6722 push_gimplify_context (&gctx);
726a989a 6723 lower_omp (body, NULL);
a68ab351
JJ
6724 if (task_shared_vars)
6725 pop_gimplify_context (NULL);
6726 }
953ff289 6727
50674e96
DN
6728 if (all_contexts)
6729 {
6730 splay_tree_delete (all_contexts);
6731 all_contexts = NULL;
6732 }
a68ab351 6733 BITMAP_FREE (task_shared_vars);
c2924966 6734 return 0;
953ff289
DN
6735}
6736
b8698a0f 6737struct gimple_opt_pass pass_lower_omp =
953ff289 6738{
8ddbbcae
JH
6739 {
6740 GIMPLE_PASS,
953ff289 6741 "omplower", /* name */
535b544a 6742 NULL, /* gate */
953ff289
DN
6743 execute_lower_omp, /* execute */
6744 NULL, /* sub */
6745 NULL, /* next */
6746 0, /* static_pass_number */
7072a650 6747 TV_NONE, /* tv_id */
953ff289
DN
6748 PROP_gimple_any, /* properties_required */
6749 PROP_gimple_lomp, /* properties_provided */
6750 0, /* properties_destroyed */
6751 0, /* todo_flags_start */
22c5fa5f 6752 0 /* todo_flags_finish */
8ddbbcae 6753 }
953ff289 6754};
953ff289
DN
6755\f
6756/* The following is a utility to diagnose OpenMP structured block violations.
777f7f9a
RH
6757 It is not part of the "omplower" pass, as that's invoked too late. It
6758 should be invoked by the respective front ends after gimplification. */
953ff289
DN
6759
6760static splay_tree all_labels;
6761
6762/* Check for mismatched contexts and generate an error if needed. Return
6763 true if an error is detected. */
6764
6765static bool
726a989a
RB
6766diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
6767 gimple branch_ctx, gimple label_ctx)
953ff289 6768{
726a989a 6769 if (label_ctx == branch_ctx)
953ff289
DN
6770 return false;
6771
b8698a0f 6772
726a989a
RB
6773 /*
6774 Previously we kept track of the label's entire context in diagnose_sb_[12]
6775 so we could traverse it and issue a correct "exit" or "enter" error
6776 message upon a structured block violation.
6777
6778 We built the context by building a list with tree_cons'ing, but there is
6779 no easy counterpart in gimple tuples. It seems like far too much work
6780 for issuing exit/enter error messages. If someone really misses the
6781 distinct error message... patches welcome.
6782 */
b8698a0f 6783
726a989a 6784#if 0
953ff289 6785 /* Try to avoid confusing the user by producing and error message
fa10beec 6786 with correct "exit" or "enter" verbiage. We prefer "exit"
953ff289
DN
6787 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
6788 if (branch_ctx == NULL)
6789 exit_p = false;
6790 else
6791 {
6792 while (label_ctx)
6793 {
6794 if (TREE_VALUE (label_ctx) == branch_ctx)
6795 {
6796 exit_p = false;
6797 break;
6798 }
6799 label_ctx = TREE_CHAIN (label_ctx);
6800 }
6801 }
6802
6803 if (exit_p)
6804 error ("invalid exit from OpenMP structured block");
6805 else
6806 error ("invalid entry to OpenMP structured block");
726a989a 6807#endif
953ff289 6808
726a989a
RB
6809 /* If it's obvious we have an invalid entry, be specific about the error. */
6810 if (branch_ctx == NULL)
6811 error ("invalid entry to OpenMP structured block");
6812 else
6813 /* Otherwise, be vague and lazy, but efficient. */
6814 error ("invalid branch to/from an OpenMP structured block");
6815
6816 gsi_replace (gsi_p, gimple_build_nop (), false);
953ff289
DN
6817 return true;
6818}
6819
6820/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
726a989a 6821 where each label is found. */
953ff289
DN
6822
6823static tree
726a989a
RB
6824diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6825 struct walk_stmt_info *wi)
953ff289 6826{
726a989a
RB
6827 gimple context = (gimple) wi->info;
6828 gimple inner_context;
6829 gimple stmt = gsi_stmt (*gsi_p);
953ff289 6830
726a989a
RB
6831 *handled_ops_p = true;
6832
6833 switch (gimple_code (stmt))
953ff289 6834 {
726a989a 6835 WALK_SUBSTMTS;
b8698a0f 6836
726a989a
RB
6837 case GIMPLE_OMP_PARALLEL:
6838 case GIMPLE_OMP_TASK:
6839 case GIMPLE_OMP_SECTIONS:
6840 case GIMPLE_OMP_SINGLE:
6841 case GIMPLE_OMP_SECTION:
6842 case GIMPLE_OMP_MASTER:
6843 case GIMPLE_OMP_ORDERED:
6844 case GIMPLE_OMP_CRITICAL:
6845 /* The minimal context here is just the current OMP construct. */
6846 inner_context = stmt;
953ff289 6847 wi->info = inner_context;
726a989a 6848 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
6849 wi->info = context;
6850 break;
6851
726a989a
RB
6852 case GIMPLE_OMP_FOR:
6853 inner_context = stmt;
953ff289 6854 wi->info = inner_context;
726a989a
RB
6855 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
6856 walk them. */
6857 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
6858 diagnose_sb_1, NULL, wi);
6859 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
6860 wi->info = context;
6861 break;
6862
726a989a
RB
6863 case GIMPLE_LABEL:
6864 splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
953ff289
DN
6865 (splay_tree_value) context);
6866 break;
6867
6868 default:
6869 break;
6870 }
6871
6872 return NULL_TREE;
6873}
6874
6875/* Pass 2: Check each branch and see if its context differs from that of
6876 the destination label's context. */
6877
6878static tree
726a989a
RB
6879diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6880 struct walk_stmt_info *wi)
953ff289 6881{
726a989a 6882 gimple context = (gimple) wi->info;
953ff289 6883 splay_tree_node n;
726a989a 6884 gimple stmt = gsi_stmt (*gsi_p);
953ff289 6885
726a989a
RB
6886 *handled_ops_p = true;
6887
6888 switch (gimple_code (stmt))
953ff289 6889 {
726a989a
RB
6890 WALK_SUBSTMTS;
6891
6892 case GIMPLE_OMP_PARALLEL:
6893 case GIMPLE_OMP_TASK:
6894 case GIMPLE_OMP_SECTIONS:
6895 case GIMPLE_OMP_SINGLE:
6896 case GIMPLE_OMP_SECTION:
6897 case GIMPLE_OMP_MASTER:
6898 case GIMPLE_OMP_ORDERED:
6899 case GIMPLE_OMP_CRITICAL:
6900 wi->info = stmt;
6901 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
6902 wi->info = context;
6903 break;
6904
726a989a
RB
6905 case GIMPLE_OMP_FOR:
6906 wi->info = stmt;
6907 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
6908 walk them. */
6909 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
6910 diagnose_sb_2, NULL, wi);
6911 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
6912 wi->info = context;
6913 break;
6914
ca50f84a
L
6915 case GIMPLE_COND:
6916 {
6917 tree lab = gimple_cond_true_label (stmt);
6918 if (lab)
6919 {
6920 n = splay_tree_lookup (all_labels,
6921 (splay_tree_key) lab);
6922 diagnose_sb_0 (gsi_p, context,
6923 n ? (gimple) n->value : NULL);
6924 }
6925 lab = gimple_cond_false_label (stmt);
6926 if (lab)
6927 {
6928 n = splay_tree_lookup (all_labels,
6929 (splay_tree_key) lab);
6930 diagnose_sb_0 (gsi_p, context,
6931 n ? (gimple) n->value : NULL);
6932 }
6933 }
6934 break;
6935
726a989a 6936 case GIMPLE_GOTO:
953ff289 6937 {
726a989a 6938 tree lab = gimple_goto_dest (stmt);
953ff289
DN
6939 if (TREE_CODE (lab) != LABEL_DECL)
6940 break;
6941
6942 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 6943 diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
953ff289
DN
6944 }
6945 break;
6946
726a989a 6947 case GIMPLE_SWITCH:
953ff289 6948 {
726a989a
RB
6949 unsigned int i;
6950 for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
953ff289 6951 {
726a989a 6952 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
953ff289 6953 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 6954 if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
953ff289
DN
6955 break;
6956 }
6957 }
6958 break;
6959
726a989a
RB
6960 case GIMPLE_RETURN:
6961 diagnose_sb_0 (gsi_p, context, NULL);
953ff289
DN
6962 break;
6963
6964 default:
6965 break;
6966 }
6967
6968 return NULL_TREE;
6969}
6970
a406865a
RG
6971static unsigned int
6972diagnose_omp_structured_block_errors (void)
953ff289 6973{
953ff289 6974 struct walk_stmt_info wi;
a406865a 6975 gimple_seq body = gimple_body (current_function_decl);
953ff289
DN
6976
6977 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
6978
6979 memset (&wi, 0, sizeof (wi));
726a989a 6980 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
953ff289
DN
6981
6982 memset (&wi, 0, sizeof (wi));
953ff289 6983 wi.want_locations = true;
726a989a 6984 walk_gimple_seq (body, diagnose_sb_2, NULL, &wi);
953ff289
DN
6985
6986 splay_tree_delete (all_labels);
6987 all_labels = NULL;
6988
a406865a 6989 return 0;
953ff289
DN
6990}
6991
a406865a
RG
6992static bool
6993gate_diagnose_omp_blocks (void)
6994{
6995 return flag_openmp != 0;
6996}
6997
6998struct gimple_opt_pass pass_diagnose_omp_blocks =
6999{
7000 {
7001 GIMPLE_PASS,
2329c6f5 7002 "*diagnose_omp_blocks", /* name */
a406865a
RG
7003 gate_diagnose_omp_blocks, /* gate */
7004 diagnose_omp_structured_block_errors, /* execute */
7005 NULL, /* sub */
7006 NULL, /* next */
7007 0, /* static_pass_number */
7008 TV_NONE, /* tv_id */
7009 PROP_gimple_any, /* properties_required */
7010 0, /* properties_provided */
7011 0, /* properties_destroyed */
7012 0, /* todo_flags_start */
7013 0, /* todo_flags_finish */
7014 }
7015};
7016
953ff289 7017#include "gt-omp-low.h"