]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
tree.h (get_pending_sizes): Remove prototype.
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
953ff289
DN
1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
c75c517d
SB
6 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
7 Free Software Foundation, Inc.
953ff289
DN
8
9This file is part of GCC.
10
11GCC is free software; you can redistribute it and/or modify it under
12the terms of the GNU General Public License as published by the Free
9dcd6f09 13Software Foundation; either version 3, or (at your option) any later
953ff289
DN
14version.
15
16GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17WARRANTY; without even the implied warranty of MERCHANTABILITY or
18FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19for more details.
20
21You should have received a copy of the GNU General Public License
9dcd6f09
NC
22along with GCC; see the file COPYING3. If not see
23<http://www.gnu.org/licenses/>. */
953ff289
DN
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "tree.h"
30#include "rtl.h"
726a989a
RB
31#include "gimple.h"
32#include "tree-iterator.h"
953ff289
DN
33#include "tree-inline.h"
34#include "langhooks.h"
1da2ed5f 35#include "diagnostic-core.h"
953ff289
DN
36#include "tree-flow.h"
37#include "timevar.h"
38#include "flags.h"
39#include "function.h"
40#include "expr.h"
953ff289
DN
41#include "tree-pass.h"
42#include "ggc.h"
43#include "except.h"
6be42dd4 44#include "splay-tree.h"
a509ebb5
RL
45#include "optabs.h"
46#include "cfgloop.h"
953ff289 47
726a989a 48
b8698a0f 49/* Lowering of OpenMP parallel and workshare constructs proceeds in two
953ff289
DN
50 phases. The first phase scans the function looking for OMP statements
51 and then for variables that must be replaced to satisfy data sharing
52 clauses. The second phase expands code for the constructs, as well as
c0220ea4 53 re-gimplifying things when variables have been replaced with complex
953ff289
DN
54 expressions.
55
7ebaeab5
DN
56 Final code generation is done by pass_expand_omp. The flowgraph is
57 scanned for parallel regions which are then moved to a new
58 function, to be invoked by the thread library. */
953ff289
DN
59
60/* Context structure. Used to store information about each parallel
61 directive in the code. */
62
63typedef struct omp_context
64{
65 /* This field must be at the beginning, as we do "inheritance": Some
66 callback functions for tree-inline.c (e.g., omp_copy_decl)
67 receive a copy_body_data pointer that is up-casted to an
68 omp_context pointer. */
69 copy_body_data cb;
70
71 /* The tree of contexts corresponding to the encountered constructs. */
72 struct omp_context *outer;
726a989a 73 gimple stmt;
953ff289 74
b8698a0f 75 /* Map variables to fields in a structure that allows communication
953ff289
DN
76 between sending and receiving threads. */
77 splay_tree field_map;
78 tree record_type;
79 tree sender_decl;
80 tree receiver_decl;
81
a68ab351
JJ
82 /* These are used just by task contexts, if task firstprivate fn is
83 needed. srecord_type is used to communicate from the thread
84 that encountered the task construct to task firstprivate fn,
85 record_type is allocated by GOMP_task, initialized by task firstprivate
86 fn and passed to the task body fn. */
87 splay_tree sfield_map;
88 tree srecord_type;
89
953ff289
DN
90 /* A chain of variables to add to the top-level block surrounding the
91 construct. In the case of a parallel, this is in the child function. */
92 tree block_vars;
93
94 /* What to do with variables with implicitly determined sharing
95 attributes. */
96 enum omp_clause_default_kind default_kind;
97
98 /* Nesting depth of this context. Used to beautify error messages re
99 invalid gotos. The outermost ctx is depth 1, with depth 0 being
100 reserved for the main body of the function. */
101 int depth;
102
953ff289
DN
103 /* True if this parallel directive is nested within another. */
104 bool is_nested;
953ff289
DN
105} omp_context;
106
107
a68ab351
JJ
108struct omp_for_data_loop
109{
110 tree v, n1, n2, step;
111 enum tree_code cond_code;
112};
113
50674e96 114/* A structure describing the main elements of a parallel loop. */
953ff289 115
50674e96 116struct omp_for_data
953ff289 117{
a68ab351 118 struct omp_for_data_loop loop;
726a989a
RB
119 tree chunk_size;
120 gimple for_stmt;
a68ab351
JJ
121 tree pre, iter_type;
122 int collapse;
953ff289
DN
123 bool have_nowait, have_ordered;
124 enum omp_clause_schedule_kind sched_kind;
a68ab351 125 struct omp_for_data_loop *loops;
953ff289
DN
126};
127
50674e96 128
953ff289 129static splay_tree all_contexts;
a68ab351 130static int taskreg_nesting_level;
777f7f9a 131struct omp_region *root_omp_region;
a68ab351 132static bitmap task_shared_vars;
953ff289 133
726a989a
RB
134static void scan_omp (gimple_seq, omp_context *);
135static tree scan_omp_1_op (tree *, int *, void *);
136
137#define WALK_SUBSTMTS \
138 case GIMPLE_BIND: \
139 case GIMPLE_TRY: \
140 case GIMPLE_CATCH: \
141 case GIMPLE_EH_FILTER: \
142 /* The sub-statements for these should be walked. */ \
143 *handled_ops_p = false; \
144 break;
145
146/* Convenience function for calling scan_omp_1_op on tree operands. */
147
148static inline tree
149scan_omp_op (tree *tp, omp_context *ctx)
150{
151 struct walk_stmt_info wi;
152
153 memset (&wi, 0, sizeof (wi));
154 wi.info = ctx;
155 wi.want_locations = true;
156
157 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
158}
159
160static void lower_omp (gimple_seq, omp_context *);
8ca5b2a2
JJ
161static tree lookup_decl_in_outer_ctx (tree, omp_context *);
162static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289
DN
163
164/* Find an OpenMP clause of type KIND within CLAUSES. */
165
917948d3 166tree
e0c68ce9 167find_omp_clause (tree clauses, enum omp_clause_code kind)
953ff289
DN
168{
169 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
aaf46ef9 170 if (OMP_CLAUSE_CODE (clauses) == kind)
953ff289
DN
171 return clauses;
172
173 return NULL_TREE;
174}
175
176/* Return true if CTX is for an omp parallel. */
177
178static inline bool
179is_parallel_ctx (omp_context *ctx)
180{
726a989a 181 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
182}
183
50674e96 184
a68ab351
JJ
185/* Return true if CTX is for an omp task. */
186
187static inline bool
188is_task_ctx (omp_context *ctx)
189{
726a989a 190 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
191}
192
193
194/* Return true if CTX is for an omp parallel or omp task. */
195
196static inline bool
197is_taskreg_ctx (omp_context *ctx)
198{
726a989a
RB
199 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
200 || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
201}
202
203
50674e96 204/* Return true if REGION is a combined parallel+workshare region. */
953ff289
DN
205
206static inline bool
50674e96
DN
207is_combined_parallel (struct omp_region *region)
208{
209 return region->is_combined_parallel;
210}
211
212
213/* Extract the header elements of parallel loop FOR_STMT and store
214 them into *FD. */
215
216static void
726a989a 217extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
a68ab351 218 struct omp_for_data_loop *loops)
50674e96 219{
a68ab351
JJ
220 tree t, var, *collapse_iter, *collapse_count;
221 tree count = NULL_TREE, iter_type = long_integer_type_node;
222 struct omp_for_data_loop *loop;
223 int i;
224 struct omp_for_data_loop dummy_loop;
db3927fb 225 location_t loc = gimple_location (for_stmt);
50674e96
DN
226
227 fd->for_stmt = for_stmt;
228 fd->pre = NULL;
726a989a 229 fd->collapse = gimple_omp_for_collapse (for_stmt);
a68ab351
JJ
230 if (fd->collapse > 1)
231 fd->loops = loops;
232 else
233 fd->loops = &fd->loop;
50674e96
DN
234
235 fd->have_nowait = fd->have_ordered = false;
236 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
237 fd->chunk_size = NULL_TREE;
a68ab351
JJ
238 collapse_iter = NULL;
239 collapse_count = NULL;
50674e96 240
726a989a 241 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
aaf46ef9 242 switch (OMP_CLAUSE_CODE (t))
50674e96
DN
243 {
244 case OMP_CLAUSE_NOWAIT:
245 fd->have_nowait = true;
246 break;
247 case OMP_CLAUSE_ORDERED:
248 fd->have_ordered = true;
249 break;
250 case OMP_CLAUSE_SCHEDULE:
251 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
252 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
253 break;
a68ab351
JJ
254 case OMP_CLAUSE_COLLAPSE:
255 if (fd->collapse > 1)
256 {
257 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
258 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
259 }
50674e96
DN
260 default:
261 break;
262 }
263
a68ab351
JJ
264 /* FIXME: for now map schedule(auto) to schedule(static).
265 There should be analysis to determine whether all iterations
266 are approximately the same amount of work (then schedule(static)
1cbc62c0 267 is best) or if it varies (then schedule(dynamic,N) is better). */
a68ab351
JJ
268 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
269 {
270 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
271 gcc_assert (fd->chunk_size == NULL);
272 }
273 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
50674e96
DN
274 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
275 gcc_assert (fd->chunk_size == NULL);
276 else if (fd->chunk_size == NULL)
277 {
278 /* We only need to compute a default chunk size for ordered
279 static loops and dynamic loops. */
a68ab351
JJ
280 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
281 || fd->have_ordered
282 || fd->collapse > 1)
50674e96
DN
283 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
284 ? integer_zero_node : integer_one_node;
285 }
a68ab351
JJ
286
287 for (i = 0; i < fd->collapse; i++)
288 {
289 if (fd->collapse == 1)
290 loop = &fd->loop;
291 else if (loops != NULL)
292 loop = loops + i;
293 else
294 loop = &dummy_loop;
295
b8698a0f 296
726a989a 297 loop->v = gimple_omp_for_index (for_stmt, i);
a68ab351
JJ
298 gcc_assert (SSA_VAR_P (loop->v));
299 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
300 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
301 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
726a989a 302 loop->n1 = gimple_omp_for_initial (for_stmt, i);
a68ab351 303
726a989a
RB
304 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
305 loop->n2 = gimple_omp_for_final (for_stmt, i);
a68ab351
JJ
306 switch (loop->cond_code)
307 {
308 case LT_EXPR:
309 case GT_EXPR:
310 break;
311 case LE_EXPR:
312 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
db3927fb
AH
313 loop->n2 = fold_build2_loc (loc,
314 POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
a68ab351
JJ
315 loop->n2, size_one_node);
316 else
db3927fb
AH
317 loop->n2 = fold_build2_loc (loc,
318 PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
319 build_int_cst (TREE_TYPE (loop->n2), 1));
320 loop->cond_code = LT_EXPR;
321 break;
322 case GE_EXPR:
323 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
db3927fb
AH
324 loop->n2 = fold_build2_loc (loc,
325 POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
a68ab351
JJ
326 loop->n2, size_int (-1));
327 else
db3927fb
AH
328 loop->n2 = fold_build2_loc (loc,
329 MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
330 build_int_cst (TREE_TYPE (loop->n2), 1));
331 loop->cond_code = GT_EXPR;
332 break;
333 default:
334 gcc_unreachable ();
335 }
336
726a989a 337 t = gimple_omp_for_incr (for_stmt, i);
a68ab351
JJ
338 gcc_assert (TREE_OPERAND (t, 0) == var);
339 switch (TREE_CODE (t))
340 {
341 case PLUS_EXPR:
342 case POINTER_PLUS_EXPR:
343 loop->step = TREE_OPERAND (t, 1);
344 break;
345 case MINUS_EXPR:
346 loop->step = TREE_OPERAND (t, 1);
db3927fb
AH
347 loop->step = fold_build1_loc (loc,
348 NEGATE_EXPR, TREE_TYPE (loop->step),
a68ab351
JJ
349 loop->step);
350 break;
351 default:
352 gcc_unreachable ();
353 }
354
355 if (iter_type != long_long_unsigned_type_node)
356 {
357 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
358 iter_type = long_long_unsigned_type_node;
359 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
360 && TYPE_PRECISION (TREE_TYPE (loop->v))
361 >= TYPE_PRECISION (iter_type))
362 {
363 tree n;
364
365 if (loop->cond_code == LT_EXPR)
db3927fb
AH
366 n = fold_build2_loc (loc,
367 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
368 loop->n2, loop->step);
369 else
370 n = loop->n1;
371 if (TREE_CODE (n) != INTEGER_CST
372 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
373 iter_type = long_long_unsigned_type_node;
374 }
375 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
376 > TYPE_PRECISION (iter_type))
377 {
378 tree n1, n2;
379
380 if (loop->cond_code == LT_EXPR)
381 {
382 n1 = loop->n1;
db3927fb
AH
383 n2 = fold_build2_loc (loc,
384 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
385 loop->n2, loop->step);
386 }
387 else
388 {
db3927fb
AH
389 n1 = fold_build2_loc (loc,
390 MINUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
391 loop->n2, loop->step);
392 n2 = loop->n1;
393 }
394 if (TREE_CODE (n1) != INTEGER_CST
395 || TREE_CODE (n2) != INTEGER_CST
396 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
397 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
398 iter_type = long_long_unsigned_type_node;
399 }
400 }
401
402 if (collapse_count && *collapse_count == NULL)
403 {
404 if ((i == 0 || count != NULL_TREE)
405 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
406 && TREE_CONSTANT (loop->n1)
407 && TREE_CONSTANT (loop->n2)
408 && TREE_CODE (loop->step) == INTEGER_CST)
409 {
410 tree itype = TREE_TYPE (loop->v);
411
412 if (POINTER_TYPE_P (itype))
413 itype
414 = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
415 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
db3927fb
AH
416 t = fold_build2_loc (loc,
417 PLUS_EXPR, itype,
418 fold_convert_loc (loc, itype, loop->step), t);
419 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
420 fold_convert_loc (loc, itype, loop->n2));
421 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
422 fold_convert_loc (loc, itype, loop->n1));
a68ab351 423 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
db3927fb
AH
424 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
425 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
426 fold_build1_loc (loc, NEGATE_EXPR, itype,
427 fold_convert_loc (loc, itype,
428 loop->step)));
a68ab351 429 else
db3927fb
AH
430 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
431 fold_convert_loc (loc, itype, loop->step));
432 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
a68ab351 433 if (count != NULL_TREE)
db3927fb
AH
434 count = fold_build2_loc (loc,
435 MULT_EXPR, long_long_unsigned_type_node,
a68ab351
JJ
436 count, t);
437 else
438 count = t;
439 if (TREE_CODE (count) != INTEGER_CST)
440 count = NULL_TREE;
441 }
442 else
443 count = NULL_TREE;
444 }
445 }
446
447 if (count)
448 {
449 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
450 iter_type = long_long_unsigned_type_node;
451 else
452 iter_type = long_integer_type_node;
453 }
454 else if (collapse_iter && *collapse_iter != NULL)
455 iter_type = TREE_TYPE (*collapse_iter);
456 fd->iter_type = iter_type;
457 if (collapse_iter && *collapse_iter == NULL)
458 *collapse_iter = create_tmp_var (iter_type, ".iter");
459 if (collapse_count && *collapse_count == NULL)
460 {
461 if (count)
db3927fb 462 *collapse_count = fold_convert_loc (loc, iter_type, count);
a68ab351
JJ
463 else
464 *collapse_count = create_tmp_var (iter_type, ".count");
465 }
466
467 if (fd->collapse > 1)
468 {
469 fd->loop.v = *collapse_iter;
470 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
471 fd->loop.n2 = *collapse_count;
472 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
473 fd->loop.cond_code = LT_EXPR;
474 }
50674e96
DN
475}
476
477
478/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
479 is the immediate dominator of PAR_ENTRY_BB, return true if there
480 are no data dependencies that would prevent expanding the parallel
481 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
482
483 When expanding a combined parallel+workshare region, the call to
484 the child function may need additional arguments in the case of
726a989a
RB
485 GIMPLE_OMP_FOR regions. In some cases, these arguments are
486 computed out of variables passed in from the parent to the child
487 via 'struct .omp_data_s'. For instance:
50674e96
DN
488
489 #pragma omp parallel for schedule (guided, i * 4)
490 for (j ...)
491
492 Is lowered into:
493
494 # BLOCK 2 (PAR_ENTRY_BB)
495 .omp_data_o.i = i;
496 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
b8698a0f 497
50674e96
DN
498 # BLOCK 3 (WS_ENTRY_BB)
499 .omp_data_i = &.omp_data_o;
500 D.1667 = .omp_data_i->i;
501 D.1598 = D.1667 * 4;
502 #pragma omp for schedule (guided, D.1598)
503
504 When we outline the parallel region, the call to the child function
505 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
506 that value is computed *after* the call site. So, in principle we
507 cannot do the transformation.
508
509 To see whether the code in WS_ENTRY_BB blocks the combined
510 parallel+workshare call, we collect all the variables used in the
726a989a 511 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
50674e96
DN
512 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
513 call.
514
515 FIXME. If we had the SSA form built at this point, we could merely
516 hoist the code in block 3 into block 2 and be done with it. But at
517 this point we don't have dataflow information and though we could
518 hack something up here, it is really not worth the aggravation. */
519
520static bool
0f900dfa 521workshare_safe_to_combine_p (basic_block ws_entry_bb)
50674e96
DN
522{
523 struct omp_for_data fd;
0f900dfa 524 gimple ws_stmt = last_stmt (ws_entry_bb);
50674e96 525
726a989a 526 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96
DN
527 return true;
528
726a989a 529 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
50674e96 530
a68ab351
JJ
531 extract_omp_for_data (ws_stmt, &fd, NULL);
532
533 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
534 return false;
535 if (fd.iter_type != long_integer_type_node)
536 return false;
50674e96
DN
537
538 /* FIXME. We give up too easily here. If any of these arguments
539 are not constants, they will likely involve variables that have
540 been mapped into fields of .omp_data_s for sharing with the child
541 function. With appropriate data flow, it would be possible to
542 see through this. */
a68ab351
JJ
543 if (!is_gimple_min_invariant (fd.loop.n1)
544 || !is_gimple_min_invariant (fd.loop.n2)
545 || !is_gimple_min_invariant (fd.loop.step)
50674e96
DN
546 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
547 return false;
548
549 return true;
550}
551
552
553/* Collect additional arguments needed to emit a combined
554 parallel+workshare call. WS_STMT is the workshare directive being
555 expanded. */
556
3bb06db4 557static VEC(tree,gc) *
726a989a 558get_ws_args_for (gimple ws_stmt)
50674e96
DN
559{
560 tree t;
db3927fb 561 location_t loc = gimple_location (ws_stmt);
3bb06db4 562 VEC(tree,gc) *ws_args;
50674e96 563
726a989a 564 if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
50674e96
DN
565 {
566 struct omp_for_data fd;
50674e96 567
a68ab351 568 extract_omp_for_data (ws_stmt, &fd, NULL);
50674e96 569
3bb06db4 570 ws_args = VEC_alloc (tree, gc, 3 + (fd.chunk_size != 0));
50674e96 571
3bb06db4
NF
572 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n1);
573 VEC_quick_push (tree, ws_args, t);
50674e96 574
db3927fb 575 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n2);
3bb06db4 576 VEC_quick_push (tree, ws_args, t);
50674e96 577
3bb06db4
NF
578 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
579 VEC_quick_push (tree, ws_args, t);
580
581 if (fd.chunk_size)
582 {
583 t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
584 VEC_quick_push (tree, ws_args, t);
585 }
50674e96
DN
586
587 return ws_args;
588 }
726a989a 589 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96 590 {
e5c95afe 591 /* Number of sections is equal to the number of edges from the
726a989a
RB
592 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
593 the exit of the sections region. */
594 basic_block bb = single_succ (gimple_bb (ws_stmt));
e5c95afe 595 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
3bb06db4
NF
596 ws_args = VEC_alloc (tree, gc, 1);
597 VEC_quick_push (tree, ws_args, t);
598 return ws_args;
50674e96
DN
599 }
600
601 gcc_unreachable ();
602}
603
604
605/* Discover whether REGION is a combined parallel+workshare region. */
606
607static void
608determine_parallel_type (struct omp_region *region)
953ff289 609{
50674e96
DN
610 basic_block par_entry_bb, par_exit_bb;
611 basic_block ws_entry_bb, ws_exit_bb;
612
d3c673c7 613 if (region == NULL || region->inner == NULL
e5c95afe
ZD
614 || region->exit == NULL || region->inner->exit == NULL
615 || region->inner->cont == NULL)
50674e96
DN
616 return;
617
618 /* We only support parallel+for and parallel+sections. */
726a989a
RB
619 if (region->type != GIMPLE_OMP_PARALLEL
620 || (region->inner->type != GIMPLE_OMP_FOR
621 && region->inner->type != GIMPLE_OMP_SECTIONS))
50674e96
DN
622 return;
623
624 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
625 WS_EXIT_BB -> PAR_EXIT_BB. */
777f7f9a
RH
626 par_entry_bb = region->entry;
627 par_exit_bb = region->exit;
628 ws_entry_bb = region->inner->entry;
629 ws_exit_bb = region->inner->exit;
50674e96
DN
630
631 if (single_succ (par_entry_bb) == ws_entry_bb
632 && single_succ (ws_exit_bb) == par_exit_bb
0f900dfa 633 && workshare_safe_to_combine_p (ws_entry_bb)
726a989a 634 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
69f1837b
JJ
635 || (last_and_only_stmt (ws_entry_bb)
636 && last_and_only_stmt (par_exit_bb))))
50674e96 637 {
726a989a 638 gimple ws_stmt = last_stmt (ws_entry_bb);
777f7f9a 639
726a989a 640 if (region->inner->type == GIMPLE_OMP_FOR)
50674e96
DN
641 {
642 /* If this is a combined parallel loop, we need to determine
643 whether or not to use the combined library calls. There
644 are two cases where we do not apply the transformation:
645 static loops and any kind of ordered loop. In the first
646 case, we already open code the loop so there is no need
647 to do anything else. In the latter case, the combined
648 parallel loop call would still need extra synchronization
649 to implement ordered semantics, so there would not be any
650 gain in using the combined call. */
726a989a 651 tree clauses = gimple_omp_for_clauses (ws_stmt);
50674e96
DN
652 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
653 if (c == NULL
654 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
655 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
656 {
657 region->is_combined_parallel = false;
658 region->inner->is_combined_parallel = false;
659 return;
660 }
661 }
662
663 region->is_combined_parallel = true;
664 region->inner->is_combined_parallel = true;
777f7f9a 665 region->ws_args = get_ws_args_for (ws_stmt);
50674e96 666 }
953ff289
DN
667}
668
50674e96 669
953ff289
DN
670/* Return true if EXPR is variable sized. */
671
672static inline bool
22ea9ec0 673is_variable_sized (const_tree expr)
953ff289
DN
674{
675 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
676}
677
678/* Return true if DECL is a reference type. */
679
680static inline bool
681is_reference (tree decl)
682{
683 return lang_hooks.decls.omp_privatize_by_reference (decl);
684}
685
686/* Lookup variables in the decl or field splay trees. The "maybe" form
687 allows for the variable form to not have been entered, otherwise we
688 assert that the variable must have been entered. */
689
690static inline tree
691lookup_decl (tree var, omp_context *ctx)
692{
6be42dd4
RG
693 tree *n;
694 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
695 return *n;
953ff289
DN
696}
697
698static inline tree
7c8f7639 699maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 700{
6be42dd4
RG
701 tree *n;
702 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
703 return n ? *n : NULL_TREE;
953ff289
DN
704}
705
706static inline tree
707lookup_field (tree var, omp_context *ctx)
708{
709 splay_tree_node n;
710 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
711 return (tree) n->value;
712}
713
a68ab351
JJ
714static inline tree
715lookup_sfield (tree var, omp_context *ctx)
716{
717 splay_tree_node n;
718 n = splay_tree_lookup (ctx->sfield_map
719 ? ctx->sfield_map : ctx->field_map,
720 (splay_tree_key) var);
721 return (tree) n->value;
722}
723
953ff289
DN
724static inline tree
725maybe_lookup_field (tree var, omp_context *ctx)
726{
727 splay_tree_node n;
728 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
729 return n ? (tree) n->value : NULL_TREE;
730}
731
7c8f7639
JJ
732/* Return true if DECL should be copied by pointer. SHARED_CTX is
733 the parallel context if DECL is to be shared. */
953ff289
DN
734
735static bool
a68ab351 736use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289
DN
737{
738 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
739 return true;
740
6fc0bb99 741 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 742 when we know the value is not accessible from an outer scope. */
7c8f7639 743 if (shared_ctx)
953ff289
DN
744 {
745 /* ??? Trivially accessible from anywhere. But why would we even
746 be passing an address in this case? Should we simply assert
747 this to be false, or should we have a cleanup pass that removes
748 these from the list of mappings? */
749 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
750 return true;
751
752 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
753 without analyzing the expression whether or not its location
754 is accessible to anyone else. In the case of nested parallel
755 regions it certainly may be. */
077b0dfb 756 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
757 return true;
758
759 /* Do not use copy-in/copy-out for variables that have their
760 address taken. */
761 if (TREE_ADDRESSABLE (decl))
762 return true;
7c8f7639
JJ
763
764 /* Disallow copy-in/out in nested parallel if
765 decl is shared in outer parallel, otherwise
766 each thread could store the shared variable
767 in its own copy-in location, making the
768 variable no longer really shared. */
769 if (!TREE_READONLY (decl) && shared_ctx->is_nested)
770 {
771 omp_context *up;
772
773 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 774 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
775 break;
776
d9c194cb 777 if (up)
7c8f7639
JJ
778 {
779 tree c;
780
726a989a 781 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
782 c; c = OMP_CLAUSE_CHAIN (c))
783 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
784 && OMP_CLAUSE_DECL (c) == decl)
785 break;
786
787 if (c)
788 return true;
789 }
790 }
a68ab351
JJ
791
792 /* For tasks avoid using copy-in/out, unless they are readonly
793 (in which case just copy-in is used). As tasks can be
794 deferred or executed in different thread, when GOMP_task
795 returns, the task hasn't necessarily terminated. */
796 if (!TREE_READONLY (decl) && is_task_ctx (shared_ctx))
797 {
798 tree outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
799 if (is_gimple_reg (outer))
800 {
801 /* Taking address of OUTER in lower_send_shared_vars
802 might need regimplification of everything that uses the
803 variable. */
804 if (!task_shared_vars)
805 task_shared_vars = BITMAP_ALLOC (NULL);
806 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
807 TREE_ADDRESSABLE (outer) = 1;
808 }
809 return true;
810 }
953ff289
DN
811 }
812
813 return false;
814}
815
917948d3 816/* Create a new VAR_DECL and copy information from VAR to it. */
953ff289 817
917948d3
ZD
818tree
819copy_var_decl (tree var, tree name, tree type)
953ff289 820{
c2255bc4 821 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
953ff289
DN
822
823 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
917948d3 824 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
0890b981 825 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
953ff289
DN
826 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
827 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
917948d3 828 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
953ff289 829 TREE_USED (copy) = 1;
953ff289
DN
830 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
831
917948d3
ZD
832 return copy;
833}
834
835/* Construct a new automatic decl similar to VAR. */
836
837static tree
838omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
839{
840 tree copy = copy_var_decl (var, name, type);
841
842 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 843 DECL_CHAIN (copy) = ctx->block_vars;
953ff289
DN
844 ctx->block_vars = copy;
845
846 return copy;
847}
848
849static tree
850omp_copy_decl_1 (tree var, omp_context *ctx)
851{
852 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
853}
854
855/* Build tree nodes to access the field for VAR on the receiver side. */
856
857static tree
858build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
859{
860 tree x, field = lookup_field (var, ctx);
861
862 /* If the receiver record type was remapped in the child function,
863 remap the field into the new record type. */
864 x = maybe_lookup_field (field, ctx);
865 if (x != NULL)
866 field = x;
867
70f34814 868 x = build_simple_mem_ref (ctx->receiver_decl);
953ff289
DN
869 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL);
870 if (by_ref)
70f34814 871 x = build_simple_mem_ref (x);
953ff289
DN
872
873 return x;
874}
875
876/* Build tree nodes to access VAR in the scope outer to CTX. In the case
877 of a parallel, this is a component reference; for workshare constructs
878 this is some variable. */
879
880static tree
881build_outer_var_ref (tree var, omp_context *ctx)
882{
883 tree x;
884
8ca5b2a2 885 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
886 x = var;
887 else if (is_variable_sized (var))
888 {
889 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
890 x = build_outer_var_ref (x, ctx);
70f34814 891 x = build_simple_mem_ref (x);
953ff289 892 }
a68ab351 893 else if (is_taskreg_ctx (ctx))
953ff289 894 {
7c8f7639 895 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
896 x = build_receiver_ref (var, by_ref, ctx);
897 }
898 else if (ctx->outer)
899 x = lookup_decl (var, ctx->outer);
eeb1d9e0
JJ
900 else if (is_reference (var))
901 /* This can happen with orphaned constructs. If var is reference, it is
902 possible it is shared and as such valid. */
903 x = var;
953ff289
DN
904 else
905 gcc_unreachable ();
906
907 if (is_reference (var))
70f34814 908 x = build_simple_mem_ref (x);
953ff289
DN
909
910 return x;
911}
912
913/* Build tree nodes to access the field for VAR on the sender side. */
914
915static tree
916build_sender_ref (tree var, omp_context *ctx)
917{
a68ab351 918 tree field = lookup_sfield (var, ctx);
953ff289
DN
919 return build3 (COMPONENT_REF, TREE_TYPE (field),
920 ctx->sender_decl, field, NULL);
921}
922
923/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
924
925static void
a68ab351 926install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 927{
a68ab351 928 tree field, type, sfield = NULL_TREE;
953ff289 929
a68ab351
JJ
930 gcc_assert ((mask & 1) == 0
931 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
932 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
933 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
953ff289
DN
934
935 type = TREE_TYPE (var);
936 if (by_ref)
937 type = build_pointer_type (type);
a68ab351
JJ
938 else if ((mask & 3) == 1 && is_reference (var))
939 type = TREE_TYPE (type);
953ff289 940
c2255bc4
AH
941 field = build_decl (DECL_SOURCE_LOCATION (var),
942 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
943
944 /* Remember what variable this field was created for. This does have a
945 side effect of making dwarf2out ignore this member, so for helpful
946 debugging we clear it later in delete_omp_context. */
947 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
948 if (type == TREE_TYPE (var))
949 {
950 DECL_ALIGN (field) = DECL_ALIGN (var);
951 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
952 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
953 }
954 else
955 DECL_ALIGN (field) = TYPE_ALIGN (type);
953ff289 956
a68ab351
JJ
957 if ((mask & 3) == 3)
958 {
959 insert_field_into_struct (ctx->record_type, field);
960 if (ctx->srecord_type)
961 {
c2255bc4
AH
962 sfield = build_decl (DECL_SOURCE_LOCATION (var),
963 FIELD_DECL, DECL_NAME (var), type);
a68ab351
JJ
964 DECL_ABSTRACT_ORIGIN (sfield) = var;
965 DECL_ALIGN (sfield) = DECL_ALIGN (field);
966 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
967 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
968 insert_field_into_struct (ctx->srecord_type, sfield);
969 }
970 }
971 else
972 {
973 if (ctx->srecord_type == NULL_TREE)
974 {
975 tree t;
976
977 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
978 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
979 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
980 {
c2255bc4
AH
981 sfield = build_decl (DECL_SOURCE_LOCATION (var),
982 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
983 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
984 insert_field_into_struct (ctx->srecord_type, sfield);
985 splay_tree_insert (ctx->sfield_map,
986 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
987 (splay_tree_value) sfield);
988 }
989 }
990 sfield = field;
991 insert_field_into_struct ((mask & 1) ? ctx->record_type
992 : ctx->srecord_type, field);
993 }
953ff289 994
a68ab351
JJ
995 if (mask & 1)
996 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
997 (splay_tree_value) field);
998 if ((mask & 2) && ctx->sfield_map)
999 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
1000 (splay_tree_value) sfield);
953ff289
DN
1001}
1002
1003static tree
1004install_var_local (tree var, omp_context *ctx)
1005{
1006 tree new_var = omp_copy_decl_1 (var, ctx);
1007 insert_decl_map (&ctx->cb, var, new_var);
1008 return new_var;
1009}
1010
1011/* Adjust the replacement for DECL in CTX for the new context. This means
1012 copying the DECL_VALUE_EXPR, and fixing up the type. */
1013
1014static void
1015fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
1016{
1017 tree new_decl, size;
1018
1019 new_decl = lookup_decl (decl, ctx);
1020
1021 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
1022
1023 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
1024 && DECL_HAS_VALUE_EXPR_P (decl))
1025 {
1026 tree ve = DECL_VALUE_EXPR (decl);
726a989a 1027 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
1028 SET_DECL_VALUE_EXPR (new_decl, ve);
1029 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1030 }
1031
1032 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1033 {
1034 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1035 if (size == error_mark_node)
1036 size = TYPE_SIZE (TREE_TYPE (new_decl));
1037 DECL_SIZE (new_decl) = size;
1038
1039 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1040 if (size == error_mark_node)
1041 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1042 DECL_SIZE_UNIT (new_decl) = size;
1043 }
1044}
1045
1046/* The callback for remap_decl. Search all containing contexts for a
1047 mapping of the variable; this avoids having to duplicate the splay
1048 tree ahead of time. We know a mapping doesn't already exist in the
1049 given context. Create new mappings to implement default semantics. */
1050
1051static tree
1052omp_copy_decl (tree var, copy_body_data *cb)
1053{
1054 omp_context *ctx = (omp_context *) cb;
1055 tree new_var;
1056
953ff289
DN
1057 if (TREE_CODE (var) == LABEL_DECL)
1058 {
c2255bc4 1059 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 1060 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
1061 insert_decl_map (&ctx->cb, var, new_var);
1062 return new_var;
1063 }
1064
a68ab351 1065 while (!is_taskreg_ctx (ctx))
953ff289
DN
1066 {
1067 ctx = ctx->outer;
1068 if (ctx == NULL)
1069 return var;
1070 new_var = maybe_lookup_decl (var, ctx);
1071 if (new_var)
1072 return new_var;
1073 }
1074
8ca5b2a2
JJ
1075 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1076 return var;
1077
953ff289
DN
1078 return error_mark_node;
1079}
1080
50674e96
DN
1081
1082/* Return the parallel region associated with STMT. */
1083
50674e96
DN
1084/* Debugging dumps for parallel regions. */
1085void dump_omp_region (FILE *, struct omp_region *, int);
1086void debug_omp_region (struct omp_region *);
1087void debug_all_omp_regions (void);
1088
1089/* Dump the parallel region tree rooted at REGION. */
1090
1091void
1092dump_omp_region (FILE *file, struct omp_region *region, int indent)
1093{
777f7f9a 1094 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
726a989a 1095 gimple_code_name[region->type]);
50674e96
DN
1096
1097 if (region->inner)
1098 dump_omp_region (file, region->inner, indent + 4);
1099
777f7f9a
RH
1100 if (region->cont)
1101 {
726a989a 1102 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
777f7f9a
RH
1103 region->cont->index);
1104 }
b8698a0f 1105
50674e96 1106 if (region->exit)
726a989a 1107 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
777f7f9a 1108 region->exit->index);
50674e96 1109 else
777f7f9a 1110 fprintf (file, "%*s[no exit marker]\n", indent, "");
50674e96
DN
1111
1112 if (region->next)
777f7f9a 1113 dump_omp_region (file, region->next, indent);
50674e96
DN
1114}
1115
24e47c76 1116DEBUG_FUNCTION void
50674e96
DN
1117debug_omp_region (struct omp_region *region)
1118{
1119 dump_omp_region (stderr, region, 0);
1120}
1121
24e47c76 1122DEBUG_FUNCTION void
50674e96
DN
1123debug_all_omp_regions (void)
1124{
1125 dump_omp_region (stderr, root_omp_region, 0);
1126}
1127
1128
1129/* Create a new parallel region starting at STMT inside region PARENT. */
1130
777f7f9a 1131struct omp_region *
726a989a
RB
1132new_omp_region (basic_block bb, enum gimple_code type,
1133 struct omp_region *parent)
50674e96 1134{
d3bfe4de 1135 struct omp_region *region = XCNEW (struct omp_region);
50674e96
DN
1136
1137 region->outer = parent;
777f7f9a
RH
1138 region->entry = bb;
1139 region->type = type;
50674e96
DN
1140
1141 if (parent)
1142 {
1143 /* This is a nested region. Add it to the list of inner
1144 regions in PARENT. */
1145 region->next = parent->inner;
1146 parent->inner = region;
1147 }
777f7f9a 1148 else
50674e96
DN
1149 {
1150 /* This is a toplevel region. Add it to the list of toplevel
1151 regions in ROOT_OMP_REGION. */
1152 region->next = root_omp_region;
1153 root_omp_region = region;
1154 }
777f7f9a
RH
1155
1156 return region;
1157}
1158
1159/* Release the memory associated with the region tree rooted at REGION. */
1160
1161static void
1162free_omp_region_1 (struct omp_region *region)
1163{
1164 struct omp_region *i, *n;
1165
1166 for (i = region->inner; i ; i = n)
50674e96 1167 {
777f7f9a
RH
1168 n = i->next;
1169 free_omp_region_1 (i);
50674e96
DN
1170 }
1171
777f7f9a
RH
1172 free (region);
1173}
50674e96 1174
777f7f9a
RH
1175/* Release the memory for the entire omp region tree. */
1176
1177void
1178free_omp_regions (void)
1179{
1180 struct omp_region *r, *n;
1181 for (r = root_omp_region; r ; r = n)
1182 {
1183 n = r->next;
1184 free_omp_region_1 (r);
1185 }
1186 root_omp_region = NULL;
50674e96
DN
1187}
1188
1189
953ff289
DN
1190/* Create a new context, with OUTER_CTX being the surrounding context. */
1191
1192static omp_context *
726a989a 1193new_omp_context (gimple stmt, omp_context *outer_ctx)
953ff289
DN
1194{
1195 omp_context *ctx = XCNEW (omp_context);
1196
1197 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1198 (splay_tree_value) ctx);
1199 ctx->stmt = stmt;
1200
1201 if (outer_ctx)
1202 {
1203 ctx->outer = outer_ctx;
1204 ctx->cb = outer_ctx->cb;
1205 ctx->cb.block = NULL;
1206 ctx->depth = outer_ctx->depth + 1;
1207 }
1208 else
1209 {
1210 ctx->cb.src_fn = current_function_decl;
1211 ctx->cb.dst_fn = current_function_decl;
fe660d7b
MJ
1212 ctx->cb.src_node = cgraph_get_node (current_function_decl);
1213 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
1214 ctx->cb.dst_node = ctx->cb.src_node;
1215 ctx->cb.src_cfun = cfun;
1216 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 1217 ctx->cb.eh_lp_nr = 0;
953ff289
DN
1218 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1219 ctx->depth = 1;
1220 }
1221
6be42dd4 1222 ctx->cb.decl_map = pointer_map_create ();
953ff289
DN
1223
1224 return ctx;
1225}
1226
726a989a 1227static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
1228
1229/* Finalize task copyfn. */
1230
1231static void
726a989a 1232finalize_task_copyfn (gimple task_stmt)
2368a460
JJ
1233{
1234 struct function *child_cfun;
1235 tree child_fn, old_fn;
726a989a
RB
1236 gimple_seq seq, new_seq;
1237 gimple bind;
2368a460 1238
726a989a 1239 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
1240 if (child_fn == NULL_TREE)
1241 return;
1242
1243 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1244
1245 /* Inform the callgraph about the new function. */
1246 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
1247 = cfun->curr_properties;
1248
1249 old_fn = current_function_decl;
1250 push_cfun (child_cfun);
1251 current_function_decl = child_fn;
726a989a
RB
1252 bind = gimplify_body (&DECL_SAVED_TREE (child_fn), child_fn, false);
1253 seq = gimple_seq_alloc ();
1254 gimple_seq_add_stmt (&seq, bind);
1255 new_seq = maybe_catch_exception (seq);
1256 if (new_seq != seq)
1257 {
1258 bind = gimple_build_bind (NULL, new_seq, NULL);
1259 seq = gimple_seq_alloc ();
1260 gimple_seq_add_stmt (&seq, bind);
1261 }
1262 gimple_set_body (child_fn, seq);
2368a460
JJ
1263 pop_cfun ();
1264 current_function_decl = old_fn;
1265
1266 cgraph_add_new_function (child_fn, false);
1267}
1268
953ff289
DN
1269/* Destroy a omp_context data structures. Called through the splay tree
1270 value delete callback. */
1271
1272static void
1273delete_omp_context (splay_tree_value value)
1274{
1275 omp_context *ctx = (omp_context *) value;
1276
6be42dd4 1277 pointer_map_destroy (ctx->cb.decl_map);
953ff289
DN
1278
1279 if (ctx->field_map)
1280 splay_tree_delete (ctx->field_map);
a68ab351
JJ
1281 if (ctx->sfield_map)
1282 splay_tree_delete (ctx->sfield_map);
953ff289
DN
1283
1284 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1285 it produces corrupt debug information. */
1286 if (ctx->record_type)
1287 {
1288 tree t;
910ad8de 1289 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
1290 DECL_ABSTRACT_ORIGIN (t) = NULL;
1291 }
a68ab351
JJ
1292 if (ctx->srecord_type)
1293 {
1294 tree t;
910ad8de 1295 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
1296 DECL_ABSTRACT_ORIGIN (t) = NULL;
1297 }
953ff289 1298
2368a460
JJ
1299 if (is_task_ctx (ctx))
1300 finalize_task_copyfn (ctx->stmt);
1301
953ff289
DN
1302 XDELETE (ctx);
1303}
1304
1305/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1306 context. */
1307
1308static void
1309fixup_child_record_type (omp_context *ctx)
1310{
1311 tree f, type = ctx->record_type;
1312
1313 /* ??? It isn't sufficient to just call remap_type here, because
1314 variably_modified_type_p doesn't work the way we expect for
1315 record types. Testing each field for whether it needs remapping
1316 and creating a new record by hand works, however. */
910ad8de 1317 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
1318 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1319 break;
1320 if (f)
1321 {
1322 tree name, new_fields = NULL;
1323
1324 type = lang_hooks.types.make_type (RECORD_TYPE);
1325 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
1326 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1327 TYPE_DECL, name, type);
953ff289
DN
1328 TYPE_NAME (type) = name;
1329
910ad8de 1330 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
1331 {
1332 tree new_f = copy_node (f);
1333 DECL_CONTEXT (new_f) = type;
1334 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 1335 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
1336 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1337 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1338 &ctx->cb, NULL);
1339 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1340 &ctx->cb, NULL);
953ff289
DN
1341 new_fields = new_f;
1342
1343 /* Arrange to be able to look up the receiver field
1344 given the sender field. */
1345 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1346 (splay_tree_value) new_f);
1347 }
1348 TYPE_FIELDS (type) = nreverse (new_fields);
1349 layout_type (type);
1350 }
1351
1352 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1353}
1354
1355/* Instantiate decls as necessary in CTX to satisfy the data sharing
1356 specified by CLAUSES. */
1357
1358static void
1359scan_sharing_clauses (tree clauses, omp_context *ctx)
1360{
1361 tree c, decl;
1362 bool scan_array_reductions = false;
1363
1364 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1365 {
1366 bool by_ref;
1367
aaf46ef9 1368 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1369 {
1370 case OMP_CLAUSE_PRIVATE:
1371 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1372 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1373 goto do_private;
1374 else if (!is_variable_sized (decl))
953ff289
DN
1375 install_var_local (decl, ctx);
1376 break;
1377
1378 case OMP_CLAUSE_SHARED:
a68ab351 1379 gcc_assert (is_taskreg_ctx (ctx));
953ff289 1380 decl = OMP_CLAUSE_DECL (c);
5da250fc
JJ
1381 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1382 || !is_variable_sized (decl));
8ca5b2a2
JJ
1383 /* Global variables don't need to be copied,
1384 the receiver side will use them directly. */
1385 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1386 break;
a68ab351 1387 by_ref = use_pointer_for_field (decl, ctx);
953ff289
DN
1388 if (! TREE_READONLY (decl)
1389 || TREE_ADDRESSABLE (decl)
1390 || by_ref
1391 || is_reference (decl))
1392 {
a68ab351 1393 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1394 install_var_local (decl, ctx);
1395 break;
1396 }
1397 /* We don't need to copy const scalar vars back. */
aaf46ef9 1398 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1399 goto do_private;
1400
1401 case OMP_CLAUSE_LASTPRIVATE:
1402 /* Let the corresponding firstprivate clause create
1403 the variable. */
1404 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1405 break;
1406 /* FALLTHRU */
1407
1408 case OMP_CLAUSE_FIRSTPRIVATE:
1409 case OMP_CLAUSE_REDUCTION:
1410 decl = OMP_CLAUSE_DECL (c);
1411 do_private:
1412 if (is_variable_sized (decl))
953ff289 1413 {
a68ab351
JJ
1414 if (is_task_ctx (ctx))
1415 install_var_field (decl, false, 1, ctx);
1416 break;
1417 }
1418 else if (is_taskreg_ctx (ctx))
1419 {
1420 bool global
1421 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1422 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1423
1424 if (is_task_ctx (ctx)
1425 && (global || by_ref || is_reference (decl)))
1426 {
1427 install_var_field (decl, false, 1, ctx);
1428 if (!global)
1429 install_var_field (decl, by_ref, 2, ctx);
1430 }
1431 else if (!global)
1432 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1433 }
1434 install_var_local (decl, ctx);
1435 break;
1436
1437 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1438 case OMP_CLAUSE_COPYIN:
1439 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1440 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1441 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1442 break;
1443
1444 case OMP_CLAUSE_DEFAULT:
1445 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1446 break;
1447
1448 case OMP_CLAUSE_IF:
1449 case OMP_CLAUSE_NUM_THREADS:
1450 case OMP_CLAUSE_SCHEDULE:
1451 if (ctx->outer)
726a989a 1452 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1453 break;
1454
1455 case OMP_CLAUSE_NOWAIT:
1456 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1457 case OMP_CLAUSE_COLLAPSE:
1458 case OMP_CLAUSE_UNTIED:
953ff289
DN
1459 break;
1460
1461 default:
1462 gcc_unreachable ();
1463 }
1464 }
1465
1466 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1467 {
aaf46ef9 1468 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1469 {
1470 case OMP_CLAUSE_LASTPRIVATE:
1471 /* Let the corresponding firstprivate clause create
1472 the variable. */
726a989a 1473 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1474 scan_array_reductions = true;
953ff289
DN
1475 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1476 break;
1477 /* FALLTHRU */
1478
1479 case OMP_CLAUSE_PRIVATE:
1480 case OMP_CLAUSE_FIRSTPRIVATE:
1481 case OMP_CLAUSE_REDUCTION:
1482 decl = OMP_CLAUSE_DECL (c);
1483 if (is_variable_sized (decl))
1484 install_var_local (decl, ctx);
1485 fixup_remapped_decl (decl, ctx,
aaf46ef9 1486 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1487 && OMP_CLAUSE_PRIVATE_DEBUG (c));
aaf46ef9 1488 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1489 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1490 scan_array_reductions = true;
1491 break;
1492
1493 case OMP_CLAUSE_SHARED:
1494 decl = OMP_CLAUSE_DECL (c);
8ca5b2a2
JJ
1495 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1496 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1497 break;
1498
1499 case OMP_CLAUSE_COPYPRIVATE:
1500 case OMP_CLAUSE_COPYIN:
1501 case OMP_CLAUSE_DEFAULT:
1502 case OMP_CLAUSE_IF:
1503 case OMP_CLAUSE_NUM_THREADS:
1504 case OMP_CLAUSE_SCHEDULE:
1505 case OMP_CLAUSE_NOWAIT:
1506 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1507 case OMP_CLAUSE_COLLAPSE:
1508 case OMP_CLAUSE_UNTIED:
953ff289
DN
1509 break;
1510
1511 default:
1512 gcc_unreachable ();
1513 }
1514 }
1515
1516 if (scan_array_reductions)
1517 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 1518 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1519 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1520 {
726a989a
RB
1521 scan_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1522 scan_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
953ff289 1523 }
a68ab351 1524 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
726a989a
RB
1525 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1526 scan_omp (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
953ff289
DN
1527}
1528
1529/* Create a new name for omp child function. Returns an identifier. */
1530
1531static GTY(()) unsigned int tmp_ompfn_id_num;
1532
1533static tree
a68ab351 1534create_omp_child_function_name (bool task_copy)
953ff289 1535{
036546e5
JH
1536 return (clone_function_name (current_function_decl,
1537 task_copy ? "_omp_cpyfn" : "_omp_fn"));
953ff289
DN
1538}
1539
1540/* Build a decl for the omp child function. It'll not contain a body
1541 yet, just the bare decl. */
1542
1543static void
a68ab351 1544create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1545{
1546 tree decl, type, name, t;
1547
a68ab351
JJ
1548 name = create_omp_child_function_name (task_copy);
1549 if (task_copy)
1550 type = build_function_type_list (void_type_node, ptr_type_node,
1551 ptr_type_node, NULL_TREE);
1552 else
1553 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1554
c2255bc4
AH
1555 decl = build_decl (gimple_location (ctx->stmt),
1556 FUNCTION_DECL, name, type);
953ff289 1557
a68ab351
JJ
1558 if (!task_copy)
1559 ctx->cb.dst_fn = decl;
1560 else
726a989a 1561 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1562
1563 TREE_STATIC (decl) = 1;
1564 TREE_USED (decl) = 1;
1565 DECL_ARTIFICIAL (decl) = 1;
cd3f04c8 1566 DECL_NAMELESS (decl) = 1;
953ff289
DN
1567 DECL_IGNORED_P (decl) = 0;
1568 TREE_PUBLIC (decl) = 0;
1569 DECL_UNINLINABLE (decl) = 1;
1570 DECL_EXTERNAL (decl) = 0;
1571 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1572 DECL_INITIAL (decl) = make_node (BLOCK);
953ff289 1573
c2255bc4
AH
1574 t = build_decl (DECL_SOURCE_LOCATION (decl),
1575 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1576 DECL_ARTIFICIAL (t) = 1;
1577 DECL_IGNORED_P (t) = 1;
07485407 1578 DECL_CONTEXT (t) = decl;
953ff289
DN
1579 DECL_RESULT (decl) = t;
1580
c2255bc4
AH
1581 t = build_decl (DECL_SOURCE_LOCATION (decl),
1582 PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
953ff289 1583 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1584 DECL_NAMELESS (t) = 1;
953ff289 1585 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1586 DECL_CONTEXT (t) = current_function_decl;
953ff289
DN
1587 TREE_USED (t) = 1;
1588 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1589 if (!task_copy)
1590 ctx->receiver_decl = t;
1591 else
1592 {
c2255bc4
AH
1593 t = build_decl (DECL_SOURCE_LOCATION (decl),
1594 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1595 ptr_type_node);
1596 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1597 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1598 DECL_ARG_TYPE (t) = ptr_type_node;
1599 DECL_CONTEXT (t) = current_function_decl;
1600 TREE_USED (t) = 1;
628c189e 1601 TREE_ADDRESSABLE (t) = 1;
910ad8de 1602 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1603 DECL_ARGUMENTS (decl) = t;
1604 }
953ff289 1605
b8698a0f 1606 /* Allocate memory for the function structure. The call to
50674e96 1607 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1608 it afterward. */
db2960f4 1609 push_struct_function (decl);
726a989a 1610 cfun->function_end_locus = gimple_location (ctx->stmt);
db2960f4 1611 pop_cfun ();
953ff289
DN
1612}
1613
953ff289
DN
1614
1615/* Scan an OpenMP parallel directive. */
1616
1617static void
726a989a 1618scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1619{
1620 omp_context *ctx;
1621 tree name;
726a989a 1622 gimple stmt = gsi_stmt (*gsi);
953ff289
DN
1623
1624 /* Ignore parallel directives with empty bodies, unless there
1625 are copyin clauses. */
1626 if (optimize > 0
726a989a
RB
1627 && empty_body_p (gimple_omp_body (stmt))
1628 && find_omp_clause (gimple_omp_parallel_clauses (stmt),
1629 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1630 {
726a989a 1631 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1632 return;
1633 }
1634
726a989a 1635 ctx = new_omp_context (stmt, outer_ctx);
a68ab351 1636 if (taskreg_nesting_level > 1)
50674e96 1637 ctx->is_nested = true;
953ff289 1638 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289
DN
1639 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1640 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 1641 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1642 name = build_decl (gimple_location (stmt),
1643 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1644 DECL_ARTIFICIAL (name) = 1;
1645 DECL_NAMELESS (name) = 1;
953ff289 1646 TYPE_NAME (ctx->record_type) = name;
a68ab351 1647 create_omp_child_function (ctx, false);
726a989a 1648 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
953ff289 1649
726a989a
RB
1650 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1651 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
1652
1653 if (TYPE_FIELDS (ctx->record_type) == NULL)
1654 ctx->record_type = ctx->receiver_decl = NULL;
1655 else
1656 {
1657 layout_type (ctx->record_type);
1658 fixup_child_record_type (ctx);
1659 }
1660}
1661
a68ab351
JJ
1662/* Scan an OpenMP task directive. */
1663
1664static void
726a989a 1665scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
1666{
1667 omp_context *ctx;
726a989a
RB
1668 tree name, t;
1669 gimple stmt = gsi_stmt (*gsi);
db3927fb 1670 location_t loc = gimple_location (stmt);
a68ab351
JJ
1671
1672 /* Ignore task directives with empty bodies. */
1673 if (optimize > 0
726a989a 1674 && empty_body_p (gimple_omp_body (stmt)))
a68ab351 1675 {
726a989a 1676 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
1677 return;
1678 }
1679
726a989a 1680 ctx = new_omp_context (stmt, outer_ctx);
a68ab351
JJ
1681 if (taskreg_nesting_level > 1)
1682 ctx->is_nested = true;
1683 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1684 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1685 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1686 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1687 name = build_decl (gimple_location (stmt),
1688 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1689 DECL_ARTIFICIAL (name) = 1;
1690 DECL_NAMELESS (name) = 1;
a68ab351
JJ
1691 TYPE_NAME (ctx->record_type) = name;
1692 create_omp_child_function (ctx, false);
726a989a 1693 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 1694
726a989a 1695 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
1696
1697 if (ctx->srecord_type)
1698 {
1699 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
1700 name = build_decl (gimple_location (stmt),
1701 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
1702 DECL_ARTIFICIAL (name) = 1;
1703 DECL_NAMELESS (name) = 1;
a68ab351
JJ
1704 TYPE_NAME (ctx->srecord_type) = name;
1705 create_omp_child_function (ctx, true);
1706 }
1707
726a989a 1708 scan_omp (gimple_omp_body (stmt), ctx);
a68ab351
JJ
1709
1710 if (TYPE_FIELDS (ctx->record_type) == NULL)
1711 {
1712 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
1713 t = build_int_cst (long_integer_type_node, 0);
1714 gimple_omp_task_set_arg_size (stmt, t);
1715 t = build_int_cst (long_integer_type_node, 1);
1716 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
1717 }
1718 else
1719 {
1720 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1721 /* Move VLA fields to the end. */
1722 p = &TYPE_FIELDS (ctx->record_type);
1723 while (*p)
1724 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1725 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1726 {
1727 *q = *p;
1728 *p = TREE_CHAIN (*p);
1729 TREE_CHAIN (*q) = NULL_TREE;
1730 q = &TREE_CHAIN (*q);
1731 }
1732 else
910ad8de 1733 p = &DECL_CHAIN (*p);
a68ab351
JJ
1734 *p = vla_fields;
1735 layout_type (ctx->record_type);
1736 fixup_child_record_type (ctx);
1737 if (ctx->srecord_type)
1738 layout_type (ctx->srecord_type);
db3927fb 1739 t = fold_convert_loc (loc, long_integer_type_node,
a68ab351 1740 TYPE_SIZE_UNIT (ctx->record_type));
726a989a
RB
1741 gimple_omp_task_set_arg_size (stmt, t);
1742 t = build_int_cst (long_integer_type_node,
a68ab351 1743 TYPE_ALIGN_UNIT (ctx->record_type));
726a989a 1744 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
1745 }
1746}
1747
953ff289 1748
50674e96 1749/* Scan an OpenMP loop directive. */
953ff289
DN
1750
1751static void
726a989a 1752scan_omp_for (gimple stmt, omp_context *outer_ctx)
953ff289 1753{
50674e96 1754 omp_context *ctx;
726a989a 1755 size_t i;
953ff289 1756
50674e96 1757 ctx = new_omp_context (stmt, outer_ctx);
953ff289 1758
726a989a 1759 scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
953ff289 1760
726a989a
RB
1761 scan_omp (gimple_omp_for_pre_body (stmt), ctx);
1762 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 1763 {
726a989a
RB
1764 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
1765 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
1766 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
1767 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 1768 }
726a989a 1769 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
1770}
1771
1772/* Scan an OpenMP sections directive. */
1773
1774static void
726a989a 1775scan_omp_sections (gimple stmt, omp_context *outer_ctx)
953ff289 1776{
953ff289
DN
1777 omp_context *ctx;
1778
1779 ctx = new_omp_context (stmt, outer_ctx);
726a989a
RB
1780 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
1781 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
1782}
1783
1784/* Scan an OpenMP single directive. */
1785
1786static void
726a989a 1787scan_omp_single (gimple stmt, omp_context *outer_ctx)
953ff289 1788{
953ff289
DN
1789 omp_context *ctx;
1790 tree name;
1791
1792 ctx = new_omp_context (stmt, outer_ctx);
1793 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1794 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1795 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
1796 name = build_decl (gimple_location (stmt),
1797 TYPE_DECL, name, ctx->record_type);
953ff289
DN
1798 TYPE_NAME (ctx->record_type) = name;
1799
726a989a
RB
1800 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
1801 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
1802
1803 if (TYPE_FIELDS (ctx->record_type) == NULL)
1804 ctx->record_type = NULL;
1805 else
1806 layout_type (ctx->record_type);
1807}
1808
953ff289 1809
a6fc8e21
JJ
1810/* Check OpenMP nesting restrictions. */
1811static void
726a989a 1812check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
a6fc8e21 1813{
726a989a 1814 switch (gimple_code (stmt))
a6fc8e21 1815 {
726a989a
RB
1816 case GIMPLE_OMP_FOR:
1817 case GIMPLE_OMP_SECTIONS:
1818 case GIMPLE_OMP_SINGLE:
1819 case GIMPLE_CALL:
a6fc8e21 1820 for (; ctx != NULL; ctx = ctx->outer)
726a989a 1821 switch (gimple_code (ctx->stmt))
a6fc8e21 1822 {
726a989a
RB
1823 case GIMPLE_OMP_FOR:
1824 case GIMPLE_OMP_SECTIONS:
1825 case GIMPLE_OMP_SINGLE:
1826 case GIMPLE_OMP_ORDERED:
1827 case GIMPLE_OMP_MASTER:
1828 case GIMPLE_OMP_TASK:
1829 if (is_gimple_call (stmt))
a68ab351
JJ
1830 {
1831 warning (0, "barrier region may not be closely nested inside "
1832 "of work-sharing, critical, ordered, master or "
1833 "explicit task region");
1834 return;
1835 }
a6fc8e21 1836 warning (0, "work-sharing region may not be closely nested inside "
a68ab351
JJ
1837 "of work-sharing, critical, ordered, master or explicit "
1838 "task region");
a6fc8e21 1839 return;
726a989a 1840 case GIMPLE_OMP_PARALLEL:
a6fc8e21
JJ
1841 return;
1842 default:
1843 break;
1844 }
1845 break;
726a989a 1846 case GIMPLE_OMP_MASTER:
a6fc8e21 1847 for (; ctx != NULL; ctx = ctx->outer)
726a989a 1848 switch (gimple_code (ctx->stmt))
a6fc8e21 1849 {
726a989a
RB
1850 case GIMPLE_OMP_FOR:
1851 case GIMPLE_OMP_SECTIONS:
1852 case GIMPLE_OMP_SINGLE:
1853 case GIMPLE_OMP_TASK:
a6fc8e21 1854 warning (0, "master region may not be closely nested inside "
a68ab351 1855 "of work-sharing or explicit task region");
a6fc8e21 1856 return;
726a989a 1857 case GIMPLE_OMP_PARALLEL:
a6fc8e21
JJ
1858 return;
1859 default:
1860 break;
1861 }
1862 break;
726a989a 1863 case GIMPLE_OMP_ORDERED:
a6fc8e21 1864 for (; ctx != NULL; ctx = ctx->outer)
726a989a 1865 switch (gimple_code (ctx->stmt))
a6fc8e21 1866 {
726a989a
RB
1867 case GIMPLE_OMP_CRITICAL:
1868 case GIMPLE_OMP_TASK:
a6fc8e21 1869 warning (0, "ordered region may not be closely nested inside "
a68ab351 1870 "of critical or explicit task region");
a6fc8e21 1871 return;
726a989a
RB
1872 case GIMPLE_OMP_FOR:
1873 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
a6fc8e21
JJ
1874 OMP_CLAUSE_ORDERED) == NULL)
1875 warning (0, "ordered region must be closely nested inside "
1876 "a loop region with an ordered clause");
1877 return;
726a989a 1878 case GIMPLE_OMP_PARALLEL:
a6fc8e21
JJ
1879 return;
1880 default:
1881 break;
1882 }
1883 break;
726a989a 1884 case GIMPLE_OMP_CRITICAL:
a6fc8e21 1885 for (; ctx != NULL; ctx = ctx->outer)
726a989a
RB
1886 if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
1887 && (gimple_omp_critical_name (stmt)
1888 == gimple_omp_critical_name (ctx->stmt)))
a6fc8e21
JJ
1889 {
1890 warning (0, "critical region may not be nested inside a critical "
1891 "region with the same name");
1892 return;
1893 }
1894 break;
1895 default:
1896 break;
1897 }
1898}
1899
1900
726a989a
RB
1901/* Helper function scan_omp.
1902
1903 Callback for walk_tree or operators in walk_gimple_stmt used to
1904 scan for OpenMP directives in TP. */
953ff289
DN
1905
1906static tree
726a989a 1907scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 1908{
d3bfe4de
KG
1909 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
1910 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
1911 tree t = *tp;
1912
726a989a
RB
1913 switch (TREE_CODE (t))
1914 {
1915 case VAR_DECL:
1916 case PARM_DECL:
1917 case LABEL_DECL:
1918 case RESULT_DECL:
1919 if (ctx)
1920 *tp = remap_decl (t, &ctx->cb);
1921 break;
1922
1923 default:
1924 if (ctx && TYPE_P (t))
1925 *tp = remap_type (t, &ctx->cb);
1926 else if (!DECL_P (t))
a900ae6b
JJ
1927 {
1928 *walk_subtrees = 1;
1929 if (ctx)
70f34814
RG
1930 {
1931 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
1932 if (tem != TREE_TYPE (t))
1933 {
1934 if (TREE_CODE (t) == INTEGER_CST)
1935 *tp = build_int_cst_wide (tem,
1936 TREE_INT_CST_LOW (t),
1937 TREE_INT_CST_HIGH (t));
1938 else
1939 TREE_TYPE (t) = tem;
1940 }
1941 }
a900ae6b 1942 }
726a989a
RB
1943 break;
1944 }
1945
1946 return NULL_TREE;
1947}
1948
1949
1950/* Helper function for scan_omp.
1951
1952 Callback for walk_gimple_stmt used to scan for OpenMP directives in
1953 the current statement in GSI. */
1954
1955static tree
1956scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
1957 struct walk_stmt_info *wi)
1958{
1959 gimple stmt = gsi_stmt (*gsi);
1960 omp_context *ctx = (omp_context *) wi->info;
1961
1962 if (gimple_has_location (stmt))
1963 input_location = gimple_location (stmt);
953ff289 1964
a6fc8e21 1965 /* Check the OpenMP nesting restrictions. */
a68ab351
JJ
1966 if (ctx != NULL)
1967 {
726a989a
RB
1968 if (is_gimple_omp (stmt))
1969 check_omp_nesting_restrictions (stmt, ctx);
1970 else if (is_gimple_call (stmt))
a68ab351 1971 {
726a989a 1972 tree fndecl = gimple_call_fndecl (stmt);
a68ab351
JJ
1973 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
1974 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
726a989a 1975 check_omp_nesting_restrictions (stmt, ctx);
a68ab351
JJ
1976 }
1977 }
a6fc8e21 1978
726a989a
RB
1979 *handled_ops_p = true;
1980
1981 switch (gimple_code (stmt))
953ff289 1982 {
726a989a 1983 case GIMPLE_OMP_PARALLEL:
a68ab351 1984 taskreg_nesting_level++;
726a989a 1985 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
1986 taskreg_nesting_level--;
1987 break;
1988
726a989a 1989 case GIMPLE_OMP_TASK:
a68ab351 1990 taskreg_nesting_level++;
726a989a 1991 scan_omp_task (gsi, ctx);
a68ab351 1992 taskreg_nesting_level--;
953ff289
DN
1993 break;
1994
726a989a
RB
1995 case GIMPLE_OMP_FOR:
1996 scan_omp_for (stmt, ctx);
953ff289
DN
1997 break;
1998
726a989a
RB
1999 case GIMPLE_OMP_SECTIONS:
2000 scan_omp_sections (stmt, ctx);
953ff289
DN
2001 break;
2002
726a989a
RB
2003 case GIMPLE_OMP_SINGLE:
2004 scan_omp_single (stmt, ctx);
953ff289
DN
2005 break;
2006
726a989a
RB
2007 case GIMPLE_OMP_SECTION:
2008 case GIMPLE_OMP_MASTER:
2009 case GIMPLE_OMP_ORDERED:
2010 case GIMPLE_OMP_CRITICAL:
2011 ctx = new_omp_context (stmt, ctx);
2012 scan_omp (gimple_omp_body (stmt), ctx);
953ff289
DN
2013 break;
2014
726a989a 2015 case GIMPLE_BIND:
953ff289
DN
2016 {
2017 tree var;
953ff289 2018
726a989a
RB
2019 *handled_ops_p = false;
2020 if (ctx)
910ad8de 2021 for (var = gimple_bind_vars (stmt); var ; var = DECL_CHAIN (var))
726a989a 2022 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
2023 }
2024 break;
953ff289 2025 default:
726a989a 2026 *handled_ops_p = false;
953ff289
DN
2027 break;
2028 }
2029
2030 return NULL_TREE;
2031}
2032
2033
726a989a
RB
2034/* Scan all the statements starting at the current statement. CTX
2035 contains context information about the OpenMP directives and
2036 clauses found during the scan. */
953ff289
DN
2037
2038static void
726a989a 2039scan_omp (gimple_seq body, omp_context *ctx)
953ff289
DN
2040{
2041 location_t saved_location;
2042 struct walk_stmt_info wi;
2043
2044 memset (&wi, 0, sizeof (wi));
953ff289 2045 wi.info = ctx;
953ff289
DN
2046 wi.want_locations = true;
2047
2048 saved_location = input_location;
726a989a 2049 walk_gimple_seq (body, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
2050 input_location = saved_location;
2051}
2052\f
2053/* Re-gimplification and code generation routines. */
2054
2055/* Build a call to GOMP_barrier. */
2056
917948d3
ZD
2057static tree
2058build_omp_barrier (void)
953ff289 2059{
917948d3 2060 return build_call_expr (built_in_decls[BUILT_IN_GOMP_BARRIER], 0);
953ff289
DN
2061}
2062
2063/* If a context was created for STMT when it was scanned, return it. */
2064
2065static omp_context *
726a989a 2066maybe_lookup_ctx (gimple stmt)
953ff289
DN
2067{
2068 splay_tree_node n;
2069 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
2070 return n ? (omp_context *) n->value : NULL;
2071}
2072
50674e96
DN
2073
2074/* Find the mapping for DECL in CTX or the immediately enclosing
2075 context that has a mapping for DECL.
2076
2077 If CTX is a nested parallel directive, we may have to use the decl
2078 mappings created in CTX's parent context. Suppose that we have the
2079 following parallel nesting (variable UIDs showed for clarity):
2080
2081 iD.1562 = 0;
2082 #omp parallel shared(iD.1562) -> outer parallel
2083 iD.1562 = iD.1562 + 1;
2084
2085 #omp parallel shared (iD.1562) -> inner parallel
2086 iD.1562 = iD.1562 - 1;
2087
2088 Each parallel structure will create a distinct .omp_data_s structure
2089 for copying iD.1562 in/out of the directive:
2090
2091 outer parallel .omp_data_s.1.i -> iD.1562
2092 inner parallel .omp_data_s.2.i -> iD.1562
2093
2094 A shared variable mapping will produce a copy-out operation before
2095 the parallel directive and a copy-in operation after it. So, in
2096 this case we would have:
2097
2098 iD.1562 = 0;
2099 .omp_data_o.1.i = iD.1562;
2100 #omp parallel shared(iD.1562) -> outer parallel
2101 .omp_data_i.1 = &.omp_data_o.1
2102 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2103
2104 .omp_data_o.2.i = iD.1562; -> **
2105 #omp parallel shared(iD.1562) -> inner parallel
2106 .omp_data_i.2 = &.omp_data_o.2
2107 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2108
2109
2110 ** This is a problem. The symbol iD.1562 cannot be referenced
2111 inside the body of the outer parallel region. But since we are
2112 emitting this copy operation while expanding the inner parallel
2113 directive, we need to access the CTX structure of the outer
2114 parallel directive to get the correct mapping:
2115
2116 .omp_data_o.2.i = .omp_data_i.1->i
2117
2118 Since there may be other workshare or parallel directives enclosing
2119 the parallel directive, it may be necessary to walk up the context
2120 parent chain. This is not a problem in general because nested
2121 parallelism happens only rarely. */
2122
2123static tree
2124lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2125{
2126 tree t;
2127 omp_context *up;
2128
50674e96
DN
2129 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2130 t = maybe_lookup_decl (decl, up);
2131
d2dda7fe 2132 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 2133
64964499 2134 return t ? t : decl;
50674e96
DN
2135}
2136
2137
8ca5b2a2
JJ
2138/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2139 in outer contexts. */
2140
2141static tree
2142maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2143{
2144 tree t = NULL;
2145 omp_context *up;
2146
d2dda7fe
JJ
2147 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2148 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
2149
2150 return t ? t : decl;
2151}
2152
2153
953ff289
DN
2154/* Construct the initialization value for reduction CLAUSE. */
2155
2156tree
2157omp_reduction_init (tree clause, tree type)
2158{
db3927fb 2159 location_t loc = OMP_CLAUSE_LOCATION (clause);
953ff289
DN
2160 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2161 {
2162 case PLUS_EXPR:
2163 case MINUS_EXPR:
2164 case BIT_IOR_EXPR:
2165 case BIT_XOR_EXPR:
2166 case TRUTH_OR_EXPR:
2167 case TRUTH_ORIF_EXPR:
2168 case TRUTH_XOR_EXPR:
2169 case NE_EXPR:
e8160c9a 2170 return build_zero_cst (type);
953ff289
DN
2171
2172 case MULT_EXPR:
2173 case TRUTH_AND_EXPR:
2174 case TRUTH_ANDIF_EXPR:
2175 case EQ_EXPR:
db3927fb 2176 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
2177
2178 case BIT_AND_EXPR:
db3927fb 2179 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
2180
2181 case MAX_EXPR:
2182 if (SCALAR_FLOAT_TYPE_P (type))
2183 {
2184 REAL_VALUE_TYPE max, min;
2185 if (HONOR_INFINITIES (TYPE_MODE (type)))
2186 {
2187 real_inf (&max);
2188 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2189 }
2190 else
2191 real_maxval (&min, 1, TYPE_MODE (type));
2192 return build_real (type, min);
2193 }
2194 else
2195 {
2196 gcc_assert (INTEGRAL_TYPE_P (type));
2197 return TYPE_MIN_VALUE (type);
2198 }
2199
2200 case MIN_EXPR:
2201 if (SCALAR_FLOAT_TYPE_P (type))
2202 {
2203 REAL_VALUE_TYPE max;
2204 if (HONOR_INFINITIES (TYPE_MODE (type)))
2205 real_inf (&max);
2206 else
2207 real_maxval (&max, 0, TYPE_MODE (type));
2208 return build_real (type, max);
2209 }
2210 else
2211 {
2212 gcc_assert (INTEGRAL_TYPE_P (type));
2213 return TYPE_MAX_VALUE (type);
2214 }
2215
2216 default:
2217 gcc_unreachable ();
2218 }
2219}
2220
2221/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2222 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2223 private variables. Initialization statements go in ILIST, while calls
2224 to destructors go in DLIST. */
2225
2226static void
726a989a 2227lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3d55c64b 2228 omp_context *ctx)
953ff289 2229{
726a989a 2230 gimple_stmt_iterator diter;
5039610b 2231 tree c, dtor, copyin_seq, x, ptr;
953ff289 2232 bool copyin_by_ref = false;
8ca5b2a2 2233 bool lastprivate_firstprivate = false;
953ff289
DN
2234 int pass;
2235
726a989a
RB
2236 *dlist = gimple_seq_alloc ();
2237 diter = gsi_start (*dlist);
953ff289
DN
2238 copyin_seq = NULL;
2239
2240 /* Do all the fixed sized types in the first pass, and the variable sized
2241 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 2242 the variable sized types are processed before we use them in the
953ff289
DN
2243 variable sized operations. */
2244 for (pass = 0; pass < 2; ++pass)
2245 {
2246 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2247 {
aaf46ef9 2248 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
2249 tree var, new_var;
2250 bool by_ref;
db3927fb 2251 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
2252
2253 switch (c_kind)
2254 {
2255 case OMP_CLAUSE_PRIVATE:
2256 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
2257 continue;
2258 break;
2259 case OMP_CLAUSE_SHARED:
8ca5b2a2
JJ
2260 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
2261 {
2262 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
2263 continue;
2264 }
953ff289 2265 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289
DN
2266 case OMP_CLAUSE_COPYIN:
2267 case OMP_CLAUSE_REDUCTION:
2268 break;
077b0dfb 2269 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
2270 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2271 {
2272 lastprivate_firstprivate = true;
2273 if (pass != 0)
2274 continue;
2275 }
077b0dfb 2276 break;
953ff289
DN
2277 default:
2278 continue;
2279 }
2280
2281 new_var = var = OMP_CLAUSE_DECL (c);
2282 if (c_kind != OMP_CLAUSE_COPYIN)
2283 new_var = lookup_decl (var, ctx);
2284
2285 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
2286 {
2287 if (pass != 0)
2288 continue;
2289 }
953ff289
DN
2290 else if (is_variable_sized (var))
2291 {
50674e96
DN
2292 /* For variable sized types, we need to allocate the
2293 actual storage here. Call alloca and store the
2294 result in the pointer decl that we created elsewhere. */
953ff289
DN
2295 if (pass == 0)
2296 continue;
2297
a68ab351
JJ
2298 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
2299 {
726a989a
RB
2300 gimple stmt;
2301 tree tmp;
2302
a68ab351
JJ
2303 ptr = DECL_VALUE_EXPR (new_var);
2304 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
2305 ptr = TREE_OPERAND (ptr, 0);
2306 gcc_assert (DECL_P (ptr));
2307 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
2308
2309 /* void *tmp = __builtin_alloca */
2310 stmt
2311 = gimple_build_call (built_in_decls[BUILT_IN_ALLOCA], 1, x);
2312 tmp = create_tmp_var_raw (ptr_type_node, NULL);
2313 gimple_add_tmp_var (tmp);
2314 gimple_call_set_lhs (stmt, tmp);
2315
2316 gimple_seq_add_stmt (ilist, stmt);
2317
db3927fb 2318 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 2319 gimplify_assign (ptr, x, ilist);
a68ab351 2320 }
953ff289 2321 }
953ff289
DN
2322 else if (is_reference (var))
2323 {
50674e96
DN
2324 /* For references that are being privatized for Fortran,
2325 allocate new backing storage for the new pointer
2326 variable. This allows us to avoid changing all the
2327 code that expects a pointer to something that expects
2328 a direct variable. Note that this doesn't apply to
2329 C++, since reference types are disallowed in data
077b0dfb
JJ
2330 sharing clauses there, except for NRV optimized
2331 return values. */
953ff289
DN
2332 if (pass == 0)
2333 continue;
2334
2335 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
2336 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
2337 {
2338 x = build_receiver_ref (var, false, ctx);
db3927fb 2339 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
2340 }
2341 else if (TREE_CONSTANT (x))
953ff289
DN
2342 {
2343 const char *name = NULL;
2344 if (DECL_NAME (var))
2345 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
2346
077b0dfb
JJ
2347 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
2348 name);
2349 gimple_add_tmp_var (x);
628c189e 2350 TREE_ADDRESSABLE (x) = 1;
db3927fb 2351 x = build_fold_addr_expr_loc (clause_loc, x);
953ff289
DN
2352 }
2353 else
2354 {
db3927fb
AH
2355 x = build_call_expr_loc (clause_loc,
2356 built_in_decls[BUILT_IN_ALLOCA], 1, x);
953ff289
DN
2357 }
2358
db3927fb 2359 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
726a989a 2360 gimplify_assign (new_var, x, ilist);
953ff289 2361
70f34814 2362 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
2363 }
2364 else if (c_kind == OMP_CLAUSE_REDUCTION
2365 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2366 {
2367 if (pass == 0)
2368 continue;
2369 }
2370 else if (pass != 0)
2371 continue;
2372
aaf46ef9 2373 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
2374 {
2375 case OMP_CLAUSE_SHARED:
8ca5b2a2
JJ
2376 /* Shared global vars are just accessed directly. */
2377 if (is_global_var (new_var))
2378 break;
953ff289
DN
2379 /* Set up the DECL_VALUE_EXPR for shared variables now. This
2380 needs to be delayed until after fixup_child_record_type so
2381 that we get the correct type during the dereference. */
7c8f7639 2382 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
2383 x = build_receiver_ref (var, by_ref, ctx);
2384 SET_DECL_VALUE_EXPR (new_var, x);
2385 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2386
2387 /* ??? If VAR is not passed by reference, and the variable
2388 hasn't been initialized yet, then we'll get a warning for
2389 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 2390 able to notice this and not store anything at all, but
953ff289
DN
2391 we're generating code too early. Suppress the warning. */
2392 if (!by_ref)
2393 TREE_NO_WARNING (var) = 1;
2394 break;
2395
2396 case OMP_CLAUSE_LASTPRIVATE:
2397 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2398 break;
2399 /* FALLTHRU */
2400
2401 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
2402 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
2403 x = build_outer_var_ref (var, ctx);
2404 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2405 {
2406 if (is_task_ctx (ctx))
2407 x = build_receiver_ref (var, false, ctx);
2408 else
2409 x = build_outer_var_ref (var, ctx);
2410 }
2411 else
2412 x = NULL;
2413 x = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
953ff289
DN
2414 if (x)
2415 gimplify_and_add (x, ilist);
2416 /* FALLTHRU */
2417
2418 do_dtor:
2419 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
2420 if (x)
2421 {
726a989a
RB
2422 gimple_seq tseq = NULL;
2423
953ff289 2424 dtor = x;
726a989a
RB
2425 gimplify_stmt (&dtor, &tseq);
2426 gsi_insert_seq_before (&diter, tseq, GSI_SAME_STMT);
953ff289
DN
2427 }
2428 break;
2429
2430 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
2431 if (is_task_ctx (ctx))
2432 {
2433 if (is_reference (var) || is_variable_sized (var))
2434 goto do_dtor;
2435 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
2436 ctx))
2437 || use_pointer_for_field (var, NULL))
2438 {
2439 x = build_receiver_ref (var, false, ctx);
2440 SET_DECL_VALUE_EXPR (new_var, x);
2441 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2442 goto do_dtor;
2443 }
2444 }
953ff289
DN
2445 x = build_outer_var_ref (var, ctx);
2446 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
2447 gimplify_and_add (x, ilist);
2448 goto do_dtor;
2449 break;
2450
2451 case OMP_CLAUSE_COPYIN:
7c8f7639 2452 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
2453 x = build_receiver_ref (var, by_ref, ctx);
2454 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
2455 append_to_statement_list (x, &copyin_seq);
2456 copyin_by_ref |= by_ref;
2457 break;
2458
2459 case OMP_CLAUSE_REDUCTION:
2460 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2461 {
a68ab351
JJ
2462 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2463 x = build_outer_var_ref (var, ctx);
2464
2465 if (is_reference (var))
db3927fb 2466 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
2467 SET_DECL_VALUE_EXPR (placeholder, x);
2468 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
726a989a
RB
2469 lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
2470 gimple_seq_add_seq (ilist,
2471 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
2472 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
a68ab351 2473 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
953ff289
DN
2474 }
2475 else
2476 {
2477 x = omp_reduction_init (c, TREE_TYPE (new_var));
2478 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
726a989a 2479 gimplify_assign (new_var, x, ilist);
953ff289
DN
2480 }
2481 break;
2482
2483 default:
2484 gcc_unreachable ();
2485 }
2486 }
2487 }
2488
2489 /* The copyin sequence is not to be executed by the main thread, since
2490 that would result in self-copies. Perhaps not visible to scalars,
2491 but it certainly is to C++ operator=. */
2492 if (copyin_seq)
2493 {
5039610b 2494 x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
953ff289
DN
2495 x = build2 (NE_EXPR, boolean_type_node, x,
2496 build_int_cst (TREE_TYPE (x), 0));
2497 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
2498 gimplify_and_add (x, ilist);
2499 }
2500
2501 /* If any copyin variable is passed by reference, we must ensure the
2502 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
2503 threads. Similarly for variables in both firstprivate and
2504 lastprivate clauses we need to ensure the lastprivate copying
2505 happens after firstprivate copying in all threads. */
2506 if (copyin_by_ref || lastprivate_firstprivate)
917948d3 2507 gimplify_and_add (build_omp_barrier (), ilist);
953ff289
DN
2508}
2509
50674e96 2510
953ff289
DN
2511/* Generate code to implement the LASTPRIVATE clauses. This is used for
2512 both parallel and workshare constructs. PREDICATE may be NULL if it's
2513 always true. */
2514
2515static void
726a989a
RB
2516lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
2517 omp_context *ctx)
953ff289 2518{
726a989a 2519 tree x, c, label = NULL;
a68ab351 2520 bool par_clauses = false;
953ff289
DN
2521
2522 /* Early exit if there are no lastprivate clauses. */
2523 clauses = find_omp_clause (clauses, OMP_CLAUSE_LASTPRIVATE);
2524 if (clauses == NULL)
2525 {
2526 /* If this was a workshare clause, see if it had been combined
2527 with its parallel. In that case, look for the clauses on the
2528 parallel statement itself. */
2529 if (is_parallel_ctx (ctx))
2530 return;
2531
2532 ctx = ctx->outer;
2533 if (ctx == NULL || !is_parallel_ctx (ctx))
2534 return;
2535
726a989a 2536 clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
2537 OMP_CLAUSE_LASTPRIVATE);
2538 if (clauses == NULL)
2539 return;
a68ab351 2540 par_clauses = true;
953ff289
DN
2541 }
2542
726a989a
RB
2543 if (predicate)
2544 {
2545 gimple stmt;
2546 tree label_true, arm1, arm2;
2547
c2255bc4
AH
2548 label = create_artificial_label (UNKNOWN_LOCATION);
2549 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
2550 arm1 = TREE_OPERAND (predicate, 0);
2551 arm2 = TREE_OPERAND (predicate, 1);
2552 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
2553 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
2554 stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
2555 label_true, label);
2556 gimple_seq_add_stmt (stmt_list, stmt);
2557 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
2558 }
953ff289 2559
a68ab351 2560 for (c = clauses; c ;)
953ff289
DN
2561 {
2562 tree var, new_var;
db3927fb 2563 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 2564
a68ab351
JJ
2565 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
2566 {
2567 var = OMP_CLAUSE_DECL (c);
2568 new_var = lookup_decl (var, ctx);
953ff289 2569
726a989a
RB
2570 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
2571 {
2572 lower_omp (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
2573 gimple_seq_add_seq (stmt_list,
2574 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
2575 }
2576 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
953ff289 2577
a68ab351
JJ
2578 x = build_outer_var_ref (var, ctx);
2579 if (is_reference (var))
70f34814 2580 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 2581 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 2582 gimplify_and_add (x, stmt_list);
a68ab351
JJ
2583 }
2584 c = OMP_CLAUSE_CHAIN (c);
2585 if (c == NULL && !par_clauses)
2586 {
2587 /* If this was a workshare clause, see if it had been combined
2588 with its parallel. In that case, continue looking for the
2589 clauses also on the parallel statement itself. */
2590 if (is_parallel_ctx (ctx))
2591 break;
2592
2593 ctx = ctx->outer;
2594 if (ctx == NULL || !is_parallel_ctx (ctx))
2595 break;
2596
726a989a 2597 c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
2598 OMP_CLAUSE_LASTPRIVATE);
2599 par_clauses = true;
2600 }
953ff289
DN
2601 }
2602
726a989a
RB
2603 if (label)
2604 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
2605}
2606
50674e96 2607
953ff289
DN
2608/* Generate code to implement the REDUCTION clauses. */
2609
2610static void
726a989a 2611lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 2612{
726a989a
RB
2613 gimple_seq sub_seq = NULL;
2614 gimple stmt;
2615 tree x, c;
953ff289
DN
2616 int count = 0;
2617
2618 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
2619 update in that case, otherwise use a lock. */
2620 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 2621 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289
DN
2622 {
2623 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2624 {
2625 /* Never use OMP_ATOMIC for array reductions. */
2626 count = -1;
2627 break;
2628 }
2629 count++;
2630 }
2631
2632 if (count == 0)
2633 return;
2634
2635 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2636 {
2637 tree var, ref, new_var;
2638 enum tree_code code;
db3927fb 2639 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 2640
aaf46ef9 2641 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
2642 continue;
2643
2644 var = OMP_CLAUSE_DECL (c);
2645 new_var = lookup_decl (var, ctx);
2646 if (is_reference (var))
70f34814 2647 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
2648 ref = build_outer_var_ref (var, ctx);
2649 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
2650
2651 /* reduction(-:var) sums up the partial results, so it acts
2652 identically to reduction(+:var). */
953ff289
DN
2653 if (code == MINUS_EXPR)
2654 code = PLUS_EXPR;
2655
2656 if (count == 1)
2657 {
db3927fb 2658 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
2659
2660 addr = save_expr (addr);
2661 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 2662 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 2663 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 2664 gimplify_and_add (x, stmt_seqp);
953ff289
DN
2665 return;
2666 }
2667
2668 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
2669 {
2670 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
2671
2672 if (is_reference (var))
db3927fb 2673 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
2674 SET_DECL_VALUE_EXPR (placeholder, ref);
2675 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
726a989a
RB
2676 lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
2677 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
2678 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
2679 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
2680 }
2681 else
2682 {
2683 x = build2 (code, TREE_TYPE (ref), ref, new_var);
2684 ref = build_outer_var_ref (var, ctx);
726a989a 2685 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
2686 }
2687 }
2688
726a989a
RB
2689 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ATOMIC_START], 0);
2690 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 2691
726a989a 2692 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 2693
726a989a
RB
2694 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ATOMIC_END], 0);
2695 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
2696}
2697
50674e96 2698
953ff289
DN
2699/* Generate code to implement the COPYPRIVATE clauses. */
2700
2701static void
726a989a 2702lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
2703 omp_context *ctx)
2704{
2705 tree c;
2706
2707 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2708 {
78db7d92 2709 tree var, new_var, ref, x;
953ff289 2710 bool by_ref;
db3927fb 2711 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 2712
aaf46ef9 2713 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
2714 continue;
2715
2716 var = OMP_CLAUSE_DECL (c);
7c8f7639 2717 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
2718
2719 ref = build_sender_ref (var, ctx);
78db7d92
JJ
2720 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
2721 if (by_ref)
2722 {
2723 x = build_fold_addr_expr_loc (clause_loc, new_var);
2724 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
2725 }
726a989a 2726 gimplify_assign (ref, x, slist);
953ff289 2727
78db7d92
JJ
2728 ref = build_receiver_ref (var, false, ctx);
2729 if (by_ref)
2730 {
2731 ref = fold_convert_loc (clause_loc,
2732 build_pointer_type (TREE_TYPE (new_var)),
2733 ref);
2734 ref = build_fold_indirect_ref_loc (clause_loc, ref);
2735 }
953ff289
DN
2736 if (is_reference (var))
2737 {
78db7d92 2738 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
2739 ref = build_simple_mem_ref_loc (clause_loc, ref);
2740 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 2741 }
78db7d92 2742 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
2743 gimplify_and_add (x, rlist);
2744 }
2745}
2746
50674e96 2747
953ff289
DN
2748/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
2749 and REDUCTION from the sender (aka parent) side. */
2750
2751static void
726a989a
RB
2752lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
2753 omp_context *ctx)
953ff289
DN
2754{
2755 tree c;
2756
2757 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2758 {
50674e96 2759 tree val, ref, x, var;
953ff289 2760 bool by_ref, do_in = false, do_out = false;
db3927fb 2761 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 2762
aaf46ef9 2763 switch (OMP_CLAUSE_CODE (c))
953ff289 2764 {
a68ab351
JJ
2765 case OMP_CLAUSE_PRIVATE:
2766 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
2767 break;
2768 continue;
953ff289
DN
2769 case OMP_CLAUSE_FIRSTPRIVATE:
2770 case OMP_CLAUSE_COPYIN:
2771 case OMP_CLAUSE_LASTPRIVATE:
2772 case OMP_CLAUSE_REDUCTION:
2773 break;
2774 default:
2775 continue;
2776 }
2777
d2dda7fe
JJ
2778 val = OMP_CLAUSE_DECL (c);
2779 var = lookup_decl_in_outer_ctx (val, ctx);
50674e96 2780
8ca5b2a2
JJ
2781 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
2782 && is_global_var (var))
2783 continue;
953ff289
DN
2784 if (is_variable_sized (val))
2785 continue;
7c8f7639 2786 by_ref = use_pointer_for_field (val, NULL);
953ff289 2787
aaf46ef9 2788 switch (OMP_CLAUSE_CODE (c))
953ff289 2789 {
a68ab351 2790 case OMP_CLAUSE_PRIVATE:
953ff289
DN
2791 case OMP_CLAUSE_FIRSTPRIVATE:
2792 case OMP_CLAUSE_COPYIN:
2793 do_in = true;
2794 break;
2795
2796 case OMP_CLAUSE_LASTPRIVATE:
2797 if (by_ref || is_reference (val))
2798 {
2799 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
2800 continue;
2801 do_in = true;
2802 }
2803 else
a68ab351
JJ
2804 {
2805 do_out = true;
2806 if (lang_hooks.decls.omp_private_outer_ref (val))
2807 do_in = true;
2808 }
953ff289
DN
2809 break;
2810
2811 case OMP_CLAUSE_REDUCTION:
2812 do_in = true;
2813 do_out = !(by_ref || is_reference (val));
2814 break;
2815
2816 default:
2817 gcc_unreachable ();
2818 }
2819
2820 if (do_in)
2821 {
2822 ref = build_sender_ref (val, ctx);
db3927fb 2823 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 2824 gimplify_assign (ref, x, ilist);
a68ab351
JJ
2825 if (is_task_ctx (ctx))
2826 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 2827 }
50674e96 2828
953ff289
DN
2829 if (do_out)
2830 {
2831 ref = build_sender_ref (val, ctx);
726a989a 2832 gimplify_assign (var, ref, olist);
953ff289
DN
2833 }
2834 }
2835}
2836
726a989a
RB
2837/* Generate code to implement SHARED from the sender (aka parent)
2838 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
2839 list things that got automatically shared. */
953ff289
DN
2840
2841static void
726a989a 2842lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 2843{
a68ab351 2844 tree var, ovar, nvar, f, x, record_type;
953ff289
DN
2845
2846 if (ctx->record_type == NULL)
2847 return;
50674e96 2848
a68ab351 2849 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 2850 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
2851 {
2852 ovar = DECL_ABSTRACT_ORIGIN (f);
2853 nvar = maybe_lookup_decl (ovar, ctx);
2854 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
2855 continue;
2856
50674e96
DN
2857 /* If CTX is a nested parallel directive. Find the immediately
2858 enclosing parallel or workshare construct that contains a
2859 mapping for OVAR. */
d2dda7fe 2860 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 2861
7c8f7639 2862 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
2863 {
2864 x = build_sender_ref (ovar, ctx);
50674e96 2865 var = build_fold_addr_expr (var);
726a989a 2866 gimplify_assign (x, var, ilist);
953ff289
DN
2867 }
2868 else
2869 {
2870 x = build_sender_ref (ovar, ctx);
726a989a 2871 gimplify_assign (x, var, ilist);
953ff289 2872
14e5b285
RG
2873 if (!TREE_READONLY (var)
2874 /* We don't need to receive a new reference to a result
2875 or parm decl. In fact we may not store to it as we will
2876 invalidate any pending RSO and generate wrong gimple
2877 during inlining. */
2878 && !((TREE_CODE (var) == RESULT_DECL
2879 || TREE_CODE (var) == PARM_DECL)
2880 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
2881 {
2882 x = build_sender_ref (ovar, ctx);
726a989a 2883 gimplify_assign (var, x, olist);
a68ab351 2884 }
953ff289
DN
2885 }
2886 }
2887}
2888
726a989a
RB
2889
2890/* A convenience function to build an empty GIMPLE_COND with just the
2891 condition. */
2892
2893static gimple
2894gimple_build_cond_empty (tree cond)
2895{
2896 enum tree_code pred_code;
2897 tree lhs, rhs;
2898
2899 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
2900 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
2901}
2902
2903
b8698a0f 2904/* Build the function calls to GOMP_parallel_start etc to actually
50674e96
DN
2905 generate the parallel operation. REGION is the parallel region
2906 being expanded. BB is the block where to insert the code. WS_ARGS
2907 will be set if this is a call to a combined parallel+workshare
2908 construct, it contains the list of additional arguments needed by
2909 the workshare construct. */
953ff289
DN
2910
2911static void
777f7f9a 2912expand_parallel_call (struct omp_region *region, basic_block bb,
3bb06db4 2913 gimple entry_stmt, VEC(tree,gc) *ws_args)
953ff289 2914{
917948d3 2915 tree t, t1, t2, val, cond, c, clauses;
726a989a
RB
2916 gimple_stmt_iterator gsi;
2917 gimple stmt;
50674e96 2918 int start_ix;
db3927fb 2919 location_t clause_loc;
3bb06db4 2920 VEC(tree,gc) *args;
50674e96 2921
726a989a 2922 clauses = gimple_omp_parallel_clauses (entry_stmt);
50674e96 2923
c0220ea4 2924 /* Determine what flavor of GOMP_parallel_start we will be
50674e96
DN
2925 emitting. */
2926 start_ix = BUILT_IN_GOMP_PARALLEL_START;
2927 if (is_combined_parallel (region))
2928 {
777f7f9a 2929 switch (region->inner->type)
50674e96 2930 {
726a989a 2931 case GIMPLE_OMP_FOR:
a68ab351 2932 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
21a66e91 2933 start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
a68ab351
JJ
2934 + (region->inner->sched_kind
2935 == OMP_CLAUSE_SCHEDULE_RUNTIME
2936 ? 3 : region->inner->sched_kind);
777f7f9a 2937 break;
726a989a 2938 case GIMPLE_OMP_SECTIONS:
777f7f9a
RH
2939 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
2940 break;
2941 default:
2942 gcc_unreachable ();
50674e96 2943 }
50674e96 2944 }
953ff289
DN
2945
2946 /* By default, the value of NUM_THREADS is zero (selected at run time)
2947 and there is no conditional. */
2948 cond = NULL_TREE;
2949 val = build_int_cst (unsigned_type_node, 0);
2950
2951 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2952 if (c)
2953 cond = OMP_CLAUSE_IF_EXPR (c);
2954
2955 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
2956 if (c)
db3927fb
AH
2957 {
2958 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
2959 clause_loc = OMP_CLAUSE_LOCATION (c);
2960 }
2961 else
2962 clause_loc = gimple_location (entry_stmt);
953ff289
DN
2963
2964 /* Ensure 'val' is of the correct type. */
db3927fb 2965 val = fold_convert_loc (clause_loc, unsigned_type_node, val);
953ff289
DN
2966
2967 /* If we found the clause 'if (cond)', build either
2968 (cond != 0) or (cond ? val : 1u). */
2969 if (cond)
2970 {
726a989a 2971 gimple_stmt_iterator gsi;
50674e96
DN
2972
2973 cond = gimple_boolify (cond);
2974
953ff289 2975 if (integer_zerop (val))
db3927fb
AH
2976 val = fold_build2_loc (clause_loc,
2977 EQ_EXPR, unsigned_type_node, cond,
917948d3 2978 build_int_cst (TREE_TYPE (cond), 0));
953ff289 2979 else
50674e96
DN
2980 {
2981 basic_block cond_bb, then_bb, else_bb;
917948d3 2982 edge e, e_then, e_else;
726a989a 2983 tree tmp_then, tmp_else, tmp_join, tmp_var;
917948d3
ZD
2984
2985 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
2986 if (gimple_in_ssa_p (cfun))
2987 {
726a989a
RB
2988 tmp_then = make_ssa_name (tmp_var, NULL);
2989 tmp_else = make_ssa_name (tmp_var, NULL);
2990 tmp_join = make_ssa_name (tmp_var, NULL);
917948d3
ZD
2991 }
2992 else
2993 {
2994 tmp_then = tmp_var;
2995 tmp_else = tmp_var;
2996 tmp_join = tmp_var;
2997 }
50674e96 2998
50674e96
DN
2999 e = split_block (bb, NULL);
3000 cond_bb = e->src;
3001 bb = e->dest;
3002 remove_edge (e);
3003
3004 then_bb = create_empty_bb (cond_bb);
3005 else_bb = create_empty_bb (then_bb);
917948d3
ZD
3006 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
3007 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
50674e96 3008
726a989a
RB
3009 stmt = gimple_build_cond_empty (cond);
3010 gsi = gsi_start_bb (cond_bb);
3011 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 3012
726a989a
RB
3013 gsi = gsi_start_bb (then_bb);
3014 stmt = gimple_build_assign (tmp_then, val);
3015 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 3016
726a989a
RB
3017 gsi = gsi_start_bb (else_bb);
3018 stmt = gimple_build_assign
3019 (tmp_else, build_int_cst (unsigned_type_node, 1));
3020 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96
DN
3021
3022 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
3023 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
917948d3
ZD
3024 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
3025 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
50674e96 3026
917948d3
ZD
3027 if (gimple_in_ssa_p (cfun))
3028 {
726a989a 3029 gimple phi = create_phi_node (tmp_join, bb);
917948d3 3030 SSA_NAME_DEF_STMT (tmp_join) = phi;
f5045c96
AM
3031 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
3032 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
917948d3
ZD
3033 }
3034
3035 val = tmp_join;
50674e96
DN
3036 }
3037
726a989a
RB
3038 gsi = gsi_start_bb (bb);
3039 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
3040 false, GSI_CONTINUE_LINKING);
953ff289
DN
3041 }
3042
726a989a
RB
3043 gsi = gsi_last_bb (bb);
3044 t = gimple_omp_parallel_data_arg (entry_stmt);
953ff289 3045 if (t == NULL)
5039610b 3046 t1 = null_pointer_node;
953ff289 3047 else
5039610b 3048 t1 = build_fold_addr_expr (t);
726a989a 3049 t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
50674e96 3050
3bb06db4
NF
3051 args = VEC_alloc (tree, gc, 3 + VEC_length (tree, ws_args));
3052 VEC_quick_push (tree, args, t2);
3053 VEC_quick_push (tree, args, t1);
3054 VEC_quick_push (tree, args, val);
3055 VEC_splice (tree, args, ws_args);
3056
3057 t = build_call_expr_loc_vec (UNKNOWN_LOCATION,
3058 built_in_decls[start_ix], args);
50674e96 3059
726a989a
RB
3060 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3061 false, GSI_CONTINUE_LINKING);
953ff289 3062
726a989a 3063 t = gimple_omp_parallel_data_arg (entry_stmt);
953ff289
DN
3064 if (t == NULL)
3065 t = null_pointer_node;
3066 else
3067 t = build_fold_addr_expr (t);
db3927fb
AH
3068 t = build_call_expr_loc (gimple_location (entry_stmt),
3069 gimple_omp_parallel_child_fn (entry_stmt), 1, t);
726a989a
RB
3070 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3071 false, GSI_CONTINUE_LINKING);
953ff289 3072
db3927fb
AH
3073 t = build_call_expr_loc (gimple_location (entry_stmt),
3074 built_in_decls[BUILT_IN_GOMP_PARALLEL_END], 0);
726a989a
RB
3075 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3076 false, GSI_CONTINUE_LINKING);
953ff289
DN
3077}
3078
50674e96 3079
a68ab351
JJ
3080/* Build the function call to GOMP_task to actually
3081 generate the task operation. BB is the block where to insert the code. */
3082
3083static void
726a989a 3084expand_task_call (basic_block bb, gimple entry_stmt)
a68ab351
JJ
3085{
3086 tree t, t1, t2, t3, flags, cond, c, clauses;
726a989a 3087 gimple_stmt_iterator gsi;
db3927fb 3088 location_t loc = gimple_location (entry_stmt);
a68ab351 3089
726a989a 3090 clauses = gimple_omp_task_clauses (entry_stmt);
a68ab351 3091
a68ab351
JJ
3092 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
3093 if (c)
3094 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
3095 else
3096 cond = boolean_true_node;
3097
3098 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
3099 flags = build_int_cst (unsigned_type_node, (c ? 1 : 0));
3100
726a989a
RB
3101 gsi = gsi_last_bb (bb);
3102 t = gimple_omp_task_data_arg (entry_stmt);
a68ab351
JJ
3103 if (t == NULL)
3104 t2 = null_pointer_node;
3105 else
db3927fb
AH
3106 t2 = build_fold_addr_expr_loc (loc, t);
3107 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
726a989a 3108 t = gimple_omp_task_copy_fn (entry_stmt);
a68ab351
JJ
3109 if (t == NULL)
3110 t3 = null_pointer_node;
3111 else
db3927fb 3112 t3 = build_fold_addr_expr_loc (loc, t);
a68ab351
JJ
3113
3114 t = build_call_expr (built_in_decls[BUILT_IN_GOMP_TASK], 7, t1, t2, t3,
726a989a
RB
3115 gimple_omp_task_arg_size (entry_stmt),
3116 gimple_omp_task_arg_align (entry_stmt), cond, flags);
a68ab351 3117
726a989a
RB
3118 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3119 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
3120}
3121
3122
726a989a
RB
3123/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
3124 catch handler and return it. This prevents programs from violating the
3125 structured block semantics with throws. */
953ff289 3126
726a989a
RB
3127static gimple_seq
3128maybe_catch_exception (gimple_seq body)
953ff289 3129{
1d65f45c
RH
3130 gimple g;
3131 tree decl;
953ff289
DN
3132
3133 if (!flag_exceptions)
726a989a 3134 return body;
953ff289 3135
3b06d379
SB
3136 if (lang_hooks.eh_protect_cleanup_actions != NULL)
3137 decl = lang_hooks.eh_protect_cleanup_actions ();
953ff289 3138 else
1d65f45c 3139 decl = built_in_decls[BUILT_IN_TRAP];
726a989a 3140
1d65f45c
RH
3141 g = gimple_build_eh_must_not_throw (decl);
3142 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
726a989a 3143 GIMPLE_TRY_CATCH);
953ff289 3144
1d65f45c 3145 return gimple_seq_alloc_with_stmt (g);
953ff289
DN
3146}
3147
50674e96 3148/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
953ff289 3149
50674e96 3150static tree
c021f10b 3151vec2chain (VEC(tree,gc) *v)
953ff289 3152{
c021f10b
NF
3153 tree chain = NULL_TREE, t;
3154 unsigned ix;
953ff289 3155
c021f10b 3156 FOR_EACH_VEC_ELT_REVERSE (tree, v, ix, t)
50674e96 3157 {
910ad8de 3158 DECL_CHAIN (t) = chain;
c021f10b 3159 chain = t;
50674e96 3160 }
953ff289 3161
c021f10b 3162 return chain;
50674e96 3163}
953ff289 3164
953ff289 3165
50674e96 3166/* Remove barriers in REGION->EXIT's block. Note that this is only
726a989a
RB
3167 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
3168 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
3169 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
50674e96 3170 removed. */
953ff289 3171
50674e96
DN
3172static void
3173remove_exit_barrier (struct omp_region *region)
3174{
726a989a 3175 gimple_stmt_iterator gsi;
50674e96 3176 basic_block exit_bb;
777f7f9a
RH
3177 edge_iterator ei;
3178 edge e;
726a989a 3179 gimple stmt;
03742a9b 3180 int any_addressable_vars = -1;
953ff289 3181
777f7f9a 3182 exit_bb = region->exit;
953ff289 3183
2aee3e57
JJ
3184 /* If the parallel region doesn't return, we don't have REGION->EXIT
3185 block at all. */
3186 if (! exit_bb)
3187 return;
3188
726a989a
RB
3189 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
3190 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
777f7f9a
RH
3191 statements that can appear in between are extremely limited -- no
3192 memory operations at all. Here, we allow nothing at all, so the
726a989a
RB
3193 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
3194 gsi = gsi_last_bb (exit_bb);
3195 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
3196 gsi_prev (&gsi);
3197 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
50674e96 3198 return;
953ff289 3199
777f7f9a
RH
3200 FOR_EACH_EDGE (e, ei, exit_bb->preds)
3201 {
726a989a
RB
3202 gsi = gsi_last_bb (e->src);
3203 if (gsi_end_p (gsi))
777f7f9a 3204 continue;
726a989a 3205 stmt = gsi_stmt (gsi);
03742a9b
JJ
3206 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
3207 && !gimple_omp_return_nowait_p (stmt))
3208 {
3209 /* OpenMP 3.0 tasks unfortunately prevent this optimization
3210 in many cases. If there could be tasks queued, the barrier
3211 might be needed to let the tasks run before some local
3212 variable of the parallel that the task uses as shared
3213 runs out of scope. The task can be spawned either
3214 from within current function (this would be easy to check)
3215 or from some function it calls and gets passed an address
3216 of such a variable. */
3217 if (any_addressable_vars < 0)
3218 {
3219 gimple parallel_stmt = last_stmt (region->entry);
3220 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
c021f10b
NF
3221 tree local_decls, block, decl;
3222 unsigned ix;
03742a9b
JJ
3223
3224 any_addressable_vars = 0;
c021f10b
NF
3225 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (child_fun), ix, decl)
3226 if (TREE_ADDRESSABLE (decl))
03742a9b
JJ
3227 {
3228 any_addressable_vars = 1;
3229 break;
3230 }
3231 for (block = gimple_block (stmt);
3232 !any_addressable_vars
3233 && block
3234 && TREE_CODE (block) == BLOCK;
3235 block = BLOCK_SUPERCONTEXT (block))
3236 {
3237 for (local_decls = BLOCK_VARS (block);
3238 local_decls;
910ad8de 3239 local_decls = DECL_CHAIN (local_decls))
03742a9b
JJ
3240 if (TREE_ADDRESSABLE (local_decls))
3241 {
3242 any_addressable_vars = 1;
3243 break;
3244 }
3245 if (block == gimple_block (parallel_stmt))
3246 break;
3247 }
3248 }
3249 if (!any_addressable_vars)
3250 gimple_omp_return_set_nowait (stmt);
3251 }
777f7f9a 3252 }
953ff289
DN
3253}
3254
777f7f9a
RH
3255static void
3256remove_exit_barriers (struct omp_region *region)
3257{
726a989a 3258 if (region->type == GIMPLE_OMP_PARALLEL)
777f7f9a
RH
3259 remove_exit_barrier (region);
3260
3261 if (region->inner)
3262 {
3263 region = region->inner;
3264 remove_exit_barriers (region);
3265 while (region->next)
3266 {
3267 region = region->next;
3268 remove_exit_barriers (region);
3269 }
3270 }
3271}
50674e96 3272
2b4cf991
JJ
3273/* Optimize omp_get_thread_num () and omp_get_num_threads ()
3274 calls. These can't be declared as const functions, but
3275 within one parallel body they are constant, so they can be
3276 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
a68ab351
JJ
3277 which are declared const. Similarly for task body, except
3278 that in untied task omp_get_thread_num () can change at any task
3279 scheduling point. */
2b4cf991
JJ
3280
3281static void
726a989a 3282optimize_omp_library_calls (gimple entry_stmt)
2b4cf991
JJ
3283{
3284 basic_block bb;
726a989a 3285 gimple_stmt_iterator gsi;
2b4cf991
JJ
3286 tree thr_num_id
3287 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM]);
3288 tree num_thr_id
3289 = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS]);
726a989a
RB
3290 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
3291 && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
a68ab351 3292 OMP_CLAUSE_UNTIED) != NULL);
2b4cf991
JJ
3293
3294 FOR_EACH_BB (bb)
726a989a 3295 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2b4cf991 3296 {
726a989a 3297 gimple call = gsi_stmt (gsi);
2b4cf991
JJ
3298 tree decl;
3299
726a989a
RB
3300 if (is_gimple_call (call)
3301 && (decl = gimple_call_fndecl (call))
2b4cf991
JJ
3302 && DECL_EXTERNAL (decl)
3303 && TREE_PUBLIC (decl)
3304 && DECL_INITIAL (decl) == NULL)
3305 {
3306 tree built_in;
3307
3308 if (DECL_NAME (decl) == thr_num_id)
a68ab351
JJ
3309 {
3310 /* In #pragma omp task untied omp_get_thread_num () can change
3311 during the execution of the task region. */
3312 if (untied_task)
3313 continue;
3314 built_in = built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM];
3315 }
2b4cf991
JJ
3316 else if (DECL_NAME (decl) == num_thr_id)
3317 built_in = built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS];
3318 else
3319 continue;
3320
3321 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
726a989a 3322 || gimple_call_num_args (call) != 0)
2b4cf991
JJ
3323 continue;
3324
3325 if (flag_exceptions && !TREE_NOTHROW (decl))
3326 continue;
3327
3328 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
9600efe1
MM
3329 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl)),
3330 TREE_TYPE (TREE_TYPE (built_in))))
2b4cf991
JJ
3331 continue;
3332
7c9577be 3333 gimple_call_set_fndecl (call, built_in);
2b4cf991
JJ
3334 }
3335 }
3336}
3337
a68ab351 3338/* Expand the OpenMP parallel or task directive starting at REGION. */
953ff289
DN
3339
3340static void
a68ab351 3341expand_omp_taskreg (struct omp_region *region)
953ff289 3342{
50674e96 3343 basic_block entry_bb, exit_bb, new_bb;
db2960f4 3344 struct function *child_cfun;
3bb06db4 3345 tree child_fn, block, t;
133f9369 3346 tree save_current;
726a989a
RB
3347 gimple_stmt_iterator gsi;
3348 gimple entry_stmt, stmt;
50674e96 3349 edge e;
3bb06db4 3350 VEC(tree,gc) *ws_args;
50674e96 3351
777f7f9a 3352 entry_stmt = last_stmt (region->entry);
726a989a 3353 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
50674e96 3354 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
135a171d
JJ
3355 /* If this function has been already instrumented, make sure
3356 the child function isn't instrumented again. */
3357 child_cfun->after_tree_profile = cfun->after_tree_profile;
50674e96 3358
777f7f9a
RH
3359 entry_bb = region->entry;
3360 exit_bb = region->exit;
50674e96 3361
50674e96 3362 if (is_combined_parallel (region))
777f7f9a 3363 ws_args = region->ws_args;
50674e96 3364 else
3bb06db4 3365 ws_args = NULL;
953ff289 3366
777f7f9a 3367 if (child_cfun->cfg)
953ff289 3368 {
50674e96
DN
3369 /* Due to inlining, it may happen that we have already outlined
3370 the region, in which case all we need to do is make the
3371 sub-graph unreachable and emit the parallel call. */
3372 edge entry_succ_e, exit_succ_e;
726a989a 3373 gimple_stmt_iterator gsi;
50674e96
DN
3374
3375 entry_succ_e = single_succ_edge (entry_bb);
50674e96 3376
726a989a
RB
3377 gsi = gsi_last_bb (entry_bb);
3378 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
3379 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
3380 gsi_remove (&gsi, true);
50674e96
DN
3381
3382 new_bb = entry_bb;
d3c673c7
JJ
3383 if (exit_bb)
3384 {
3385 exit_succ_e = single_succ_edge (exit_bb);
3386 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
3387 }
917948d3 3388 remove_edge_and_dominated_blocks (entry_succ_e);
953ff289 3389 }
50674e96
DN
3390 else
3391 {
2fed2012 3392 unsigned srcidx, dstidx, num;
c021f10b 3393
50674e96 3394 /* If the parallel region needs data sent from the parent
b570947c
JJ
3395 function, then the very first statement (except possible
3396 tree profile counter updates) of the parallel body
50674e96
DN
3397 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
3398 &.OMP_DATA_O is passed as an argument to the child function,
3399 we need to replace it with the argument as seen by the child
3400 function.
3401
3402 In most cases, this will end up being the identity assignment
3403 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
3404 a function call that has been inlined, the original PARM_DECL
3405 .OMP_DATA_I may have been converted into a different local
3406 variable. In which case, we need to keep the assignment. */
726a989a 3407 if (gimple_omp_taskreg_data_arg (entry_stmt))
50674e96
DN
3408 {
3409 basic_block entry_succ_bb = single_succ (entry_bb);
726a989a
RB
3410 gimple_stmt_iterator gsi;
3411 tree arg, narg;
3412 gimple parcopy_stmt = NULL;
953ff289 3413
726a989a 3414 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
b570947c 3415 {
726a989a 3416 gimple stmt;
b570947c 3417
726a989a
RB
3418 gcc_assert (!gsi_end_p (gsi));
3419 stmt = gsi_stmt (gsi);
3420 if (gimple_code (stmt) != GIMPLE_ASSIGN)
018b899b
JJ
3421 continue;
3422
726a989a 3423 if (gimple_num_ops (stmt) == 2)
b570947c 3424 {
726a989a
RB
3425 tree arg = gimple_assign_rhs1 (stmt);
3426
3427 /* We're ignore the subcode because we're
3428 effectively doing a STRIP_NOPS. */
3429
3430 if (TREE_CODE (arg) == ADDR_EXPR
3431 && TREE_OPERAND (arg, 0)
3432 == gimple_omp_taskreg_data_arg (entry_stmt))
3433 {
3434 parcopy_stmt = stmt;
3435 break;
3436 }
b570947c
JJ
3437 }
3438 }
917948d3 3439
726a989a 3440 gcc_assert (parcopy_stmt != NULL);
917948d3
ZD
3441 arg = DECL_ARGUMENTS (child_fn);
3442
3443 if (!gimple_in_ssa_p (cfun))
3444 {
726a989a
RB
3445 if (gimple_assign_lhs (parcopy_stmt) == arg)
3446 gsi_remove (&gsi, true);
917948d3 3447 else
726a989a
RB
3448 {
3449 /* ?? Is setting the subcode really necessary ?? */
3450 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
3451 gimple_assign_set_rhs1 (parcopy_stmt, arg);
3452 }
917948d3
ZD
3453 }
3454 else
3455 {
3456 /* If we are in ssa form, we must load the value from the default
3457 definition of the argument. That should not be defined now,
3458 since the argument is not used uninitialized. */
3459 gcc_assert (gimple_default_def (cfun, arg) == NULL);
726a989a 3460 narg = make_ssa_name (arg, gimple_build_nop ());
917948d3 3461 set_default_def (arg, narg);
726a989a
RB
3462 /* ?? Is setting the subcode really necessary ?? */
3463 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
3464 gimple_assign_set_rhs1 (parcopy_stmt, narg);
917948d3
ZD
3465 update_stmt (parcopy_stmt);
3466 }
50674e96
DN
3467 }
3468
3469 /* Declare local variables needed in CHILD_CFUN. */
3470 block = DECL_INITIAL (child_fn);
c021f10b 3471 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
4f0ae266
JJ
3472 /* The gimplifier could record temporaries in parallel/task block
3473 rather than in containing function's local_decls chain,
3474 which would mean cgraph missed finalizing them. Do it now. */
910ad8de 3475 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
4f0ae266
JJ
3476 if (TREE_CODE (t) == VAR_DECL
3477 && TREE_STATIC (t)
3478 && !DECL_EXTERNAL (t))
3479 varpool_finalize_decl (t);
726a989a
RB
3480 DECL_SAVED_TREE (child_fn) = NULL;
3481 gimple_set_body (child_fn, bb_seq (single_succ (entry_bb)));
b357f682 3482 TREE_USED (block) = 1;
50674e96 3483
917948d3 3484 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 3485 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
50674e96
DN
3486 DECL_CONTEXT (t) = child_fn;
3487
726a989a
RB
3488 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
3489 so that it can be moved to the child function. */
3490 gsi = gsi_last_bb (entry_bb);
3491 stmt = gsi_stmt (gsi);
3492 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
3493 || gimple_code (stmt) == GIMPLE_OMP_TASK));
3494 gsi_remove (&gsi, true);
3495 e = split_block (entry_bb, stmt);
50674e96
DN
3496 entry_bb = e->dest;
3497 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3498
726a989a 3499 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
2aee3e57
JJ
3500 if (exit_bb)
3501 {
726a989a
RB
3502 gsi = gsi_last_bb (exit_bb);
3503 gcc_assert (!gsi_end_p (gsi)
3504 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
3505 stmt = gimple_build_return (NULL);
3506 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
3507 gsi_remove (&gsi, true);
2aee3e57 3508 }
917948d3
ZD
3509
3510 /* Move the parallel region into CHILD_CFUN. */
b8698a0f 3511
917948d3
ZD
3512 if (gimple_in_ssa_p (cfun))
3513 {
3514 push_cfun (child_cfun);
5db9ba0c 3515 init_tree_ssa (child_cfun);
917948d3
ZD
3516 init_ssa_operands ();
3517 cfun->gimple_df->in_ssa_p = true;
3518 pop_cfun ();
b357f682 3519 block = NULL_TREE;
917948d3 3520 }
b357f682 3521 else
726a989a 3522 block = gimple_block (entry_stmt);
b357f682
JJ
3523
3524 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
917948d3
ZD
3525 if (exit_bb)
3526 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
3527
b357f682 3528 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
2fed2012
JJ
3529 num = VEC_length (tree, child_cfun->local_decls);
3530 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
3531 {
3532 t = VEC_index (tree, child_cfun->local_decls, srcidx);
3533 if (DECL_CONTEXT (t) == cfun->decl)
3534 continue;
3535 if (srcidx != dstidx)
3536 VEC_replace (tree, child_cfun->local_decls, dstidx, t);
3537 dstidx++;
3538 }
3539 if (dstidx != num)
3540 VEC_truncate (tree, child_cfun->local_decls, dstidx);
b357f682 3541
917948d3
ZD
3542 /* Inform the callgraph about the new function. */
3543 DECL_STRUCT_FUNCTION (child_fn)->curr_properties
3544 = cfun->curr_properties;
3545 cgraph_add_new_function (child_fn, true);
3546
3547 /* Fix the callgraph edges for child_cfun. Those for cfun will be
3548 fixed in a following pass. */
3549 push_cfun (child_cfun);
133f9369
MJ
3550 save_current = current_function_decl;
3551 current_function_decl = child_fn;
2b4cf991 3552 if (optimize)
a68ab351 3553 optimize_omp_library_calls (entry_stmt);
917948d3 3554 rebuild_cgraph_edges ();
99819c63
JJ
3555
3556 /* Some EH regions might become dead, see PR34608. If
3557 pass_cleanup_cfg isn't the first pass to happen with the
3558 new child, these dead EH edges might cause problems.
3559 Clean them up now. */
3560 if (flag_exceptions)
3561 {
3562 basic_block bb;
99819c63
JJ
3563 bool changed = false;
3564
99819c63 3565 FOR_EACH_BB (bb)
726a989a 3566 changed |= gimple_purge_dead_eh_edges (bb);
99819c63
JJ
3567 if (changed)
3568 cleanup_tree_cfg ();
99819c63 3569 }
5006671f
RG
3570 if (gimple_in_ssa_p (cfun))
3571 update_ssa (TODO_update_ssa);
133f9369 3572 current_function_decl = save_current;
917948d3 3573 pop_cfun ();
50674e96 3574 }
b8698a0f 3575
50674e96 3576 /* Emit a library call to launch the children threads. */
726a989a 3577 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
a68ab351
JJ
3578 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
3579 else
3580 expand_task_call (new_bb, entry_stmt);
5f40b3cb 3581 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
3582}
3583
50674e96
DN
3584
3585/* A subroutine of expand_omp_for. Generate code for a parallel
953ff289
DN
3586 loop with any schedule. Given parameters:
3587
3588 for (V = N1; V cond N2; V += STEP) BODY;
3589
3590 where COND is "<" or ">", we generate pseudocode
3591
3592 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
50674e96 3593 if (more) goto L0; else goto L3;
953ff289
DN
3594 L0:
3595 V = istart0;
3596 iend = iend0;
3597 L1:
3598 BODY;
3599 V += STEP;
50674e96 3600 if (V cond iend) goto L1; else goto L2;
953ff289 3601 L2:
50674e96
DN
3602 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3603 L3:
953ff289 3604
50674e96 3605 If this is a combined omp parallel loop, instead of the call to
a68ab351
JJ
3606 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
3607
3608 For collapsed loops, given parameters:
3609 collapse(3)
3610 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
3611 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
3612 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
3613 BODY;
3614
3615 we generate pseudocode
3616
3617 if (cond3 is <)
3618 adj = STEP3 - 1;
3619 else
3620 adj = STEP3 + 1;
3621 count3 = (adj + N32 - N31) / STEP3;
3622 if (cond2 is <)
3623 adj = STEP2 - 1;
3624 else
3625 adj = STEP2 + 1;
3626 count2 = (adj + N22 - N21) / STEP2;
3627 if (cond1 is <)
3628 adj = STEP1 - 1;
3629 else
3630 adj = STEP1 + 1;
3631 count1 = (adj + N12 - N11) / STEP1;
3632 count = count1 * count2 * count3;
3633 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
3634 if (more) goto L0; else goto L3;
3635 L0:
3636 V = istart0;
3637 T = V;
3638 V3 = N31 + (T % count3) * STEP3;
3639 T = T / count3;
3640 V2 = N21 + (T % count2) * STEP2;
3641 T = T / count2;
3642 V1 = N11 + T * STEP1;
3643 iend = iend0;
3644 L1:
3645 BODY;
3646 V += 1;
3647 if (V < iend) goto L10; else goto L2;
3648 L10:
3649 V3 += STEP3;
3650 if (V3 cond3 N32) goto L1; else goto L11;
3651 L11:
3652 V3 = N31;
3653 V2 += STEP2;
3654 if (V2 cond2 N22) goto L1; else goto L12;
3655 L12:
3656 V2 = N21;
3657 V1 += STEP1;
3658 goto L1;
3659 L2:
3660 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
3661 L3:
3662
3663 */
953ff289 3664
777f7f9a 3665static void
50674e96
DN
3666expand_omp_for_generic (struct omp_region *region,
3667 struct omp_for_data *fd,
953ff289
DN
3668 enum built_in_function start_fn,
3669 enum built_in_function next_fn)
3670{
726a989a 3671 tree type, istart0, iend0, iend;
a68ab351
JJ
3672 tree t, vmain, vback, bias = NULL_TREE;
3673 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
d3c673c7 3674 basic_block l2_bb = NULL, l3_bb = NULL;
726a989a
RB
3675 gimple_stmt_iterator gsi;
3676 gimple stmt;
50674e96 3677 bool in_combined_parallel = is_combined_parallel (region);
e5c95afe 3678 bool broken_loop = region->cont == NULL;
917948d3 3679 edge e, ne;
a68ab351
JJ
3680 tree *counts = NULL;
3681 int i;
e5c95afe
ZD
3682
3683 gcc_assert (!broken_loop || !in_combined_parallel);
a68ab351
JJ
3684 gcc_assert (fd->iter_type == long_integer_type_node
3685 || !in_combined_parallel);
953ff289 3686
a68ab351
JJ
3687 type = TREE_TYPE (fd->loop.v);
3688 istart0 = create_tmp_var (fd->iter_type, ".istart0");
3689 iend0 = create_tmp_var (fd->iter_type, ".iend0");
5b4fc8fb
JJ
3690 TREE_ADDRESSABLE (istart0) = 1;
3691 TREE_ADDRESSABLE (iend0) = 1;
917948d3
ZD
3692 if (gimple_in_ssa_p (cfun))
3693 {
3694 add_referenced_var (istart0);
3695 add_referenced_var (iend0);
3696 }
953ff289 3697
a68ab351
JJ
3698 /* See if we need to bias by LLONG_MIN. */
3699 if (fd->iter_type == long_long_unsigned_type_node
3700 && TREE_CODE (type) == INTEGER_TYPE
3701 && !TYPE_UNSIGNED (type))
3702 {
3703 tree n1, n2;
3704
3705 if (fd->loop.cond_code == LT_EXPR)
3706 {
3707 n1 = fd->loop.n1;
3708 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
3709 }
3710 else
3711 {
3712 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
3713 n2 = fd->loop.n1;
3714 }
3715 if (TREE_CODE (n1) != INTEGER_CST
3716 || TREE_CODE (n2) != INTEGER_CST
3717 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
3718 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
3719 }
3720
777f7f9a 3721 entry_bb = region->entry;
d3c673c7 3722 cont_bb = region->cont;
a68ab351 3723 collapse_bb = NULL;
e5c95afe
ZD
3724 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
3725 gcc_assert (broken_loop
3726 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
3727 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
3728 l1_bb = single_succ (l0_bb);
3729 if (!broken_loop)
d3c673c7
JJ
3730 {
3731 l2_bb = create_empty_bb (cont_bb);
e5c95afe
ZD
3732 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
3733 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
d3c673c7 3734 }
e5c95afe
ZD
3735 else
3736 l2_bb = NULL;
3737 l3_bb = BRANCH_EDGE (entry_bb)->dest;
3738 exit_bb = region->exit;
50674e96 3739
726a989a 3740 gsi = gsi_last_bb (entry_bb);
a68ab351 3741
726a989a 3742 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
a68ab351
JJ
3743 if (fd->collapse > 1)
3744 {
3745 /* collapsed loops need work for expansion in SSA form. */
3746 gcc_assert (!gimple_in_ssa_p (cfun));
3747 counts = (tree *) alloca (fd->collapse * sizeof (tree));
3748 for (i = 0; i < fd->collapse; i++)
3749 {
3750 tree itype = TREE_TYPE (fd->loops[i].v);
3751
3752 if (POINTER_TYPE_P (itype))
3753 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (itype), 0);
3754 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
3755 ? -1 : 1));
3756 t = fold_build2 (PLUS_EXPR, itype,
3757 fold_convert (itype, fd->loops[i].step), t);
3758 t = fold_build2 (PLUS_EXPR, itype, t,
3759 fold_convert (itype, fd->loops[i].n2));
3760 t = fold_build2 (MINUS_EXPR, itype, t,
3761 fold_convert (itype, fd->loops[i].n1));
3762 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
3763 t = fold_build2 (TRUNC_DIV_EXPR, itype,
3764 fold_build1 (NEGATE_EXPR, itype, t),
3765 fold_build1 (NEGATE_EXPR, itype,
3766 fold_convert (itype,
3767 fd->loops[i].step)));
3768 else
3769 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
3770 fold_convert (itype, fd->loops[i].step));
3771 t = fold_convert (type, t);
3772 if (TREE_CODE (t) == INTEGER_CST)
3773 counts[i] = t;
3774 else
3775 {
3776 counts[i] = create_tmp_var (type, ".count");
726a989a
RB
3777 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3778 true, GSI_SAME_STMT);
3779 stmt = gimple_build_assign (counts[i], t);
3780 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
a68ab351
JJ
3781 }
3782 if (SSA_VAR_P (fd->loop.n2))
3783 {
3784 if (i == 0)
726a989a 3785 t = counts[0];
a68ab351
JJ
3786 else
3787 {
3788 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
726a989a
RB
3789 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3790 true, GSI_SAME_STMT);
a68ab351 3791 }
726a989a
RB
3792 stmt = gimple_build_assign (fd->loop.n2, t);
3793 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
a68ab351
JJ
3794 }
3795 }
3796 }
917948d3
ZD
3797 if (in_combined_parallel)
3798 {
3799 /* In a combined parallel loop, emit a call to
3800 GOMP_loop_foo_next. */
3801 t = build_call_expr (built_in_decls[next_fn], 2,
3802 build_fold_addr_expr (istart0),
3803 build_fold_addr_expr (iend0));
3804 }
3805 else
953ff289 3806 {
5039610b 3807 tree t0, t1, t2, t3, t4;
50674e96
DN
3808 /* If this is not a combined parallel loop, emit a call to
3809 GOMP_loop_foo_start in ENTRY_BB. */
5039610b
SL
3810 t4 = build_fold_addr_expr (iend0);
3811 t3 = build_fold_addr_expr (istart0);
a68ab351 3812 t2 = fold_convert (fd->iter_type, fd->loop.step);
c6ff4493
SE
3813 if (POINTER_TYPE_P (type)
3814 && TYPE_PRECISION (type) != TYPE_PRECISION (fd->iter_type))
3815 {
3816 /* Avoid casting pointers to integer of a different size. */
3817 tree itype
3818 = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
3819 t1 = fold_convert (fd->iter_type, fold_convert (itype, fd->loop.n2));
3820 t0 = fold_convert (fd->iter_type, fold_convert (itype, fd->loop.n1));
3821 }
3822 else
3823 {
3824 t1 = fold_convert (fd->iter_type, fd->loop.n2);
3825 t0 = fold_convert (fd->iter_type, fd->loop.n1);
3826 }
a68ab351 3827 if (bias)
953ff289 3828 {
a68ab351
JJ
3829 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
3830 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
3831 }
3832 if (fd->iter_type == long_integer_type_node)
3833 {
3834 if (fd->chunk_size)
3835 {
3836 t = fold_convert (fd->iter_type, fd->chunk_size);
3837 t = build_call_expr (built_in_decls[start_fn], 6,
3838 t0, t1, t2, t, t3, t4);
3839 }
3840 else
3841 t = build_call_expr (built_in_decls[start_fn], 5,
3842 t0, t1, t2, t3, t4);
953ff289 3843 }
5039610b 3844 else
a68ab351
JJ
3845 {
3846 tree t5;
3847 tree c_bool_type;
3848
3849 /* The GOMP_loop_ull_*start functions have additional boolean
3850 argument, true for < loops and false for > loops.
3851 In Fortran, the C bool type can be different from
3852 boolean_type_node. */
3853 c_bool_type = TREE_TYPE (TREE_TYPE (built_in_decls[start_fn]));
3854 t5 = build_int_cst (c_bool_type,
3855 fd->loop.cond_code == LT_EXPR ? 1 : 0);
3856 if (fd->chunk_size)
3857 {
3858 t = fold_convert (fd->iter_type, fd->chunk_size);
3859 t = build_call_expr (built_in_decls[start_fn], 7,
3860 t5, t0, t1, t2, t, t3, t4);
3861 }
3862 else
3863 t = build_call_expr (built_in_decls[start_fn], 6,
3864 t5, t0, t1, t2, t3, t4);
3865 }
953ff289 3866 }
a68ab351
JJ
3867 if (TREE_TYPE (t) != boolean_type_node)
3868 t = fold_build2 (NE_EXPR, boolean_type_node,
3869 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
3870 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3871 true, GSI_SAME_STMT);
3872 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
917948d3 3873
726a989a
RB
3874 /* Remove the GIMPLE_OMP_FOR statement. */
3875 gsi_remove (&gsi, true);
953ff289 3876
50674e96 3877 /* Iteration setup for sequential loop goes in L0_BB. */
726a989a 3878 gsi = gsi_start_bb (l0_bb);
550918ca 3879 t = istart0;
a68ab351 3880 if (bias)
550918ca
RG
3881 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3882 if (POINTER_TYPE_P (type))
3883 t = fold_convert (lang_hooks.types.type_for_size (TYPE_PRECISION (type),
3884 0), t);
3885 t = fold_convert (type, t);
726a989a
RB
3886 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3887 false, GSI_CONTINUE_LINKING);
3888 stmt = gimple_build_assign (fd->loop.v, t);
3889 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
953ff289 3890
550918ca 3891 t = iend0;
a68ab351 3892 if (bias)
550918ca
RG
3893 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
3894 if (POINTER_TYPE_P (type))
3895 t = fold_convert (lang_hooks.types.type_for_size (TYPE_PRECISION (type),
3896 0), t);
3897 t = fold_convert (type, t);
726a989a
RB
3898 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
3899 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
3900 if (fd->collapse > 1)
3901 {
3902 tree tem = create_tmp_var (type, ".tem");
3903
726a989a
RB
3904 stmt = gimple_build_assign (tem, fd->loop.v);
3905 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3906 for (i = fd->collapse - 1; i >= 0; i--)
3907 {
3908 tree vtype = TREE_TYPE (fd->loops[i].v), itype;
3909 itype = vtype;
3910 if (POINTER_TYPE_P (vtype))
3911 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (vtype), 0);
3912 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
3913 t = fold_convert (itype, t);
ada39f0b
RG
3914 t = fold_build2 (MULT_EXPR, itype, t,
3915 fold_convert (itype, fd->loops[i].step));
a68ab351
JJ
3916 if (POINTER_TYPE_P (vtype))
3917 t = fold_build2 (POINTER_PLUS_EXPR, vtype,
3918 fd->loops[i].n1, fold_convert (sizetype, t));
3919 else
3920 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
726a989a
RB
3921 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3922 false, GSI_CONTINUE_LINKING);
3923 stmt = gimple_build_assign (fd->loops[i].v, t);
3924 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3925 if (i != 0)
3926 {
3927 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
726a989a
RB
3928 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3929 false, GSI_CONTINUE_LINKING);
3930 stmt = gimple_build_assign (tem, t);
3931 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3932 }
3933 }
3934 }
50674e96 3935
e5c95afe 3936 if (!broken_loop)
d3c673c7 3937 {
e5c95afe
ZD
3938 /* Code to control the increment and predicate for the sequential
3939 loop goes in the CONT_BB. */
726a989a
RB
3940 gsi = gsi_last_bb (cont_bb);
3941 stmt = gsi_stmt (gsi);
3942 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
3943 vmain = gimple_omp_continue_control_use (stmt);
3944 vback = gimple_omp_continue_control_def (stmt);
917948d3 3945
a68ab351
JJ
3946 if (POINTER_TYPE_P (type))
3947 t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
3948 fold_convert (sizetype, fd->loop.step));
3949 else
3950 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
726a989a
RB
3951 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3952 true, GSI_SAME_STMT);
3953 stmt = gimple_build_assign (vback, t);
3954 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
3955
a68ab351 3956 t = build2 (fd->loop.cond_code, boolean_type_node, vback, iend);
726a989a
RB
3957 stmt = gimple_build_cond_empty (t);
3958 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
50674e96 3959
726a989a
RB
3960 /* Remove GIMPLE_OMP_CONTINUE. */
3961 gsi_remove (&gsi, true);
50674e96 3962
a68ab351
JJ
3963 if (fd->collapse > 1)
3964 {
3965 basic_block last_bb, bb;
3966
3967 last_bb = cont_bb;
3968 for (i = fd->collapse - 1; i >= 0; i--)
3969 {
3970 tree vtype = TREE_TYPE (fd->loops[i].v);
3971
3972 bb = create_empty_bb (last_bb);
726a989a 3973 gsi = gsi_start_bb (bb);
a68ab351
JJ
3974
3975 if (i < fd->collapse - 1)
3976 {
3977 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
3978 e->probability = REG_BR_PROB_BASE / 8;
3979
726a989a
RB
3980 t = fd->loops[i + 1].n1;
3981 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3982 false, GSI_CONTINUE_LINKING);
3983 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
3984 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
3985 }
3986 else
3987 collapse_bb = bb;
3988
3989 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
3990
3991 if (POINTER_TYPE_P (vtype))
3992 t = fold_build2 (POINTER_PLUS_EXPR, vtype,
3993 fd->loops[i].v,
3994 fold_convert (sizetype, fd->loops[i].step));
3995 else
3996 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v,
3997 fd->loops[i].step);
726a989a
RB
3998 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
3999 false, GSI_CONTINUE_LINKING);
4000 stmt = gimple_build_assign (fd->loops[i].v, t);
4001 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
4002
4003 if (i > 0)
4004 {
726a989a
RB
4005 t = fd->loops[i].n2;
4006 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4007 false, GSI_CONTINUE_LINKING);
a68ab351 4008 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node,
726a989a
RB
4009 fd->loops[i].v, t);
4010 stmt = gimple_build_cond_empty (t);
4011 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351
JJ
4012 e = make_edge (bb, l1_bb, EDGE_TRUE_VALUE);
4013 e->probability = REG_BR_PROB_BASE * 7 / 8;
4014 }
4015 else
4016 make_edge (bb, l1_bb, EDGE_FALLTHRU);
4017 last_bb = bb;
4018 }
4019 }
4020
e5c95afe 4021 /* Emit code to get the next parallel iteration in L2_BB. */
726a989a 4022 gsi = gsi_start_bb (l2_bb);
50674e96 4023
e5c95afe
ZD
4024 t = build_call_expr (built_in_decls[next_fn], 2,
4025 build_fold_addr_expr (istart0),
4026 build_fold_addr_expr (iend0));
726a989a
RB
4027 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4028 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
4029 if (TREE_TYPE (t) != boolean_type_node)
4030 t = fold_build2 (NE_EXPR, boolean_type_node,
4031 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
4032 stmt = gimple_build_cond_empty (t);
4033 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
e5c95afe 4034 }
953ff289 4035
777f7f9a 4036 /* Add the loop cleanup function. */
726a989a
RB
4037 gsi = gsi_last_bb (exit_bb);
4038 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
777f7f9a
RH
4039 t = built_in_decls[BUILT_IN_GOMP_LOOP_END_NOWAIT];
4040 else
4041 t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
726a989a
RB
4042 stmt = gimple_build_call (t, 0);
4043 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
4044 gsi_remove (&gsi, true);
50674e96
DN
4045
4046 /* Connect the new blocks. */
917948d3
ZD
4047 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
4048 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
953ff289 4049
e5c95afe
ZD
4050 if (!broken_loop)
4051 {
726a989a
RB
4052 gimple_seq phis;
4053
917948d3
ZD
4054 e = find_edge (cont_bb, l3_bb);
4055 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
4056
726a989a
RB
4057 phis = phi_nodes (l3_bb);
4058 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4059 {
4060 gimple phi = gsi_stmt (gsi);
4061 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
4062 PHI_ARG_DEF_FROM_EDGE (phi, e));
4063 }
917948d3
ZD
4064 remove_edge (e);
4065
e5c95afe 4066 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
a68ab351
JJ
4067 if (fd->collapse > 1)
4068 {
4069 e = find_edge (cont_bb, l1_bb);
4070 remove_edge (e);
4071 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
4072 }
4073 else
4074 {
4075 e = find_edge (cont_bb, l1_bb);
4076 e->flags = EDGE_TRUE_VALUE;
4077 }
4078 e->probability = REG_BR_PROB_BASE * 7 / 8;
4079 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
e5c95afe 4080 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
917948d3
ZD
4081
4082 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
4083 recompute_dominator (CDI_DOMINATORS, l2_bb));
4084 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
4085 recompute_dominator (CDI_DOMINATORS, l3_bb));
4086 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
4087 recompute_dominator (CDI_DOMINATORS, l0_bb));
4088 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
4089 recompute_dominator (CDI_DOMINATORS, l1_bb));
e5c95afe 4090 }
953ff289
DN
4091}
4092
4093
50674e96
DN
4094/* A subroutine of expand_omp_for. Generate code for a parallel
4095 loop with static schedule and no specified chunk size. Given
4096 parameters:
953ff289
DN
4097
4098 for (V = N1; V cond N2; V += STEP) BODY;
4099
4100 where COND is "<" or ">", we generate pseudocode
4101
4102 if (cond is <)
4103 adj = STEP - 1;
4104 else
4105 adj = STEP + 1;
a68ab351
JJ
4106 if ((__typeof (V)) -1 > 0 && cond is >)
4107 n = -(adj + N2 - N1) / -STEP;
4108 else
4109 n = (adj + N2 - N1) / STEP;
953ff289
DN
4110 q = n / nthreads;
4111 q += (q * nthreads != n);
4112 s0 = q * threadid;
4113 e0 = min(s0 + q, n);
917948d3 4114 V = s0 * STEP + N1;
953ff289
DN
4115 if (s0 >= e0) goto L2; else goto L0;
4116 L0:
953ff289
DN
4117 e = e0 * STEP + N1;
4118 L1:
4119 BODY;
4120 V += STEP;
4121 if (V cond e) goto L1;
953ff289
DN
4122 L2:
4123*/
4124
777f7f9a 4125static void
50674e96
DN
4126expand_omp_for_static_nochunk (struct omp_region *region,
4127 struct omp_for_data *fd)
953ff289 4128{
a9b77cd1 4129 tree n, q, s0, e0, e, t, nthreads, threadid;
a68ab351 4130 tree type, itype, vmain, vback;
777f7f9a
RH
4131 basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
4132 basic_block fin_bb;
726a989a
RB
4133 gimple_stmt_iterator gsi;
4134 gimple stmt;
953ff289 4135
a68ab351
JJ
4136 itype = type = TREE_TYPE (fd->loop.v);
4137 if (POINTER_TYPE_P (type))
4138 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
953ff289 4139
777f7f9a 4140 entry_bb = region->entry;
777f7f9a 4141 cont_bb = region->cont;
e5c95afe
ZD
4142 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
4143 gcc_assert (BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
4144 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
4145 body_bb = single_succ (seq_start_bb);
4146 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
4147 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
4148 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
777f7f9a
RH
4149 exit_bb = region->exit;
4150
50674e96 4151 /* Iteration space partitioning goes in ENTRY_BB. */
726a989a
RB
4152 gsi = gsi_last_bb (entry_bb);
4153 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
777f7f9a 4154
5039610b 4155 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
a68ab351 4156 t = fold_convert (itype, t);
726a989a
RB
4157 nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4158 true, GSI_SAME_STMT);
b8698a0f 4159
5039610b 4160 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
a68ab351 4161 t = fold_convert (itype, t);
726a989a
RB
4162 threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4163 true, GSI_SAME_STMT);
953ff289 4164
a68ab351 4165 fd->loop.n1
726a989a
RB
4166 = force_gimple_operand_gsi (&gsi, fold_convert (type, fd->loop.n1),
4167 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351 4168 fd->loop.n2
726a989a
RB
4169 = force_gimple_operand_gsi (&gsi, fold_convert (itype, fd->loop.n2),
4170 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351 4171 fd->loop.step
726a989a
RB
4172 = force_gimple_operand_gsi (&gsi, fold_convert (itype, fd->loop.step),
4173 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
4174
4175 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
4176 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
4177 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
4178 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
4179 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
4180 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4181 fold_build1 (NEGATE_EXPR, itype, t),
4182 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
4183 else
4184 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
4185 t = fold_convert (itype, t);
726a989a 4186 n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 4187
a68ab351 4188 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
726a989a 4189 q = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 4190
a68ab351
JJ
4191 t = fold_build2 (MULT_EXPR, itype, q, nthreads);
4192 t = fold_build2 (NE_EXPR, itype, t, n);
4193 t = fold_build2 (PLUS_EXPR, itype, q, t);
726a989a 4194 q = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 4195
a68ab351 4196 t = build2 (MULT_EXPR, itype, q, threadid);
726a989a 4197 s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 4198
a68ab351
JJ
4199 t = fold_build2 (PLUS_EXPR, itype, s0, q);
4200 t = fold_build2 (MIN_EXPR, itype, t, n);
726a989a 4201 e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 4202
953ff289 4203 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
726a989a 4204 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
50674e96 4205
726a989a
RB
4206 /* Remove the GIMPLE_OMP_FOR statement. */
4207 gsi_remove (&gsi, true);
50674e96
DN
4208
4209 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 4210 gsi = gsi_start_bb (seq_start_bb);
953ff289 4211
a68ab351
JJ
4212 t = fold_convert (itype, s0);
4213 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4214 if (POINTER_TYPE_P (type))
4215 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4216 fold_convert (sizetype, t));
4217 else
4218 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4219 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
4220 false, GSI_CONTINUE_LINKING);
4221 stmt = gimple_build_assign (fd->loop.v, t);
4222 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
b8698a0f 4223
a68ab351
JJ
4224 t = fold_convert (itype, e0);
4225 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4226 if (POINTER_TYPE_P (type))
4227 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4228 fold_convert (sizetype, t));
4229 else
4230 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4231 e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4232 false, GSI_CONTINUE_LINKING);
953ff289 4233
726a989a
RB
4234 /* The code controlling the sequential loop replaces the
4235 GIMPLE_OMP_CONTINUE. */
4236 gsi = gsi_last_bb (cont_bb);
4237 stmt = gsi_stmt (gsi);
4238 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
4239 vmain = gimple_omp_continue_control_use (stmt);
4240 vback = gimple_omp_continue_control_def (stmt);
917948d3 4241
a68ab351
JJ
4242 if (POINTER_TYPE_P (type))
4243 t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
4244 fold_convert (sizetype, fd->loop.step));
4245 else
4246 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
726a989a
RB
4247 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
4248 true, GSI_SAME_STMT);
4249 stmt = gimple_build_assign (vback, t);
4250 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
917948d3 4251
a68ab351 4252 t = build2 (fd->loop.cond_code, boolean_type_node, vback, e);
726a989a 4253 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
953ff289 4254
726a989a
RB
4255 /* Remove the GIMPLE_OMP_CONTINUE statement. */
4256 gsi_remove (&gsi, true);
50674e96 4257
726a989a
RB
4258 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
4259 gsi = gsi_last_bb (exit_bb);
4260 if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
4261 force_gimple_operand_gsi (&gsi, build_omp_barrier (), false, NULL_TREE,
4262 false, GSI_SAME_STMT);
4263 gsi_remove (&gsi, true);
50674e96
DN
4264
4265 /* Connect all the blocks. */
e5c95afe
ZD
4266 find_edge (entry_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
4267 find_edge (entry_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
917948d3 4268
e5c95afe 4269 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
777f7f9a 4270 find_edge (cont_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
b8698a0f 4271
917948d3
ZD
4272 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, entry_bb);
4273 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4274 recompute_dominator (CDI_DOMINATORS, body_bb));
4275 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4276 recompute_dominator (CDI_DOMINATORS, fin_bb));
953ff289
DN
4277}
4278
50674e96
DN
4279
4280/* A subroutine of expand_omp_for. Generate code for a parallel
4281 loop with static schedule and a specified chunk size. Given
4282 parameters:
953ff289
DN
4283
4284 for (V = N1; V cond N2; V += STEP) BODY;
4285
4286 where COND is "<" or ">", we generate pseudocode
4287
4288 if (cond is <)
4289 adj = STEP - 1;
4290 else
4291 adj = STEP + 1;
a68ab351
JJ
4292 if ((__typeof (V)) -1 > 0 && cond is >)
4293 n = -(adj + N2 - N1) / -STEP;
4294 else
4295 n = (adj + N2 - N1) / STEP;
953ff289 4296 trip = 0;
917948d3
ZD
4297 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
4298 here so that V is defined
4299 if the loop is not entered
953ff289
DN
4300 L0:
4301 s0 = (trip * nthreads + threadid) * CHUNK;
4302 e0 = min(s0 + CHUNK, n);
4303 if (s0 < n) goto L1; else goto L4;
4304 L1:
4305 V = s0 * STEP + N1;
4306 e = e0 * STEP + N1;
4307 L2:
4308 BODY;
4309 V += STEP;
4310 if (V cond e) goto L2; else goto L3;
4311 L3:
4312 trip += 1;
4313 goto L0;
4314 L4:
953ff289
DN
4315*/
4316
777f7f9a 4317static void
726a989a 4318expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
953ff289 4319{
726a989a 4320 tree n, s0, e0, e, t;
917948d3 4321 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
726a989a 4322 tree type, itype, v_main, v_back, v_extra;
50674e96 4323 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
777f7f9a 4324 basic_block trip_update_bb, cont_bb, fin_bb;
726a989a
RB
4325 gimple_stmt_iterator si;
4326 gimple stmt;
4327 edge se;
953ff289 4328
a68ab351
JJ
4329 itype = type = TREE_TYPE (fd->loop.v);
4330 if (POINTER_TYPE_P (type))
4331 itype = lang_hooks.types.type_for_size (TYPE_PRECISION (type), 0);
953ff289 4332
777f7f9a 4333 entry_bb = region->entry;
e5c95afe
ZD
4334 se = split_block (entry_bb, last_stmt (entry_bb));
4335 entry_bb = se->src;
4336 iter_part_bb = se->dest;
777f7f9a 4337 cont_bb = region->cont;
e5c95afe
ZD
4338 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
4339 gcc_assert (BRANCH_EDGE (iter_part_bb)->dest
4340 == FALLTHRU_EDGE (cont_bb)->dest);
4341 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
4342 body_bb = single_succ (seq_start_bb);
4343 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
4344 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
4345 fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
4346 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
777f7f9a 4347 exit_bb = region->exit;
50674e96 4348
50674e96 4349 /* Trip and adjustment setup goes in ENTRY_BB. */
726a989a
RB
4350 si = gsi_last_bb (entry_bb);
4351 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
50674e96 4352
5039610b 4353 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
a68ab351 4354 t = fold_convert (itype, t);
726a989a
RB
4355 nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4356 true, GSI_SAME_STMT);
b8698a0f 4357
5039610b 4358 t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
a68ab351 4359 t = fold_convert (itype, t);
726a989a
RB
4360 threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4361 true, GSI_SAME_STMT);
917948d3 4362
a68ab351 4363 fd->loop.n1
726a989a
RB
4364 = force_gimple_operand_gsi (&si, fold_convert (type, fd->loop.n1),
4365 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351 4366 fd->loop.n2
726a989a
RB
4367 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->loop.n2),
4368 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351 4369 fd->loop.step
726a989a
RB
4370 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->loop.step),
4371 true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 4372 fd->chunk_size
726a989a
RB
4373 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
4374 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
4375
4376 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
4377 t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
4378 t = fold_build2 (PLUS_EXPR, itype, t, fd->loop.n2);
4379 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, fd->loop.n1));
4380 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
4381 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4382 fold_build1 (NEGATE_EXPR, itype, t),
4383 fold_build1 (NEGATE_EXPR, itype, fd->loop.step));
4384 else
4385 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
4386 t = fold_convert (itype, t);
726a989a
RB
4387 n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4388 true, GSI_SAME_STMT);
917948d3 4389
a68ab351 4390 trip_var = create_tmp_var (itype, ".trip");
917948d3
ZD
4391 if (gimple_in_ssa_p (cfun))
4392 {
4393 add_referenced_var (trip_var);
726a989a
RB
4394 trip_init = make_ssa_name (trip_var, NULL);
4395 trip_main = make_ssa_name (trip_var, NULL);
4396 trip_back = make_ssa_name (trip_var, NULL);
917948d3 4397 }
953ff289 4398 else
917948d3
ZD
4399 {
4400 trip_init = trip_var;
4401 trip_main = trip_var;
4402 trip_back = trip_var;
4403 }
953ff289 4404
726a989a
RB
4405 stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
4406 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
50674e96 4407
a68ab351
JJ
4408 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
4409 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4410 if (POINTER_TYPE_P (type))
4411 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4412 fold_convert (sizetype, t));
4413 else
4414 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4415 v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4416 true, GSI_SAME_STMT);
917948d3 4417
726a989a
RB
4418 /* Remove the GIMPLE_OMP_FOR. */
4419 gsi_remove (&si, true);
50674e96
DN
4420
4421 /* Iteration space partitioning goes in ITER_PART_BB. */
726a989a 4422 si = gsi_last_bb (iter_part_bb);
953ff289 4423
a68ab351
JJ
4424 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
4425 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
4426 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
726a989a
RB
4427 s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4428 false, GSI_CONTINUE_LINKING);
953ff289 4429
a68ab351
JJ
4430 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
4431 t = fold_build2 (MIN_EXPR, itype, t, n);
726a989a
RB
4432 e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4433 false, GSI_CONTINUE_LINKING);
953ff289
DN
4434
4435 t = build2 (LT_EXPR, boolean_type_node, s0, n);
726a989a 4436 gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
50674e96
DN
4437
4438 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 4439 si = gsi_start_bb (seq_start_bb);
953ff289 4440
a68ab351
JJ
4441 t = fold_convert (itype, s0);
4442 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4443 if (POINTER_TYPE_P (type))
4444 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4445 fold_convert (sizetype, t));
4446 else
4447 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4448 t = force_gimple_operand_gsi (&si, t, false, NULL_TREE,
4449 false, GSI_CONTINUE_LINKING);
4450 stmt = gimple_build_assign (fd->loop.v, t);
4451 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
953ff289 4452
a68ab351
JJ
4453 t = fold_convert (itype, e0);
4454 t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
4455 if (POINTER_TYPE_P (type))
4456 t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
4457 fold_convert (sizetype, t));
4458 else
4459 t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
726a989a
RB
4460 e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
4461 false, GSI_CONTINUE_LINKING);
953ff289 4462
777f7f9a 4463 /* The code controlling the sequential loop goes in CONT_BB,
726a989a
RB
4464 replacing the GIMPLE_OMP_CONTINUE. */
4465 si = gsi_last_bb (cont_bb);
4466 stmt = gsi_stmt (si);
4467 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
4468 v_main = gimple_omp_continue_control_use (stmt);
4469 v_back = gimple_omp_continue_control_def (stmt);
917948d3 4470
a68ab351
JJ
4471 if (POINTER_TYPE_P (type))
4472 t = fold_build2 (POINTER_PLUS_EXPR, type, v_main,
4473 fold_convert (sizetype, fd->loop.step));
4474 else
726a989a
RB
4475 t = fold_build2 (PLUS_EXPR, type, v_main, fd->loop.step);
4476 stmt = gimple_build_assign (v_back, t);
4477 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
917948d3 4478
a68ab351 4479 t = build2 (fd->loop.cond_code, boolean_type_node, v_back, e);
726a989a 4480 gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
b8698a0f 4481
726a989a
RB
4482 /* Remove GIMPLE_OMP_CONTINUE. */
4483 gsi_remove (&si, true);
50674e96
DN
4484
4485 /* Trip update code goes into TRIP_UPDATE_BB. */
726a989a 4486 si = gsi_start_bb (trip_update_bb);
953ff289 4487
a68ab351
JJ
4488 t = build_int_cst (itype, 1);
4489 t = build2 (PLUS_EXPR, itype, trip_main, t);
726a989a
RB
4490 stmt = gimple_build_assign (trip_back, t);
4491 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
953ff289 4492
726a989a
RB
4493 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
4494 si = gsi_last_bb (exit_bb);
4495 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
4496 force_gimple_operand_gsi (&si, build_omp_barrier (), false, NULL_TREE,
4497 false, GSI_SAME_STMT);
4498 gsi_remove (&si, true);
953ff289 4499
50674e96 4500 /* Connect the new blocks. */
e5c95afe
ZD
4501 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
4502 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
917948d3 4503
e5c95afe
ZD
4504 find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
4505 find_edge (cont_bb, trip_update_bb)->flags = EDGE_FALSE_VALUE;
917948d3 4506
e5c95afe 4507 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
917948d3
ZD
4508
4509 if (gimple_in_ssa_p (cfun))
4510 {
726a989a
RB
4511 gimple_stmt_iterator psi;
4512 gimple phi;
4513 edge re, ene;
4514 edge_var_map_vector head;
4515 edge_var_map *vm;
4516 size_t i;
4517
917948d3
ZD
4518 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
4519 remove arguments of the phi nodes in fin_bb. We need to create
4520 appropriate phi nodes in iter_part_bb instead. */
4521 se = single_pred_edge (fin_bb);
4522 re = single_succ_edge (trip_update_bb);
726a989a 4523 head = redirect_edge_var_map_vector (re);
917948d3
ZD
4524 ene = single_succ_edge (entry_bb);
4525
726a989a
RB
4526 psi = gsi_start_phis (fin_bb);
4527 for (i = 0; !gsi_end_p (psi) && VEC_iterate (edge_var_map, head, i, vm);
4528 gsi_next (&psi), ++i)
917948d3 4529 {
726a989a 4530 gimple nphi;
f5045c96 4531 source_location locus;
726a989a
RB
4532
4533 phi = gsi_stmt (psi);
4534 t = gimple_phi_result (phi);
4535 gcc_assert (t == redirect_edge_var_map_result (vm));
917948d3
ZD
4536 nphi = create_phi_node (t, iter_part_bb);
4537 SSA_NAME_DEF_STMT (t) = nphi;
4538
4539 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
f5045c96
AM
4540 locus = gimple_phi_arg_location_from_edge (phi, se);
4541
a68ab351
JJ
4542 /* A special case -- fd->loop.v is not yet computed in
4543 iter_part_bb, we need to use v_extra instead. */
4544 if (t == fd->loop.v)
917948d3 4545 t = v_extra;
f5045c96
AM
4546 add_phi_arg (nphi, t, ene, locus);
4547 locus = redirect_edge_var_map_location (vm);
4548 add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
726a989a
RB
4549 }
4550 gcc_assert (!gsi_end_p (psi) && i == VEC_length (edge_var_map, head));
4551 redirect_edge_var_map_clear (re);
4552 while (1)
4553 {
4554 psi = gsi_start_phis (fin_bb);
4555 if (gsi_end_p (psi))
4556 break;
4557 remove_phi_node (&psi, false);
917948d3 4558 }
917948d3
ZD
4559
4560 /* Make phi node for trip. */
4561 phi = create_phi_node (trip_main, iter_part_bb);
4562 SSA_NAME_DEF_STMT (trip_main) = phi;
f5045c96
AM
4563 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb),
4564 UNKNOWN_LOCATION);
4565 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb),
4566 UNKNOWN_LOCATION);
917948d3
ZD
4567 }
4568
4569 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
4570 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
4571 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
4572 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
4573 recompute_dominator (CDI_DOMINATORS, fin_bb));
4574 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
4575 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
4576 set_immediate_dominator (CDI_DOMINATORS, body_bb,
4577 recompute_dominator (CDI_DOMINATORS, body_bb));
953ff289
DN
4578}
4579
953ff289 4580
50674e96 4581/* Expand the OpenMP loop defined by REGION. */
953ff289 4582
50674e96
DN
4583static void
4584expand_omp_for (struct omp_region *region)
4585{
4586 struct omp_for_data fd;
a68ab351 4587 struct omp_for_data_loop *loops;
953ff289 4588
a68ab351
JJ
4589 loops
4590 = (struct omp_for_data_loop *)
726a989a 4591 alloca (gimple_omp_for_collapse (last_stmt (region->entry))
a68ab351 4592 * sizeof (struct omp_for_data_loop));
a68ab351 4593 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
21a66e91 4594 region->sched_kind = fd.sched_kind;
953ff289 4595
135a171d
JJ
4596 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
4597 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4598 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
4599 if (region->cont)
4600 {
4601 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
4602 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4603 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
4604 }
4605
d3c673c7
JJ
4606 if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
4607 && !fd.have_ordered
a68ab351 4608 && fd.collapse == 1
e5c95afe 4609 && region->cont != NULL)
953ff289
DN
4610 {
4611 if (fd.chunk_size == NULL)
777f7f9a 4612 expand_omp_for_static_nochunk (region, &fd);
953ff289 4613 else
777f7f9a 4614 expand_omp_for_static_chunk (region, &fd);
953ff289
DN
4615 }
4616 else
4617 {
a68ab351
JJ
4618 int fn_index, start_ix, next_ix;
4619
4620 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
4621 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
726a989a 4622 ? 3 : fd.sched_kind;
a68ab351
JJ
4623 fn_index += fd.have_ordered * 4;
4624 start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
4625 next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
4626 if (fd.iter_type == long_long_unsigned_type_node)
4627 {
4628 start_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_START
4629 - BUILT_IN_GOMP_LOOP_STATIC_START;
4630 next_ix += BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
4631 - BUILT_IN_GOMP_LOOP_STATIC_NEXT;
4632 }
bbbbb16a
ILT
4633 expand_omp_for_generic (region, &fd, (enum built_in_function) start_ix,
4634 (enum built_in_function) next_ix);
953ff289 4635 }
5f40b3cb
ZD
4636
4637 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
4638}
4639
953ff289
DN
4640
4641/* Expand code for an OpenMP sections directive. In pseudo code, we generate
4642
953ff289
DN
4643 v = GOMP_sections_start (n);
4644 L0:
4645 switch (v)
4646 {
4647 case 0:
4648 goto L2;
4649 case 1:
4650 section 1;
4651 goto L1;
4652 case 2:
4653 ...
4654 case n:
4655 ...
953ff289
DN
4656 default:
4657 abort ();
4658 }
4659 L1:
4660 v = GOMP_sections_next ();
4661 goto L0;
4662 L2:
4663 reduction;
4664
50674e96 4665 If this is a combined parallel sections, replace the call to
917948d3 4666 GOMP_sections_start with call to GOMP_sections_next. */
953ff289
DN
4667
4668static void
50674e96 4669expand_omp_sections (struct omp_region *region)
953ff289 4670{
0f900dfa 4671 tree t, u, vin = NULL, vmain, vnext, l2;
726a989a
RB
4672 VEC (tree,heap) *label_vec;
4673 unsigned len;
e5c95afe 4674 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
726a989a
RB
4675 gimple_stmt_iterator si, switch_si;
4676 gimple sections_stmt, stmt, cont;
c34938a8
JJ
4677 edge_iterator ei;
4678 edge e;
777f7f9a 4679 struct omp_region *inner;
726a989a 4680 unsigned i, casei;
e5c95afe 4681 bool exit_reachable = region->cont != NULL;
953ff289 4682
e5c95afe 4683 gcc_assert (exit_reachable == (region->exit != NULL));
777f7f9a 4684 entry_bb = region->entry;
e5c95afe 4685 l0_bb = single_succ (entry_bb);
777f7f9a 4686 l1_bb = region->cont;
e5c95afe
ZD
4687 l2_bb = region->exit;
4688 if (exit_reachable)
d3c673c7 4689 {
057dd91d 4690 if (single_pred_p (l2_bb) && single_pred (l2_bb) == l0_bb)
726a989a 4691 l2 = gimple_block_label (l2_bb);
c34938a8
JJ
4692 else
4693 {
4694 /* This can happen if there are reductions. */
4695 len = EDGE_COUNT (l0_bb->succs);
4696 gcc_assert (len > 0);
4697 e = EDGE_SUCC (l0_bb, len - 1);
726a989a 4698 si = gsi_last_bb (e->dest);
3ac4c44a 4699 l2 = NULL_TREE;
726a989a
RB
4700 if (gsi_end_p (si)
4701 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
4702 l2 = gimple_block_label (e->dest);
c34938a8
JJ
4703 else
4704 FOR_EACH_EDGE (e, ei, l0_bb->succs)
4705 {
726a989a
RB
4706 si = gsi_last_bb (e->dest);
4707 if (gsi_end_p (si)
4708 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
c34938a8 4709 {
726a989a 4710 l2 = gimple_block_label (e->dest);
c34938a8
JJ
4711 break;
4712 }
4713 }
4714 }
d3c673c7 4715 default_bb = create_empty_bb (l1_bb->prev_bb);
d3c673c7
JJ
4716 }
4717 else
4718 {
e5c95afe 4719 default_bb = create_empty_bb (l0_bb);
726a989a 4720 l2 = gimple_block_label (default_bb);
d3c673c7 4721 }
50674e96
DN
4722
4723 /* We will build a switch() with enough cases for all the
726a989a 4724 GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
50674e96 4725 and a default case to abort if something goes wrong. */
e5c95afe 4726 len = EDGE_COUNT (l0_bb->succs);
726a989a
RB
4727
4728 /* Use VEC_quick_push on label_vec throughout, since we know the size
4729 in advance. */
4730 label_vec = VEC_alloc (tree, heap, len);
953ff289 4731
777f7f9a 4732 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
726a989a
RB
4733 GIMPLE_OMP_SECTIONS statement. */
4734 si = gsi_last_bb (entry_bb);
4735 sections_stmt = gsi_stmt (si);
4736 gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
4737 vin = gimple_omp_sections_control (sections_stmt);
50674e96 4738 if (!is_combined_parallel (region))
953ff289 4739 {
50674e96
DN
4740 /* If we are not inside a combined parallel+sections region,
4741 call GOMP_sections_start. */
e5c95afe
ZD
4742 t = build_int_cst (unsigned_type_node,
4743 exit_reachable ? len - 1 : len);
953ff289 4744 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
726a989a 4745 stmt = gimple_build_call (u, 1, t);
953ff289 4746 }
917948d3
ZD
4747 else
4748 {
4749 /* Otherwise, call GOMP_sections_next. */
4750 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
726a989a 4751 stmt = gimple_build_call (u, 0);
917948d3 4752 }
726a989a
RB
4753 gimple_call_set_lhs (stmt, vin);
4754 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4755 gsi_remove (&si, true);
4756
4757 /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
4758 L0_BB. */
4759 switch_si = gsi_last_bb (l0_bb);
4760 gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
917948d3
ZD
4761 if (exit_reachable)
4762 {
4763 cont = last_stmt (l1_bb);
726a989a
RB
4764 gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
4765 vmain = gimple_omp_continue_control_use (cont);
4766 vnext = gimple_omp_continue_control_def (cont);
917948d3
ZD
4767 }
4768 else
4769 {
4770 vmain = vin;
4771 vnext = NULL_TREE;
4772 }
953ff289 4773
e5c95afe
ZD
4774 i = 0;
4775 if (exit_reachable)
4776 {
4777 t = build3 (CASE_LABEL_EXPR, void_type_node,
4778 build_int_cst (unsigned_type_node, 0), NULL, l2);
726a989a 4779 VEC_quick_push (tree, label_vec, t);
e5c95afe
ZD
4780 i++;
4781 }
d3c673c7 4782
726a989a 4783 /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
e5c95afe
ZD
4784 for (inner = region->inner, casei = 1;
4785 inner;
4786 inner = inner->next, i++, casei++)
953ff289 4787 {
50674e96
DN
4788 basic_block s_entry_bb, s_exit_bb;
4789
c34938a8 4790 /* Skip optional reduction region. */
726a989a 4791 if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
c34938a8
JJ
4792 {
4793 --i;
4794 --casei;
4795 continue;
4796 }
4797
777f7f9a
RH
4798 s_entry_bb = inner->entry;
4799 s_exit_bb = inner->exit;
953ff289 4800
726a989a 4801 t = gimple_block_label (s_entry_bb);
e5c95afe 4802 u = build_int_cst (unsigned_type_node, casei);
953ff289 4803 u = build3 (CASE_LABEL_EXPR, void_type_node, u, NULL, t);
726a989a 4804 VEC_quick_push (tree, label_vec, u);
777f7f9a 4805
726a989a
RB
4806 si = gsi_last_bb (s_entry_bb);
4807 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
4808 gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
4809 gsi_remove (&si, true);
777f7f9a 4810 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
d3c673c7
JJ
4811
4812 if (s_exit_bb == NULL)
4813 continue;
4814
726a989a
RB
4815 si = gsi_last_bb (s_exit_bb);
4816 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
4817 gsi_remove (&si, true);
d3c673c7 4818
50674e96 4819 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
953ff289
DN
4820 }
4821
50674e96 4822 /* Error handling code goes in DEFAULT_BB. */
726a989a 4823 t = gimple_block_label (default_bb);
953ff289 4824 u = build3 (CASE_LABEL_EXPR, void_type_node, NULL, NULL, t);
777f7f9a 4825 make_edge (l0_bb, default_bb, 0);
953ff289 4826
726a989a
RB
4827 stmt = gimple_build_switch_vec (vmain, u, label_vec);
4828 gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
4829 gsi_remove (&switch_si, true);
4830 VEC_free (tree, heap, label_vec);
4831
4832 si = gsi_start_bb (default_bb);
4833 stmt = gimple_build_call (built_in_decls[BUILT_IN_TRAP], 0);
4834 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
50674e96 4835
e5c95afe 4836 if (exit_reachable)
d3c673c7 4837 {
e5c95afe 4838 /* Code to get the next section goes in L1_BB. */
726a989a
RB
4839 si = gsi_last_bb (l1_bb);
4840 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
953ff289 4841
726a989a
RB
4842 stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT], 0);
4843 gimple_call_set_lhs (stmt, vnext);
4844 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4845 gsi_remove (&si, true);
50674e96 4846
e5c95afe
ZD
4847 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
4848
726a989a
RB
4849 /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB. */
4850 si = gsi_last_bb (l2_bb);
4851 if (gimple_omp_return_nowait_p (gsi_stmt (si)))
d3c673c7
JJ
4852 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
4853 else
4854 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
726a989a
RB
4855 stmt = gimple_build_call (t, 0);
4856 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
4857 gsi_remove (&si, true);
d3c673c7 4858 }
50674e96 4859
917948d3 4860 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
50674e96 4861}
953ff289 4862
953ff289 4863
777f7f9a
RH
4864/* Expand code for an OpenMP single directive. We've already expanded
4865 much of the code, here we simply place the GOMP_barrier call. */
4866
4867static void
4868expand_omp_single (struct omp_region *region)
4869{
4870 basic_block entry_bb, exit_bb;
726a989a 4871 gimple_stmt_iterator si;
777f7f9a
RH
4872 bool need_barrier = false;
4873
4874 entry_bb = region->entry;
4875 exit_bb = region->exit;
4876
726a989a 4877 si = gsi_last_bb (entry_bb);
777f7f9a
RH
4878 /* The terminal barrier at the end of a GOMP_single_copy sequence cannot
4879 be removed. We need to ensure that the thread that entered the single
4880 does not exit before the data is copied out by the other threads. */
726a989a 4881 if (find_omp_clause (gimple_omp_single_clauses (gsi_stmt (si)),
777f7f9a
RH
4882 OMP_CLAUSE_COPYPRIVATE))
4883 need_barrier = true;
726a989a
RB
4884 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
4885 gsi_remove (&si, true);
777f7f9a
RH
4886 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4887
726a989a
RB
4888 si = gsi_last_bb (exit_bb);
4889 if (!gimple_omp_return_nowait_p (gsi_stmt (si)) || need_barrier)
4890 force_gimple_operand_gsi (&si, build_omp_barrier (), false, NULL_TREE,
4891 false, GSI_SAME_STMT);
4892 gsi_remove (&si, true);
777f7f9a
RH
4893 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4894}
4895
4896
4897/* Generic expansion for OpenMP synchronization directives: master,
4898 ordered and critical. All we need to do here is remove the entry
4899 and exit markers for REGION. */
50674e96
DN
4900
4901static void
4902expand_omp_synch (struct omp_region *region)
4903{
4904 basic_block entry_bb, exit_bb;
726a989a 4905 gimple_stmt_iterator si;
50674e96 4906
777f7f9a
RH
4907 entry_bb = region->entry;
4908 exit_bb = region->exit;
50674e96 4909
726a989a
RB
4910 si = gsi_last_bb (entry_bb);
4911 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
4912 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
4913 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
4914 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL);
4915 gsi_remove (&si, true);
50674e96
DN
4916 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4917
d3c673c7
JJ
4918 if (exit_bb)
4919 {
726a989a
RB
4920 si = gsi_last_bb (exit_bb);
4921 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
4922 gsi_remove (&si, true);
d3c673c7
JJ
4923 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
4924 }
50674e96 4925}
953ff289 4926
a509ebb5
RL
4927/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
4928 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
4929 size of the data type, and thus usable to find the index of the builtin
4930 decl. Returns false if the expression is not of the proper form. */
4931
4932static bool
4933expand_omp_atomic_fetch_op (basic_block load_bb,
4934 tree addr, tree loaded_val,
4935 tree stored_val, int index)
4936{
4937 enum built_in_function base;
4938 tree decl, itype, call;
f9621cc4 4939 direct_optab optab;
a509ebb5
RL
4940 tree rhs;
4941 basic_block store_bb = single_succ (load_bb);
726a989a
RB
4942 gimple_stmt_iterator gsi;
4943 gimple stmt;
db3927fb 4944 location_t loc;
a509ebb5
RL
4945
4946 /* We expect to find the following sequences:
b8698a0f 4947
a509ebb5 4948 load_bb:
726a989a 4949 GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
a509ebb5
RL
4950
4951 store_bb:
4952 val = tmp OP something; (or: something OP tmp)
b8698a0f 4953 GIMPLE_OMP_STORE (val)
a509ebb5 4954
b8698a0f 4955 ???FIXME: Allow a more flexible sequence.
a509ebb5 4956 Perhaps use data flow to pick the statements.
b8698a0f 4957
a509ebb5
RL
4958 */
4959
726a989a
RB
4960 gsi = gsi_after_labels (store_bb);
4961 stmt = gsi_stmt (gsi);
db3927fb 4962 loc = gimple_location (stmt);
726a989a 4963 if (!is_gimple_assign (stmt))
a509ebb5 4964 return false;
726a989a
RB
4965 gsi_next (&gsi);
4966 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
a509ebb5
RL
4967 return false;
4968
726a989a 4969 if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
a509ebb5
RL
4970 return false;
4971
a509ebb5 4972 /* Check for one of the supported fetch-op operations. */
726a989a 4973 switch (gimple_assign_rhs_code (stmt))
a509ebb5
RL
4974 {
4975 case PLUS_EXPR:
4976 case POINTER_PLUS_EXPR:
4977 base = BUILT_IN_FETCH_AND_ADD_N;
4978 optab = sync_add_optab;
4979 break;
4980 case MINUS_EXPR:
4981 base = BUILT_IN_FETCH_AND_SUB_N;
4982 optab = sync_add_optab;
4983 break;
4984 case BIT_AND_EXPR:
4985 base = BUILT_IN_FETCH_AND_AND_N;
4986 optab = sync_and_optab;
4987 break;
4988 case BIT_IOR_EXPR:
4989 base = BUILT_IN_FETCH_AND_OR_N;
4990 optab = sync_ior_optab;
4991 break;
4992 case BIT_XOR_EXPR:
4993 base = BUILT_IN_FETCH_AND_XOR_N;
4994 optab = sync_xor_optab;
4995 break;
4996 default:
4997 return false;
4998 }
4999 /* Make sure the expression is of the proper form. */
726a989a
RB
5000 if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
5001 rhs = gimple_assign_rhs2 (stmt);
5002 else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
5003 && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
5004 rhs = gimple_assign_rhs1 (stmt);
a509ebb5
RL
5005 else
5006 return false;
5007
5008 decl = built_in_decls[base + index + 1];
20790697
JJ
5009 if (decl == NULL_TREE)
5010 return false;
a509ebb5
RL
5011 itype = TREE_TYPE (TREE_TYPE (decl));
5012
f9621cc4 5013 if (direct_optab_handler (optab, TYPE_MODE (itype)) == CODE_FOR_nothing)
a509ebb5
RL
5014 return false;
5015
726a989a
RB
5016 gsi = gsi_last_bb (load_bb);
5017 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
db3927fb
AH
5018 call = build_call_expr_loc (loc,
5019 decl, 2, addr,
5020 fold_convert_loc (loc, itype, rhs));
5021 call = fold_convert_loc (loc, void_type_node, call);
726a989a
RB
5022 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
5023 gsi_remove (&gsi, true);
a509ebb5 5024
726a989a
RB
5025 gsi = gsi_last_bb (store_bb);
5026 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
5027 gsi_remove (&gsi, true);
5028 gsi = gsi_last_bb (store_bb);
5029 gsi_remove (&gsi, true);
a509ebb5
RL
5030
5031 if (gimple_in_ssa_p (cfun))
5032 update_ssa (TODO_update_ssa_no_phi);
5033
5034 return true;
5035}
5036
5037/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
5038
5039 oldval = *addr;
5040 repeat:
5041 newval = rhs; // with oldval replacing *addr in rhs
5042 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5043 if (oldval != newval)
5044 goto repeat;
5045
5046 INDEX is log2 of the size of the data type, and thus usable to find the
5047 index of the builtin decl. */
5048
5049static bool
5050expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
5051 tree addr, tree loaded_val, tree stored_val,
5052 int index)
5053{
c18c98c0 5054 tree loadedi, storedi, initial, new_storedi, old_vali;
a509ebb5 5055 tree type, itype, cmpxchg, iaddr;
726a989a 5056 gimple_stmt_iterator si;
a509ebb5 5057 basic_block loop_header = single_succ (load_bb);
726a989a 5058 gimple phi, stmt;
a509ebb5
RL
5059 edge e;
5060
5061 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
20790697
JJ
5062 if (cmpxchg == NULL_TREE)
5063 return false;
a509ebb5
RL
5064 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5065 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5066
f9621cc4
RS
5067 if (direct_optab_handler (sync_compare_and_swap_optab, TYPE_MODE (itype))
5068 == CODE_FOR_nothing)
a509ebb5
RL
5069 return false;
5070
726a989a
RB
5071 /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD. */
5072 si = gsi_last_bb (load_bb);
5073 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
5074
c18c98c0
JJ
5075 /* For floating-point values, we'll need to view-convert them to integers
5076 so that we can perform the atomic compare and swap. Simplify the
5077 following code by always setting up the "i"ntegral variables. */
5078 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5079 {
726a989a
RB
5080 tree iaddr_val;
5081
5b21f0f3
RG
5082 iaddr = create_tmp_var (build_pointer_type_for_mode (itype, ptr_mode,
5083 true), NULL);
726a989a
RB
5084 iaddr_val
5085 = force_gimple_operand_gsi (&si,
5086 fold_convert (TREE_TYPE (iaddr), addr),
5087 false, NULL_TREE, true, GSI_SAME_STMT);
5088 stmt = gimple_build_assign (iaddr, iaddr_val);
5089 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
c18c98c0
JJ
5090 loadedi = create_tmp_var (itype, NULL);
5091 if (gimple_in_ssa_p (cfun))
5092 {
5093 add_referenced_var (iaddr);
5094 add_referenced_var (loadedi);
5095 loadedi = make_ssa_name (loadedi, NULL);
5096 }
5097 }
5098 else
5099 {
5100 iaddr = addr;
5101 loadedi = loaded_val;
5102 }
726a989a 5103
70f34814
RG
5104 initial
5105 = force_gimple_operand_gsi (&si,
5106 build2 (MEM_REF, TREE_TYPE (TREE_TYPE (iaddr)),
5107 iaddr,
5108 build_int_cst (TREE_TYPE (iaddr), 0)),
5109 true, NULL_TREE, true, GSI_SAME_STMT);
c18c98c0
JJ
5110
5111 /* Move the value to the LOADEDI temporary. */
a509ebb5
RL
5112 if (gimple_in_ssa_p (cfun))
5113 {
726a989a 5114 gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
c18c98c0
JJ
5115 phi = create_phi_node (loadedi, loop_header);
5116 SSA_NAME_DEF_STMT (loadedi) = phi;
a509ebb5
RL
5117 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
5118 initial);
5119 }
5120 else
726a989a
RB
5121 gsi_insert_before (&si,
5122 gimple_build_assign (loadedi, initial),
5123 GSI_SAME_STMT);
c18c98c0
JJ
5124 if (loadedi != loaded_val)
5125 {
726a989a
RB
5126 gimple_stmt_iterator gsi2;
5127 tree x;
c18c98c0
JJ
5128
5129 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
726a989a 5130 gsi2 = gsi_start_bb (loop_header);
c18c98c0
JJ
5131 if (gimple_in_ssa_p (cfun))
5132 {
726a989a
RB
5133 gimple stmt;
5134 x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
5135 true, GSI_SAME_STMT);
5136 stmt = gimple_build_assign (loaded_val, x);
5137 gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
c18c98c0
JJ
5138 }
5139 else
5140 {
726a989a
RB
5141 x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
5142 force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
5143 true, GSI_SAME_STMT);
c18c98c0
JJ
5144 }
5145 }
726a989a 5146 gsi_remove (&si, true);
a509ebb5 5147
726a989a
RB
5148 si = gsi_last_bb (store_bb);
5149 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 5150
c18c98c0
JJ
5151 if (iaddr == addr)
5152 storedi = stored_val;
a509ebb5 5153 else
c18c98c0 5154 storedi =
726a989a 5155 force_gimple_operand_gsi (&si,
c18c98c0
JJ
5156 build1 (VIEW_CONVERT_EXPR, itype,
5157 stored_val), true, NULL_TREE, true,
726a989a 5158 GSI_SAME_STMT);
a509ebb5
RL
5159
5160 /* Build the compare&swap statement. */
5161 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
726a989a 5162 new_storedi = force_gimple_operand_gsi (&si,
587aa063
RG
5163 fold_convert (TREE_TYPE (loadedi),
5164 new_storedi),
a509ebb5 5165 true, NULL_TREE,
726a989a 5166 true, GSI_SAME_STMT);
a509ebb5
RL
5167
5168 if (gimple_in_ssa_p (cfun))
5169 old_vali = loadedi;
5170 else
5171 {
587aa063 5172 old_vali = create_tmp_var (TREE_TYPE (loadedi), NULL);
c18c98c0
JJ
5173 if (gimple_in_ssa_p (cfun))
5174 add_referenced_var (old_vali);
726a989a
RB
5175 stmt = gimple_build_assign (old_vali, loadedi);
5176 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5 5177
726a989a
RB
5178 stmt = gimple_build_assign (loadedi, new_storedi);
5179 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
5180 }
5181
5182 /* Note that we always perform the comparison as an integer, even for
b8698a0f 5183 floating point. This allows the atomic operation to properly
a509ebb5 5184 succeed even with NaNs and -0.0. */
726a989a
RB
5185 stmt = gimple_build_cond_empty
5186 (build2 (NE_EXPR, boolean_type_node,
5187 new_storedi, old_vali));
5188 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
5189
5190 /* Update cfg. */
5191 e = single_succ_edge (store_bb);
5192 e->flags &= ~EDGE_FALLTHRU;
5193 e->flags |= EDGE_FALSE_VALUE;
5194
5195 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
5196
c18c98c0 5197 /* Copy the new value to loadedi (we already did that before the condition
a509ebb5
RL
5198 if we are not in SSA). */
5199 if (gimple_in_ssa_p (cfun))
5200 {
726a989a 5201 phi = gimple_seq_first_stmt (phi_nodes (loop_header));
c18c98c0 5202 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
a509ebb5
RL
5203 }
5204
726a989a
RB
5205 /* Remove GIMPLE_OMP_ATOMIC_STORE. */
5206 gsi_remove (&si, true);
a509ebb5
RL
5207
5208 if (gimple_in_ssa_p (cfun))
5209 update_ssa (TODO_update_ssa_no_phi);
5210
5211 return true;
5212}
5213
5214/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
5215
5216 GOMP_atomic_start ();
5217 *addr = rhs;
5218 GOMP_atomic_end ();
5219
5220 The result is not globally atomic, but works so long as all parallel
5221 references are within #pragma omp atomic directives. According to
5222 responses received from omp@openmp.org, appears to be within spec.
5223 Which makes sense, since that's how several other compilers handle
b8698a0f 5224 this situation as well.
726a989a
RB
5225 LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
5226 expanding. STORED_VAL is the operand of the matching
5227 GIMPLE_OMP_ATOMIC_STORE.
a509ebb5 5228
b8698a0f
L
5229 We replace
5230 GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with
a509ebb5
RL
5231 loaded_val = *addr;
5232
5233 and replace
726a989a 5234 GIMPLE_OMP_ATOMIC_ATORE (stored_val) with
b8698a0f 5235 *addr = stored_val;
a509ebb5
RL
5236*/
5237
5238static bool
5239expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
5240 tree addr, tree loaded_val, tree stored_val)
5241{
726a989a
RB
5242 gimple_stmt_iterator si;
5243 gimple stmt;
a509ebb5
RL
5244 tree t;
5245
726a989a
RB
5246 si = gsi_last_bb (load_bb);
5247 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5
RL
5248
5249 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
3bb06db4 5250 t = build_call_expr (t, 0);
726a989a 5251 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
a509ebb5 5252
70f34814 5253 stmt = gimple_build_assign (loaded_val, build_simple_mem_ref (addr));
726a989a
RB
5254 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
5255 gsi_remove (&si, true);
a509ebb5 5256
726a989a
RB
5257 si = gsi_last_bb (store_bb);
5258 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 5259
70f34814
RG
5260 stmt = gimple_build_assign (build_simple_mem_ref (unshare_expr (addr)),
5261 stored_val);
726a989a 5262 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
5263
5264 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
3bb06db4 5265 t = build_call_expr (t, 0);
726a989a
RB
5266 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
5267 gsi_remove (&si, true);
a509ebb5
RL
5268
5269 if (gimple_in_ssa_p (cfun))
5270 update_ssa (TODO_update_ssa_no_phi);
5271 return true;
5272}
5273
b8698a0f
L
5274/* Expand an GIMPLE_OMP_ATOMIC statement. We try to expand
5275 using expand_omp_atomic_fetch_op. If it failed, we try to
a509ebb5
RL
5276 call expand_omp_atomic_pipeline, and if it fails too, the
5277 ultimate fallback is wrapping the operation in a mutex
b8698a0f
L
5278 (expand_omp_atomic_mutex). REGION is the atomic region built
5279 by build_omp_regions_1(). */
a509ebb5
RL
5280
5281static void
5282expand_omp_atomic (struct omp_region *region)
5283{
5284 basic_block load_bb = region->entry, store_bb = region->exit;
726a989a
RB
5285 gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
5286 tree loaded_val = gimple_omp_atomic_load_lhs (load);
5287 tree addr = gimple_omp_atomic_load_rhs (load);
5288 tree stored_val = gimple_omp_atomic_store_val (store);
a509ebb5
RL
5289 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5290 HOST_WIDE_INT index;
5291
5292 /* Make sure the type is one of the supported sizes. */
5293 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5294 index = exact_log2 (index);
5295 if (index >= 0 && index <= 4)
5296 {
5297 unsigned int align = TYPE_ALIGN_UNIT (type);
5298
5299 /* __sync builtins require strict data alignment. */
5300 if (exact_log2 (align) >= index)
5301 {
5302 /* When possible, use specialized atomic update functions. */
5303 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5304 && store_bb == single_succ (load_bb))
5305 {
5306 if (expand_omp_atomic_fetch_op (load_bb, addr,
5307 loaded_val, stored_val, index))
5308 return;
5309 }
5310
5311 /* If we don't have specialized __sync builtins, try and implement
5312 as a compare and swap loop. */
5313 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
5314 loaded_val, stored_val, index))
5315 return;
5316 }
5317 }
5318
5319 /* The ultimate fallback is wrapping the operation in a mutex. */
5320 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
5321}
5322
953ff289 5323
50674e96
DN
5324/* Expand the parallel region tree rooted at REGION. Expansion
5325 proceeds in depth-first order. Innermost regions are expanded
5326 first. This way, parallel regions that require a new function to
726a989a 5327 be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
50674e96
DN
5328 internal dependencies in their body. */
5329
5330static void
5331expand_omp (struct omp_region *region)
5332{
5333 while (region)
5334 {
b357f682
JJ
5335 location_t saved_location;
5336
068e1875
ZD
5337 /* First, determine whether this is a combined parallel+workshare
5338 region. */
726a989a 5339 if (region->type == GIMPLE_OMP_PARALLEL)
068e1875
ZD
5340 determine_parallel_type (region);
5341
50674e96
DN
5342 if (region->inner)
5343 expand_omp (region->inner);
5344
b357f682 5345 saved_location = input_location;
726a989a
RB
5346 if (gimple_has_location (last_stmt (region->entry)))
5347 input_location = gimple_location (last_stmt (region->entry));
b357f682 5348
777f7f9a 5349 switch (region->type)
50674e96 5350 {
726a989a
RB
5351 case GIMPLE_OMP_PARALLEL:
5352 case GIMPLE_OMP_TASK:
a68ab351
JJ
5353 expand_omp_taskreg (region);
5354 break;
5355
726a989a 5356 case GIMPLE_OMP_FOR:
777f7f9a
RH
5357 expand_omp_for (region);
5358 break;
50674e96 5359
726a989a 5360 case GIMPLE_OMP_SECTIONS:
777f7f9a
RH
5361 expand_omp_sections (region);
5362 break;
50674e96 5363
726a989a 5364 case GIMPLE_OMP_SECTION:
777f7f9a 5365 /* Individual omp sections are handled together with their
726a989a 5366 parent GIMPLE_OMP_SECTIONS region. */
777f7f9a 5367 break;
50674e96 5368
726a989a 5369 case GIMPLE_OMP_SINGLE:
777f7f9a
RH
5370 expand_omp_single (region);
5371 break;
50674e96 5372
726a989a
RB
5373 case GIMPLE_OMP_MASTER:
5374 case GIMPLE_OMP_ORDERED:
5375 case GIMPLE_OMP_CRITICAL:
777f7f9a
RH
5376 expand_omp_synch (region);
5377 break;
50674e96 5378
726a989a 5379 case GIMPLE_OMP_ATOMIC_LOAD:
a509ebb5
RL
5380 expand_omp_atomic (region);
5381 break;
5382
777f7f9a
RH
5383 default:
5384 gcc_unreachable ();
5385 }
8d9c1aec 5386
b357f682 5387 input_location = saved_location;
50674e96
DN
5388 region = region->next;
5389 }
5390}
5391
5392
5393/* Helper for build_omp_regions. Scan the dominator tree starting at
5f40b3cb
ZD
5394 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
5395 true, the function ends once a single tree is built (otherwise, whole
5396 forest of OMP constructs may be built). */
50674e96
DN
5397
5398static void
5f40b3cb
ZD
5399build_omp_regions_1 (basic_block bb, struct omp_region *parent,
5400 bool single_tree)
50674e96 5401{
726a989a
RB
5402 gimple_stmt_iterator gsi;
5403 gimple stmt;
50674e96
DN
5404 basic_block son;
5405
726a989a
RB
5406 gsi = gsi_last_bb (bb);
5407 if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
50674e96
DN
5408 {
5409 struct omp_region *region;
726a989a 5410 enum gimple_code code;
50674e96 5411
726a989a
RB
5412 stmt = gsi_stmt (gsi);
5413 code = gimple_code (stmt);
5414 if (code == GIMPLE_OMP_RETURN)
50674e96
DN
5415 {
5416 /* STMT is the return point out of region PARENT. Mark it
5417 as the exit point and make PARENT the immediately
5418 enclosing region. */
5419 gcc_assert (parent);
5420 region = parent;
777f7f9a 5421 region->exit = bb;
50674e96 5422 parent = parent->outer;
50674e96 5423 }
726a989a 5424 else if (code == GIMPLE_OMP_ATOMIC_STORE)
a509ebb5 5425 {
726a989a
RB
5426 /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
5427 GIMPLE_OMP_RETURN, but matches with
5428 GIMPLE_OMP_ATOMIC_LOAD. */
a509ebb5 5429 gcc_assert (parent);
726a989a 5430 gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5
RL
5431 region = parent;
5432 region->exit = bb;
5433 parent = parent->outer;
5434 }
5435
726a989a 5436 else if (code == GIMPLE_OMP_CONTINUE)
777f7f9a
RH
5437 {
5438 gcc_assert (parent);
5439 parent->cont = bb;
5440 }
726a989a 5441 else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
e5c95afe 5442 {
726a989a
RB
5443 /* GIMPLE_OMP_SECTIONS_SWITCH is part of
5444 GIMPLE_OMP_SECTIONS, and we do nothing for it. */
5445 ;
e5c95afe 5446 }
50674e96
DN
5447 else
5448 {
5449 /* Otherwise, this directive becomes the parent for a new
5450 region. */
777f7f9a 5451 region = new_omp_region (bb, code, parent);
50674e96
DN
5452 parent = region;
5453 }
50674e96
DN
5454 }
5455
5f40b3cb
ZD
5456 if (single_tree && !parent)
5457 return;
5458
50674e96
DN
5459 for (son = first_dom_son (CDI_DOMINATORS, bb);
5460 son;
5461 son = next_dom_son (CDI_DOMINATORS, son))
5f40b3cb
ZD
5462 build_omp_regions_1 (son, parent, single_tree);
5463}
5464
5465/* Builds the tree of OMP regions rooted at ROOT, storing it to
5466 root_omp_region. */
5467
5468static void
5469build_omp_regions_root (basic_block root)
5470{
5471 gcc_assert (root_omp_region == NULL);
5472 build_omp_regions_1 (root, NULL, true);
5473 gcc_assert (root_omp_region != NULL);
50674e96
DN
5474}
5475
5f40b3cb
ZD
5476/* Expands omp construct (and its subconstructs) starting in HEAD. */
5477
5478void
5479omp_expand_local (basic_block head)
5480{
5481 build_omp_regions_root (head);
5482 if (dump_file && (dump_flags & TDF_DETAILS))
5483 {
5484 fprintf (dump_file, "\nOMP region tree\n\n");
5485 dump_omp_region (dump_file, root_omp_region, 0);
5486 fprintf (dump_file, "\n");
5487 }
5488
5489 remove_exit_barriers (root_omp_region);
5490 expand_omp (root_omp_region);
5491
5492 free_omp_regions ();
5493}
50674e96
DN
5494
5495/* Scan the CFG and build a tree of OMP regions. Return the root of
5496 the OMP region tree. */
5497
5498static void
5499build_omp_regions (void)
5500{
777f7f9a 5501 gcc_assert (root_omp_region == NULL);
50674e96 5502 calculate_dominance_info (CDI_DOMINATORS);
5f40b3cb 5503 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
50674e96
DN
5504}
5505
50674e96
DN
5506/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
5507
c2924966 5508static unsigned int
50674e96
DN
5509execute_expand_omp (void)
5510{
5511 build_omp_regions ();
5512
777f7f9a
RH
5513 if (!root_omp_region)
5514 return 0;
50674e96 5515
777f7f9a
RH
5516 if (dump_file)
5517 {
5518 fprintf (dump_file, "\nOMP region tree\n\n");
5519 dump_omp_region (dump_file, root_omp_region, 0);
5520 fprintf (dump_file, "\n");
50674e96 5521 }
777f7f9a
RH
5522
5523 remove_exit_barriers (root_omp_region);
5524
5525 expand_omp (root_omp_region);
5526
777f7f9a
RH
5527 cleanup_tree_cfg ();
5528
5529 free_omp_regions ();
5530
c2924966 5531 return 0;
50674e96
DN
5532}
5533
917948d3
ZD
5534/* OMP expansion -- the default pass, run before creation of SSA form. */
5535
50674e96
DN
5536static bool
5537gate_expand_omp (void)
5538{
1da2ed5f 5539 return (flag_openmp != 0 && !seen_error ());
50674e96
DN
5540}
5541
b8698a0f 5542struct gimple_opt_pass pass_expand_omp =
50674e96 5543{
8ddbbcae
JH
5544 {
5545 GIMPLE_PASS,
50674e96
DN
5546 "ompexp", /* name */
5547 gate_expand_omp, /* gate */
5548 execute_expand_omp, /* execute */
5549 NULL, /* sub */
5550 NULL, /* next */
5551 0, /* static_pass_number */
7072a650 5552 TV_NONE, /* tv_id */
50674e96 5553 PROP_gimple_any, /* properties_required */
535b544a 5554 0, /* properties_provided */
50674e96
DN
5555 0, /* properties_destroyed */
5556 0, /* todo_flags_start */
8ddbbcae
JH
5557 TODO_dump_func /* todo_flags_finish */
5558 }
50674e96
DN
5559};
5560\f
5561/* Routines to lower OpenMP directives into OMP-GIMPLE. */
5562
726a989a
RB
5563/* Lower the OpenMP sections directive in the current statement in GSI_P.
5564 CTX is the enclosing OMP context for the current statement. */
50674e96
DN
5565
5566static void
726a989a 5567lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 5568{
726a989a
RB
5569 tree block, control;
5570 gimple_stmt_iterator tgsi;
50674e96 5571 unsigned i, len;
726a989a
RB
5572 gimple stmt, new_stmt, bind, t;
5573 gimple_seq ilist, dlist, olist, new_body, body;
d406b663 5574 struct gimplify_ctx gctx;
50674e96 5575
726a989a 5576 stmt = gsi_stmt (*gsi_p);
50674e96 5577
d406b663 5578 push_gimplify_context (&gctx);
50674e96
DN
5579
5580 dlist = NULL;
5581 ilist = NULL;
726a989a
RB
5582 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5583 &ilist, &dlist, ctx);
50674e96 5584
726a989a
RB
5585 tgsi = gsi_start (gimple_omp_body (stmt));
5586 for (len = 0; !gsi_end_p (tgsi); len++, gsi_next (&tgsi))
50674e96
DN
5587 continue;
5588
726a989a
RB
5589 tgsi = gsi_start (gimple_omp_body (stmt));
5590 body = NULL;
5591 for (i = 0; i < len; i++, gsi_next (&tgsi))
50674e96
DN
5592 {
5593 omp_context *sctx;
726a989a 5594 gimple sec_start;
50674e96 5595
726a989a 5596 sec_start = gsi_stmt (tgsi);
50674e96
DN
5597 sctx = maybe_lookup_ctx (sec_start);
5598 gcc_assert (sctx);
5599
726a989a 5600 gimple_seq_add_stmt (&body, sec_start);
777f7f9a 5601
726a989a
RB
5602 lower_omp (gimple_omp_body (sec_start), sctx);
5603 gimple_seq_add_seq (&body, gimple_omp_body (sec_start));
5604 gimple_omp_set_body (sec_start, NULL);
50674e96
DN
5605
5606 if (i == len - 1)
5607 {
726a989a
RB
5608 gimple_seq l = NULL;
5609 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
50674e96 5610 &l, ctx);
726a989a
RB
5611 gimple_seq_add_seq (&body, l);
5612 gimple_omp_section_set_last (sec_start);
50674e96 5613 }
b8698a0f 5614
726a989a 5615 gimple_seq_add_stmt (&body, gimple_build_omp_return (false));
50674e96 5616 }
953ff289
DN
5617
5618 block = make_node (BLOCK);
726a989a 5619 bind = gimple_build_bind (NULL, body, block);
953ff289 5620
726a989a
RB
5621 olist = NULL;
5622 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 5623
b357f682 5624 block = make_node (BLOCK);
726a989a 5625 new_stmt = gimple_build_bind (NULL, NULL, block);
50674e96 5626
b357f682 5627 pop_gimplify_context (new_stmt);
726a989a
RB
5628 gimple_bind_append_vars (new_stmt, ctx->block_vars);
5629 BLOCK_VARS (block) = gimple_bind_vars (bind);
b357f682
JJ
5630 if (BLOCK_VARS (block))
5631 TREE_USED (block) = 1;
5632
726a989a
RB
5633 new_body = NULL;
5634 gimple_seq_add_seq (&new_body, ilist);
5635 gimple_seq_add_stmt (&new_body, stmt);
5636 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5637 gimple_seq_add_stmt (&new_body, bind);
777f7f9a 5638
e5c95afe 5639 control = create_tmp_var (unsigned_type_node, ".section");
726a989a
RB
5640 t = gimple_build_omp_continue (control, control);
5641 gimple_omp_sections_set_control (stmt, control);
5642 gimple_seq_add_stmt (&new_body, t);
777f7f9a 5643
726a989a
RB
5644 gimple_seq_add_seq (&new_body, olist);
5645 gimple_seq_add_seq (&new_body, dlist);
50674e96 5646
726a989a 5647 new_body = maybe_catch_exception (new_body);
4a31b7ee 5648
726a989a
RB
5649 t = gimple_build_omp_return
5650 (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
5651 OMP_CLAUSE_NOWAIT));
5652 gimple_seq_add_stmt (&new_body, t);
777f7f9a 5653
726a989a
RB
5654 gimple_bind_set_body (new_stmt, new_body);
5655 gimple_omp_set_body (stmt, NULL);
50674e96 5656
726a989a 5657 gsi_replace (gsi_p, new_stmt, true);
953ff289
DN
5658}
5659
5660
50674e96 5661/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 5662 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
953ff289
DN
5663
5664 if (GOMP_single_start ())
5665 BODY;
5666 [ GOMP_barrier (); ] -> unless 'nowait' is present.
50674e96
DN
5667
5668 FIXME. It may be better to delay expanding the logic of this until
5669 pass_expand_omp. The expanded logic may make the job more difficult
5670 to a synchronization analysis pass. */
953ff289
DN
5671
5672static void
726a989a 5673lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
953ff289 5674{
c2255bc4
AH
5675 location_t loc = gimple_location (single_stmt);
5676 tree tlabel = create_artificial_label (loc);
5677 tree flabel = create_artificial_label (loc);
726a989a
RB
5678 gimple call, cond;
5679 tree lhs, decl;
5680
5681 decl = built_in_decls[BUILT_IN_GOMP_SINGLE_START];
5682 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
5683 call = gimple_build_call (decl, 0);
5684 gimple_call_set_lhs (call, lhs);
5685 gimple_seq_add_stmt (pre_p, call);
5686
5687 cond = gimple_build_cond (EQ_EXPR, lhs,
db3927fb
AH
5688 fold_convert_loc (loc, TREE_TYPE (lhs),
5689 boolean_true_node),
726a989a
RB
5690 tlabel, flabel);
5691 gimple_seq_add_stmt (pre_p, cond);
5692 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
5693 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
5694 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
953ff289
DN
5695}
5696
50674e96
DN
5697
5698/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 5699 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289
DN
5700
5701 #pragma omp single copyprivate (a, b, c)
5702
5703 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
5704
5705 {
5706 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
5707 {
5708 BODY;
5709 copyout.a = a;
5710 copyout.b = b;
5711 copyout.c = c;
5712 GOMP_single_copy_end (&copyout);
5713 }
5714 else
5715 {
5716 a = copyout_p->a;
5717 b = copyout_p->b;
5718 c = copyout_p->c;
5719 }
5720 GOMP_barrier ();
5721 }
50674e96
DN
5722
5723 FIXME. It may be better to delay expanding the logic of this until
5724 pass_expand_omp. The expanded logic may make the job more difficult
5725 to a synchronization analysis pass. */
953ff289
DN
5726
5727static void
726a989a 5728lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
953ff289 5729{
726a989a
RB
5730 tree ptr_type, t, l0, l1, l2;
5731 gimple_seq copyin_seq;
c2255bc4 5732 location_t loc = gimple_location (single_stmt);
953ff289
DN
5733
5734 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
5735
5736 ptr_type = build_pointer_type (ctx->record_type);
5737 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
5738
c2255bc4
AH
5739 l0 = create_artificial_label (loc);
5740 l1 = create_artificial_label (loc);
5741 l2 = create_artificial_label (loc);
953ff289 5742
db3927fb
AH
5743 t = build_call_expr_loc (loc, built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START], 0);
5744 t = fold_convert_loc (loc, ptr_type, t);
726a989a 5745 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289
DN
5746
5747 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
5748 build_int_cst (ptr_type, 0));
5749 t = build3 (COND_EXPR, void_type_node, t,
5750 build_and_jump (&l0), build_and_jump (&l1));
5751 gimplify_and_add (t, pre_p);
5752
726a989a 5753 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 5754
726a989a 5755 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289
DN
5756
5757 copyin_seq = NULL;
726a989a 5758 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
953ff289
DN
5759 &copyin_seq, ctx);
5760
db3927fb
AH
5761 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
5762 t = build_call_expr_loc (loc, built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END],
5763 1, t);
953ff289
DN
5764 gimplify_and_add (t, pre_p);
5765
5766 t = build_and_jump (&l2);
5767 gimplify_and_add (t, pre_p);
5768
726a989a 5769 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 5770
726a989a 5771 gimple_seq_add_seq (pre_p, copyin_seq);
953ff289 5772
726a989a 5773 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
953ff289
DN
5774}
5775
50674e96 5776
953ff289
DN
5777/* Expand code for an OpenMP single directive. */
5778
5779static void
726a989a 5780lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 5781{
726a989a
RB
5782 tree block;
5783 gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
5784 gimple_seq bind_body, dlist;
d406b663 5785 struct gimplify_ctx gctx;
953ff289 5786
d406b663 5787 push_gimplify_context (&gctx);
953ff289 5788
726a989a
RB
5789 bind_body = NULL;
5790 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
5791 &bind_body, &dlist, ctx);
5792 lower_omp (gimple_omp_body (single_stmt), ctx);
953ff289 5793
726a989a 5794 gimple_seq_add_stmt (&bind_body, single_stmt);
953ff289
DN
5795
5796 if (ctx->record_type)
726a989a 5797 lower_omp_single_copy (single_stmt, &bind_body, ctx);
953ff289 5798 else
726a989a
RB
5799 lower_omp_single_simple (single_stmt, &bind_body);
5800
5801 gimple_omp_set_body (single_stmt, NULL);
953ff289 5802
726a989a 5803 gimple_seq_add_seq (&bind_body, dlist);
777f7f9a 5804
726a989a 5805 bind_body = maybe_catch_exception (bind_body);
777f7f9a 5806
b8698a0f 5807 t = gimple_build_omp_return
726a989a
RB
5808 (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
5809 OMP_CLAUSE_NOWAIT));
5810 gimple_seq_add_stmt (&bind_body, t);
4a31b7ee 5811
726a989a
RB
5812 block = make_node (BLOCK);
5813 bind = gimple_build_bind (NULL, bind_body, block);
777f7f9a 5814
953ff289 5815 pop_gimplify_context (bind);
50674e96 5816
726a989a
RB
5817 gimple_bind_append_vars (bind, ctx->block_vars);
5818 BLOCK_VARS (block) = ctx->block_vars;
5819 gsi_replace (gsi_p, bind, true);
b357f682
JJ
5820 if (BLOCK_VARS (block))
5821 TREE_USED (block) = 1;
953ff289
DN
5822}
5823
50674e96 5824
953ff289
DN
5825/* Expand code for an OpenMP master directive. */
5826
5827static void
726a989a 5828lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 5829{
726a989a
RB
5830 tree block, lab = NULL, x;
5831 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 5832 location_t loc = gimple_location (stmt);
726a989a 5833 gimple_seq tseq;
d406b663 5834 struct gimplify_ctx gctx;
953ff289 5835
d406b663 5836 push_gimplify_context (&gctx);
953ff289
DN
5837
5838 block = make_node (BLOCK);
726a989a
RB
5839 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
5840 block);
777f7f9a 5841
db3927fb 5842 x = build_call_expr_loc (loc, built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
953ff289
DN
5843 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
5844 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
726a989a
RB
5845 tseq = NULL;
5846 gimplify_and_add (x, &tseq);
5847 gimple_bind_add_seq (bind, tseq);
953ff289 5848
726a989a
RB
5849 lower_omp (gimple_omp_body (stmt), ctx);
5850 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
5851 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
5852 gimple_omp_set_body (stmt, NULL);
953ff289 5853
726a989a 5854 gimple_bind_add_stmt (bind, gimple_build_label (lab));
777f7f9a 5855
726a989a 5856 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 5857
953ff289 5858 pop_gimplify_context (bind);
50674e96 5859
726a989a
RB
5860 gimple_bind_append_vars (bind, ctx->block_vars);
5861 BLOCK_VARS (block) = ctx->block_vars;
5862 gsi_replace (gsi_p, bind, true);
953ff289
DN
5863}
5864
50674e96 5865
953ff289
DN
5866/* Expand code for an OpenMP ordered directive. */
5867
5868static void
726a989a 5869lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 5870{
726a989a
RB
5871 tree block;
5872 gimple stmt = gsi_stmt (*gsi_p), bind, x;
d406b663 5873 struct gimplify_ctx gctx;
953ff289 5874
d406b663 5875 push_gimplify_context (&gctx);
953ff289
DN
5876
5877 block = make_node (BLOCK);
726a989a
RB
5878 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
5879 block);
777f7f9a 5880
726a989a
RB
5881 x = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ORDERED_START], 0);
5882 gimple_bind_add_stmt (bind, x);
953ff289 5883
726a989a
RB
5884 lower_omp (gimple_omp_body (stmt), ctx);
5885 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
5886 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
5887 gimple_omp_set_body (stmt, NULL);
953ff289 5888
726a989a
RB
5889 x = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ORDERED_END], 0);
5890 gimple_bind_add_stmt (bind, x);
777f7f9a 5891
726a989a 5892 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 5893
953ff289 5894 pop_gimplify_context (bind);
50674e96 5895
726a989a
RB
5896 gimple_bind_append_vars (bind, ctx->block_vars);
5897 BLOCK_VARS (block) = gimple_bind_vars (bind);
5898 gsi_replace (gsi_p, bind, true);
953ff289
DN
5899}
5900
953ff289 5901
726a989a 5902/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
953ff289
DN
5903 substitution of a couple of function calls. But in the NAMED case,
5904 requires that languages coordinate a symbol name. It is therefore
5905 best put here in common code. */
5906
5907static GTY((param1_is (tree), param2_is (tree)))
5908 splay_tree critical_name_mutexes;
5909
5910static void
726a989a 5911lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 5912{
726a989a
RB
5913 tree block;
5914 tree name, lock, unlock;
5915 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 5916 location_t loc = gimple_location (stmt);
726a989a 5917 gimple_seq tbody;
d406b663 5918 struct gimplify_ctx gctx;
953ff289 5919
726a989a 5920 name = gimple_omp_critical_name (stmt);
953ff289
DN
5921 if (name)
5922 {
5039610b 5923 tree decl;
953ff289
DN
5924 splay_tree_node n;
5925
5926 if (!critical_name_mutexes)
5927 critical_name_mutexes
a9429e29
LB
5928 = splay_tree_new_ggc (splay_tree_compare_pointers,
5929 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
5930 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
953ff289
DN
5931
5932 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
5933 if (n == NULL)
5934 {
5935 char *new_str;
5936
5937 decl = create_tmp_var_raw (ptr_type_node, NULL);
5938
5939 new_str = ACONCAT ((".gomp_critical_user_",
5940 IDENTIFIER_POINTER (name), NULL));
5941 DECL_NAME (decl) = get_identifier (new_str);
5942 TREE_PUBLIC (decl) = 1;
5943 TREE_STATIC (decl) = 1;
5944 DECL_COMMON (decl) = 1;
5945 DECL_ARTIFICIAL (decl) = 1;
5946 DECL_IGNORED_P (decl) = 1;
8a4a83ed 5947 varpool_finalize_decl (decl);
953ff289
DN
5948
5949 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
5950 (splay_tree_value) decl);
5951 }
5952 else
5953 decl = (tree) n->value;
5954
953ff289 5955 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_START];
db3927fb 5956 lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
953ff289 5957
953ff289 5958 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_END];
db3927fb
AH
5959 unlock = build_call_expr_loc (loc, unlock, 1,
5960 build_fold_addr_expr_loc (loc, decl));
953ff289
DN
5961 }
5962 else
5963 {
5964 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_START];
db3927fb 5965 lock = build_call_expr_loc (loc, lock, 0);
953ff289
DN
5966
5967 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_END];
db3927fb 5968 unlock = build_call_expr_loc (loc, unlock, 0);
953ff289
DN
5969 }
5970
d406b663 5971 push_gimplify_context (&gctx);
953ff289
DN
5972
5973 block = make_node (BLOCK);
726a989a 5974 bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt), block);
777f7f9a 5975
726a989a
RB
5976 tbody = gimple_bind_body (bind);
5977 gimplify_and_add (lock, &tbody);
5978 gimple_bind_set_body (bind, tbody);
953ff289 5979
726a989a
RB
5980 lower_omp (gimple_omp_body (stmt), ctx);
5981 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
5982 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
5983 gimple_omp_set_body (stmt, NULL);
953ff289 5984
726a989a
RB
5985 tbody = gimple_bind_body (bind);
5986 gimplify_and_add (unlock, &tbody);
5987 gimple_bind_set_body (bind, tbody);
777f7f9a 5988
726a989a 5989 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
953ff289
DN
5990
5991 pop_gimplify_context (bind);
726a989a
RB
5992 gimple_bind_append_vars (bind, ctx->block_vars);
5993 BLOCK_VARS (block) = gimple_bind_vars (bind);
5994 gsi_replace (gsi_p, bind, true);
50674e96
DN
5995}
5996
5997
5998/* A subroutine of lower_omp_for. Generate code to emit the predicate
5999 for a lastprivate clause. Given a loop control predicate of (V
6000 cond N2), we gate the clause on (!(V cond N2)). The lowered form
3d55c64b
JJ
6001 is appended to *DLIST, iterator initialization is appended to
6002 *BODY_P. */
50674e96
DN
6003
6004static void
726a989a
RB
6005lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6006 gimple_seq *dlist, struct omp_context *ctx)
50674e96 6007{
726a989a 6008 tree clauses, cond, vinit;
50674e96 6009 enum tree_code cond_code;
726a989a 6010 gimple_seq stmts;
b8698a0f 6011
a68ab351 6012 cond_code = fd->loop.cond_code;
50674e96
DN
6013 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6014
6015 /* When possible, use a strict equality expression. This can let VRP
6016 type optimizations deduce the value and remove a copy. */
a68ab351 6017 if (host_integerp (fd->loop.step, 0))
50674e96 6018 {
a68ab351 6019 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
50674e96
DN
6020 if (step == 1 || step == -1)
6021 cond_code = EQ_EXPR;
6022 }
6023
a68ab351 6024 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
50674e96 6025
726a989a 6026 clauses = gimple_omp_for_clauses (fd->for_stmt);
3d55c64b
JJ
6027 stmts = NULL;
6028 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
726a989a 6029 if (!gimple_seq_empty_p (stmts))
3d55c64b 6030 {
726a989a 6031 gimple_seq_add_seq (&stmts, *dlist);
a68ab351 6032 *dlist = stmts;
3d55c64b
JJ
6033
6034 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
a68ab351 6035 vinit = fd->loop.n1;
3d55c64b 6036 if (cond_code == EQ_EXPR
a68ab351
JJ
6037 && host_integerp (fd->loop.n2, 0)
6038 && ! integer_zerop (fd->loop.n2))
6039 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
3d55c64b
JJ
6040
6041 /* Initialize the iterator variable, so that threads that don't execute
6042 any iterations don't execute the lastprivate clauses by accident. */
726a989a 6043 gimplify_assign (fd->loop.v, vinit, body_p);
3d55c64b 6044 }
50674e96
DN
6045}
6046
6047
6048/* Lower code for an OpenMP loop directive. */
6049
6050static void
726a989a 6051lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 6052{
726a989a 6053 tree *rhs_p, block;
50674e96 6054 struct omp_for_data fd;
726a989a 6055 gimple stmt = gsi_stmt (*gsi_p), new_stmt;
0f900dfa 6056 gimple_seq omp_for_body, body, dlist;
726a989a 6057 size_t i;
d406b663 6058 struct gimplify_ctx gctx;
50674e96 6059
d406b663 6060 push_gimplify_context (&gctx);
50674e96 6061
726a989a
RB
6062 lower_omp (gimple_omp_for_pre_body (stmt), ctx);
6063 lower_omp (gimple_omp_body (stmt), ctx);
50674e96 6064
b357f682 6065 block = make_node (BLOCK);
726a989a 6066 new_stmt = gimple_build_bind (NULL, NULL, block);
b357f682 6067
50674e96
DN
6068 /* Move declaration of temporaries in the loop body before we make
6069 it go away. */
726a989a
RB
6070 omp_for_body = gimple_omp_body (stmt);
6071 if (!gimple_seq_empty_p (omp_for_body)
6072 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6073 {
6074 tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
6075 gimple_bind_append_vars (new_stmt, vars);
6076 }
50674e96 6077
726a989a 6078 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
50674e96 6079 dlist = NULL;
726a989a
RB
6080 body = NULL;
6081 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx);
6082 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
50674e96
DN
6083
6084 /* Lower the header expressions. At this point, we can assume that
6085 the header is of the form:
6086
6087 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6088
6089 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6090 using the .omp_data_s mapping, if needed. */
726a989a 6091 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 6092 {
726a989a 6093 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
a68ab351 6094 if (!is_gimple_min_invariant (*rhs_p))
726a989a 6095 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 6096
726a989a 6097 rhs_p = gimple_omp_for_final_ptr (stmt, i);
a68ab351 6098 if (!is_gimple_min_invariant (*rhs_p))
726a989a 6099 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 6100
726a989a 6101 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
a68ab351 6102 if (!is_gimple_min_invariant (*rhs_p))
726a989a 6103 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 6104 }
50674e96
DN
6105
6106 /* Once lowered, extract the bounds and clauses. */
a68ab351 6107 extract_omp_for_data (stmt, &fd, NULL);
50674e96 6108
726a989a 6109 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
50674e96 6110
726a989a
RB
6111 gimple_seq_add_stmt (&body, stmt);
6112 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
777f7f9a 6113
726a989a
RB
6114 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6115 fd.loop.v));
777f7f9a 6116
50674e96 6117 /* After the loop, add exit clauses. */
726a989a
RB
6118 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6119 gimple_seq_add_seq (&body, dlist);
50674e96 6120
726a989a 6121 body = maybe_catch_exception (body);
4a31b7ee 6122
777f7f9a 6123 /* Region exit marker goes at the end of the loop body. */
726a989a 6124 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
50674e96 6125
b357f682 6126 pop_gimplify_context (new_stmt);
726a989a
RB
6127
6128 gimple_bind_append_vars (new_stmt, ctx->block_vars);
6129 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
b357f682
JJ
6130 if (BLOCK_VARS (block))
6131 TREE_USED (block) = 1;
50674e96 6132
726a989a
RB
6133 gimple_bind_set_body (new_stmt, body);
6134 gimple_omp_set_body (stmt, NULL);
6135 gimple_omp_for_set_pre_body (stmt, NULL);
6136 gsi_replace (gsi_p, new_stmt, true);
953ff289
DN
6137}
6138
b8698a0f 6139/* Callback for walk_stmts. Check if the current statement only contains
726a989a 6140 GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL. */
69f1837b
JJ
6141
6142static tree
726a989a
RB
6143check_combined_parallel (gimple_stmt_iterator *gsi_p,
6144 bool *handled_ops_p,
6145 struct walk_stmt_info *wi)
69f1837b 6146{
d3bfe4de 6147 int *info = (int *) wi->info;
726a989a 6148 gimple stmt = gsi_stmt (*gsi_p);
69f1837b 6149
726a989a
RB
6150 *handled_ops_p = true;
6151 switch (gimple_code (stmt))
69f1837b 6152 {
726a989a
RB
6153 WALK_SUBSTMTS;
6154
6155 case GIMPLE_OMP_FOR:
6156 case GIMPLE_OMP_SECTIONS:
69f1837b
JJ
6157 *info = *info == 0 ? 1 : -1;
6158 break;
6159 default:
6160 *info = -1;
6161 break;
6162 }
6163 return NULL;
6164}
50674e96 6165
a68ab351
JJ
6166struct omp_taskcopy_context
6167{
6168 /* This field must be at the beginning, as we do "inheritance": Some
6169 callback functions for tree-inline.c (e.g., omp_copy_decl)
6170 receive a copy_body_data pointer that is up-casted to an
6171 omp_context pointer. */
6172 copy_body_data cb;
6173 omp_context *ctx;
6174};
6175
6176static tree
6177task_copyfn_copy_decl (tree var, copy_body_data *cb)
6178{
6179 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
6180
6181 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
6182 return create_tmp_var (TREE_TYPE (var), NULL);
6183
6184 return var;
6185}
6186
6187static tree
6188task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
6189{
6190 tree name, new_fields = NULL, type, f;
6191
6192 type = lang_hooks.types.make_type (RECORD_TYPE);
6193 name = DECL_NAME (TYPE_NAME (orig_type));
c2255bc4
AH
6194 name = build_decl (gimple_location (tcctx->ctx->stmt),
6195 TYPE_DECL, name, type);
a68ab351
JJ
6196 TYPE_NAME (type) = name;
6197
6198 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
6199 {
6200 tree new_f = copy_node (f);
6201 DECL_CONTEXT (new_f) = type;
6202 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
6203 TREE_CHAIN (new_f) = new_fields;
726a989a
RB
6204 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6205 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
6206 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
6207 &tcctx->cb, NULL);
a68ab351
JJ
6208 new_fields = new_f;
6209 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
6210 }
6211 TYPE_FIELDS (type) = nreverse (new_fields);
6212 layout_type (type);
6213 return type;
6214}
6215
6216/* Create task copyfn. */
6217
6218static void
726a989a 6219create_task_copyfn (gimple task_stmt, omp_context *ctx)
a68ab351
JJ
6220{
6221 struct function *child_cfun;
6222 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
6223 tree record_type, srecord_type, bind, list;
6224 bool record_needs_remap = false, srecord_needs_remap = false;
6225 splay_tree_node n;
6226 struct omp_taskcopy_context tcctx;
d406b663 6227 struct gimplify_ctx gctx;
db3927fb 6228 location_t loc = gimple_location (task_stmt);
a68ab351 6229
726a989a 6230 child_fn = gimple_omp_task_copy_fn (task_stmt);
a68ab351
JJ
6231 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
6232 gcc_assert (child_cfun->cfg == NULL);
a68ab351
JJ
6233 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
6234
6235 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 6236 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
a68ab351
JJ
6237 DECL_CONTEXT (t) = child_fn;
6238
6239 /* Populate the function. */
d406b663 6240 push_gimplify_context (&gctx);
a68ab351
JJ
6241 current_function_decl = child_fn;
6242
6243 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6244 TREE_SIDE_EFFECTS (bind) = 1;
6245 list = NULL;
6246 DECL_SAVED_TREE (child_fn) = bind;
726a989a 6247 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
a68ab351
JJ
6248
6249 /* Remap src and dst argument types if needed. */
6250 record_type = ctx->record_type;
6251 srecord_type = ctx->srecord_type;
910ad8de 6252 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
6253 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6254 {
6255 record_needs_remap = true;
6256 break;
6257 }
910ad8de 6258 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
6259 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
6260 {
6261 srecord_needs_remap = true;
6262 break;
6263 }
6264
6265 if (record_needs_remap || srecord_needs_remap)
6266 {
6267 memset (&tcctx, '\0', sizeof (tcctx));
6268 tcctx.cb.src_fn = ctx->cb.src_fn;
6269 tcctx.cb.dst_fn = child_fn;
fe660d7b
MJ
6270 tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
6271 gcc_checking_assert (tcctx.cb.src_node);
a68ab351
JJ
6272 tcctx.cb.dst_node = tcctx.cb.src_node;
6273 tcctx.cb.src_cfun = ctx->cb.src_cfun;
6274 tcctx.cb.copy_decl = task_copyfn_copy_decl;
1d65f45c 6275 tcctx.cb.eh_lp_nr = 0;
a68ab351
JJ
6276 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
6277 tcctx.cb.decl_map = pointer_map_create ();
6278 tcctx.ctx = ctx;
6279
6280 if (record_needs_remap)
6281 record_type = task_copyfn_remap_type (&tcctx, record_type);
6282 if (srecord_needs_remap)
6283 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
6284 }
6285 else
6286 tcctx.cb.decl_map = NULL;
6287
6288 push_cfun (child_cfun);
6289
6290 arg = DECL_ARGUMENTS (child_fn);
6291 TREE_TYPE (arg) = build_pointer_type (record_type);
910ad8de 6292 sarg = DECL_CHAIN (arg);
a68ab351
JJ
6293 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
6294
6295 /* First pass: initialize temporaries used in record_type and srecord_type
6296 sizes and field offsets. */
6297 if (tcctx.cb.decl_map)
726a989a 6298 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
6299 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
6300 {
6301 tree *p;
6302
6303 decl = OMP_CLAUSE_DECL (c);
6304 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
6305 if (p == NULL)
6306 continue;
6307 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6308 sf = (tree) n->value;
6309 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6310 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351 6311 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
726a989a 6312 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
a68ab351
JJ
6313 append_to_statement_list (t, &list);
6314 }
6315
6316 /* Second pass: copy shared var pointers and copy construct non-VLA
6317 firstprivate vars. */
726a989a 6318 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
6319 switch (OMP_CLAUSE_CODE (c))
6320 {
6321 case OMP_CLAUSE_SHARED:
6322 decl = OMP_CLAUSE_DECL (c);
6323 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6324 if (n == NULL)
6325 break;
6326 f = (tree) n->value;
6327 if (tcctx.cb.decl_map)
6328 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6329 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6330 sf = (tree) n->value;
6331 if (tcctx.cb.decl_map)
6332 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6333 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351 6334 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
70f34814 6335 dst = build_simple_mem_ref_loc (loc, arg);
a68ab351 6336 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
726a989a 6337 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
6338 append_to_statement_list (t, &list);
6339 break;
6340 case OMP_CLAUSE_FIRSTPRIVATE:
6341 decl = OMP_CLAUSE_DECL (c);
6342 if (is_variable_sized (decl))
6343 break;
6344 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6345 if (n == NULL)
6346 break;
6347 f = (tree) n->value;
6348 if (tcctx.cb.decl_map)
6349 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6350 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6351 if (n != NULL)
6352 {
6353 sf = (tree) n->value;
6354 if (tcctx.cb.decl_map)
6355 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6356 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351
JJ
6357 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6358 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
70f34814 6359 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
6360 }
6361 else
6362 src = decl;
70f34814 6363 dst = build_simple_mem_ref_loc (loc, arg);
a68ab351
JJ
6364 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6365 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6366 append_to_statement_list (t, &list);
6367 break;
6368 case OMP_CLAUSE_PRIVATE:
6369 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6370 break;
6371 decl = OMP_CLAUSE_DECL (c);
6372 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6373 f = (tree) n->value;
6374 if (tcctx.cb.decl_map)
6375 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6376 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
6377 if (n != NULL)
6378 {
6379 sf = (tree) n->value;
6380 if (tcctx.cb.decl_map)
6381 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6382 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351
JJ
6383 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
6384 if (use_pointer_for_field (decl, NULL))
70f34814 6385 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
6386 }
6387 else
6388 src = decl;
70f34814 6389 dst = build_simple_mem_ref_loc (loc, arg);
a68ab351 6390 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
726a989a 6391 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
6392 append_to_statement_list (t, &list);
6393 break;
6394 default:
6395 break;
6396 }
6397
6398 /* Last pass: handle VLA firstprivates. */
6399 if (tcctx.cb.decl_map)
726a989a 6400 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
6401 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
6402 {
6403 tree ind, ptr, df;
6404
6405 decl = OMP_CLAUSE_DECL (c);
6406 if (!is_variable_sized (decl))
6407 continue;
6408 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
6409 if (n == NULL)
6410 continue;
6411 f = (tree) n->value;
6412 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
6413 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
6414 ind = DECL_VALUE_EXPR (decl);
6415 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
6416 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
6417 n = splay_tree_lookup (ctx->sfield_map,
6418 (splay_tree_key) TREE_OPERAND (ind, 0));
6419 sf = (tree) n->value;
6420 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 6421 src = build_simple_mem_ref_loc (loc, sarg);
a68ab351 6422 src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
70f34814
RG
6423 src = build_simple_mem_ref_loc (loc, src);
6424 dst = build_simple_mem_ref_loc (loc, arg);
a68ab351
JJ
6425 dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
6426 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
6427 append_to_statement_list (t, &list);
6428 n = splay_tree_lookup (ctx->field_map,
6429 (splay_tree_key) TREE_OPERAND (ind, 0));
6430 df = (tree) n->value;
6431 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
70f34814 6432 ptr = build_simple_mem_ref_loc (loc, arg);
a68ab351 6433 ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
726a989a 6434 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
db3927fb 6435 build_fold_addr_expr_loc (loc, dst));
a68ab351
JJ
6436 append_to_statement_list (t, &list);
6437 }
6438
6439 t = build1 (RETURN_EXPR, void_type_node, NULL);
6440 append_to_statement_list (t, &list);
6441
6442 if (tcctx.cb.decl_map)
6443 pointer_map_destroy (tcctx.cb.decl_map);
6444 pop_gimplify_context (NULL);
6445 BIND_EXPR_BODY (bind) = list;
6446 pop_cfun ();
6447 current_function_decl = ctx->cb.src_fn;
6448}
6449
726a989a
RB
6450/* Lower the OpenMP parallel or task directive in the current statement
6451 in GSI_P. CTX holds context information for the directive. */
50674e96
DN
6452
6453static void
726a989a 6454lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 6455{
726a989a
RB
6456 tree clauses;
6457 tree child_fn, t;
6458 gimple stmt = gsi_stmt (*gsi_p);
6459 gimple par_bind, bind;
6460 gimple_seq par_body, olist, ilist, par_olist, par_ilist, new_body;
d406b663 6461 struct gimplify_ctx gctx;
db3927fb 6462 location_t loc = gimple_location (stmt);
50674e96 6463
726a989a
RB
6464 clauses = gimple_omp_taskreg_clauses (stmt);
6465 par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
6466 par_body = gimple_bind_body (par_bind);
50674e96 6467 child_fn = ctx->cb.dst_fn;
726a989a
RB
6468 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
6469 && !gimple_omp_parallel_combined_p (stmt))
69f1837b
JJ
6470 {
6471 struct walk_stmt_info wi;
6472 int ws_num = 0;
6473
6474 memset (&wi, 0, sizeof (wi));
69f1837b
JJ
6475 wi.info = &ws_num;
6476 wi.val_only = true;
726a989a 6477 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
69f1837b 6478 if (ws_num == 1)
726a989a 6479 gimple_omp_parallel_set_combined_p (stmt, true);
69f1837b 6480 }
a68ab351
JJ
6481 if (ctx->srecord_type)
6482 create_task_copyfn (stmt, ctx);
50674e96 6483
d406b663 6484 push_gimplify_context (&gctx);
50674e96 6485
726a989a
RB
6486 par_olist = NULL;
6487 par_ilist = NULL;
50674e96 6488 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
726a989a
RB
6489 lower_omp (par_body, ctx);
6490 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
a68ab351 6491 lower_reduction_clauses (clauses, &par_olist, ctx);
50674e96
DN
6492
6493 /* Declare all the variables created by mapping and the variables
6494 declared in the scope of the parallel body. */
6495 record_vars_into (ctx->block_vars, child_fn);
726a989a 6496 record_vars_into (gimple_bind_vars (par_bind), child_fn);
50674e96
DN
6497
6498 if (ctx->record_type)
6499 {
a68ab351
JJ
6500 ctx->sender_decl
6501 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
6502 : ctx->record_type, ".omp_data_o");
cd3f04c8 6503 DECL_NAMELESS (ctx->sender_decl) = 1;
628c189e 6504 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
726a989a 6505 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
50674e96
DN
6506 }
6507
726a989a
RB
6508 olist = NULL;
6509 ilist = NULL;
50674e96
DN
6510 lower_send_clauses (clauses, &ilist, &olist, ctx);
6511 lower_send_shared_vars (&ilist, &olist, ctx);
6512
6513 /* Once all the expansions are done, sequence all the different
726a989a 6514 fragments inside gimple_omp_body. */
50674e96 6515
726a989a 6516 new_body = NULL;
50674e96
DN
6517
6518 if (ctx->record_type)
6519 {
db3927fb 6520 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
018b899b 6521 /* fixup_child_record_type might have changed receiver_decl's type. */
db3927fb 6522 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
726a989a
RB
6523 gimple_seq_add_stmt (&new_body,
6524 gimple_build_assign (ctx->receiver_decl, t));
50674e96
DN
6525 }
6526
726a989a
RB
6527 gimple_seq_add_seq (&new_body, par_ilist);
6528 gimple_seq_add_seq (&new_body, par_body);
6529 gimple_seq_add_seq (&new_body, par_olist);
6530 new_body = maybe_catch_exception (new_body);
6531 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
6532 gimple_omp_set_body (stmt, new_body);
50674e96 6533
726a989a
RB
6534 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
6535 gimple_bind_add_stmt (bind, stmt);
b357f682
JJ
6536 if (ilist || olist)
6537 {
726a989a
RB
6538 gimple_seq_add_stmt (&ilist, bind);
6539 gimple_seq_add_seq (&ilist, olist);
6540 bind = gimple_build_bind (NULL, ilist, NULL);
b357f682 6541 }
50674e96 6542
726a989a 6543 gsi_replace (gsi_p, bind, true);
50674e96 6544
726a989a 6545 pop_gimplify_context (NULL);
50674e96
DN
6546}
6547
d0fb20be 6548/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
726a989a
RB
6549 regimplified. If DATA is non-NULL, lower_omp_1 is outside
6550 of OpenMP context, but with task_shared_vars set. */
75a4c3c1
AP
6551
6552static tree
726a989a
RB
6553lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
6554 void *data)
75a4c3c1 6555{
d0fb20be 6556 tree t = *tp;
75a4c3c1 6557
d0fb20be 6558 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
726a989a 6559 if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
b826efd9
JJ
6560 return t;
6561
6562 if (task_shared_vars
6563 && DECL_P (t)
6564 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
d0fb20be 6565 return t;
75a4c3c1 6566
d0fb20be
JJ
6567 /* If a global variable has been privatized, TREE_CONSTANT on
6568 ADDR_EXPR might be wrong. */
726a989a 6569 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
d0fb20be 6570 recompute_tree_invariant_for_addr_expr (t);
75a4c3c1 6571
d0fb20be
JJ
6572 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
6573 return NULL_TREE;
75a4c3c1 6574}
50674e96 6575
d0fb20be 6576static void
726a989a 6577lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 6578{
726a989a
RB
6579 gimple stmt = gsi_stmt (*gsi_p);
6580 struct walk_stmt_info wi;
953ff289 6581
726a989a
RB
6582 if (gimple_has_location (stmt))
6583 input_location = gimple_location (stmt);
d0fb20be 6584
726a989a
RB
6585 if (task_shared_vars)
6586 memset (&wi, '\0', sizeof (wi));
d0fb20be 6587
50674e96
DN
6588 /* If we have issued syntax errors, avoid doing any heavy lifting.
6589 Just replace the OpenMP directives with a NOP to avoid
6590 confusing RTL expansion. */
1da2ed5f 6591 if (seen_error () && is_gimple_omp (stmt))
50674e96 6592 {
726a989a 6593 gsi_replace (gsi_p, gimple_build_nop (), true);
d0fb20be 6594 return;
50674e96
DN
6595 }
6596
726a989a 6597 switch (gimple_code (stmt))
953ff289 6598 {
726a989a 6599 case GIMPLE_COND:
a68ab351 6600 if ((ctx || task_shared_vars)
726a989a
RB
6601 && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
6602 ctx ? NULL : &wi, NULL)
6603 || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
6604 ctx ? NULL : &wi, NULL)))
6605 gimple_regimplify_operands (stmt, gsi_p);
d0fb20be 6606 break;
726a989a
RB
6607 case GIMPLE_CATCH:
6608 lower_omp (gimple_catch_handler (stmt), ctx);
d0fb20be 6609 break;
726a989a
RB
6610 case GIMPLE_EH_FILTER:
6611 lower_omp (gimple_eh_filter_failure (stmt), ctx);
d0fb20be 6612 break;
726a989a
RB
6613 case GIMPLE_TRY:
6614 lower_omp (gimple_try_eval (stmt), ctx);
6615 lower_omp (gimple_try_cleanup (stmt), ctx);
d0fb20be 6616 break;
726a989a
RB
6617 case GIMPLE_BIND:
6618 lower_omp (gimple_bind_body (stmt), ctx);
d0fb20be 6619 break;
726a989a
RB
6620 case GIMPLE_OMP_PARALLEL:
6621 case GIMPLE_OMP_TASK:
6622 ctx = maybe_lookup_ctx (stmt);
6623 lower_omp_taskreg (gsi_p, ctx);
d0fb20be 6624 break;
726a989a
RB
6625 case GIMPLE_OMP_FOR:
6626 ctx = maybe_lookup_ctx (stmt);
953ff289 6627 gcc_assert (ctx);
726a989a 6628 lower_omp_for (gsi_p, ctx);
953ff289 6629 break;
726a989a
RB
6630 case GIMPLE_OMP_SECTIONS:
6631 ctx = maybe_lookup_ctx (stmt);
953ff289 6632 gcc_assert (ctx);
726a989a 6633 lower_omp_sections (gsi_p, ctx);
953ff289 6634 break;
726a989a
RB
6635 case GIMPLE_OMP_SINGLE:
6636 ctx = maybe_lookup_ctx (stmt);
953ff289 6637 gcc_assert (ctx);
726a989a 6638 lower_omp_single (gsi_p, ctx);
953ff289 6639 break;
726a989a
RB
6640 case GIMPLE_OMP_MASTER:
6641 ctx = maybe_lookup_ctx (stmt);
953ff289 6642 gcc_assert (ctx);
726a989a 6643 lower_omp_master (gsi_p, ctx);
953ff289 6644 break;
726a989a
RB
6645 case GIMPLE_OMP_ORDERED:
6646 ctx = maybe_lookup_ctx (stmt);
953ff289 6647 gcc_assert (ctx);
726a989a 6648 lower_omp_ordered (gsi_p, ctx);
953ff289 6649 break;
726a989a
RB
6650 case GIMPLE_OMP_CRITICAL:
6651 ctx = maybe_lookup_ctx (stmt);
953ff289 6652 gcc_assert (ctx);
726a989a
RB
6653 lower_omp_critical (gsi_p, ctx);
6654 break;
6655 case GIMPLE_OMP_ATOMIC_LOAD:
6656 if ((ctx || task_shared_vars)
6657 && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
6658 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
6659 gimple_regimplify_operands (stmt, gsi_p);
953ff289 6660 break;
d0fb20be 6661 default:
a68ab351 6662 if ((ctx || task_shared_vars)
726a989a
RB
6663 && walk_gimple_op (stmt, lower_omp_regimplify_p,
6664 ctx ? NULL : &wi))
6665 gimple_regimplify_operands (stmt, gsi_p);
953ff289 6666 break;
953ff289 6667 }
953ff289
DN
6668}
6669
6670static void
726a989a 6671lower_omp (gimple_seq body, omp_context *ctx)
953ff289 6672{
b357f682 6673 location_t saved_location = input_location;
726a989a
RB
6674 gimple_stmt_iterator gsi = gsi_start (body);
6675 for (gsi = gsi_start (body); !gsi_end_p (gsi); gsi_next (&gsi))
6676 lower_omp_1 (&gsi, ctx);
b357f682 6677 input_location = saved_location;
953ff289
DN
6678}
6679\f
6680/* Main entry point. */
6681
c2924966 6682static unsigned int
953ff289
DN
6683execute_lower_omp (void)
6684{
726a989a
RB
6685 gimple_seq body;
6686
535b544a
SB
6687 /* This pass always runs, to provide PROP_gimple_lomp.
6688 But there is nothing to do unless -fopenmp is given. */
6689 if (flag_openmp == 0)
6690 return 0;
6691
953ff289
DN
6692 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
6693 delete_omp_context);
6694
726a989a
RB
6695 body = gimple_body (current_function_decl);
6696 scan_omp (body, NULL);
a68ab351 6697 gcc_assert (taskreg_nesting_level == 0);
953ff289
DN
6698
6699 if (all_contexts->root)
a68ab351 6700 {
d406b663
JJ
6701 struct gimplify_ctx gctx;
6702
a68ab351 6703 if (task_shared_vars)
d406b663 6704 push_gimplify_context (&gctx);
726a989a 6705 lower_omp (body, NULL);
a68ab351
JJ
6706 if (task_shared_vars)
6707 pop_gimplify_context (NULL);
6708 }
953ff289 6709
50674e96
DN
6710 if (all_contexts)
6711 {
6712 splay_tree_delete (all_contexts);
6713 all_contexts = NULL;
6714 }
a68ab351 6715 BITMAP_FREE (task_shared_vars);
c2924966 6716 return 0;
953ff289
DN
6717}
6718
b8698a0f 6719struct gimple_opt_pass pass_lower_omp =
953ff289 6720{
8ddbbcae
JH
6721 {
6722 GIMPLE_PASS,
953ff289 6723 "omplower", /* name */
535b544a 6724 NULL, /* gate */
953ff289
DN
6725 execute_lower_omp, /* execute */
6726 NULL, /* sub */
6727 NULL, /* next */
6728 0, /* static_pass_number */
7072a650 6729 TV_NONE, /* tv_id */
953ff289
DN
6730 PROP_gimple_any, /* properties_required */
6731 PROP_gimple_lomp, /* properties_provided */
6732 0, /* properties_destroyed */
6733 0, /* todo_flags_start */
8ddbbcae
JH
6734 TODO_dump_func /* todo_flags_finish */
6735 }
953ff289 6736};
953ff289
DN
6737\f
6738/* The following is a utility to diagnose OpenMP structured block violations.
777f7f9a
RH
6739 It is not part of the "omplower" pass, as that's invoked too late. It
6740 should be invoked by the respective front ends after gimplification. */
953ff289
DN
6741
6742static splay_tree all_labels;
6743
6744/* Check for mismatched contexts and generate an error if needed. Return
6745 true if an error is detected. */
6746
6747static bool
726a989a
RB
6748diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
6749 gimple branch_ctx, gimple label_ctx)
953ff289 6750{
726a989a 6751 if (label_ctx == branch_ctx)
953ff289
DN
6752 return false;
6753
b8698a0f 6754
726a989a
RB
6755 /*
6756 Previously we kept track of the label's entire context in diagnose_sb_[12]
6757 so we could traverse it and issue a correct "exit" or "enter" error
6758 message upon a structured block violation.
6759
6760 We built the context by building a list with tree_cons'ing, but there is
6761 no easy counterpart in gimple tuples. It seems like far too much work
6762 for issuing exit/enter error messages. If someone really misses the
6763 distinct error message... patches welcome.
6764 */
b8698a0f 6765
726a989a 6766#if 0
953ff289 6767 /* Try to avoid confusing the user by producing and error message
fa10beec 6768 with correct "exit" or "enter" verbiage. We prefer "exit"
953ff289
DN
6769 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
6770 if (branch_ctx == NULL)
6771 exit_p = false;
6772 else
6773 {
6774 while (label_ctx)
6775 {
6776 if (TREE_VALUE (label_ctx) == branch_ctx)
6777 {
6778 exit_p = false;
6779 break;
6780 }
6781 label_ctx = TREE_CHAIN (label_ctx);
6782 }
6783 }
6784
6785 if (exit_p)
6786 error ("invalid exit from OpenMP structured block");
6787 else
6788 error ("invalid entry to OpenMP structured block");
726a989a 6789#endif
953ff289 6790
726a989a
RB
6791 /* If it's obvious we have an invalid entry, be specific about the error. */
6792 if (branch_ctx == NULL)
6793 error ("invalid entry to OpenMP structured block");
6794 else
6795 /* Otherwise, be vague and lazy, but efficient. */
6796 error ("invalid branch to/from an OpenMP structured block");
6797
6798 gsi_replace (gsi_p, gimple_build_nop (), false);
953ff289
DN
6799 return true;
6800}
6801
6802/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
726a989a 6803 where each label is found. */
953ff289
DN
6804
6805static tree
726a989a
RB
6806diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6807 struct walk_stmt_info *wi)
953ff289 6808{
726a989a
RB
6809 gimple context = (gimple) wi->info;
6810 gimple inner_context;
6811 gimple stmt = gsi_stmt (*gsi_p);
953ff289 6812
726a989a
RB
6813 *handled_ops_p = true;
6814
6815 switch (gimple_code (stmt))
953ff289 6816 {
726a989a 6817 WALK_SUBSTMTS;
b8698a0f 6818
726a989a
RB
6819 case GIMPLE_OMP_PARALLEL:
6820 case GIMPLE_OMP_TASK:
6821 case GIMPLE_OMP_SECTIONS:
6822 case GIMPLE_OMP_SINGLE:
6823 case GIMPLE_OMP_SECTION:
6824 case GIMPLE_OMP_MASTER:
6825 case GIMPLE_OMP_ORDERED:
6826 case GIMPLE_OMP_CRITICAL:
6827 /* The minimal context here is just the current OMP construct. */
6828 inner_context = stmt;
953ff289 6829 wi->info = inner_context;
726a989a 6830 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
6831 wi->info = context;
6832 break;
6833
726a989a
RB
6834 case GIMPLE_OMP_FOR:
6835 inner_context = stmt;
953ff289 6836 wi->info = inner_context;
726a989a
RB
6837 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
6838 walk them. */
6839 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
6840 diagnose_sb_1, NULL, wi);
6841 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
6842 wi->info = context;
6843 break;
6844
726a989a
RB
6845 case GIMPLE_LABEL:
6846 splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
953ff289
DN
6847 (splay_tree_value) context);
6848 break;
6849
6850 default:
6851 break;
6852 }
6853
6854 return NULL_TREE;
6855}
6856
6857/* Pass 2: Check each branch and see if its context differs from that of
6858 the destination label's context. */
6859
6860static tree
726a989a
RB
6861diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6862 struct walk_stmt_info *wi)
953ff289 6863{
726a989a 6864 gimple context = (gimple) wi->info;
953ff289 6865 splay_tree_node n;
726a989a 6866 gimple stmt = gsi_stmt (*gsi_p);
953ff289 6867
726a989a
RB
6868 *handled_ops_p = true;
6869
6870 switch (gimple_code (stmt))
953ff289 6871 {
726a989a
RB
6872 WALK_SUBSTMTS;
6873
6874 case GIMPLE_OMP_PARALLEL:
6875 case GIMPLE_OMP_TASK:
6876 case GIMPLE_OMP_SECTIONS:
6877 case GIMPLE_OMP_SINGLE:
6878 case GIMPLE_OMP_SECTION:
6879 case GIMPLE_OMP_MASTER:
6880 case GIMPLE_OMP_ORDERED:
6881 case GIMPLE_OMP_CRITICAL:
6882 wi->info = stmt;
6883 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
6884 wi->info = context;
6885 break;
6886
726a989a
RB
6887 case GIMPLE_OMP_FOR:
6888 wi->info = stmt;
6889 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
6890 walk them. */
6891 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
6892 diagnose_sb_2, NULL, wi);
6893 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
6894 wi->info = context;
6895 break;
6896
ca50f84a
L
6897 case GIMPLE_COND:
6898 {
6899 tree lab = gimple_cond_true_label (stmt);
6900 if (lab)
6901 {
6902 n = splay_tree_lookup (all_labels,
6903 (splay_tree_key) lab);
6904 diagnose_sb_0 (gsi_p, context,
6905 n ? (gimple) n->value : NULL);
6906 }
6907 lab = gimple_cond_false_label (stmt);
6908 if (lab)
6909 {
6910 n = splay_tree_lookup (all_labels,
6911 (splay_tree_key) lab);
6912 diagnose_sb_0 (gsi_p, context,
6913 n ? (gimple) n->value : NULL);
6914 }
6915 }
6916 break;
6917
726a989a 6918 case GIMPLE_GOTO:
953ff289 6919 {
726a989a 6920 tree lab = gimple_goto_dest (stmt);
953ff289
DN
6921 if (TREE_CODE (lab) != LABEL_DECL)
6922 break;
6923
6924 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 6925 diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
953ff289
DN
6926 }
6927 break;
6928
726a989a 6929 case GIMPLE_SWITCH:
953ff289 6930 {
726a989a
RB
6931 unsigned int i;
6932 for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
953ff289 6933 {
726a989a 6934 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
953ff289 6935 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 6936 if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
953ff289
DN
6937 break;
6938 }
6939 }
6940 break;
6941
726a989a
RB
6942 case GIMPLE_RETURN:
6943 diagnose_sb_0 (gsi_p, context, NULL);
953ff289
DN
6944 break;
6945
6946 default:
6947 break;
6948 }
6949
6950 return NULL_TREE;
6951}
6952
a406865a
RG
6953static unsigned int
6954diagnose_omp_structured_block_errors (void)
953ff289 6955{
953ff289 6956 struct walk_stmt_info wi;
a406865a 6957 gimple_seq body = gimple_body (current_function_decl);
953ff289
DN
6958
6959 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
6960
6961 memset (&wi, 0, sizeof (wi));
726a989a 6962 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
953ff289
DN
6963
6964 memset (&wi, 0, sizeof (wi));
953ff289 6965 wi.want_locations = true;
726a989a 6966 walk_gimple_seq (body, diagnose_sb_2, NULL, &wi);
953ff289
DN
6967
6968 splay_tree_delete (all_labels);
6969 all_labels = NULL;
6970
a406865a 6971 return 0;
953ff289
DN
6972}
6973
a406865a
RG
6974static bool
6975gate_diagnose_omp_blocks (void)
6976{
6977 return flag_openmp != 0;
6978}
6979
6980struct gimple_opt_pass pass_diagnose_omp_blocks =
6981{
6982 {
6983 GIMPLE_PASS,
2329c6f5 6984 "*diagnose_omp_blocks", /* name */
a406865a
RG
6985 gate_diagnose_omp_blocks, /* gate */
6986 diagnose_omp_structured_block_errors, /* execute */
6987 NULL, /* sub */
6988 NULL, /* next */
6989 0, /* static_pass_number */
6990 TV_NONE, /* tv_id */
6991 PROP_gimple_any, /* properties_required */
6992 0, /* properties_provided */
6993 0, /* properties_destroyed */
6994 0, /* todo_flags_start */
6995 0, /* todo_flags_finish */
6996 }
6997};
6998
953ff289 6999#include "gt-omp-low.h"