]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
c-common.c, [...]: Replace tree_low_cst (..., 0) with tree_to_shwi throughout.
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
953ff289
DN
1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
d1e082c2 6 Copyright (C) 2005-2013 Free Software Foundation, Inc.
953ff289
DN
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
9dcd6f09 12Software Foundation; either version 3, or (at your option) any later
953ff289
DN
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
9dcd6f09
NC
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
953ff289
DN
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
29#include "rtl.h"
18f429e2 30#include "gimple.h"
45b0be94 31#include "gimplify.h"
5be5c238 32#include "gimple-iterator.h"
18f429e2 33#include "gimplify-me.h"
5be5c238 34#include "gimple-walk.h"
726a989a 35#include "tree-iterator.h"
953ff289
DN
36#include "tree-inline.h"
37#include "langhooks.h"
1da2ed5f 38#include "diagnostic-core.h"
442b4905
AM
39#include "gimple-ssa.h"
40#include "cgraph.h"
41#include "tree-cfg.h"
42#include "tree-phinodes.h"
43#include "ssa-iterators.h"
44#include "tree-ssanames.h"
45#include "tree-into-ssa.h"
46#include "tree-dfa.h"
7a300452 47#include "tree-ssa.h"
953ff289
DN
48#include "flags.h"
49#include "function.h"
50#include "expr.h"
953ff289
DN
51#include "tree-pass.h"
52#include "ggc.h"
53#include "except.h"
6be42dd4 54#include "splay-tree.h"
a509ebb5
RL
55#include "optabs.h"
56#include "cfgloop.h"
74bf76ed 57#include "target.h"
0645c1a2 58#include "omp-low.h"
4484a35a
AM
59#include "gimple-low.h"
60#include "tree-cfgcleanup.h"
1fe37220 61#include "tree-nested.h"
953ff289 62
726a989a 63
b8698a0f 64/* Lowering of OpenMP parallel and workshare constructs proceeds in two
953ff289
DN
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
c0220ea4 68 re-gimplifying things when variables have been replaced with complex
953ff289
DN
69 expressions.
70
7ebaeab5
DN
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for parallel regions which are then moved to a new
73 function, to be invoked by the thread library. */
953ff289 74
0645c1a2
AM
75/* Parallel region information. Every parallel and workshare
76 directive is enclosed between two markers, the OMP_* directive
77 and a corresponding OMP_RETURN statement. */
78
79struct omp_region
80{
81 /* The enclosing region. */
82 struct omp_region *outer;
83
84 /* First child region. */
85 struct omp_region *inner;
86
87 /* Next peer region. */
88 struct omp_region *next;
89
90 /* Block containing the omp directive as its last stmt. */
91 basic_block entry;
92
93 /* Block containing the OMP_RETURN as its last stmt. */
94 basic_block exit;
95
96 /* Block containing the OMP_CONTINUE as its last stmt. */
97 basic_block cont;
98
99 /* If this is a combined parallel+workshare region, this is a list
100 of additional arguments needed by the combined parallel+workshare
101 library call. */
102 vec<tree, va_gc> *ws_args;
103
104 /* The code for the omp directive of this region. */
105 enum gimple_code type;
106
107 /* Schedule kind, only used for OMP_FOR type regions. */
108 enum omp_clause_schedule_kind sched_kind;
109
110 /* True if this is a combined parallel+workshare region. */
111 bool is_combined_parallel;
112};
113
953ff289
DN
114/* Context structure. Used to store information about each parallel
115 directive in the code. */
116
117typedef struct omp_context
118{
119 /* This field must be at the beginning, as we do "inheritance": Some
120 callback functions for tree-inline.c (e.g., omp_copy_decl)
121 receive a copy_body_data pointer that is up-casted to an
122 omp_context pointer. */
123 copy_body_data cb;
124
125 /* The tree of contexts corresponding to the encountered constructs. */
126 struct omp_context *outer;
726a989a 127 gimple stmt;
953ff289 128
b8698a0f 129 /* Map variables to fields in a structure that allows communication
953ff289
DN
130 between sending and receiving threads. */
131 splay_tree field_map;
132 tree record_type;
133 tree sender_decl;
134 tree receiver_decl;
135
a68ab351
JJ
136 /* These are used just by task contexts, if task firstprivate fn is
137 needed. srecord_type is used to communicate from the thread
138 that encountered the task construct to task firstprivate fn,
139 record_type is allocated by GOMP_task, initialized by task firstprivate
140 fn and passed to the task body fn. */
141 splay_tree sfield_map;
142 tree srecord_type;
143
953ff289
DN
144 /* A chain of variables to add to the top-level block surrounding the
145 construct. In the case of a parallel, this is in the child function. */
146 tree block_vars;
147
acf0174b
JJ
148 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
149 barriers should jump to during omplower pass. */
150 tree cancel_label;
151
953ff289
DN
152 /* What to do with variables with implicitly determined sharing
153 attributes. */
154 enum omp_clause_default_kind default_kind;
155
156 /* Nesting depth of this context. Used to beautify error messages re
157 invalid gotos. The outermost ctx is depth 1, with depth 0 being
158 reserved for the main body of the function. */
159 int depth;
160
953ff289
DN
161 /* True if this parallel directive is nested within another. */
162 bool is_nested;
acf0174b
JJ
163
164 /* True if this construct can be cancelled. */
165 bool cancellable;
953ff289
DN
166} omp_context;
167
168
a68ab351
JJ
169struct omp_for_data_loop
170{
171 tree v, n1, n2, step;
172 enum tree_code cond_code;
173};
174
50674e96 175/* A structure describing the main elements of a parallel loop. */
953ff289 176
50674e96 177struct omp_for_data
953ff289 178{
a68ab351 179 struct omp_for_data_loop loop;
726a989a
RB
180 tree chunk_size;
181 gimple for_stmt;
a68ab351
JJ
182 tree pre, iter_type;
183 int collapse;
953ff289
DN
184 bool have_nowait, have_ordered;
185 enum omp_clause_schedule_kind sched_kind;
a68ab351 186 struct omp_for_data_loop *loops;
953ff289
DN
187};
188
50674e96 189
953ff289 190static splay_tree all_contexts;
a68ab351 191static int taskreg_nesting_level;
acf0174b 192static int target_nesting_level;
0645c1a2 193static struct omp_region *root_omp_region;
a68ab351 194static bitmap task_shared_vars;
953ff289 195
26127932 196static void scan_omp (gimple_seq *, omp_context *);
726a989a
RB
197static tree scan_omp_1_op (tree *, int *, void *);
198
199#define WALK_SUBSTMTS \
200 case GIMPLE_BIND: \
201 case GIMPLE_TRY: \
202 case GIMPLE_CATCH: \
203 case GIMPLE_EH_FILTER: \
0a35513e 204 case GIMPLE_TRANSACTION: \
726a989a
RB
205 /* The sub-statements for these should be walked. */ \
206 *handled_ops_p = false; \
207 break;
208
209/* Convenience function for calling scan_omp_1_op on tree operands. */
210
211static inline tree
212scan_omp_op (tree *tp, omp_context *ctx)
213{
214 struct walk_stmt_info wi;
215
216 memset (&wi, 0, sizeof (wi));
217 wi.info = ctx;
218 wi.want_locations = true;
219
220 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
221}
222
355a7673 223static void lower_omp (gimple_seq *, omp_context *);
8ca5b2a2
JJ
224static tree lookup_decl_in_outer_ctx (tree, omp_context *);
225static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289
DN
226
227/* Find an OpenMP clause of type KIND within CLAUSES. */
228
917948d3 229tree
e0c68ce9 230find_omp_clause (tree clauses, enum omp_clause_code kind)
953ff289
DN
231{
232 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
aaf46ef9 233 if (OMP_CLAUSE_CODE (clauses) == kind)
953ff289
DN
234 return clauses;
235
236 return NULL_TREE;
237}
238
239/* Return true if CTX is for an omp parallel. */
240
241static inline bool
242is_parallel_ctx (omp_context *ctx)
243{
726a989a 244 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
245}
246
50674e96 247
a68ab351
JJ
248/* Return true if CTX is for an omp task. */
249
250static inline bool
251is_task_ctx (omp_context *ctx)
252{
726a989a 253 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
254}
255
256
257/* Return true if CTX is for an omp parallel or omp task. */
258
259static inline bool
260is_taskreg_ctx (omp_context *ctx)
261{
726a989a
RB
262 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
263 || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
264}
265
266
50674e96 267/* Return true if REGION is a combined parallel+workshare region. */
953ff289
DN
268
269static inline bool
50674e96
DN
270is_combined_parallel (struct omp_region *region)
271{
272 return region->is_combined_parallel;
273}
274
275
276/* Extract the header elements of parallel loop FOR_STMT and store
277 them into *FD. */
278
279static void
726a989a 280extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
a68ab351 281 struct omp_for_data_loop *loops)
50674e96 282{
a68ab351
JJ
283 tree t, var, *collapse_iter, *collapse_count;
284 tree count = NULL_TREE, iter_type = long_integer_type_node;
285 struct omp_for_data_loop *loop;
286 int i;
287 struct omp_for_data_loop dummy_loop;
db3927fb 288 location_t loc = gimple_location (for_stmt);
c02065fc 289 bool simd = gimple_omp_for_kind (for_stmt) & GF_OMP_FOR_KIND_SIMD;
acf0174b
JJ
290 bool distribute = gimple_omp_for_kind (for_stmt)
291 == GF_OMP_FOR_KIND_DISTRIBUTE;
50674e96
DN
292
293 fd->for_stmt = for_stmt;
294 fd->pre = NULL;
726a989a 295 fd->collapse = gimple_omp_for_collapse (for_stmt);
a68ab351
JJ
296 if (fd->collapse > 1)
297 fd->loops = loops;
298 else
299 fd->loops = &fd->loop;
50674e96 300
acf0174b
JJ
301 fd->have_nowait = distribute || simd;
302 fd->have_ordered = false;
50674e96
DN
303 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
304 fd->chunk_size = NULL_TREE;
a68ab351
JJ
305 collapse_iter = NULL;
306 collapse_count = NULL;
50674e96 307
726a989a 308 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
aaf46ef9 309 switch (OMP_CLAUSE_CODE (t))
50674e96
DN
310 {
311 case OMP_CLAUSE_NOWAIT:
312 fd->have_nowait = true;
313 break;
314 case OMP_CLAUSE_ORDERED:
315 fd->have_ordered = true;
316 break;
317 case OMP_CLAUSE_SCHEDULE:
acf0174b 318 gcc_assert (!distribute);
50674e96
DN
319 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
320 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
321 break;
acf0174b
JJ
322 case OMP_CLAUSE_DIST_SCHEDULE:
323 gcc_assert (distribute);
324 fd->chunk_size = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t);
325 break;
a68ab351
JJ
326 case OMP_CLAUSE_COLLAPSE:
327 if (fd->collapse > 1)
328 {
329 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
330 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
331 }
50674e96
DN
332 default:
333 break;
334 }
335
a68ab351
JJ
336 /* FIXME: for now map schedule(auto) to schedule(static).
337 There should be analysis to determine whether all iterations
338 are approximately the same amount of work (then schedule(static)
1cbc62c0 339 is best) or if it varies (then schedule(dynamic,N) is better). */
a68ab351
JJ
340 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
341 {
342 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
343 gcc_assert (fd->chunk_size == NULL);
344 }
345 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
50674e96
DN
346 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
347 gcc_assert (fd->chunk_size == NULL);
348 else if (fd->chunk_size == NULL)
349 {
350 /* We only need to compute a default chunk size for ordered
351 static loops and dynamic loops. */
a68ab351 352 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
acf0174b 353 || fd->have_ordered)
50674e96
DN
354 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
355 ? integer_zero_node : integer_one_node;
356 }
a68ab351
JJ
357
358 for (i = 0; i < fd->collapse; i++)
359 {
360 if (fd->collapse == 1)
361 loop = &fd->loop;
362 else if (loops != NULL)
363 loop = loops + i;
364 else
365 loop = &dummy_loop;
366
726a989a 367 loop->v = gimple_omp_for_index (for_stmt, i);
a68ab351
JJ
368 gcc_assert (SSA_VAR_P (loop->v));
369 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
370 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
371 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
726a989a 372 loop->n1 = gimple_omp_for_initial (for_stmt, i);
a68ab351 373
726a989a
RB
374 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
375 loop->n2 = gimple_omp_for_final (for_stmt, i);
a68ab351
JJ
376 switch (loop->cond_code)
377 {
378 case LT_EXPR:
379 case GT_EXPR:
380 break;
c02065fc
AH
381 case NE_EXPR:
382 gcc_assert (gimple_omp_for_kind (for_stmt)
383 == GF_OMP_FOR_KIND_CILKSIMD);
384 break;
a68ab351
JJ
385 case LE_EXPR:
386 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
5d49b6a7 387 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, 1);
a68ab351 388 else
db3927fb
AH
389 loop->n2 = fold_build2_loc (loc,
390 PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
391 build_int_cst (TREE_TYPE (loop->n2), 1));
392 loop->cond_code = LT_EXPR;
393 break;
394 case GE_EXPR:
395 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
5d49b6a7 396 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, -1);
a68ab351 397 else
db3927fb
AH
398 loop->n2 = fold_build2_loc (loc,
399 MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
400 build_int_cst (TREE_TYPE (loop->n2), 1));
401 loop->cond_code = GT_EXPR;
402 break;
403 default:
404 gcc_unreachable ();
405 }
406
726a989a 407 t = gimple_omp_for_incr (for_stmt, i);
a68ab351
JJ
408 gcc_assert (TREE_OPERAND (t, 0) == var);
409 switch (TREE_CODE (t))
410 {
411 case PLUS_EXPR:
a68ab351
JJ
412 loop->step = TREE_OPERAND (t, 1);
413 break;
56099f00
RG
414 case POINTER_PLUS_EXPR:
415 loop->step = fold_convert (ssizetype, TREE_OPERAND (t, 1));
416 break;
a68ab351
JJ
417 case MINUS_EXPR:
418 loop->step = TREE_OPERAND (t, 1);
db3927fb
AH
419 loop->step = fold_build1_loc (loc,
420 NEGATE_EXPR, TREE_TYPE (loop->step),
a68ab351
JJ
421 loop->step);
422 break;
423 default:
424 gcc_unreachable ();
425 }
426
acf0174b
JJ
427 if (simd
428 || (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
429 && !fd->have_ordered))
74bf76ed
JJ
430 {
431 if (fd->collapse == 1)
432 iter_type = TREE_TYPE (loop->v);
433 else if (i == 0
434 || TYPE_PRECISION (iter_type)
435 < TYPE_PRECISION (TREE_TYPE (loop->v)))
436 iter_type
437 = build_nonstandard_integer_type
acf0174b 438 (TYPE_PRECISION (TREE_TYPE (loop->v)), 1);
74bf76ed
JJ
439 }
440 else if (iter_type != long_long_unsigned_type_node)
a68ab351
JJ
441 {
442 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
443 iter_type = long_long_unsigned_type_node;
444 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
445 && TYPE_PRECISION (TREE_TYPE (loop->v))
446 >= TYPE_PRECISION (iter_type))
447 {
448 tree n;
449
450 if (loop->cond_code == LT_EXPR)
db3927fb
AH
451 n = fold_build2_loc (loc,
452 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
453 loop->n2, loop->step);
454 else
455 n = loop->n1;
456 if (TREE_CODE (n) != INTEGER_CST
457 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
458 iter_type = long_long_unsigned_type_node;
459 }
460 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
461 > TYPE_PRECISION (iter_type))
462 {
463 tree n1, n2;
464
465 if (loop->cond_code == LT_EXPR)
466 {
467 n1 = loop->n1;
db3927fb
AH
468 n2 = fold_build2_loc (loc,
469 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
470 loop->n2, loop->step);
471 }
472 else
473 {
db3927fb
AH
474 n1 = fold_build2_loc (loc,
475 MINUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
476 loop->n2, loop->step);
477 n2 = loop->n1;
478 }
479 if (TREE_CODE (n1) != INTEGER_CST
480 || TREE_CODE (n2) != INTEGER_CST
481 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
482 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
483 iter_type = long_long_unsigned_type_node;
484 }
485 }
486
487 if (collapse_count && *collapse_count == NULL)
488 {
5a0f4dd3
JJ
489 t = fold_binary (loop->cond_code, boolean_type_node,
490 fold_convert (TREE_TYPE (loop->v), loop->n1),
491 fold_convert (TREE_TYPE (loop->v), loop->n2));
492 if (t && integer_zerop (t))
493 count = build_zero_cst (long_long_unsigned_type_node);
494 else if ((i == 0 || count != NULL_TREE)
495 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
496 && TREE_CONSTANT (loop->n1)
497 && TREE_CONSTANT (loop->n2)
498 && TREE_CODE (loop->step) == INTEGER_CST)
a68ab351
JJ
499 {
500 tree itype = TREE_TYPE (loop->v);
501
502 if (POINTER_TYPE_P (itype))
96f9265a 503 itype = signed_type_for (itype);
a68ab351 504 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
db3927fb
AH
505 t = fold_build2_loc (loc,
506 PLUS_EXPR, itype,
507 fold_convert_loc (loc, itype, loop->step), t);
508 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
509 fold_convert_loc (loc, itype, loop->n2));
510 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
511 fold_convert_loc (loc, itype, loop->n1));
a68ab351 512 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
db3927fb
AH
513 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
514 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
515 fold_build1_loc (loc, NEGATE_EXPR, itype,
516 fold_convert_loc (loc, itype,
517 loop->step)));
a68ab351 518 else
db3927fb
AH
519 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
520 fold_convert_loc (loc, itype, loop->step));
521 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
a68ab351 522 if (count != NULL_TREE)
db3927fb
AH
523 count = fold_build2_loc (loc,
524 MULT_EXPR, long_long_unsigned_type_node,
a68ab351
JJ
525 count, t);
526 else
527 count = t;
528 if (TREE_CODE (count) != INTEGER_CST)
529 count = NULL_TREE;
530 }
5a0f4dd3 531 else if (count && !integer_zerop (count))
a68ab351
JJ
532 count = NULL_TREE;
533 }
534 }
535
74bf76ed 536 if (count
acf0174b
JJ
537 && !simd
538 && (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
539 || fd->have_ordered))
a68ab351
JJ
540 {
541 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
542 iter_type = long_long_unsigned_type_node;
543 else
544 iter_type = long_integer_type_node;
545 }
546 else if (collapse_iter && *collapse_iter != NULL)
547 iter_type = TREE_TYPE (*collapse_iter);
548 fd->iter_type = iter_type;
549 if (collapse_iter && *collapse_iter == NULL)
550 *collapse_iter = create_tmp_var (iter_type, ".iter");
551 if (collapse_count && *collapse_count == NULL)
552 {
553 if (count)
db3927fb 554 *collapse_count = fold_convert_loc (loc, iter_type, count);
a68ab351
JJ
555 else
556 *collapse_count = create_tmp_var (iter_type, ".count");
557 }
558
559 if (fd->collapse > 1)
560 {
561 fd->loop.v = *collapse_iter;
562 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
563 fd->loop.n2 = *collapse_count;
564 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
565 fd->loop.cond_code = LT_EXPR;
566 }
50674e96
DN
567}
568
569
570/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
571 is the immediate dominator of PAR_ENTRY_BB, return true if there
572 are no data dependencies that would prevent expanding the parallel
573 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
574
575 When expanding a combined parallel+workshare region, the call to
576 the child function may need additional arguments in the case of
726a989a
RB
577 GIMPLE_OMP_FOR regions. In some cases, these arguments are
578 computed out of variables passed in from the parent to the child
579 via 'struct .omp_data_s'. For instance:
50674e96
DN
580
581 #pragma omp parallel for schedule (guided, i * 4)
582 for (j ...)
583
584 Is lowered into:
585
586 # BLOCK 2 (PAR_ENTRY_BB)
587 .omp_data_o.i = i;
588 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
b8698a0f 589
50674e96
DN
590 # BLOCK 3 (WS_ENTRY_BB)
591 .omp_data_i = &.omp_data_o;
592 D.1667 = .omp_data_i->i;
593 D.1598 = D.1667 * 4;
594 #pragma omp for schedule (guided, D.1598)
595
596 When we outline the parallel region, the call to the child function
597 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
598 that value is computed *after* the call site. So, in principle we
599 cannot do the transformation.
600
601 To see whether the code in WS_ENTRY_BB blocks the combined
602 parallel+workshare call, we collect all the variables used in the
726a989a 603 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
50674e96
DN
604 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
605 call.
606
607 FIXME. If we had the SSA form built at this point, we could merely
608 hoist the code in block 3 into block 2 and be done with it. But at
609 this point we don't have dataflow information and though we could
610 hack something up here, it is really not worth the aggravation. */
611
612static bool
0f900dfa 613workshare_safe_to_combine_p (basic_block ws_entry_bb)
50674e96
DN
614{
615 struct omp_for_data fd;
0f900dfa 616 gimple ws_stmt = last_stmt (ws_entry_bb);
50674e96 617
726a989a 618 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96
DN
619 return true;
620
726a989a 621 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
50674e96 622
a68ab351
JJ
623 extract_omp_for_data (ws_stmt, &fd, NULL);
624
625 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
626 return false;
627 if (fd.iter_type != long_integer_type_node)
628 return false;
50674e96
DN
629
630 /* FIXME. We give up too easily here. If any of these arguments
631 are not constants, they will likely involve variables that have
632 been mapped into fields of .omp_data_s for sharing with the child
633 function. With appropriate data flow, it would be possible to
634 see through this. */
a68ab351
JJ
635 if (!is_gimple_min_invariant (fd.loop.n1)
636 || !is_gimple_min_invariant (fd.loop.n2)
637 || !is_gimple_min_invariant (fd.loop.step)
50674e96
DN
638 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
639 return false;
640
641 return true;
642}
643
644
645/* Collect additional arguments needed to emit a combined
646 parallel+workshare call. WS_STMT is the workshare directive being
647 expanded. */
648
9771b263 649static vec<tree, va_gc> *
acf0174b 650get_ws_args_for (gimple par_stmt, gimple ws_stmt)
50674e96
DN
651{
652 tree t;
db3927fb 653 location_t loc = gimple_location (ws_stmt);
9771b263 654 vec<tree, va_gc> *ws_args;
50674e96 655
726a989a 656 if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
50674e96
DN
657 {
658 struct omp_for_data fd;
acf0174b 659 tree n1, n2;
50674e96 660
a68ab351 661 extract_omp_for_data (ws_stmt, &fd, NULL);
acf0174b
JJ
662 n1 = fd.loop.n1;
663 n2 = fd.loop.n2;
664
665 if (gimple_omp_for_combined_into_p (ws_stmt))
666 {
667 tree innerc
668 = find_omp_clause (gimple_omp_parallel_clauses (par_stmt),
669 OMP_CLAUSE__LOOPTEMP_);
670 gcc_assert (innerc);
671 n1 = OMP_CLAUSE_DECL (innerc);
672 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
673 OMP_CLAUSE__LOOPTEMP_);
674 gcc_assert (innerc);
675 n2 = OMP_CLAUSE_DECL (innerc);
676 }
50674e96 677
9771b263 678 vec_alloc (ws_args, 3 + (fd.chunk_size != 0));
50674e96 679
acf0174b 680 t = fold_convert_loc (loc, long_integer_type_node, n1);
9771b263 681 ws_args->quick_push (t);
50674e96 682
acf0174b 683 t = fold_convert_loc (loc, long_integer_type_node, n2);
9771b263 684 ws_args->quick_push (t);
50674e96 685
3bb06db4 686 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
9771b263 687 ws_args->quick_push (t);
3bb06db4
NF
688
689 if (fd.chunk_size)
690 {
691 t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
9771b263 692 ws_args->quick_push (t);
3bb06db4 693 }
50674e96
DN
694
695 return ws_args;
696 }
726a989a 697 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96 698 {
e5c95afe 699 /* Number of sections is equal to the number of edges from the
726a989a
RB
700 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
701 the exit of the sections region. */
702 basic_block bb = single_succ (gimple_bb (ws_stmt));
e5c95afe 703 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
9771b263
DN
704 vec_alloc (ws_args, 1);
705 ws_args->quick_push (t);
3bb06db4 706 return ws_args;
50674e96
DN
707 }
708
709 gcc_unreachable ();
710}
711
712
713/* Discover whether REGION is a combined parallel+workshare region. */
714
715static void
716determine_parallel_type (struct omp_region *region)
953ff289 717{
50674e96
DN
718 basic_block par_entry_bb, par_exit_bb;
719 basic_block ws_entry_bb, ws_exit_bb;
720
d3c673c7 721 if (region == NULL || region->inner == NULL
e5c95afe
ZD
722 || region->exit == NULL || region->inner->exit == NULL
723 || region->inner->cont == NULL)
50674e96
DN
724 return;
725
726 /* We only support parallel+for and parallel+sections. */
726a989a
RB
727 if (region->type != GIMPLE_OMP_PARALLEL
728 || (region->inner->type != GIMPLE_OMP_FOR
729 && region->inner->type != GIMPLE_OMP_SECTIONS))
50674e96
DN
730 return;
731
732 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
733 WS_EXIT_BB -> PAR_EXIT_BB. */
777f7f9a
RH
734 par_entry_bb = region->entry;
735 par_exit_bb = region->exit;
736 ws_entry_bb = region->inner->entry;
737 ws_exit_bb = region->inner->exit;
50674e96
DN
738
739 if (single_succ (par_entry_bb) == ws_entry_bb
740 && single_succ (ws_exit_bb) == par_exit_bb
0f900dfa 741 && workshare_safe_to_combine_p (ws_entry_bb)
726a989a 742 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
69f1837b
JJ
743 || (last_and_only_stmt (ws_entry_bb)
744 && last_and_only_stmt (par_exit_bb))))
50674e96 745 {
acf0174b 746 gimple par_stmt = last_stmt (par_entry_bb);
726a989a 747 gimple ws_stmt = last_stmt (ws_entry_bb);
777f7f9a 748
726a989a 749 if (region->inner->type == GIMPLE_OMP_FOR)
50674e96
DN
750 {
751 /* If this is a combined parallel loop, we need to determine
752 whether or not to use the combined library calls. There
753 are two cases where we do not apply the transformation:
754 static loops and any kind of ordered loop. In the first
755 case, we already open code the loop so there is no need
756 to do anything else. In the latter case, the combined
757 parallel loop call would still need extra synchronization
758 to implement ordered semantics, so there would not be any
759 gain in using the combined call. */
726a989a 760 tree clauses = gimple_omp_for_clauses (ws_stmt);
50674e96
DN
761 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
762 if (c == NULL
763 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
764 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
765 {
766 region->is_combined_parallel = false;
767 region->inner->is_combined_parallel = false;
768 return;
769 }
770 }
771
772 region->is_combined_parallel = true;
773 region->inner->is_combined_parallel = true;
acf0174b 774 region->ws_args = get_ws_args_for (par_stmt, ws_stmt);
50674e96 775 }
953ff289
DN
776}
777
50674e96 778
953ff289
DN
779/* Return true if EXPR is variable sized. */
780
781static inline bool
22ea9ec0 782is_variable_sized (const_tree expr)
953ff289
DN
783{
784 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
785}
786
787/* Return true if DECL is a reference type. */
788
789static inline bool
790is_reference (tree decl)
791{
792 return lang_hooks.decls.omp_privatize_by_reference (decl);
793}
794
795/* Lookup variables in the decl or field splay trees. The "maybe" form
796 allows for the variable form to not have been entered, otherwise we
797 assert that the variable must have been entered. */
798
799static inline tree
800lookup_decl (tree var, omp_context *ctx)
801{
6be42dd4
RG
802 tree *n;
803 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
804 return *n;
953ff289
DN
805}
806
807static inline tree
7c8f7639 808maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 809{
6be42dd4
RG
810 tree *n;
811 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
812 return n ? *n : NULL_TREE;
953ff289
DN
813}
814
815static inline tree
816lookup_field (tree var, omp_context *ctx)
817{
818 splay_tree_node n;
819 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
820 return (tree) n->value;
821}
822
a68ab351
JJ
823static inline tree
824lookup_sfield (tree var, omp_context *ctx)
825{
826 splay_tree_node n;
827 n = splay_tree_lookup (ctx->sfield_map
828 ? ctx->sfield_map : ctx->field_map,
829 (splay_tree_key) var);
830 return (tree) n->value;
831}
832
953ff289
DN
833static inline tree
834maybe_lookup_field (tree var, omp_context *ctx)
835{
836 splay_tree_node n;
837 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
838 return n ? (tree) n->value : NULL_TREE;
839}
840
7c8f7639
JJ
841/* Return true if DECL should be copied by pointer. SHARED_CTX is
842 the parallel context if DECL is to be shared. */
953ff289
DN
843
844static bool
a68ab351 845use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289
DN
846{
847 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
848 return true;
849
6fc0bb99 850 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 851 when we know the value is not accessible from an outer scope. */
7c8f7639 852 if (shared_ctx)
953ff289
DN
853 {
854 /* ??? Trivially accessible from anywhere. But why would we even
855 be passing an address in this case? Should we simply assert
856 this to be false, or should we have a cleanup pass that removes
857 these from the list of mappings? */
858 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
859 return true;
860
861 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
862 without analyzing the expression whether or not its location
863 is accessible to anyone else. In the case of nested parallel
864 regions it certainly may be. */
077b0dfb 865 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
866 return true;
867
868 /* Do not use copy-in/copy-out for variables that have their
869 address taken. */
870 if (TREE_ADDRESSABLE (decl))
871 return true;
7c8f7639 872
6d840d99
JJ
873 /* lower_send_shared_vars only uses copy-in, but not copy-out
874 for these. */
875 if (TREE_READONLY (decl)
876 || ((TREE_CODE (decl) == RESULT_DECL
877 || TREE_CODE (decl) == PARM_DECL)
878 && DECL_BY_REFERENCE (decl)))
879 return false;
880
7c8f7639
JJ
881 /* Disallow copy-in/out in nested parallel if
882 decl is shared in outer parallel, otherwise
883 each thread could store the shared variable
884 in its own copy-in location, making the
885 variable no longer really shared. */
6d840d99 886 if (shared_ctx->is_nested)
7c8f7639
JJ
887 {
888 omp_context *up;
889
890 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 891 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
892 break;
893
d9c194cb 894 if (up)
7c8f7639
JJ
895 {
896 tree c;
897
726a989a 898 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
899 c; c = OMP_CLAUSE_CHAIN (c))
900 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
901 && OMP_CLAUSE_DECL (c) == decl)
902 break;
903
904 if (c)
25142650 905 goto maybe_mark_addressable_and_ret;
7c8f7639
JJ
906 }
907 }
a68ab351 908
6d840d99 909 /* For tasks avoid using copy-in/out. As tasks can be
a68ab351
JJ
910 deferred or executed in different thread, when GOMP_task
911 returns, the task hasn't necessarily terminated. */
6d840d99 912 if (is_task_ctx (shared_ctx))
a68ab351 913 {
25142650
JJ
914 tree outer;
915 maybe_mark_addressable_and_ret:
916 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
a68ab351
JJ
917 if (is_gimple_reg (outer))
918 {
919 /* Taking address of OUTER in lower_send_shared_vars
920 might need regimplification of everything that uses the
921 variable. */
922 if (!task_shared_vars)
923 task_shared_vars = BITMAP_ALLOC (NULL);
924 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
925 TREE_ADDRESSABLE (outer) = 1;
926 }
927 return true;
928 }
953ff289
DN
929 }
930
931 return false;
932}
933
917948d3
ZD
934/* Construct a new automatic decl similar to VAR. */
935
936static tree
937omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
938{
939 tree copy = copy_var_decl (var, name, type);
940
941 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 942 DECL_CHAIN (copy) = ctx->block_vars;
953ff289
DN
943 ctx->block_vars = copy;
944
945 return copy;
946}
947
948static tree
949omp_copy_decl_1 (tree var, omp_context *ctx)
950{
951 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
952}
953
a9a58711
JJ
954/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
955 as appropriate. */
956static tree
957omp_build_component_ref (tree obj, tree field)
958{
959 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
960 if (TREE_THIS_VOLATILE (field))
961 TREE_THIS_VOLATILE (ret) |= 1;
962 if (TREE_READONLY (field))
963 TREE_READONLY (ret) |= 1;
964 return ret;
965}
966
953ff289
DN
967/* Build tree nodes to access the field for VAR on the receiver side. */
968
969static tree
970build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
971{
972 tree x, field = lookup_field (var, ctx);
973
974 /* If the receiver record type was remapped in the child function,
975 remap the field into the new record type. */
976 x = maybe_lookup_field (field, ctx);
977 if (x != NULL)
978 field = x;
979
70f34814 980 x = build_simple_mem_ref (ctx->receiver_decl);
a9a58711 981 x = omp_build_component_ref (x, field);
953ff289 982 if (by_ref)
70f34814 983 x = build_simple_mem_ref (x);
953ff289
DN
984
985 return x;
986}
987
988/* Build tree nodes to access VAR in the scope outer to CTX. In the case
989 of a parallel, this is a component reference; for workshare constructs
990 this is some variable. */
991
992static tree
993build_outer_var_ref (tree var, omp_context *ctx)
994{
995 tree x;
996
8ca5b2a2 997 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
998 x = var;
999 else if (is_variable_sized (var))
1000 {
1001 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
1002 x = build_outer_var_ref (x, ctx);
70f34814 1003 x = build_simple_mem_ref (x);
953ff289 1004 }
a68ab351 1005 else if (is_taskreg_ctx (ctx))
953ff289 1006 {
7c8f7639 1007 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
1008 x = build_receiver_ref (var, by_ref, ctx);
1009 }
74bf76ed 1010 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 1011 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
1012 {
1013 /* #pragma omp simd isn't a worksharing construct, and can reference even
1014 private vars in its linear etc. clauses. */
1015 x = NULL_TREE;
1016 if (ctx->outer && is_taskreg_ctx (ctx))
1017 x = lookup_decl (var, ctx->outer);
1018 else if (ctx->outer)
f3b331d1 1019 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
74bf76ed
JJ
1020 if (x == NULL_TREE)
1021 x = var;
1022 }
953ff289
DN
1023 else if (ctx->outer)
1024 x = lookup_decl (var, ctx->outer);
eeb1d9e0
JJ
1025 else if (is_reference (var))
1026 /* This can happen with orphaned constructs. If var is reference, it is
1027 possible it is shared and as such valid. */
1028 x = var;
953ff289
DN
1029 else
1030 gcc_unreachable ();
1031
1032 if (is_reference (var))
70f34814 1033 x = build_simple_mem_ref (x);
953ff289
DN
1034
1035 return x;
1036}
1037
1038/* Build tree nodes to access the field for VAR on the sender side. */
1039
1040static tree
1041build_sender_ref (tree var, omp_context *ctx)
1042{
a68ab351 1043 tree field = lookup_sfield (var, ctx);
a9a58711 1044 return omp_build_component_ref (ctx->sender_decl, field);
953ff289
DN
1045}
1046
1047/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
1048
1049static void
a68ab351 1050install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 1051{
a68ab351 1052 tree field, type, sfield = NULL_TREE;
953ff289 1053
a68ab351
JJ
1054 gcc_assert ((mask & 1) == 0
1055 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
1056 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
1057 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
953ff289
DN
1058
1059 type = TREE_TYPE (var);
acf0174b
JJ
1060 if (mask & 4)
1061 {
1062 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
1063 type = build_pointer_type (build_pointer_type (type));
1064 }
1065 else if (by_ref)
953ff289 1066 type = build_pointer_type (type);
a68ab351
JJ
1067 else if ((mask & 3) == 1 && is_reference (var))
1068 type = TREE_TYPE (type);
953ff289 1069
c2255bc4
AH
1070 field = build_decl (DECL_SOURCE_LOCATION (var),
1071 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
1072
1073 /* Remember what variable this field was created for. This does have a
1074 side effect of making dwarf2out ignore this member, so for helpful
1075 debugging we clear it later in delete_omp_context. */
1076 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
1077 if (type == TREE_TYPE (var))
1078 {
1079 DECL_ALIGN (field) = DECL_ALIGN (var);
1080 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
1081 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
1082 }
1083 else
1084 DECL_ALIGN (field) = TYPE_ALIGN (type);
953ff289 1085
a68ab351
JJ
1086 if ((mask & 3) == 3)
1087 {
1088 insert_field_into_struct (ctx->record_type, field);
1089 if (ctx->srecord_type)
1090 {
c2255bc4
AH
1091 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1092 FIELD_DECL, DECL_NAME (var), type);
a68ab351
JJ
1093 DECL_ABSTRACT_ORIGIN (sfield) = var;
1094 DECL_ALIGN (sfield) = DECL_ALIGN (field);
1095 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
1096 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
1097 insert_field_into_struct (ctx->srecord_type, sfield);
1098 }
1099 }
1100 else
1101 {
1102 if (ctx->srecord_type == NULL_TREE)
1103 {
1104 tree t;
1105
1106 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
1107 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1108 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
1109 {
c2255bc4
AH
1110 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1111 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
1112 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
1113 insert_field_into_struct (ctx->srecord_type, sfield);
1114 splay_tree_insert (ctx->sfield_map,
1115 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
1116 (splay_tree_value) sfield);
1117 }
1118 }
1119 sfield = field;
1120 insert_field_into_struct ((mask & 1) ? ctx->record_type
1121 : ctx->srecord_type, field);
1122 }
953ff289 1123
a68ab351
JJ
1124 if (mask & 1)
1125 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
1126 (splay_tree_value) field);
1127 if ((mask & 2) && ctx->sfield_map)
1128 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
1129 (splay_tree_value) sfield);
953ff289
DN
1130}
1131
1132static tree
1133install_var_local (tree var, omp_context *ctx)
1134{
1135 tree new_var = omp_copy_decl_1 (var, ctx);
1136 insert_decl_map (&ctx->cb, var, new_var);
1137 return new_var;
1138}
1139
1140/* Adjust the replacement for DECL in CTX for the new context. This means
1141 copying the DECL_VALUE_EXPR, and fixing up the type. */
1142
1143static void
1144fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
1145{
1146 tree new_decl, size;
1147
1148 new_decl = lookup_decl (decl, ctx);
1149
1150 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
1151
1152 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
1153 && DECL_HAS_VALUE_EXPR_P (decl))
1154 {
1155 tree ve = DECL_VALUE_EXPR (decl);
726a989a 1156 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
1157 SET_DECL_VALUE_EXPR (new_decl, ve);
1158 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1159 }
1160
1161 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1162 {
1163 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1164 if (size == error_mark_node)
1165 size = TYPE_SIZE (TREE_TYPE (new_decl));
1166 DECL_SIZE (new_decl) = size;
1167
1168 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1169 if (size == error_mark_node)
1170 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1171 DECL_SIZE_UNIT (new_decl) = size;
1172 }
1173}
1174
1175/* The callback for remap_decl. Search all containing contexts for a
1176 mapping of the variable; this avoids having to duplicate the splay
1177 tree ahead of time. We know a mapping doesn't already exist in the
1178 given context. Create new mappings to implement default semantics. */
1179
1180static tree
1181omp_copy_decl (tree var, copy_body_data *cb)
1182{
1183 omp_context *ctx = (omp_context *) cb;
1184 tree new_var;
1185
953ff289
DN
1186 if (TREE_CODE (var) == LABEL_DECL)
1187 {
c2255bc4 1188 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 1189 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
1190 insert_decl_map (&ctx->cb, var, new_var);
1191 return new_var;
1192 }
1193
a68ab351 1194 while (!is_taskreg_ctx (ctx))
953ff289
DN
1195 {
1196 ctx = ctx->outer;
1197 if (ctx == NULL)
1198 return var;
1199 new_var = maybe_lookup_decl (var, ctx);
1200 if (new_var)
1201 return new_var;
1202 }
1203
8ca5b2a2
JJ
1204 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1205 return var;
1206
953ff289
DN
1207 return error_mark_node;
1208}
1209
50674e96
DN
1210
1211/* Return the parallel region associated with STMT. */
1212
50674e96
DN
1213/* Debugging dumps for parallel regions. */
1214void dump_omp_region (FILE *, struct omp_region *, int);
1215void debug_omp_region (struct omp_region *);
1216void debug_all_omp_regions (void);
1217
1218/* Dump the parallel region tree rooted at REGION. */
1219
1220void
1221dump_omp_region (FILE *file, struct omp_region *region, int indent)
1222{
777f7f9a 1223 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
726a989a 1224 gimple_code_name[region->type]);
50674e96
DN
1225
1226 if (region->inner)
1227 dump_omp_region (file, region->inner, indent + 4);
1228
777f7f9a
RH
1229 if (region->cont)
1230 {
726a989a 1231 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
777f7f9a
RH
1232 region->cont->index);
1233 }
b8698a0f 1234
50674e96 1235 if (region->exit)
726a989a 1236 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
777f7f9a 1237 region->exit->index);
50674e96 1238 else
777f7f9a 1239 fprintf (file, "%*s[no exit marker]\n", indent, "");
50674e96
DN
1240
1241 if (region->next)
777f7f9a 1242 dump_omp_region (file, region->next, indent);
50674e96
DN
1243}
1244
24e47c76 1245DEBUG_FUNCTION void
50674e96
DN
1246debug_omp_region (struct omp_region *region)
1247{
1248 dump_omp_region (stderr, region, 0);
1249}
1250
24e47c76 1251DEBUG_FUNCTION void
50674e96
DN
1252debug_all_omp_regions (void)
1253{
1254 dump_omp_region (stderr, root_omp_region, 0);
1255}
1256
1257
1258/* Create a new parallel region starting at STMT inside region PARENT. */
1259
0645c1a2 1260static struct omp_region *
726a989a
RB
1261new_omp_region (basic_block bb, enum gimple_code type,
1262 struct omp_region *parent)
50674e96 1263{
d3bfe4de 1264 struct omp_region *region = XCNEW (struct omp_region);
50674e96
DN
1265
1266 region->outer = parent;
777f7f9a
RH
1267 region->entry = bb;
1268 region->type = type;
50674e96
DN
1269
1270 if (parent)
1271 {
1272 /* This is a nested region. Add it to the list of inner
1273 regions in PARENT. */
1274 region->next = parent->inner;
1275 parent->inner = region;
1276 }
777f7f9a 1277 else
50674e96
DN
1278 {
1279 /* This is a toplevel region. Add it to the list of toplevel
1280 regions in ROOT_OMP_REGION. */
1281 region->next = root_omp_region;
1282 root_omp_region = region;
1283 }
777f7f9a
RH
1284
1285 return region;
1286}
1287
1288/* Release the memory associated with the region tree rooted at REGION. */
1289
1290static void
1291free_omp_region_1 (struct omp_region *region)
1292{
1293 struct omp_region *i, *n;
1294
1295 for (i = region->inner; i ; i = n)
50674e96 1296 {
777f7f9a
RH
1297 n = i->next;
1298 free_omp_region_1 (i);
50674e96
DN
1299 }
1300
777f7f9a
RH
1301 free (region);
1302}
50674e96 1303
777f7f9a
RH
1304/* Release the memory for the entire omp region tree. */
1305
1306void
1307free_omp_regions (void)
1308{
1309 struct omp_region *r, *n;
1310 for (r = root_omp_region; r ; r = n)
1311 {
1312 n = r->next;
1313 free_omp_region_1 (r);
1314 }
1315 root_omp_region = NULL;
50674e96
DN
1316}
1317
1318
953ff289
DN
1319/* Create a new context, with OUTER_CTX being the surrounding context. */
1320
1321static omp_context *
726a989a 1322new_omp_context (gimple stmt, omp_context *outer_ctx)
953ff289
DN
1323{
1324 omp_context *ctx = XCNEW (omp_context);
1325
1326 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1327 (splay_tree_value) ctx);
1328 ctx->stmt = stmt;
1329
1330 if (outer_ctx)
1331 {
1332 ctx->outer = outer_ctx;
1333 ctx->cb = outer_ctx->cb;
1334 ctx->cb.block = NULL;
1335 ctx->depth = outer_ctx->depth + 1;
1336 }
1337 else
1338 {
1339 ctx->cb.src_fn = current_function_decl;
1340 ctx->cb.dst_fn = current_function_decl;
fe660d7b
MJ
1341 ctx->cb.src_node = cgraph_get_node (current_function_decl);
1342 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
1343 ctx->cb.dst_node = ctx->cb.src_node;
1344 ctx->cb.src_cfun = cfun;
1345 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 1346 ctx->cb.eh_lp_nr = 0;
953ff289
DN
1347 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1348 ctx->depth = 1;
1349 }
1350
6be42dd4 1351 ctx->cb.decl_map = pointer_map_create ();
953ff289
DN
1352
1353 return ctx;
1354}
1355
726a989a 1356static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
1357
1358/* Finalize task copyfn. */
1359
1360static void
726a989a 1361finalize_task_copyfn (gimple task_stmt)
2368a460
JJ
1362{
1363 struct function *child_cfun;
af16bc76 1364 tree child_fn;
355a7673 1365 gimple_seq seq = NULL, new_seq;
726a989a 1366 gimple bind;
2368a460 1367
726a989a 1368 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
1369 if (child_fn == NULL_TREE)
1370 return;
1371
1372 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
d7ed20db 1373 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
2368a460 1374
2368a460 1375 push_cfun (child_cfun);
3ad065ef 1376 bind = gimplify_body (child_fn, false);
726a989a
RB
1377 gimple_seq_add_stmt (&seq, bind);
1378 new_seq = maybe_catch_exception (seq);
1379 if (new_seq != seq)
1380 {
1381 bind = gimple_build_bind (NULL, new_seq, NULL);
355a7673 1382 seq = NULL;
726a989a
RB
1383 gimple_seq_add_stmt (&seq, bind);
1384 }
1385 gimple_set_body (child_fn, seq);
2368a460 1386 pop_cfun ();
2368a460 1387
d7ed20db 1388 /* Inform the callgraph about the new function. */
2368a460
JJ
1389 cgraph_add_new_function (child_fn, false);
1390}
1391
953ff289
DN
1392/* Destroy a omp_context data structures. Called through the splay tree
1393 value delete callback. */
1394
1395static void
1396delete_omp_context (splay_tree_value value)
1397{
1398 omp_context *ctx = (omp_context *) value;
1399
6be42dd4 1400 pointer_map_destroy (ctx->cb.decl_map);
953ff289
DN
1401
1402 if (ctx->field_map)
1403 splay_tree_delete (ctx->field_map);
a68ab351
JJ
1404 if (ctx->sfield_map)
1405 splay_tree_delete (ctx->sfield_map);
953ff289
DN
1406
1407 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1408 it produces corrupt debug information. */
1409 if (ctx->record_type)
1410 {
1411 tree t;
910ad8de 1412 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
1413 DECL_ABSTRACT_ORIGIN (t) = NULL;
1414 }
a68ab351
JJ
1415 if (ctx->srecord_type)
1416 {
1417 tree t;
910ad8de 1418 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
1419 DECL_ABSTRACT_ORIGIN (t) = NULL;
1420 }
953ff289 1421
2368a460
JJ
1422 if (is_task_ctx (ctx))
1423 finalize_task_copyfn (ctx->stmt);
1424
953ff289
DN
1425 XDELETE (ctx);
1426}
1427
1428/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1429 context. */
1430
1431static void
1432fixup_child_record_type (omp_context *ctx)
1433{
1434 tree f, type = ctx->record_type;
1435
1436 /* ??? It isn't sufficient to just call remap_type here, because
1437 variably_modified_type_p doesn't work the way we expect for
1438 record types. Testing each field for whether it needs remapping
1439 and creating a new record by hand works, however. */
910ad8de 1440 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
1441 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1442 break;
1443 if (f)
1444 {
1445 tree name, new_fields = NULL;
1446
1447 type = lang_hooks.types.make_type (RECORD_TYPE);
1448 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
1449 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1450 TYPE_DECL, name, type);
953ff289
DN
1451 TYPE_NAME (type) = name;
1452
910ad8de 1453 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
1454 {
1455 tree new_f = copy_node (f);
1456 DECL_CONTEXT (new_f) = type;
1457 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 1458 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
1459 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1460 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1461 &ctx->cb, NULL);
1462 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1463 &ctx->cb, NULL);
953ff289
DN
1464 new_fields = new_f;
1465
1466 /* Arrange to be able to look up the receiver field
1467 given the sender field. */
1468 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1469 (splay_tree_value) new_f);
1470 }
1471 TYPE_FIELDS (type) = nreverse (new_fields);
1472 layout_type (type);
1473 }
1474
1475 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1476}
1477
1478/* Instantiate decls as necessary in CTX to satisfy the data sharing
1479 specified by CLAUSES. */
1480
1481static void
1482scan_sharing_clauses (tree clauses, omp_context *ctx)
1483{
1484 tree c, decl;
1485 bool scan_array_reductions = false;
1486
1487 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1488 {
1489 bool by_ref;
1490
aaf46ef9 1491 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1492 {
1493 case OMP_CLAUSE_PRIVATE:
1494 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1495 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1496 goto do_private;
1497 else if (!is_variable_sized (decl))
953ff289
DN
1498 install_var_local (decl, ctx);
1499 break;
1500
1501 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1502 /* Ignore shared directives in teams construct. */
1503 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1504 break;
a68ab351 1505 gcc_assert (is_taskreg_ctx (ctx));
953ff289 1506 decl = OMP_CLAUSE_DECL (c);
5da250fc
JJ
1507 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1508 || !is_variable_sized (decl));
8ca5b2a2
JJ
1509 /* Global variables don't need to be copied,
1510 the receiver side will use them directly. */
1511 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1512 break;
a68ab351 1513 by_ref = use_pointer_for_field (decl, ctx);
953ff289
DN
1514 if (! TREE_READONLY (decl)
1515 || TREE_ADDRESSABLE (decl)
1516 || by_ref
1517 || is_reference (decl))
1518 {
a68ab351 1519 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1520 install_var_local (decl, ctx);
1521 break;
1522 }
1523 /* We don't need to copy const scalar vars back. */
aaf46ef9 1524 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1525 goto do_private;
1526
1527 case OMP_CLAUSE_LASTPRIVATE:
1528 /* Let the corresponding firstprivate clause create
1529 the variable. */
1530 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1531 break;
1532 /* FALLTHRU */
1533
1534 case OMP_CLAUSE_FIRSTPRIVATE:
1535 case OMP_CLAUSE_REDUCTION:
74bf76ed 1536 case OMP_CLAUSE_LINEAR:
953ff289
DN
1537 decl = OMP_CLAUSE_DECL (c);
1538 do_private:
1539 if (is_variable_sized (decl))
953ff289 1540 {
a68ab351
JJ
1541 if (is_task_ctx (ctx))
1542 install_var_field (decl, false, 1, ctx);
1543 break;
1544 }
1545 else if (is_taskreg_ctx (ctx))
1546 {
1547 bool global
1548 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1549 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1550
1551 if (is_task_ctx (ctx)
1552 && (global || by_ref || is_reference (decl)))
1553 {
1554 install_var_field (decl, false, 1, ctx);
1555 if (!global)
1556 install_var_field (decl, by_ref, 2, ctx);
1557 }
1558 else if (!global)
1559 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1560 }
1561 install_var_local (decl, ctx);
1562 break;
1563
acf0174b
JJ
1564 case OMP_CLAUSE__LOOPTEMP_:
1565 gcc_assert (is_parallel_ctx (ctx));
1566 decl = OMP_CLAUSE_DECL (c);
1567 install_var_field (decl, false, 3, ctx);
1568 install_var_local (decl, ctx);
1569 break;
1570
953ff289 1571 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1572 case OMP_CLAUSE_COPYIN:
1573 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1574 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1575 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1576 break;
1577
1578 case OMP_CLAUSE_DEFAULT:
1579 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1580 break;
1581
20906c66 1582 case OMP_CLAUSE_FINAL:
953ff289
DN
1583 case OMP_CLAUSE_IF:
1584 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1585 case OMP_CLAUSE_NUM_TEAMS:
1586 case OMP_CLAUSE_THREAD_LIMIT:
1587 case OMP_CLAUSE_DEVICE:
953ff289 1588 case OMP_CLAUSE_SCHEDULE:
acf0174b
JJ
1589 case OMP_CLAUSE_DIST_SCHEDULE:
1590 case OMP_CLAUSE_DEPEND:
953ff289 1591 if (ctx->outer)
726a989a 1592 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1593 break;
1594
acf0174b
JJ
1595 case OMP_CLAUSE_TO:
1596 case OMP_CLAUSE_FROM:
1597 case OMP_CLAUSE_MAP:
1598 if (ctx->outer)
1599 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1600 decl = OMP_CLAUSE_DECL (c);
1601 /* Global variables with "omp declare target" attribute
1602 don't need to be copied, the receiver side will use them
1603 directly. */
1604 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1605 && DECL_P (decl)
1606 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1607 && lookup_attribute ("omp declare target",
1608 DECL_ATTRIBUTES (decl)))
1609 break;
1610 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1611 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER)
1612 {
1613 /* Ignore OMP_CLAUSE_MAP_POINTER kind for arrays in
1614 #pragma omp target data, there is nothing to map for
1615 those. */
1616 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA
1617 && !POINTER_TYPE_P (TREE_TYPE (decl)))
1618 break;
1619 }
1620 if (DECL_P (decl))
1621 {
1622 if (DECL_SIZE (decl)
1623 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1624 {
1625 tree decl2 = DECL_VALUE_EXPR (decl);
1626 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1627 decl2 = TREE_OPERAND (decl2, 0);
1628 gcc_assert (DECL_P (decl2));
1629 install_var_field (decl2, true, 3, ctx);
1630 install_var_local (decl2, ctx);
1631 install_var_local (decl, ctx);
1632 }
1633 else
1634 {
1635 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1636 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1637 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1638 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1639 install_var_field (decl, true, 7, ctx);
1640 else
1641 install_var_field (decl, true, 3, ctx);
1642 if (gimple_omp_target_kind (ctx->stmt)
1643 == GF_OMP_TARGET_KIND_REGION)
1644 install_var_local (decl, ctx);
1645 }
1646 }
1647 else
1648 {
1649 tree base = get_base_address (decl);
1650 tree nc = OMP_CLAUSE_CHAIN (c);
1651 if (DECL_P (base)
1652 && nc != NULL_TREE
1653 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1654 && OMP_CLAUSE_DECL (nc) == base
1655 && OMP_CLAUSE_MAP_KIND (nc) == OMP_CLAUSE_MAP_POINTER
1656 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1657 {
1658 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1659 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1660 }
1661 else
1662 {
1663 gcc_assert (!splay_tree_lookup (ctx->field_map,
1664 (splay_tree_key) decl));
1665 tree field
1666 = build_decl (OMP_CLAUSE_LOCATION (c),
1667 FIELD_DECL, NULL_TREE, ptr_type_node);
1668 DECL_ALIGN (field) = TYPE_ALIGN (ptr_type_node);
1669 insert_field_into_struct (ctx->record_type, field);
1670 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1671 (splay_tree_value) field);
1672 }
1673 }
1674 break;
1675
953ff289
DN
1676 case OMP_CLAUSE_NOWAIT:
1677 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1678 case OMP_CLAUSE_COLLAPSE:
1679 case OMP_CLAUSE_UNTIED:
20906c66 1680 case OMP_CLAUSE_MERGEABLE:
acf0174b 1681 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1682 case OMP_CLAUSE_SAFELEN:
953ff289
DN
1683 break;
1684
acf0174b
JJ
1685 case OMP_CLAUSE_ALIGNED:
1686 decl = OMP_CLAUSE_DECL (c);
1687 if (is_global_var (decl)
1688 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1689 install_var_local (decl, ctx);
1690 break;
1691
953ff289
DN
1692 default:
1693 gcc_unreachable ();
1694 }
1695 }
1696
1697 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1698 {
aaf46ef9 1699 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1700 {
1701 case OMP_CLAUSE_LASTPRIVATE:
1702 /* Let the corresponding firstprivate clause create
1703 the variable. */
726a989a 1704 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1705 scan_array_reductions = true;
953ff289
DN
1706 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1707 break;
1708 /* FALLTHRU */
1709
1710 case OMP_CLAUSE_PRIVATE:
1711 case OMP_CLAUSE_FIRSTPRIVATE:
1712 case OMP_CLAUSE_REDUCTION:
74bf76ed 1713 case OMP_CLAUSE_LINEAR:
953ff289
DN
1714 decl = OMP_CLAUSE_DECL (c);
1715 if (is_variable_sized (decl))
1716 install_var_local (decl, ctx);
1717 fixup_remapped_decl (decl, ctx,
aaf46ef9 1718 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1719 && OMP_CLAUSE_PRIVATE_DEBUG (c));
aaf46ef9 1720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1721 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1722 scan_array_reductions = true;
1723 break;
1724
1725 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1726 /* Ignore shared directives in teams construct. */
1727 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1728 break;
953ff289 1729 decl = OMP_CLAUSE_DECL (c);
8ca5b2a2
JJ
1730 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1731 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1732 break;
1733
acf0174b
JJ
1734 case OMP_CLAUSE_MAP:
1735 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA)
1736 break;
1737 decl = OMP_CLAUSE_DECL (c);
1738 if (DECL_P (decl)
1739 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1740 && lookup_attribute ("omp declare target",
1741 DECL_ATTRIBUTES (decl)))
1742 break;
1743 if (DECL_P (decl))
1744 {
1745 if (OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1746 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1747 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1748 {
1749 tree new_decl = lookup_decl (decl, ctx);
1750 TREE_TYPE (new_decl)
1751 = remap_type (TREE_TYPE (decl), &ctx->cb);
1752 }
1753 else if (DECL_SIZE (decl)
1754 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1755 {
1756 tree decl2 = DECL_VALUE_EXPR (decl);
1757 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1758 decl2 = TREE_OPERAND (decl2, 0);
1759 gcc_assert (DECL_P (decl2));
1760 fixup_remapped_decl (decl2, ctx, false);
1761 fixup_remapped_decl (decl, ctx, true);
1762 }
1763 else
1764 fixup_remapped_decl (decl, ctx, false);
1765 }
1766 break;
1767
953ff289
DN
1768 case OMP_CLAUSE_COPYPRIVATE:
1769 case OMP_CLAUSE_COPYIN:
1770 case OMP_CLAUSE_DEFAULT:
1771 case OMP_CLAUSE_IF:
1772 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1773 case OMP_CLAUSE_NUM_TEAMS:
1774 case OMP_CLAUSE_THREAD_LIMIT:
1775 case OMP_CLAUSE_DEVICE:
953ff289 1776 case OMP_CLAUSE_SCHEDULE:
acf0174b 1777 case OMP_CLAUSE_DIST_SCHEDULE:
953ff289
DN
1778 case OMP_CLAUSE_NOWAIT:
1779 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1780 case OMP_CLAUSE_COLLAPSE:
1781 case OMP_CLAUSE_UNTIED:
20906c66
JJ
1782 case OMP_CLAUSE_FINAL:
1783 case OMP_CLAUSE_MERGEABLE:
acf0174b 1784 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1785 case OMP_CLAUSE_SAFELEN:
acf0174b
JJ
1786 case OMP_CLAUSE_ALIGNED:
1787 case OMP_CLAUSE_DEPEND:
1788 case OMP_CLAUSE__LOOPTEMP_:
1789 case OMP_CLAUSE_TO:
1790 case OMP_CLAUSE_FROM:
953ff289
DN
1791 break;
1792
1793 default:
1794 gcc_unreachable ();
1795 }
1796 }
1797
1798 if (scan_array_reductions)
1799 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 1800 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1801 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1802 {
26127932
JJ
1803 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1804 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
953ff289 1805 }
a68ab351 1806 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
726a989a 1807 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
26127932 1808 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
953ff289
DN
1809}
1810
1811/* Create a new name for omp child function. Returns an identifier. */
1812
1813static GTY(()) unsigned int tmp_ompfn_id_num;
1814
1815static tree
a68ab351 1816create_omp_child_function_name (bool task_copy)
953ff289 1817{
036546e5
JH
1818 return (clone_function_name (current_function_decl,
1819 task_copy ? "_omp_cpyfn" : "_omp_fn"));
953ff289
DN
1820}
1821
1822/* Build a decl for the omp child function. It'll not contain a body
1823 yet, just the bare decl. */
1824
1825static void
a68ab351 1826create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1827{
1828 tree decl, type, name, t;
1829
a68ab351
JJ
1830 name = create_omp_child_function_name (task_copy);
1831 if (task_copy)
1832 type = build_function_type_list (void_type_node, ptr_type_node,
1833 ptr_type_node, NULL_TREE);
1834 else
1835 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1836
c2255bc4
AH
1837 decl = build_decl (gimple_location (ctx->stmt),
1838 FUNCTION_DECL, name, type);
953ff289 1839
a68ab351
JJ
1840 if (!task_copy)
1841 ctx->cb.dst_fn = decl;
1842 else
726a989a 1843 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1844
1845 TREE_STATIC (decl) = 1;
1846 TREE_USED (decl) = 1;
1847 DECL_ARTIFICIAL (decl) = 1;
cd3f04c8 1848 DECL_NAMELESS (decl) = 1;
953ff289
DN
1849 DECL_IGNORED_P (decl) = 0;
1850 TREE_PUBLIC (decl) = 0;
1851 DECL_UNINLINABLE (decl) = 1;
1852 DECL_EXTERNAL (decl) = 0;
1853 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1854 DECL_INITIAL (decl) = make_node (BLOCK);
acf0174b
JJ
1855 bool target_p = false;
1856 if (lookup_attribute ("omp declare target",
1857 DECL_ATTRIBUTES (current_function_decl)))
1858 target_p = true;
1859 else
1860 {
1861 omp_context *octx;
1862 for (octx = ctx; octx; octx = octx->outer)
1863 if (gimple_code (octx->stmt) == GIMPLE_OMP_TARGET
1864 && gimple_omp_target_kind (octx->stmt)
1865 == GF_OMP_TARGET_KIND_REGION)
1866 {
1867 target_p = true;
1868 break;
1869 }
1870 }
1871 if (target_p)
1872 DECL_ATTRIBUTES (decl)
1873 = tree_cons (get_identifier ("omp declare target"),
1874 NULL_TREE, DECL_ATTRIBUTES (decl));
953ff289 1875
c2255bc4
AH
1876 t = build_decl (DECL_SOURCE_LOCATION (decl),
1877 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1878 DECL_ARTIFICIAL (t) = 1;
1879 DECL_IGNORED_P (t) = 1;
07485407 1880 DECL_CONTEXT (t) = decl;
953ff289
DN
1881 DECL_RESULT (decl) = t;
1882
c2255bc4
AH
1883 t = build_decl (DECL_SOURCE_LOCATION (decl),
1884 PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
953ff289 1885 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1886 DECL_NAMELESS (t) = 1;
953ff289 1887 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1888 DECL_CONTEXT (t) = current_function_decl;
953ff289
DN
1889 TREE_USED (t) = 1;
1890 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1891 if (!task_copy)
1892 ctx->receiver_decl = t;
1893 else
1894 {
c2255bc4
AH
1895 t = build_decl (DECL_SOURCE_LOCATION (decl),
1896 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1897 ptr_type_node);
1898 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1899 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1900 DECL_ARG_TYPE (t) = ptr_type_node;
1901 DECL_CONTEXT (t) = current_function_decl;
1902 TREE_USED (t) = 1;
628c189e 1903 TREE_ADDRESSABLE (t) = 1;
910ad8de 1904 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1905 DECL_ARGUMENTS (decl) = t;
1906 }
953ff289 1907
b8698a0f 1908 /* Allocate memory for the function structure. The call to
50674e96 1909 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1910 it afterward. */
db2960f4 1911 push_struct_function (decl);
726a989a 1912 cfun->function_end_locus = gimple_location (ctx->stmt);
db2960f4 1913 pop_cfun ();
953ff289
DN
1914}
1915
acf0174b
JJ
1916/* Callback for walk_gimple_seq. Check if combined parallel
1917 contains gimple_omp_for_combined_into_p OMP_FOR. */
1918
1919static tree
1920find_combined_for (gimple_stmt_iterator *gsi_p,
1921 bool *handled_ops_p,
1922 struct walk_stmt_info *wi)
1923{
1924 gimple stmt = gsi_stmt (*gsi_p);
1925
1926 *handled_ops_p = true;
1927 switch (gimple_code (stmt))
1928 {
1929 WALK_SUBSTMTS;
1930
1931 case GIMPLE_OMP_FOR:
1932 if (gimple_omp_for_combined_into_p (stmt)
1933 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
1934 {
1935 wi->info = stmt;
1936 return integer_zero_node;
1937 }
1938 break;
1939 default:
1940 break;
1941 }
1942 return NULL;
1943}
1944
953ff289
DN
1945/* Scan an OpenMP parallel directive. */
1946
1947static void
726a989a 1948scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1949{
1950 omp_context *ctx;
1951 tree name;
726a989a 1952 gimple stmt = gsi_stmt (*gsi);
953ff289
DN
1953
1954 /* Ignore parallel directives with empty bodies, unless there
1955 are copyin clauses. */
1956 if (optimize > 0
726a989a
RB
1957 && empty_body_p (gimple_omp_body (stmt))
1958 && find_omp_clause (gimple_omp_parallel_clauses (stmt),
1959 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1960 {
726a989a 1961 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1962 return;
1963 }
1964
acf0174b
JJ
1965 if (gimple_omp_parallel_combined_p (stmt))
1966 {
1967 gimple for_stmt;
1968 struct walk_stmt_info wi;
1969
1970 memset (&wi, 0, sizeof (wi));
1971 wi.val_only = true;
1972 walk_gimple_seq (gimple_omp_body (stmt),
1973 find_combined_for, NULL, &wi);
1974 for_stmt = (gimple) wi.info;
1975 if (for_stmt)
1976 {
1977 struct omp_for_data fd;
1978 extract_omp_for_data (for_stmt, &fd, NULL);
1979 /* We need two temporaries with fd.loop.v type (istart/iend)
1980 and then (fd.collapse - 1) temporaries with the same
1981 type for count2 ... countN-1 vars if not constant. */
1982 size_t count = 2, i;
1983 tree type = fd.iter_type;
1984 if (fd.collapse > 1
1985 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1986 count += fd.collapse - 1;
1987 for (i = 0; i < count; i++)
1988 {
1989 tree temp = create_tmp_var (type, NULL);
1990 tree c = build_omp_clause (UNKNOWN_LOCATION,
1991 OMP_CLAUSE__LOOPTEMP_);
1992 OMP_CLAUSE_DECL (c) = temp;
1993 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1994 gimple_omp_parallel_set_clauses (stmt, c);
1995 }
1996 }
1997 }
1998
726a989a 1999 ctx = new_omp_context (stmt, outer_ctx);
a68ab351 2000 if (taskreg_nesting_level > 1)
50674e96 2001 ctx->is_nested = true;
953ff289 2002 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289
DN
2003 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2004 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 2005 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
2006 name = build_decl (gimple_location (stmt),
2007 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
2008 DECL_ARTIFICIAL (name) = 1;
2009 DECL_NAMELESS (name) = 1;
953ff289 2010 TYPE_NAME (ctx->record_type) = name;
a68ab351 2011 create_omp_child_function (ctx, false);
726a989a 2012 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
953ff289 2013
726a989a 2014 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
26127932 2015 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2016
2017 if (TYPE_FIELDS (ctx->record_type) == NULL)
2018 ctx->record_type = ctx->receiver_decl = NULL;
2019 else
2020 {
2021 layout_type (ctx->record_type);
2022 fixup_child_record_type (ctx);
2023 }
2024}
2025
a68ab351
JJ
2026/* Scan an OpenMP task directive. */
2027
2028static void
726a989a 2029scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
2030{
2031 omp_context *ctx;
726a989a
RB
2032 tree name, t;
2033 gimple stmt = gsi_stmt (*gsi);
db3927fb 2034 location_t loc = gimple_location (stmt);
a68ab351
JJ
2035
2036 /* Ignore task directives with empty bodies. */
2037 if (optimize > 0
726a989a 2038 && empty_body_p (gimple_omp_body (stmt)))
a68ab351 2039 {
726a989a 2040 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
2041 return;
2042 }
2043
726a989a 2044 ctx = new_omp_context (stmt, outer_ctx);
a68ab351
JJ
2045 if (taskreg_nesting_level > 1)
2046 ctx->is_nested = true;
2047 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2048 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2049 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2050 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
2051 name = build_decl (gimple_location (stmt),
2052 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
2053 DECL_ARTIFICIAL (name) = 1;
2054 DECL_NAMELESS (name) = 1;
a68ab351
JJ
2055 TYPE_NAME (ctx->record_type) = name;
2056 create_omp_child_function (ctx, false);
726a989a 2057 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 2058
726a989a 2059 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
2060
2061 if (ctx->srecord_type)
2062 {
2063 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
2064 name = build_decl (gimple_location (stmt),
2065 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
2066 DECL_ARTIFICIAL (name) = 1;
2067 DECL_NAMELESS (name) = 1;
a68ab351
JJ
2068 TYPE_NAME (ctx->srecord_type) = name;
2069 create_omp_child_function (ctx, true);
2070 }
2071
26127932 2072 scan_omp (gimple_omp_body_ptr (stmt), ctx);
a68ab351
JJ
2073
2074 if (TYPE_FIELDS (ctx->record_type) == NULL)
2075 {
2076 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
2077 t = build_int_cst (long_integer_type_node, 0);
2078 gimple_omp_task_set_arg_size (stmt, t);
2079 t = build_int_cst (long_integer_type_node, 1);
2080 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
2081 }
2082 else
2083 {
2084 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2085 /* Move VLA fields to the end. */
2086 p = &TYPE_FIELDS (ctx->record_type);
2087 while (*p)
2088 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2089 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2090 {
2091 *q = *p;
2092 *p = TREE_CHAIN (*p);
2093 TREE_CHAIN (*q) = NULL_TREE;
2094 q = &TREE_CHAIN (*q);
2095 }
2096 else
910ad8de 2097 p = &DECL_CHAIN (*p);
a68ab351
JJ
2098 *p = vla_fields;
2099 layout_type (ctx->record_type);
2100 fixup_child_record_type (ctx);
2101 if (ctx->srecord_type)
2102 layout_type (ctx->srecord_type);
db3927fb 2103 t = fold_convert_loc (loc, long_integer_type_node,
a68ab351 2104 TYPE_SIZE_UNIT (ctx->record_type));
726a989a
RB
2105 gimple_omp_task_set_arg_size (stmt, t);
2106 t = build_int_cst (long_integer_type_node,
a68ab351 2107 TYPE_ALIGN_UNIT (ctx->record_type));
726a989a 2108 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
2109 }
2110}
2111
953ff289 2112
50674e96 2113/* Scan an OpenMP loop directive. */
953ff289
DN
2114
2115static void
726a989a 2116scan_omp_for (gimple stmt, omp_context *outer_ctx)
953ff289 2117{
50674e96 2118 omp_context *ctx;
726a989a 2119 size_t i;
953ff289 2120
50674e96 2121 ctx = new_omp_context (stmt, outer_ctx);
953ff289 2122
726a989a 2123 scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
953ff289 2124
26127932 2125 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
726a989a 2126 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 2127 {
726a989a
RB
2128 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2129 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2130 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2131 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 2132 }
26127932 2133 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2134}
2135
2136/* Scan an OpenMP sections directive. */
2137
2138static void
726a989a 2139scan_omp_sections (gimple stmt, omp_context *outer_ctx)
953ff289 2140{
953ff289
DN
2141 omp_context *ctx;
2142
2143 ctx = new_omp_context (stmt, outer_ctx);
726a989a 2144 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
26127932 2145 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2146}
2147
2148/* Scan an OpenMP single directive. */
2149
2150static void
726a989a 2151scan_omp_single (gimple stmt, omp_context *outer_ctx)
953ff289 2152{
953ff289
DN
2153 omp_context *ctx;
2154 tree name;
2155
2156 ctx = new_omp_context (stmt, outer_ctx);
2157 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2158 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2159 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
2160 name = build_decl (gimple_location (stmt),
2161 TYPE_DECL, name, ctx->record_type);
953ff289
DN
2162 TYPE_NAME (ctx->record_type) = name;
2163
726a989a 2164 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
26127932 2165 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2166
2167 if (TYPE_FIELDS (ctx->record_type) == NULL)
2168 ctx->record_type = NULL;
2169 else
2170 layout_type (ctx->record_type);
2171}
2172
acf0174b
JJ
2173/* Scan an OpenMP target{, data, update} directive. */
2174
2175static void
2176scan_omp_target (gimple stmt, omp_context *outer_ctx)
2177{
2178 omp_context *ctx;
2179 tree name;
2180 int kind = gimple_omp_target_kind (stmt);
2181
2182 ctx = new_omp_context (stmt, outer_ctx);
2183 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2184 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2185 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2186 name = create_tmp_var_name (".omp_data_t");
2187 name = build_decl (gimple_location (stmt),
2188 TYPE_DECL, name, ctx->record_type);
2189 DECL_ARTIFICIAL (name) = 1;
2190 DECL_NAMELESS (name) = 1;
2191 TYPE_NAME (ctx->record_type) = name;
2192 if (kind == GF_OMP_TARGET_KIND_REGION)
2193 {
2194 create_omp_child_function (ctx, false);
2195 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2196 }
2197
2198 scan_sharing_clauses (gimple_omp_target_clauses (stmt), ctx);
2199 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2200
2201 if (TYPE_FIELDS (ctx->record_type) == NULL)
2202 ctx->record_type = ctx->receiver_decl = NULL;
2203 else
2204 {
2205 TYPE_FIELDS (ctx->record_type)
2206 = nreverse (TYPE_FIELDS (ctx->record_type));
2207#ifdef ENABLE_CHECKING
2208 tree field;
2209 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2210 for (field = TYPE_FIELDS (ctx->record_type);
2211 field;
2212 field = DECL_CHAIN (field))
2213 gcc_assert (DECL_ALIGN (field) == align);
2214#endif
2215 layout_type (ctx->record_type);
2216 if (kind == GF_OMP_TARGET_KIND_REGION)
2217 fixup_child_record_type (ctx);
2218 }
2219}
2220
2221/* Scan an OpenMP teams directive. */
2222
2223static void
2224scan_omp_teams (gimple stmt, omp_context *outer_ctx)
2225{
2226 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2227 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2228 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2229}
953ff289 2230
a6fc8e21 2231/* Check OpenMP nesting restrictions. */
26127932
JJ
2232static bool
2233check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
a6fc8e21 2234{
74bf76ed
JJ
2235 if (ctx != NULL)
2236 {
2237 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 2238 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
2239 {
2240 error_at (gimple_location (stmt),
2241 "OpenMP constructs may not be nested inside simd region");
2242 return false;
2243 }
acf0174b
JJ
2244 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2245 {
2246 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2247 || (gimple_omp_for_kind (stmt)
2248 != GF_OMP_FOR_KIND_DISTRIBUTE))
2249 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2250 {
2251 error_at (gimple_location (stmt),
2252 "only distribute or parallel constructs are allowed to "
2253 "be closely nested inside teams construct");
2254 return false;
2255 }
2256 }
74bf76ed 2257 }
726a989a 2258 switch (gimple_code (stmt))
a6fc8e21 2259 {
726a989a 2260 case GIMPLE_OMP_FOR:
c02065fc 2261 if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed 2262 return true;
acf0174b
JJ
2263 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2264 {
2265 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2266 {
2267 error_at (gimple_location (stmt),
2268 "distribute construct must be closely nested inside "
2269 "teams construct");
2270 return false;
2271 }
2272 return true;
2273 }
2274 /* FALLTHRU */
2275 case GIMPLE_CALL:
2276 if (is_gimple_call (stmt)
2277 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2278 == BUILT_IN_GOMP_CANCEL
2279 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2280 == BUILT_IN_GOMP_CANCELLATION_POINT))
2281 {
2282 const char *bad = NULL;
2283 const char *kind = NULL;
2284 if (ctx == NULL)
2285 {
2286 error_at (gimple_location (stmt), "orphaned %qs construct",
2287 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2288 == BUILT_IN_GOMP_CANCEL
2289 ? "#pragma omp cancel"
2290 : "#pragma omp cancellation point");
2291 return false;
2292 }
9541ffee 2293 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
9439e9a1 2294 ? tree_to_shwi (gimple_call_arg (stmt, 0))
acf0174b
JJ
2295 : 0)
2296 {
2297 case 1:
2298 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2299 bad = "#pragma omp parallel";
2300 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2301 == BUILT_IN_GOMP_CANCEL
2302 && !integer_zerop (gimple_call_arg (stmt, 1)))
2303 ctx->cancellable = true;
2304 kind = "parallel";
2305 break;
2306 case 2:
2307 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2308 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2309 bad = "#pragma omp for";
2310 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2311 == BUILT_IN_GOMP_CANCEL
2312 && !integer_zerop (gimple_call_arg (stmt, 1)))
2313 {
2314 ctx->cancellable = true;
2315 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2316 OMP_CLAUSE_NOWAIT))
2317 warning_at (gimple_location (stmt), 0,
2318 "%<#pragma omp cancel for%> inside "
2319 "%<nowait%> for construct");
2320 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2321 OMP_CLAUSE_ORDERED))
2322 warning_at (gimple_location (stmt), 0,
2323 "%<#pragma omp cancel for%> inside "
2324 "%<ordered%> for construct");
2325 }
2326 kind = "for";
2327 break;
2328 case 4:
2329 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2330 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2331 bad = "#pragma omp sections";
2332 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2333 == BUILT_IN_GOMP_CANCEL
2334 && !integer_zerop (gimple_call_arg (stmt, 1)))
2335 {
2336 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2337 {
2338 ctx->cancellable = true;
2339 if (find_omp_clause (gimple_omp_sections_clauses
2340 (ctx->stmt),
2341 OMP_CLAUSE_NOWAIT))
2342 warning_at (gimple_location (stmt), 0,
2343 "%<#pragma omp cancel sections%> inside "
2344 "%<nowait%> sections construct");
2345 }
2346 else
2347 {
2348 gcc_assert (ctx->outer
2349 && gimple_code (ctx->outer->stmt)
2350 == GIMPLE_OMP_SECTIONS);
2351 ctx->outer->cancellable = true;
2352 if (find_omp_clause (gimple_omp_sections_clauses
2353 (ctx->outer->stmt),
2354 OMP_CLAUSE_NOWAIT))
2355 warning_at (gimple_location (stmt), 0,
2356 "%<#pragma omp cancel sections%> inside "
2357 "%<nowait%> sections construct");
2358 }
2359 }
2360 kind = "sections";
2361 break;
2362 case 8:
2363 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2364 bad = "#pragma omp task";
2365 else
2366 ctx->cancellable = true;
2367 kind = "taskgroup";
2368 break;
2369 default:
2370 error_at (gimple_location (stmt), "invalid arguments");
2371 return false;
2372 }
2373 if (bad)
2374 {
2375 error_at (gimple_location (stmt),
2376 "%<%s %s%> construct not closely nested inside of %qs",
2377 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2378 == BUILT_IN_GOMP_CANCEL
2379 ? "#pragma omp cancel"
2380 : "#pragma omp cancellation point", kind, bad);
2381 return false;
2382 }
2383 }
74bf76ed 2384 /* FALLTHRU */
726a989a
RB
2385 case GIMPLE_OMP_SECTIONS:
2386 case GIMPLE_OMP_SINGLE:
a6fc8e21 2387 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2388 switch (gimple_code (ctx->stmt))
a6fc8e21 2389 {
726a989a
RB
2390 case GIMPLE_OMP_FOR:
2391 case GIMPLE_OMP_SECTIONS:
2392 case GIMPLE_OMP_SINGLE:
2393 case GIMPLE_OMP_ORDERED:
2394 case GIMPLE_OMP_MASTER:
2395 case GIMPLE_OMP_TASK:
acf0174b 2396 case GIMPLE_OMP_CRITICAL:
726a989a 2397 if (is_gimple_call (stmt))
a68ab351 2398 {
acf0174b
JJ
2399 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2400 != BUILT_IN_GOMP_BARRIER)
2401 return true;
26127932
JJ
2402 error_at (gimple_location (stmt),
2403 "barrier region may not be closely nested inside "
2404 "of work-sharing, critical, ordered, master or "
2405 "explicit task region");
2406 return false;
a68ab351 2407 }
26127932
JJ
2408 error_at (gimple_location (stmt),
2409 "work-sharing region may not be closely nested inside "
2410 "of work-sharing, critical, ordered, master or explicit "
2411 "task region");
2412 return false;
726a989a 2413 case GIMPLE_OMP_PARALLEL:
26127932 2414 return true;
a6fc8e21
JJ
2415 default:
2416 break;
2417 }
2418 break;
726a989a 2419 case GIMPLE_OMP_MASTER:
a6fc8e21 2420 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2421 switch (gimple_code (ctx->stmt))
a6fc8e21 2422 {
726a989a
RB
2423 case GIMPLE_OMP_FOR:
2424 case GIMPLE_OMP_SECTIONS:
2425 case GIMPLE_OMP_SINGLE:
2426 case GIMPLE_OMP_TASK:
26127932
JJ
2427 error_at (gimple_location (stmt),
2428 "master region may not be closely nested inside "
2429 "of work-sharing or explicit task region");
2430 return false;
726a989a 2431 case GIMPLE_OMP_PARALLEL:
26127932 2432 return true;
a6fc8e21
JJ
2433 default:
2434 break;
2435 }
2436 break;
726a989a 2437 case GIMPLE_OMP_ORDERED:
a6fc8e21 2438 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2439 switch (gimple_code (ctx->stmt))
a6fc8e21 2440 {
726a989a
RB
2441 case GIMPLE_OMP_CRITICAL:
2442 case GIMPLE_OMP_TASK:
26127932
JJ
2443 error_at (gimple_location (stmt),
2444 "ordered region may not be closely nested inside "
2445 "of critical or explicit task region");
2446 return false;
726a989a
RB
2447 case GIMPLE_OMP_FOR:
2448 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
a6fc8e21 2449 OMP_CLAUSE_ORDERED) == NULL)
26127932
JJ
2450 {
2451 error_at (gimple_location (stmt),
2452 "ordered region must be closely nested inside "
a6fc8e21 2453 "a loop region with an ordered clause");
26127932
JJ
2454 return false;
2455 }
2456 return true;
726a989a 2457 case GIMPLE_OMP_PARALLEL:
acf0174b
JJ
2458 error_at (gimple_location (stmt),
2459 "ordered region must be closely nested inside "
2460 "a loop region with an ordered clause");
2461 return false;
a6fc8e21
JJ
2462 default:
2463 break;
2464 }
2465 break;
726a989a 2466 case GIMPLE_OMP_CRITICAL:
a6fc8e21 2467 for (; ctx != NULL; ctx = ctx->outer)
726a989a
RB
2468 if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
2469 && (gimple_omp_critical_name (stmt)
2470 == gimple_omp_critical_name (ctx->stmt)))
a6fc8e21 2471 {
26127932
JJ
2472 error_at (gimple_location (stmt),
2473 "critical region may not be nested inside a critical "
2474 "region with the same name");
2475 return false;
a6fc8e21
JJ
2476 }
2477 break;
acf0174b
JJ
2478 case GIMPLE_OMP_TEAMS:
2479 if (ctx == NULL
2480 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2481 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2482 {
2483 error_at (gimple_location (stmt),
2484 "teams construct not closely nested inside of target "
2485 "region");
2486 return false;
2487 }
2488 break;
a6fc8e21
JJ
2489 default:
2490 break;
2491 }
26127932 2492 return true;
a6fc8e21
JJ
2493}
2494
2495
726a989a
RB
2496/* Helper function scan_omp.
2497
2498 Callback for walk_tree or operators in walk_gimple_stmt used to
2499 scan for OpenMP directives in TP. */
953ff289
DN
2500
2501static tree
726a989a 2502scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 2503{
d3bfe4de
KG
2504 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2505 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
2506 tree t = *tp;
2507
726a989a
RB
2508 switch (TREE_CODE (t))
2509 {
2510 case VAR_DECL:
2511 case PARM_DECL:
2512 case LABEL_DECL:
2513 case RESULT_DECL:
2514 if (ctx)
2515 *tp = remap_decl (t, &ctx->cb);
2516 break;
2517
2518 default:
2519 if (ctx && TYPE_P (t))
2520 *tp = remap_type (t, &ctx->cb);
2521 else if (!DECL_P (t))
a900ae6b
JJ
2522 {
2523 *walk_subtrees = 1;
2524 if (ctx)
70f34814
RG
2525 {
2526 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2527 if (tem != TREE_TYPE (t))
2528 {
2529 if (TREE_CODE (t) == INTEGER_CST)
2530 *tp = build_int_cst_wide (tem,
2531 TREE_INT_CST_LOW (t),
2532 TREE_INT_CST_HIGH (t));
2533 else
2534 TREE_TYPE (t) = tem;
2535 }
2536 }
a900ae6b 2537 }
726a989a
RB
2538 break;
2539 }
2540
2541 return NULL_TREE;
2542}
2543
c02065fc
AH
2544/* Return true if FNDECL is a setjmp or a longjmp. */
2545
2546static bool
2547setjmp_or_longjmp_p (const_tree fndecl)
2548{
2549 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2550 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
2551 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
2552 return true;
2553
2554 tree declname = DECL_NAME (fndecl);
2555 if (!declname)
2556 return false;
2557 const char *name = IDENTIFIER_POINTER (declname);
2558 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
2559}
2560
726a989a
RB
2561
2562/* Helper function for scan_omp.
2563
2564 Callback for walk_gimple_stmt used to scan for OpenMP directives in
2565 the current statement in GSI. */
2566
2567static tree
2568scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2569 struct walk_stmt_info *wi)
2570{
2571 gimple stmt = gsi_stmt (*gsi);
2572 omp_context *ctx = (omp_context *) wi->info;
2573
2574 if (gimple_has_location (stmt))
2575 input_location = gimple_location (stmt);
953ff289 2576
a6fc8e21 2577 /* Check the OpenMP nesting restrictions. */
acf0174b
JJ
2578 bool remove = false;
2579 if (is_gimple_omp (stmt))
2580 remove = !check_omp_nesting_restrictions (stmt, ctx);
2581 else if (is_gimple_call (stmt))
2582 {
2583 tree fndecl = gimple_call_fndecl (stmt);
c02065fc
AH
2584 if (fndecl)
2585 {
2586 if (setjmp_or_longjmp_p (fndecl)
2587 && ctx
2588 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2589 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
2590 {
2591 remove = true;
2592 error_at (gimple_location (stmt),
2593 "setjmp/longjmp inside simd construct");
2594 }
2595 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2596 switch (DECL_FUNCTION_CODE (fndecl))
2597 {
2598 case BUILT_IN_GOMP_BARRIER:
2599 case BUILT_IN_GOMP_CANCEL:
2600 case BUILT_IN_GOMP_CANCELLATION_POINT:
2601 case BUILT_IN_GOMP_TASKYIELD:
2602 case BUILT_IN_GOMP_TASKWAIT:
2603 case BUILT_IN_GOMP_TASKGROUP_START:
2604 case BUILT_IN_GOMP_TASKGROUP_END:
2605 remove = !check_omp_nesting_restrictions (stmt, ctx);
2606 break;
2607 default:
2608 break;
2609 }
2610 }
acf0174b
JJ
2611 }
2612 if (remove)
2613 {
2614 stmt = gimple_build_nop ();
2615 gsi_replace (gsi, stmt, false);
a68ab351 2616 }
a6fc8e21 2617
726a989a
RB
2618 *handled_ops_p = true;
2619
2620 switch (gimple_code (stmt))
953ff289 2621 {
726a989a 2622 case GIMPLE_OMP_PARALLEL:
a68ab351 2623 taskreg_nesting_level++;
726a989a 2624 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
2625 taskreg_nesting_level--;
2626 break;
2627
726a989a 2628 case GIMPLE_OMP_TASK:
a68ab351 2629 taskreg_nesting_level++;
726a989a 2630 scan_omp_task (gsi, ctx);
a68ab351 2631 taskreg_nesting_level--;
953ff289
DN
2632 break;
2633
726a989a
RB
2634 case GIMPLE_OMP_FOR:
2635 scan_omp_for (stmt, ctx);
953ff289
DN
2636 break;
2637
726a989a
RB
2638 case GIMPLE_OMP_SECTIONS:
2639 scan_omp_sections (stmt, ctx);
953ff289
DN
2640 break;
2641
726a989a
RB
2642 case GIMPLE_OMP_SINGLE:
2643 scan_omp_single (stmt, ctx);
953ff289
DN
2644 break;
2645
726a989a
RB
2646 case GIMPLE_OMP_SECTION:
2647 case GIMPLE_OMP_MASTER:
acf0174b 2648 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
2649 case GIMPLE_OMP_ORDERED:
2650 case GIMPLE_OMP_CRITICAL:
2651 ctx = new_omp_context (stmt, ctx);
26127932 2652 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2653 break;
2654
acf0174b
JJ
2655 case GIMPLE_OMP_TARGET:
2656 scan_omp_target (stmt, ctx);
2657 break;
2658
2659 case GIMPLE_OMP_TEAMS:
2660 scan_omp_teams (stmt, ctx);
2661 break;
2662
726a989a 2663 case GIMPLE_BIND:
953ff289
DN
2664 {
2665 tree var;
953ff289 2666
726a989a
RB
2667 *handled_ops_p = false;
2668 if (ctx)
910ad8de 2669 for (var = gimple_bind_vars (stmt); var ; var = DECL_CHAIN (var))
726a989a 2670 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
2671 }
2672 break;
953ff289 2673 default:
726a989a 2674 *handled_ops_p = false;
953ff289
DN
2675 break;
2676 }
2677
2678 return NULL_TREE;
2679}
2680
2681
726a989a
RB
2682/* Scan all the statements starting at the current statement. CTX
2683 contains context information about the OpenMP directives and
2684 clauses found during the scan. */
953ff289
DN
2685
2686static void
26127932 2687scan_omp (gimple_seq *body_p, omp_context *ctx)
953ff289
DN
2688{
2689 location_t saved_location;
2690 struct walk_stmt_info wi;
2691
2692 memset (&wi, 0, sizeof (wi));
953ff289 2693 wi.info = ctx;
953ff289
DN
2694 wi.want_locations = true;
2695
2696 saved_location = input_location;
26127932 2697 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
2698 input_location = saved_location;
2699}
2700\f
2701/* Re-gimplification and code generation routines. */
2702
2703/* Build a call to GOMP_barrier. */
2704
acf0174b
JJ
2705static gimple
2706build_omp_barrier (tree lhs)
2707{
2708 tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
2709 : BUILT_IN_GOMP_BARRIER);
2710 gimple g = gimple_build_call (fndecl, 0);
2711 if (lhs)
2712 gimple_call_set_lhs (g, lhs);
2713 return g;
953ff289
DN
2714}
2715
2716/* If a context was created for STMT when it was scanned, return it. */
2717
2718static omp_context *
726a989a 2719maybe_lookup_ctx (gimple stmt)
953ff289
DN
2720{
2721 splay_tree_node n;
2722 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
2723 return n ? (omp_context *) n->value : NULL;
2724}
2725
50674e96
DN
2726
2727/* Find the mapping for DECL in CTX or the immediately enclosing
2728 context that has a mapping for DECL.
2729
2730 If CTX is a nested parallel directive, we may have to use the decl
2731 mappings created in CTX's parent context. Suppose that we have the
2732 following parallel nesting (variable UIDs showed for clarity):
2733
2734 iD.1562 = 0;
2735 #omp parallel shared(iD.1562) -> outer parallel
2736 iD.1562 = iD.1562 + 1;
2737
2738 #omp parallel shared (iD.1562) -> inner parallel
2739 iD.1562 = iD.1562 - 1;
2740
2741 Each parallel structure will create a distinct .omp_data_s structure
2742 for copying iD.1562 in/out of the directive:
2743
2744 outer parallel .omp_data_s.1.i -> iD.1562
2745 inner parallel .omp_data_s.2.i -> iD.1562
2746
2747 A shared variable mapping will produce a copy-out operation before
2748 the parallel directive and a copy-in operation after it. So, in
2749 this case we would have:
2750
2751 iD.1562 = 0;
2752 .omp_data_o.1.i = iD.1562;
2753 #omp parallel shared(iD.1562) -> outer parallel
2754 .omp_data_i.1 = &.omp_data_o.1
2755 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2756
2757 .omp_data_o.2.i = iD.1562; -> **
2758 #omp parallel shared(iD.1562) -> inner parallel
2759 .omp_data_i.2 = &.omp_data_o.2
2760 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2761
2762
2763 ** This is a problem. The symbol iD.1562 cannot be referenced
2764 inside the body of the outer parallel region. But since we are
2765 emitting this copy operation while expanding the inner parallel
2766 directive, we need to access the CTX structure of the outer
2767 parallel directive to get the correct mapping:
2768
2769 .omp_data_o.2.i = .omp_data_i.1->i
2770
2771 Since there may be other workshare or parallel directives enclosing
2772 the parallel directive, it may be necessary to walk up the context
2773 parent chain. This is not a problem in general because nested
2774 parallelism happens only rarely. */
2775
2776static tree
2777lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2778{
2779 tree t;
2780 omp_context *up;
2781
50674e96
DN
2782 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2783 t = maybe_lookup_decl (decl, up);
2784
d2dda7fe 2785 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 2786
64964499 2787 return t ? t : decl;
50674e96
DN
2788}
2789
2790
8ca5b2a2
JJ
2791/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2792 in outer contexts. */
2793
2794static tree
2795maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2796{
2797 tree t = NULL;
2798 omp_context *up;
2799
d2dda7fe
JJ
2800 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2801 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
2802
2803 return t ? t : decl;
2804}
2805
2806
953ff289
DN
2807/* Construct the initialization value for reduction CLAUSE. */
2808
2809tree
2810omp_reduction_init (tree clause, tree type)
2811{
db3927fb 2812 location_t loc = OMP_CLAUSE_LOCATION (clause);
953ff289
DN
2813 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2814 {
2815 case PLUS_EXPR:
2816 case MINUS_EXPR:
2817 case BIT_IOR_EXPR:
2818 case BIT_XOR_EXPR:
2819 case TRUTH_OR_EXPR:
2820 case TRUTH_ORIF_EXPR:
2821 case TRUTH_XOR_EXPR:
2822 case NE_EXPR:
e8160c9a 2823 return build_zero_cst (type);
953ff289
DN
2824
2825 case MULT_EXPR:
2826 case TRUTH_AND_EXPR:
2827 case TRUTH_ANDIF_EXPR:
2828 case EQ_EXPR:
db3927fb 2829 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
2830
2831 case BIT_AND_EXPR:
db3927fb 2832 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
2833
2834 case MAX_EXPR:
2835 if (SCALAR_FLOAT_TYPE_P (type))
2836 {
2837 REAL_VALUE_TYPE max, min;
2838 if (HONOR_INFINITIES (TYPE_MODE (type)))
2839 {
2840 real_inf (&max);
2841 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2842 }
2843 else
2844 real_maxval (&min, 1, TYPE_MODE (type));
2845 return build_real (type, min);
2846 }
2847 else
2848 {
2849 gcc_assert (INTEGRAL_TYPE_P (type));
2850 return TYPE_MIN_VALUE (type);
2851 }
2852
2853 case MIN_EXPR:
2854 if (SCALAR_FLOAT_TYPE_P (type))
2855 {
2856 REAL_VALUE_TYPE max;
2857 if (HONOR_INFINITIES (TYPE_MODE (type)))
2858 real_inf (&max);
2859 else
2860 real_maxval (&max, 0, TYPE_MODE (type));
2861 return build_real (type, max);
2862 }
2863 else
2864 {
2865 gcc_assert (INTEGRAL_TYPE_P (type));
2866 return TYPE_MAX_VALUE (type);
2867 }
2868
2869 default:
2870 gcc_unreachable ();
2871 }
2872}
2873
acf0174b
JJ
2874/* Return alignment to be assumed for var in CLAUSE, which should be
2875 OMP_CLAUSE_ALIGNED. */
2876
2877static tree
2878omp_clause_aligned_alignment (tree clause)
2879{
2880 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2881 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
2882
2883 /* Otherwise return implementation defined alignment. */
2884 unsigned int al = 1;
2885 enum machine_mode mode, vmode;
2886 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2887 if (vs)
2888 vs = 1 << floor_log2 (vs);
2889 static enum mode_class classes[]
2890 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
2891 for (int i = 0; i < 4; i += 2)
2892 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
2893 mode != VOIDmode;
2894 mode = GET_MODE_WIDER_MODE (mode))
2895 {
2896 vmode = targetm.vectorize.preferred_simd_mode (mode);
2897 if (GET_MODE_CLASS (vmode) != classes[i + 1])
2898 continue;
2899 while (vs
2900 && GET_MODE_SIZE (vmode) < vs
2901 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
2902 vmode = GET_MODE_2XWIDER_MODE (vmode);
2903
2904 tree type = lang_hooks.types.type_for_mode (mode, 1);
2905 if (type == NULL_TREE || TYPE_MODE (type) != mode)
2906 continue;
2907 type = build_vector_type (type, GET_MODE_SIZE (vmode)
2908 / GET_MODE_SIZE (mode));
2909 if (TYPE_MODE (type) != vmode)
2910 continue;
2911 if (TYPE_ALIGN_UNIT (type) > al)
2912 al = TYPE_ALIGN_UNIT (type);
2913 }
2914 return build_int_cst (integer_type_node, al);
2915}
2916
74bf76ed
JJ
2917/* Return maximum possible vectorization factor for the target. */
2918
2919static int
2920omp_max_vf (void)
2921{
2922 if (!optimize
2923 || optimize_debug
ea0f3e87
XDL
2924 || (!flag_tree_loop_vectorize
2925 && (global_options_set.x_flag_tree_loop_vectorize
2926 || global_options_set.x_flag_tree_vectorize)))
74bf76ed
JJ
2927 return 1;
2928
2929 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2930 if (vs)
2931 {
2932 vs = 1 << floor_log2 (vs);
2933 return vs;
2934 }
2935 enum machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
2936 if (GET_MODE_CLASS (vqimode) == MODE_VECTOR_INT)
2937 return GET_MODE_NUNITS (vqimode);
2938 return 1;
2939}
2940
2941/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
2942 privatization. */
2943
2944static bool
2945lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, int &max_vf,
2946 tree &idx, tree &lane, tree &ivar, tree &lvar)
2947{
2948 if (max_vf == 0)
2949 {
2950 max_vf = omp_max_vf ();
2951 if (max_vf > 1)
2952 {
2953 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2954 OMP_CLAUSE_SAFELEN);
2955 if (c
2956 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c), max_vf) == -1)
9439e9a1 2957 max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
74bf76ed
JJ
2958 }
2959 if (max_vf > 1)
2960 {
2961 idx = create_tmp_var (unsigned_type_node, NULL);
2962 lane = create_tmp_var (unsigned_type_node, NULL);
2963 }
2964 }
2965 if (max_vf == 1)
2966 return false;
2967
2968 tree atype = build_array_type_nelts (TREE_TYPE (new_var), max_vf);
2969 tree avar = create_tmp_var_raw (atype, NULL);
2970 if (TREE_ADDRESSABLE (new_var))
2971 TREE_ADDRESSABLE (avar) = 1;
2972 DECL_ATTRIBUTES (avar)
2973 = tree_cons (get_identifier ("omp simd array"), NULL,
2974 DECL_ATTRIBUTES (avar));
2975 gimple_add_tmp_var (avar);
2976 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, idx,
2977 NULL_TREE, NULL_TREE);
2978 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, lane,
2979 NULL_TREE, NULL_TREE);
acf0174b
JJ
2980 if (DECL_P (new_var))
2981 {
2982 SET_DECL_VALUE_EXPR (new_var, lvar);
2983 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2984 }
74bf76ed
JJ
2985 return true;
2986}
2987
953ff289
DN
2988/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2989 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2990 private variables. Initialization statements go in ILIST, while calls
2991 to destructors go in DLIST. */
2992
2993static void
726a989a 2994lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
acf0174b 2995 omp_context *ctx, struct omp_for_data *fd)
953ff289 2996{
5039610b 2997 tree c, dtor, copyin_seq, x, ptr;
953ff289 2998 bool copyin_by_ref = false;
8ca5b2a2 2999 bool lastprivate_firstprivate = false;
acf0174b 3000 bool reduction_omp_orig_ref = false;
953ff289 3001 int pass;
74bf76ed 3002 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 3003 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD);
74bf76ed
JJ
3004 int max_vf = 0;
3005 tree lane = NULL_TREE, idx = NULL_TREE;
3006 tree ivar = NULL_TREE, lvar = NULL_TREE;
3007 gimple_seq llist[2] = { NULL, NULL };
953ff289 3008
953ff289
DN
3009 copyin_seq = NULL;
3010
74bf76ed
JJ
3011 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3012 with data sharing clauses referencing variable sized vars. That
3013 is unnecessarily hard to support and very unlikely to result in
3014 vectorized code anyway. */
3015 if (is_simd)
3016 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3017 switch (OMP_CLAUSE_CODE (c))
3018 {
3019 case OMP_CLAUSE_REDUCTION:
74bf76ed
JJ
3020 case OMP_CLAUSE_PRIVATE:
3021 case OMP_CLAUSE_FIRSTPRIVATE:
3022 case OMP_CLAUSE_LASTPRIVATE:
3023 case OMP_CLAUSE_LINEAR:
3024 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3025 max_vf = 1;
3026 break;
3027 default:
3028 continue;
3029 }
3030
953ff289
DN
3031 /* Do all the fixed sized types in the first pass, and the variable sized
3032 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 3033 the variable sized types are processed before we use them in the
953ff289
DN
3034 variable sized operations. */
3035 for (pass = 0; pass < 2; ++pass)
3036 {
3037 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3038 {
aaf46ef9 3039 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
3040 tree var, new_var;
3041 bool by_ref;
db3927fb 3042 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
3043
3044 switch (c_kind)
3045 {
3046 case OMP_CLAUSE_PRIVATE:
3047 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3048 continue;
3049 break;
3050 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3051 /* Ignore shared directives in teams construct. */
3052 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3053 continue;
8ca5b2a2
JJ
3054 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3055 {
3056 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
3057 continue;
3058 }
953ff289 3059 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289 3060 case OMP_CLAUSE_COPYIN:
acf0174b
JJ
3061 case OMP_CLAUSE_LINEAR:
3062 break;
953ff289 3063 case OMP_CLAUSE_REDUCTION:
acf0174b
JJ
3064 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3065 reduction_omp_orig_ref = true;
953ff289 3066 break;
acf0174b
JJ
3067 case OMP_CLAUSE__LOOPTEMP_:
3068 /* Handle _looptemp_ clauses only on parallel. */
3069 if (fd)
3070 continue;
74bf76ed 3071 break;
077b0dfb 3072 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
3073 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3074 {
3075 lastprivate_firstprivate = true;
3076 if (pass != 0)
3077 continue;
3078 }
077b0dfb 3079 break;
acf0174b
JJ
3080 case OMP_CLAUSE_ALIGNED:
3081 if (pass == 0)
3082 continue;
3083 var = OMP_CLAUSE_DECL (c);
3084 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3085 && !is_global_var (var))
3086 {
3087 new_var = maybe_lookup_decl (var, ctx);
3088 if (new_var == NULL_TREE)
3089 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3090 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3091 x = build_call_expr_loc (clause_loc, x, 2, new_var,
3092 omp_clause_aligned_alignment (c));
3093 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3094 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3095 gimplify_and_add (x, ilist);
3096 }
3097 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3098 && is_global_var (var))
3099 {
3100 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3101 new_var = lookup_decl (var, ctx);
3102 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3103 t = build_fold_addr_expr_loc (clause_loc, t);
3104 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3105 t = build_call_expr_loc (clause_loc, t2, 2, t,
3106 omp_clause_aligned_alignment (c));
3107 t = fold_convert_loc (clause_loc, ptype, t);
3108 x = create_tmp_var (ptype, NULL);
3109 t = build2 (MODIFY_EXPR, ptype, x, t);
3110 gimplify_and_add (t, ilist);
3111 t = build_simple_mem_ref_loc (clause_loc, x);
3112 SET_DECL_VALUE_EXPR (new_var, t);
3113 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3114 }
3115 continue;
953ff289
DN
3116 default:
3117 continue;
3118 }
3119
3120 new_var = var = OMP_CLAUSE_DECL (c);
3121 if (c_kind != OMP_CLAUSE_COPYIN)
3122 new_var = lookup_decl (var, ctx);
3123
3124 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3125 {
3126 if (pass != 0)
3127 continue;
3128 }
953ff289
DN
3129 else if (is_variable_sized (var))
3130 {
50674e96
DN
3131 /* For variable sized types, we need to allocate the
3132 actual storage here. Call alloca and store the
3133 result in the pointer decl that we created elsewhere. */
953ff289
DN
3134 if (pass == 0)
3135 continue;
3136
a68ab351
JJ
3137 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3138 {
726a989a 3139 gimple stmt;
e79983f4 3140 tree tmp, atmp;
726a989a 3141
a68ab351
JJ
3142 ptr = DECL_VALUE_EXPR (new_var);
3143 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3144 ptr = TREE_OPERAND (ptr, 0);
3145 gcc_assert (DECL_P (ptr));
3146 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
3147
3148 /* void *tmp = __builtin_alloca */
e79983f4
MM
3149 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3150 stmt = gimple_build_call (atmp, 1, x);
726a989a
RB
3151 tmp = create_tmp_var_raw (ptr_type_node, NULL);
3152 gimple_add_tmp_var (tmp);
3153 gimple_call_set_lhs (stmt, tmp);
3154
3155 gimple_seq_add_stmt (ilist, stmt);
3156
db3927fb 3157 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 3158 gimplify_assign (ptr, x, ilist);
a68ab351 3159 }
953ff289 3160 }
953ff289
DN
3161 else if (is_reference (var))
3162 {
50674e96
DN
3163 /* For references that are being privatized for Fortran,
3164 allocate new backing storage for the new pointer
3165 variable. This allows us to avoid changing all the
3166 code that expects a pointer to something that expects
acf0174b 3167 a direct variable. */
953ff289
DN
3168 if (pass == 0)
3169 continue;
3170
3171 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
3172 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
3173 {
3174 x = build_receiver_ref (var, false, ctx);
db3927fb 3175 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
3176 }
3177 else if (TREE_CONSTANT (x))
953ff289
DN
3178 {
3179 const char *name = NULL;
3180 if (DECL_NAME (var))
3181 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
3182
077b0dfb
JJ
3183 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
3184 name);
3185 gimple_add_tmp_var (x);
628c189e 3186 TREE_ADDRESSABLE (x) = 1;
db3927fb 3187 x = build_fold_addr_expr_loc (clause_loc, x);
953ff289
DN
3188 }
3189 else
3190 {
e79983f4
MM
3191 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3192 x = build_call_expr_loc (clause_loc, atmp, 1, x);
953ff289
DN
3193 }
3194
db3927fb 3195 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
726a989a 3196 gimplify_assign (new_var, x, ilist);
953ff289 3197
70f34814 3198 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
3199 }
3200 else if (c_kind == OMP_CLAUSE_REDUCTION
3201 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3202 {
3203 if (pass == 0)
3204 continue;
3205 }
3206 else if (pass != 0)
3207 continue;
3208
aaf46ef9 3209 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
3210 {
3211 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3212 /* Ignore shared directives in teams construct. */
3213 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3214 continue;
8ca5b2a2
JJ
3215 /* Shared global vars are just accessed directly. */
3216 if (is_global_var (new_var))
3217 break;
953ff289
DN
3218 /* Set up the DECL_VALUE_EXPR for shared variables now. This
3219 needs to be delayed until after fixup_child_record_type so
3220 that we get the correct type during the dereference. */
7c8f7639 3221 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
3222 x = build_receiver_ref (var, by_ref, ctx);
3223 SET_DECL_VALUE_EXPR (new_var, x);
3224 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3225
3226 /* ??? If VAR is not passed by reference, and the variable
3227 hasn't been initialized yet, then we'll get a warning for
3228 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 3229 able to notice this and not store anything at all, but
953ff289
DN
3230 we're generating code too early. Suppress the warning. */
3231 if (!by_ref)
3232 TREE_NO_WARNING (var) = 1;
3233 break;
3234
3235 case OMP_CLAUSE_LASTPRIVATE:
3236 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3237 break;
3238 /* FALLTHRU */
3239
3240 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
3241 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
3242 x = build_outer_var_ref (var, ctx);
3243 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3244 {
3245 if (is_task_ctx (ctx))
3246 x = build_receiver_ref (var, false, ctx);
3247 else
3248 x = build_outer_var_ref (var, ctx);
3249 }
3250 else
3251 x = NULL;
74bf76ed 3252 do_private:
acf0174b
JJ
3253 tree nx;
3254 nx = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
74bf76ed
JJ
3255 if (is_simd)
3256 {
3257 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
acf0174b 3258 if ((TREE_ADDRESSABLE (new_var) || nx || y
74bf76ed
JJ
3259 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
3260 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3261 idx, lane, ivar, lvar))
3262 {
acf0174b 3263 if (nx)
74bf76ed
JJ
3264 x = lang_hooks.decls.omp_clause_default_ctor
3265 (c, unshare_expr (ivar), x);
acf0174b 3266 if (nx && x)
74bf76ed
JJ
3267 gimplify_and_add (x, &llist[0]);
3268 if (y)
3269 {
3270 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
3271 if (y)
3272 {
3273 gimple_seq tseq = NULL;
3274
3275 dtor = y;
3276 gimplify_stmt (&dtor, &tseq);
3277 gimple_seq_add_seq (&llist[1], tseq);
3278 }
3279 }
3280 break;
3281 }
3282 }
acf0174b
JJ
3283 if (nx)
3284 gimplify_and_add (nx, ilist);
953ff289
DN
3285 /* FALLTHRU */
3286
3287 do_dtor:
3288 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
3289 if (x)
3290 {
726a989a
RB
3291 gimple_seq tseq = NULL;
3292
953ff289 3293 dtor = x;
726a989a 3294 gimplify_stmt (&dtor, &tseq);
355a7673 3295 gimple_seq_add_seq (dlist, tseq);
953ff289
DN
3296 }
3297 break;
3298
74bf76ed
JJ
3299 case OMP_CLAUSE_LINEAR:
3300 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
3301 goto do_firstprivate;
3302 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3303 x = NULL;
3304 else
3305 x = build_outer_var_ref (var, ctx);
3306 goto do_private;
3307
953ff289 3308 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
3309 if (is_task_ctx (ctx))
3310 {
3311 if (is_reference (var) || is_variable_sized (var))
3312 goto do_dtor;
3313 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
3314 ctx))
3315 || use_pointer_for_field (var, NULL))
3316 {
3317 x = build_receiver_ref (var, false, ctx);
3318 SET_DECL_VALUE_EXPR (new_var, x);
3319 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3320 goto do_dtor;
3321 }
3322 }
74bf76ed 3323 do_firstprivate:
953ff289 3324 x = build_outer_var_ref (var, ctx);
74bf76ed
JJ
3325 if (is_simd)
3326 {
acf0174b
JJ
3327 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3328 && gimple_omp_for_combined_into_p (ctx->stmt))
3329 {
3330 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3331 ? sizetype : TREE_TYPE (x);
3332 tree t = fold_convert (stept,
3333 OMP_CLAUSE_LINEAR_STEP (c));
3334 tree c = find_omp_clause (clauses,
3335 OMP_CLAUSE__LOOPTEMP_);
3336 gcc_assert (c);
3337 tree l = OMP_CLAUSE_DECL (c);
3338 if (fd->collapse == 1)
3339 {
3340 tree n1 = fd->loop.n1;
3341 tree step = fd->loop.step;
3342 tree itype = TREE_TYPE (l);
3343 if (POINTER_TYPE_P (itype))
3344 itype = signed_type_for (itype);
3345 l = fold_build2 (MINUS_EXPR, itype, l, n1);
3346 if (TYPE_UNSIGNED (itype)
3347 && fd->loop.cond_code == GT_EXPR)
3348 l = fold_build2 (TRUNC_DIV_EXPR, itype,
3349 fold_build1 (NEGATE_EXPR,
3350 itype, l),
3351 fold_build1 (NEGATE_EXPR,
3352 itype, step));
3353 else
3354 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
3355 }
3356 t = fold_build2 (MULT_EXPR, stept,
3357 fold_convert (stept, l), t);
3358 if (POINTER_TYPE_P (TREE_TYPE (x)))
3359 x = fold_build2 (POINTER_PLUS_EXPR,
3360 TREE_TYPE (x), x, t);
3361 else
3362 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
3363 }
3364
74bf76ed
JJ
3365 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
3366 || TREE_ADDRESSABLE (new_var))
3367 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3368 idx, lane, ivar, lvar))
3369 {
3370 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
3371 {
3372 tree iv = create_tmp_var (TREE_TYPE (new_var), NULL);
3373 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
3374 gimplify_and_add (x, ilist);
3375 gimple_stmt_iterator gsi
3376 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3377 gimple g
3378 = gimple_build_assign (unshare_expr (lvar), iv);
3379 gsi_insert_before_without_update (&gsi, g,
3380 GSI_SAME_STMT);
3381 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3382 ? sizetype : TREE_TYPE (x);
3383 tree t = fold_convert (stept,
3384 OMP_CLAUSE_LINEAR_STEP (c));
3385 enum tree_code code = PLUS_EXPR;
3386 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
3387 code = POINTER_PLUS_EXPR;
3388 g = gimple_build_assign_with_ops (code, iv, iv, t);
3389 gsi_insert_before_without_update (&gsi, g,
3390 GSI_SAME_STMT);
3391 break;
3392 }
3393 x = lang_hooks.decls.omp_clause_copy_ctor
3394 (c, unshare_expr (ivar), x);
3395 gimplify_and_add (x, &llist[0]);
3396 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3397 if (x)
3398 {
3399 gimple_seq tseq = NULL;
3400
3401 dtor = x;
3402 gimplify_stmt (&dtor, &tseq);
3403 gimple_seq_add_seq (&llist[1], tseq);
3404 }
3405 break;
3406 }
3407 }
953ff289
DN
3408 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
3409 gimplify_and_add (x, ilist);
3410 goto do_dtor;
953ff289 3411
acf0174b
JJ
3412 case OMP_CLAUSE__LOOPTEMP_:
3413 gcc_assert (is_parallel_ctx (ctx));
3414 x = build_outer_var_ref (var, ctx);
3415 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3416 gimplify_and_add (x, ilist);
3417 break;
3418
953ff289 3419 case OMP_CLAUSE_COPYIN:
7c8f7639 3420 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
3421 x = build_receiver_ref (var, by_ref, ctx);
3422 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
3423 append_to_statement_list (x, &copyin_seq);
3424 copyin_by_ref |= by_ref;
3425 break;
3426
3427 case OMP_CLAUSE_REDUCTION:
3428 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3429 {
a68ab351 3430 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
acf0174b 3431 gimple tseq;
a68ab351
JJ
3432 x = build_outer_var_ref (var, ctx);
3433
acf0174b
JJ
3434 if (is_reference (var)
3435 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3436 TREE_TYPE (x)))
db3927fb 3437 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
3438 SET_DECL_VALUE_EXPR (placeholder, x);
3439 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
acf0174b
JJ
3440 tree new_vard = new_var;
3441 if (is_reference (var))
3442 {
3443 gcc_assert (TREE_CODE (new_var) == MEM_REF);
3444 new_vard = TREE_OPERAND (new_var, 0);
3445 gcc_assert (DECL_P (new_vard));
3446 }
74bf76ed
JJ
3447 if (is_simd
3448 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3449 idx, lane, ivar, lvar))
3450 {
acf0174b
JJ
3451 if (new_vard == new_var)
3452 {
3453 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
3454 SET_DECL_VALUE_EXPR (new_var, ivar);
3455 }
3456 else
3457 {
3458 SET_DECL_VALUE_EXPR (new_vard,
3459 build_fold_addr_expr (ivar));
3460 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
3461 }
3462 x = lang_hooks.decls.omp_clause_default_ctor
3463 (c, unshare_expr (ivar),
3464 build_outer_var_ref (var, ctx));
3465 if (x)
3466 gimplify_and_add (x, &llist[0]);
3467 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3468 {
3469 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3470 lower_omp (&tseq, ctx);
3471 gimple_seq_add_seq (&llist[0], tseq);
3472 }
3473 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3474 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3475 lower_omp (&tseq, ctx);
3476 gimple_seq_add_seq (&llist[1], tseq);
3477 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3478 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3479 if (new_vard == new_var)
3480 SET_DECL_VALUE_EXPR (new_var, lvar);
3481 else
3482 SET_DECL_VALUE_EXPR (new_vard,
3483 build_fold_addr_expr (lvar));
3484 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3485 if (x)
3486 {
3487 tseq = NULL;
3488 dtor = x;
3489 gimplify_stmt (&dtor, &tseq);
3490 gimple_seq_add_seq (&llist[1], tseq);
3491 }
3492 break;
3493 }
3494 x = lang_hooks.decls.omp_clause_default_ctor
3495 (c, new_var, unshare_expr (x));
3496 if (x)
3497 gimplify_and_add (x, ilist);
3498 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3499 {
3500 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3501 lower_omp (&tseq, ctx);
3502 gimple_seq_add_seq (ilist, tseq);
3503 }
3504 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3505 if (is_simd)
3506 {
3507 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3508 lower_omp (&tseq, ctx);
3509 gimple_seq_add_seq (dlist, tseq);
3510 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3511 }
3512 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3513 goto do_dtor;
3514 }
3515 else
3516 {
3517 x = omp_reduction_init (c, TREE_TYPE (new_var));
3518 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
3519 if (is_simd
3520 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3521 idx, lane, ivar, lvar))
3522 {
3523 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3524 tree ref = build_outer_var_ref (var, ctx);
3525
3526 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
3527
3528 /* reduction(-:var) sums up the partial results, so it
3529 acts identically to reduction(+:var). */
3530 if (code == MINUS_EXPR)
3531 code = PLUS_EXPR;
3532
3533 x = build2 (code, TREE_TYPE (ref), ref, ivar);
74bf76ed
JJ
3534 ref = build_outer_var_ref (var, ctx);
3535 gimplify_assign (ref, x, &llist[1]);
3536 }
3537 else
3538 {
3539 gimplify_assign (new_var, x, ilist);
3540 if (is_simd)
3541 gimplify_assign (build_outer_var_ref (var, ctx),
3542 new_var, dlist);
3543 }
953ff289
DN
3544 }
3545 break;
3546
3547 default:
3548 gcc_unreachable ();
3549 }
3550 }
3551 }
3552
74bf76ed
JJ
3553 if (lane)
3554 {
3555 tree uid = create_tmp_var (ptr_type_node, "simduid");
8928eff3
JJ
3556 /* Don't want uninit warnings on simduid, it is always uninitialized,
3557 but we use it not for the value, but for the DECL_UID only. */
3558 TREE_NO_WARNING (uid) = 1;
74bf76ed
JJ
3559 gimple g
3560 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
3561 gimple_call_set_lhs (g, lane);
3562 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3563 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
3564 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
3565 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
3566 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3567 gimple_omp_for_set_clauses (ctx->stmt, c);
3568 g = gimple_build_assign_with_ops (INTEGER_CST, lane,
3569 build_int_cst (unsigned_type_node, 0),
3570 NULL_TREE);
3571 gimple_seq_add_stmt (ilist, g);
3572 for (int i = 0; i < 2; i++)
3573 if (llist[i])
3574 {
3575 tree vf = create_tmp_var (unsigned_type_node, NULL);
3576 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
3577 gimple_call_set_lhs (g, vf);
3578 gimple_seq *seq = i == 0 ? ilist : dlist;
3579 gimple_seq_add_stmt (seq, g);
3580 tree t = build_int_cst (unsigned_type_node, 0);
3581 g = gimple_build_assign_with_ops (INTEGER_CST, idx, t, NULL_TREE);
3582 gimple_seq_add_stmt (seq, g);
3583 tree body = create_artificial_label (UNKNOWN_LOCATION);
3584 tree header = create_artificial_label (UNKNOWN_LOCATION);
3585 tree end = create_artificial_label (UNKNOWN_LOCATION);
3586 gimple_seq_add_stmt (seq, gimple_build_goto (header));
3587 gimple_seq_add_stmt (seq, gimple_build_label (body));
3588 gimple_seq_add_seq (seq, llist[i]);
3589 t = build_int_cst (unsigned_type_node, 1);
3590 g = gimple_build_assign_with_ops (PLUS_EXPR, idx, idx, t);
3591 gimple_seq_add_stmt (seq, g);
3592 gimple_seq_add_stmt (seq, gimple_build_label (header));
3593 g = gimple_build_cond (LT_EXPR, idx, vf, body, end);
3594 gimple_seq_add_stmt (seq, g);
3595 gimple_seq_add_stmt (seq, gimple_build_label (end));
3596 }
3597 }
3598
953ff289
DN
3599 /* The copyin sequence is not to be executed by the main thread, since
3600 that would result in self-copies. Perhaps not visible to scalars,
3601 but it certainly is to C++ operator=. */
3602 if (copyin_seq)
3603 {
e79983f4
MM
3604 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
3605 0);
953ff289
DN
3606 x = build2 (NE_EXPR, boolean_type_node, x,
3607 build_int_cst (TREE_TYPE (x), 0));
3608 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
3609 gimplify_and_add (x, ilist);
3610 }
3611
3612 /* If any copyin variable is passed by reference, we must ensure the
3613 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
3614 threads. Similarly for variables in both firstprivate and
3615 lastprivate clauses we need to ensure the lastprivate copying
acf0174b
JJ
3616 happens after firstprivate copying in all threads. And similarly
3617 for UDRs if initializer expression refers to omp_orig. */
3618 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
74bf76ed
JJ
3619 {
3620 /* Don't add any barrier for #pragma omp simd or
3621 #pragma omp distribute. */
3622 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
c02065fc 3623 || gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_FOR)
acf0174b 3624 gimple_seq_add_stmt (ilist, build_omp_barrier (NULL_TREE));
74bf76ed
JJ
3625 }
3626
3627 /* If max_vf is non-zero, then we can use only a vectorization factor
3628 up to the max_vf we chose. So stick it into the safelen clause. */
3629 if (max_vf)
3630 {
3631 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
3632 OMP_CLAUSE_SAFELEN);
3633 if (c == NULL_TREE
3634 || compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3635 max_vf) == 1)
3636 {
3637 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
3638 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
3639 max_vf);
3640 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3641 gimple_omp_for_set_clauses (ctx->stmt, c);
3642 }
3643 }
953ff289
DN
3644}
3645
50674e96 3646
953ff289
DN
3647/* Generate code to implement the LASTPRIVATE clauses. This is used for
3648 both parallel and workshare constructs. PREDICATE may be NULL if it's
3649 always true. */
3650
3651static void
726a989a 3652lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
acf0174b 3653 omp_context *ctx)
953ff289 3654{
74bf76ed 3655 tree x, c, label = NULL, orig_clauses = clauses;
a68ab351 3656 bool par_clauses = false;
74bf76ed 3657 tree simduid = NULL, lastlane = NULL;
953ff289 3658
74bf76ed
JJ
3659 /* Early exit if there are no lastprivate or linear clauses. */
3660 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
3661 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
3662 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
3663 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
3664 break;
953ff289
DN
3665 if (clauses == NULL)
3666 {
3667 /* If this was a workshare clause, see if it had been combined
3668 with its parallel. In that case, look for the clauses on the
3669 parallel statement itself. */
3670 if (is_parallel_ctx (ctx))
3671 return;
3672
3673 ctx = ctx->outer;
3674 if (ctx == NULL || !is_parallel_ctx (ctx))
3675 return;
3676
726a989a 3677 clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
3678 OMP_CLAUSE_LASTPRIVATE);
3679 if (clauses == NULL)
3680 return;
a68ab351 3681 par_clauses = true;
953ff289
DN
3682 }
3683
726a989a
RB
3684 if (predicate)
3685 {
3686 gimple stmt;
3687 tree label_true, arm1, arm2;
3688
c2255bc4
AH
3689 label = create_artificial_label (UNKNOWN_LOCATION);
3690 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3691 arm1 = TREE_OPERAND (predicate, 0);
3692 arm2 = TREE_OPERAND (predicate, 1);
3693 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
3694 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
3695 stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
3696 label_true, label);
3697 gimple_seq_add_stmt (stmt_list, stmt);
3698 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
3699 }
953ff289 3700
74bf76ed 3701 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 3702 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
3703 {
3704 simduid = find_omp_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
3705 if (simduid)
3706 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
3707 }
3708
a68ab351 3709 for (c = clauses; c ;)
953ff289
DN
3710 {
3711 tree var, new_var;
db3927fb 3712 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3713
74bf76ed
JJ
3714 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3715 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3716 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
a68ab351
JJ
3717 {
3718 var = OMP_CLAUSE_DECL (c);
3719 new_var = lookup_decl (var, ctx);
953ff289 3720
74bf76ed
JJ
3721 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
3722 {
3723 tree val = DECL_VALUE_EXPR (new_var);
3724 if (TREE_CODE (val) == ARRAY_REF
3725 && VAR_P (TREE_OPERAND (val, 0))
3726 && lookup_attribute ("omp simd array",
3727 DECL_ATTRIBUTES (TREE_OPERAND (val,
3728 0))))
3729 {
3730 if (lastlane == NULL)
3731 {
3732 lastlane = create_tmp_var (unsigned_type_node, NULL);
3733 gimple g
3734 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
3735 2, simduid,
3736 TREE_OPERAND (val, 1));
3737 gimple_call_set_lhs (g, lastlane);
3738 gimple_seq_add_stmt (stmt_list, g);
3739 }
3740 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
3741 TREE_OPERAND (val, 0), lastlane,
3742 NULL_TREE, NULL_TREE);
3743 }
3744 }
3745
3746 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3747 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
726a989a 3748 {
355a7673 3749 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
726a989a
RB
3750 gimple_seq_add_seq (stmt_list,
3751 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
74bf76ed 3752 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
726a989a 3753 }
953ff289 3754
a68ab351
JJ
3755 x = build_outer_var_ref (var, ctx);
3756 if (is_reference (var))
70f34814 3757 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 3758 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 3759 gimplify_and_add (x, stmt_list);
a68ab351
JJ
3760 }
3761 c = OMP_CLAUSE_CHAIN (c);
3762 if (c == NULL && !par_clauses)
3763 {
3764 /* If this was a workshare clause, see if it had been combined
3765 with its parallel. In that case, continue looking for the
3766 clauses also on the parallel statement itself. */
3767 if (is_parallel_ctx (ctx))
3768 break;
3769
3770 ctx = ctx->outer;
3771 if (ctx == NULL || !is_parallel_ctx (ctx))
3772 break;
3773
726a989a 3774 c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
3775 OMP_CLAUSE_LASTPRIVATE);
3776 par_clauses = true;
3777 }
953ff289
DN
3778 }
3779
726a989a
RB
3780 if (label)
3781 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
3782}
3783
50674e96 3784
953ff289
DN
3785/* Generate code to implement the REDUCTION clauses. */
3786
3787static void
726a989a 3788lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 3789{
726a989a
RB
3790 gimple_seq sub_seq = NULL;
3791 gimple stmt;
3792 tree x, c;
953ff289
DN
3793 int count = 0;
3794
74bf76ed
JJ
3795 /* SIMD reductions are handled in lower_rec_input_clauses. */
3796 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
c02065fc 3797 && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
3798 return;
3799
953ff289
DN
3800 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
3801 update in that case, otherwise use a lock. */
3802 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 3803 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289
DN
3804 {
3805 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3806 {
acf0174b 3807 /* Never use OMP_ATOMIC for array reductions or UDRs. */
953ff289
DN
3808 count = -1;
3809 break;
3810 }
3811 count++;
3812 }
3813
3814 if (count == 0)
3815 return;
3816
3817 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3818 {
3819 tree var, ref, new_var;
3820 enum tree_code code;
db3927fb 3821 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3822
aaf46ef9 3823 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
3824 continue;
3825
3826 var = OMP_CLAUSE_DECL (c);
3827 new_var = lookup_decl (var, ctx);
3828 if (is_reference (var))
70f34814 3829 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
3830 ref = build_outer_var_ref (var, ctx);
3831 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
3832
3833 /* reduction(-:var) sums up the partial results, so it acts
3834 identically to reduction(+:var). */
953ff289
DN
3835 if (code == MINUS_EXPR)
3836 code = PLUS_EXPR;
3837
3838 if (count == 1)
3839 {
db3927fb 3840 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
3841
3842 addr = save_expr (addr);
3843 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 3844 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 3845 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 3846 gimplify_and_add (x, stmt_seqp);
953ff289
DN
3847 return;
3848 }
3849
3850 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3851 {
3852 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3853
acf0174b
JJ
3854 if (is_reference (var)
3855 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3856 TREE_TYPE (ref)))
db3927fb 3857 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
3858 SET_DECL_VALUE_EXPR (placeholder, ref);
3859 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
355a7673 3860 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
726a989a
RB
3861 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
3862 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
3863 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
3864 }
3865 else
3866 {
3867 x = build2 (code, TREE_TYPE (ref), ref, new_var);
3868 ref = build_outer_var_ref (var, ctx);
726a989a 3869 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
3870 }
3871 }
3872
e79983f4
MM
3873 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
3874 0);
726a989a 3875 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 3876
726a989a 3877 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 3878
e79983f4
MM
3879 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
3880 0);
726a989a 3881 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
3882}
3883
50674e96 3884
953ff289
DN
3885/* Generate code to implement the COPYPRIVATE clauses. */
3886
3887static void
726a989a 3888lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
3889 omp_context *ctx)
3890{
3891 tree c;
3892
3893 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3894 {
78db7d92 3895 tree var, new_var, ref, x;
953ff289 3896 bool by_ref;
db3927fb 3897 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3898
aaf46ef9 3899 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
3900 continue;
3901
3902 var = OMP_CLAUSE_DECL (c);
7c8f7639 3903 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
3904
3905 ref = build_sender_ref (var, ctx);
78db7d92
JJ
3906 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
3907 if (by_ref)
3908 {
3909 x = build_fold_addr_expr_loc (clause_loc, new_var);
3910 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
3911 }
726a989a 3912 gimplify_assign (ref, x, slist);
953ff289 3913
78db7d92
JJ
3914 ref = build_receiver_ref (var, false, ctx);
3915 if (by_ref)
3916 {
3917 ref = fold_convert_loc (clause_loc,
3918 build_pointer_type (TREE_TYPE (new_var)),
3919 ref);
3920 ref = build_fold_indirect_ref_loc (clause_loc, ref);
3921 }
953ff289
DN
3922 if (is_reference (var))
3923 {
78db7d92 3924 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
3925 ref = build_simple_mem_ref_loc (clause_loc, ref);
3926 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 3927 }
78db7d92 3928 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
3929 gimplify_and_add (x, rlist);
3930 }
3931}
3932
50674e96 3933
953ff289
DN
3934/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
3935 and REDUCTION from the sender (aka parent) side. */
3936
3937static void
726a989a
RB
3938lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
3939 omp_context *ctx)
953ff289
DN
3940{
3941 tree c;
3942
3943 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3944 {
50674e96 3945 tree val, ref, x, var;
953ff289 3946 bool by_ref, do_in = false, do_out = false;
db3927fb 3947 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3948
aaf46ef9 3949 switch (OMP_CLAUSE_CODE (c))
953ff289 3950 {
a68ab351
JJ
3951 case OMP_CLAUSE_PRIVATE:
3952 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3953 break;
3954 continue;
953ff289
DN
3955 case OMP_CLAUSE_FIRSTPRIVATE:
3956 case OMP_CLAUSE_COPYIN:
3957 case OMP_CLAUSE_LASTPRIVATE:
3958 case OMP_CLAUSE_REDUCTION:
acf0174b 3959 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
3960 break;
3961 default:
3962 continue;
3963 }
3964
d2dda7fe
JJ
3965 val = OMP_CLAUSE_DECL (c);
3966 var = lookup_decl_in_outer_ctx (val, ctx);
50674e96 3967
8ca5b2a2
JJ
3968 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
3969 && is_global_var (var))
3970 continue;
953ff289
DN
3971 if (is_variable_sized (val))
3972 continue;
7c8f7639 3973 by_ref = use_pointer_for_field (val, NULL);
953ff289 3974
aaf46ef9 3975 switch (OMP_CLAUSE_CODE (c))
953ff289 3976 {
a68ab351 3977 case OMP_CLAUSE_PRIVATE:
953ff289
DN
3978 case OMP_CLAUSE_FIRSTPRIVATE:
3979 case OMP_CLAUSE_COPYIN:
acf0174b 3980 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
3981 do_in = true;
3982 break;
3983
3984 case OMP_CLAUSE_LASTPRIVATE:
3985 if (by_ref || is_reference (val))
3986 {
3987 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3988 continue;
3989 do_in = true;
3990 }
3991 else
a68ab351
JJ
3992 {
3993 do_out = true;
3994 if (lang_hooks.decls.omp_private_outer_ref (val))
3995 do_in = true;
3996 }
953ff289
DN
3997 break;
3998
3999 case OMP_CLAUSE_REDUCTION:
4000 do_in = true;
4001 do_out = !(by_ref || is_reference (val));
4002 break;
4003
4004 default:
4005 gcc_unreachable ();
4006 }
4007
4008 if (do_in)
4009 {
4010 ref = build_sender_ref (val, ctx);
db3927fb 4011 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 4012 gimplify_assign (ref, x, ilist);
a68ab351
JJ
4013 if (is_task_ctx (ctx))
4014 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 4015 }
50674e96 4016
953ff289
DN
4017 if (do_out)
4018 {
4019 ref = build_sender_ref (val, ctx);
726a989a 4020 gimplify_assign (var, ref, olist);
953ff289
DN
4021 }
4022 }
4023}
4024
726a989a
RB
4025/* Generate code to implement SHARED from the sender (aka parent)
4026 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
4027 list things that got automatically shared. */
953ff289
DN
4028
4029static void
726a989a 4030lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 4031{
a68ab351 4032 tree var, ovar, nvar, f, x, record_type;
953ff289
DN
4033
4034 if (ctx->record_type == NULL)
4035 return;
50674e96 4036
a68ab351 4037 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 4038 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
4039 {
4040 ovar = DECL_ABSTRACT_ORIGIN (f);
4041 nvar = maybe_lookup_decl (ovar, ctx);
4042 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
4043 continue;
4044
50674e96
DN
4045 /* If CTX is a nested parallel directive. Find the immediately
4046 enclosing parallel or workshare construct that contains a
4047 mapping for OVAR. */
d2dda7fe 4048 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 4049
7c8f7639 4050 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
4051 {
4052 x = build_sender_ref (ovar, ctx);
50674e96 4053 var = build_fold_addr_expr (var);
726a989a 4054 gimplify_assign (x, var, ilist);
953ff289
DN
4055 }
4056 else
4057 {
4058 x = build_sender_ref (ovar, ctx);
726a989a 4059 gimplify_assign (x, var, ilist);
953ff289 4060
14e5b285
RG
4061 if (!TREE_READONLY (var)
4062 /* We don't need to receive a new reference to a result
4063 or parm decl. In fact we may not store to it as we will
4064 invalidate any pending RSO and generate wrong gimple
4065 during inlining. */
4066 && !((TREE_CODE (var) == RESULT_DECL
4067 || TREE_CODE (var) == PARM_DECL)
4068 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
4069 {
4070 x = build_sender_ref (ovar, ctx);
726a989a 4071 gimplify_assign (var, x, olist);
a68ab351 4072 }
953ff289
DN
4073 }
4074 }
4075}
4076
726a989a
RB
4077
4078/* A convenience function to build an empty GIMPLE_COND with just the
4079 condition. */
4080
4081static gimple
4082gimple_build_cond_empty (tree cond)
4083{
4084 enum tree_code pred_code;
4085 tree lhs, rhs;
4086
4087 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
4088 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
4089}
4090
4091
b8698a0f 4092/* Build the function calls to GOMP_parallel_start etc to actually
50674e96
DN
4093 generate the parallel operation. REGION is the parallel region
4094 being expanded. BB is the block where to insert the code. WS_ARGS
4095 will be set if this is a call to a combined parallel+workshare
4096 construct, it contains the list of additional arguments needed by
4097 the workshare construct. */
953ff289
DN
4098
4099static void
777f7f9a 4100expand_parallel_call (struct omp_region *region, basic_block bb,
9771b263 4101 gimple entry_stmt, vec<tree, va_gc> *ws_args)
953ff289 4102{
acf0174b 4103 tree t, t1, t2, val, cond, c, clauses, flags;
726a989a
RB
4104 gimple_stmt_iterator gsi;
4105 gimple stmt;
e79983f4
MM
4106 enum built_in_function start_ix;
4107 int start_ix2;
db3927fb 4108 location_t clause_loc;
9771b263 4109 vec<tree, va_gc> *args;
50674e96 4110
726a989a 4111 clauses = gimple_omp_parallel_clauses (entry_stmt);
50674e96 4112
acf0174b 4113 /* Determine what flavor of GOMP_parallel we will be
50674e96 4114 emitting. */
acf0174b 4115 start_ix = BUILT_IN_GOMP_PARALLEL;
50674e96
DN
4116 if (is_combined_parallel (region))
4117 {
777f7f9a 4118 switch (region->inner->type)
50674e96 4119 {
726a989a 4120 case GIMPLE_OMP_FOR:
a68ab351 4121 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
acf0174b 4122 start_ix2 = ((int)BUILT_IN_GOMP_PARALLEL_LOOP_STATIC
e79983f4
MM
4123 + (region->inner->sched_kind
4124 == OMP_CLAUSE_SCHEDULE_RUNTIME
4125 ? 3 : region->inner->sched_kind));
4126 start_ix = (enum built_in_function)start_ix2;
777f7f9a 4127 break;
726a989a 4128 case GIMPLE_OMP_SECTIONS:
acf0174b 4129 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS;
777f7f9a
RH
4130 break;
4131 default:
4132 gcc_unreachable ();
50674e96 4133 }
50674e96 4134 }
953ff289
DN
4135
4136 /* By default, the value of NUM_THREADS is zero (selected at run time)
4137 and there is no conditional. */
4138 cond = NULL_TREE;
4139 val = build_int_cst (unsigned_type_node, 0);
acf0174b 4140 flags = build_int_cst (unsigned_type_node, 0);
953ff289
DN
4141
4142 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4143 if (c)
4144 cond = OMP_CLAUSE_IF_EXPR (c);
4145
4146 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
4147 if (c)
db3927fb
AH
4148 {
4149 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
4150 clause_loc = OMP_CLAUSE_LOCATION (c);
4151 }
4152 else
4153 clause_loc = gimple_location (entry_stmt);
953ff289 4154
acf0174b
JJ
4155 c = find_omp_clause (clauses, OMP_CLAUSE_PROC_BIND);
4156 if (c)
4157 flags = build_int_cst (unsigned_type_node, OMP_CLAUSE_PROC_BIND_KIND (c));
4158
953ff289 4159 /* Ensure 'val' is of the correct type. */
db3927fb 4160 val = fold_convert_loc (clause_loc, unsigned_type_node, val);
953ff289
DN
4161
4162 /* If we found the clause 'if (cond)', build either
4163 (cond != 0) or (cond ? val : 1u). */
4164 if (cond)
4165 {
726a989a 4166 gimple_stmt_iterator gsi;
50674e96
DN
4167
4168 cond = gimple_boolify (cond);
4169
953ff289 4170 if (integer_zerop (val))
db3927fb
AH
4171 val = fold_build2_loc (clause_loc,
4172 EQ_EXPR, unsigned_type_node, cond,
917948d3 4173 build_int_cst (TREE_TYPE (cond), 0));
953ff289 4174 else
50674e96
DN
4175 {
4176 basic_block cond_bb, then_bb, else_bb;
917948d3 4177 edge e, e_then, e_else;
726a989a 4178 tree tmp_then, tmp_else, tmp_join, tmp_var;
917948d3
ZD
4179
4180 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
4181 if (gimple_in_ssa_p (cfun))
4182 {
726a989a
RB
4183 tmp_then = make_ssa_name (tmp_var, NULL);
4184 tmp_else = make_ssa_name (tmp_var, NULL);
4185 tmp_join = make_ssa_name (tmp_var, NULL);
917948d3
ZD
4186 }
4187 else
4188 {
4189 tmp_then = tmp_var;
4190 tmp_else = tmp_var;
4191 tmp_join = tmp_var;
4192 }
50674e96 4193
50674e96
DN
4194 e = split_block (bb, NULL);
4195 cond_bb = e->src;
4196 bb = e->dest;
4197 remove_edge (e);
4198
4199 then_bb = create_empty_bb (cond_bb);
4200 else_bb = create_empty_bb (then_bb);
917948d3
ZD
4201 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
4202 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
50674e96 4203
726a989a
RB
4204 stmt = gimple_build_cond_empty (cond);
4205 gsi = gsi_start_bb (cond_bb);
4206 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 4207
726a989a
RB
4208 gsi = gsi_start_bb (then_bb);
4209 stmt = gimple_build_assign (tmp_then, val);
4210 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 4211
726a989a
RB
4212 gsi = gsi_start_bb (else_bb);
4213 stmt = gimple_build_assign
4214 (tmp_else, build_int_cst (unsigned_type_node, 1));
4215 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96
DN
4216
4217 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
4218 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
a9e0d843
RB
4219 if (current_loops)
4220 {
4221 add_bb_to_loop (then_bb, cond_bb->loop_father);
4222 add_bb_to_loop (else_bb, cond_bb->loop_father);
4223 }
917948d3
ZD
4224 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
4225 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
50674e96 4226
917948d3
ZD
4227 if (gimple_in_ssa_p (cfun))
4228 {
726a989a 4229 gimple phi = create_phi_node (tmp_join, bb);
9e227d60
DC
4230 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
4231 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
917948d3
ZD
4232 }
4233
4234 val = tmp_join;
50674e96
DN
4235 }
4236
726a989a
RB
4237 gsi = gsi_start_bb (bb);
4238 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
4239 false, GSI_CONTINUE_LINKING);
953ff289
DN
4240 }
4241
726a989a
RB
4242 gsi = gsi_last_bb (bb);
4243 t = gimple_omp_parallel_data_arg (entry_stmt);
953ff289 4244 if (t == NULL)
5039610b 4245 t1 = null_pointer_node;
953ff289 4246 else
5039610b 4247 t1 = build_fold_addr_expr (t);
726a989a 4248 t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
50674e96 4249
acf0174b 4250 vec_alloc (args, 4 + vec_safe_length (ws_args));
9771b263
DN
4251 args->quick_push (t2);
4252 args->quick_push (t1);
4253 args->quick_push (val);
4254 if (ws_args)
4255 args->splice (*ws_args);
acf0174b 4256 args->quick_push (flags);
3bb06db4
NF
4257
4258 t = build_call_expr_loc_vec (UNKNOWN_LOCATION,
e79983f4 4259 builtin_decl_explicit (start_ix), args);
50674e96 4260
726a989a
RB
4261 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4262 false, GSI_CONTINUE_LINKING);
953ff289
DN
4263}
4264
50674e96 4265
a68ab351
JJ
4266/* Build the function call to GOMP_task to actually
4267 generate the task operation. BB is the block where to insert the code. */
4268
4269static void
726a989a 4270expand_task_call (basic_block bb, gimple entry_stmt)
a68ab351 4271{
acf0174b 4272 tree t, t1, t2, t3, flags, cond, c, c2, clauses, depend;
726a989a 4273 gimple_stmt_iterator gsi;
db3927fb 4274 location_t loc = gimple_location (entry_stmt);
a68ab351 4275
726a989a 4276 clauses = gimple_omp_task_clauses (entry_stmt);
a68ab351 4277
a68ab351
JJ
4278 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4279 if (c)
4280 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
4281 else
4282 cond = boolean_true_node;
4283
4284 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
20906c66 4285 c2 = find_omp_clause (clauses, OMP_CLAUSE_MERGEABLE);
acf0174b 4286 depend = find_omp_clause (clauses, OMP_CLAUSE_DEPEND);
20906c66 4287 flags = build_int_cst (unsigned_type_node,
acf0174b 4288 (c ? 1 : 0) + (c2 ? 4 : 0) + (depend ? 8 : 0));
20906c66
JJ
4289
4290 c = find_omp_clause (clauses, OMP_CLAUSE_FINAL);
4291 if (c)
4292 {
4293 c = gimple_boolify (OMP_CLAUSE_FINAL_EXPR (c));
4294 c = fold_build3_loc (loc, COND_EXPR, unsigned_type_node, c,
4295 build_int_cst (unsigned_type_node, 2),
4296 build_int_cst (unsigned_type_node, 0));
4297 flags = fold_build2_loc (loc, PLUS_EXPR, unsigned_type_node, flags, c);
4298 }
acf0174b
JJ
4299 if (depend)
4300 depend = OMP_CLAUSE_DECL (depend);
4301 else
4302 depend = build_int_cst (ptr_type_node, 0);
a68ab351 4303
726a989a
RB
4304 gsi = gsi_last_bb (bb);
4305 t = gimple_omp_task_data_arg (entry_stmt);
a68ab351
JJ
4306 if (t == NULL)
4307 t2 = null_pointer_node;
4308 else
db3927fb
AH
4309 t2 = build_fold_addr_expr_loc (loc, t);
4310 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
726a989a 4311 t = gimple_omp_task_copy_fn (entry_stmt);
a68ab351
JJ
4312 if (t == NULL)
4313 t3 = null_pointer_node;
4314 else
db3927fb 4315 t3 = build_fold_addr_expr_loc (loc, t);
a68ab351 4316
e79983f4 4317 t = build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_TASK),
acf0174b 4318 8, t1, t2, t3,
726a989a 4319 gimple_omp_task_arg_size (entry_stmt),
acf0174b
JJ
4320 gimple_omp_task_arg_align (entry_stmt), cond, flags,
4321 depend);
a68ab351 4322
726a989a
RB
4323 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4324 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
4325}
4326
4327
726a989a
RB
4328/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
4329 catch handler and return it. This prevents programs from violating the
4330 structured block semantics with throws. */
953ff289 4331
726a989a
RB
4332static gimple_seq
4333maybe_catch_exception (gimple_seq body)
953ff289 4334{
1d65f45c
RH
4335 gimple g;
4336 tree decl;
953ff289
DN
4337
4338 if (!flag_exceptions)
726a989a 4339 return body;
953ff289 4340
3b06d379
SB
4341 if (lang_hooks.eh_protect_cleanup_actions != NULL)
4342 decl = lang_hooks.eh_protect_cleanup_actions ();
953ff289 4343 else
e79983f4 4344 decl = builtin_decl_explicit (BUILT_IN_TRAP);
726a989a 4345
1d65f45c
RH
4346 g = gimple_build_eh_must_not_throw (decl);
4347 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
726a989a 4348 GIMPLE_TRY_CATCH);
953ff289 4349
1d65f45c 4350 return gimple_seq_alloc_with_stmt (g);
953ff289
DN
4351}
4352
50674e96 4353/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
953ff289 4354
50674e96 4355static tree
9771b263 4356vec2chain (vec<tree, va_gc> *v)
953ff289 4357{
c021f10b
NF
4358 tree chain = NULL_TREE, t;
4359 unsigned ix;
953ff289 4360
9771b263 4361 FOR_EACH_VEC_SAFE_ELT_REVERSE (v, ix, t)
50674e96 4362 {
910ad8de 4363 DECL_CHAIN (t) = chain;
c021f10b 4364 chain = t;
50674e96 4365 }
953ff289 4366
c021f10b 4367 return chain;
50674e96 4368}
953ff289 4369
953ff289 4370
50674e96 4371/* Remove barriers in REGION->EXIT's block. Note that this is only
726a989a
RB
4372 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
4373 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
4374 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
50674e96 4375 removed. */
953ff289 4376
50674e96
DN
4377static void
4378remove_exit_barrier (struct omp_region *region)
4379{
726a989a 4380 gimple_stmt_iterator gsi;
50674e96 4381 basic_block exit_bb;
777f7f9a
RH
4382 edge_iterator ei;
4383 edge e;
726a989a 4384 gimple stmt;
03742a9b 4385 int any_addressable_vars = -1;
953ff289 4386
777f7f9a 4387 exit_bb = region->exit;
953ff289 4388
2aee3e57
JJ
4389 /* If the parallel region doesn't return, we don't have REGION->EXIT
4390 block at all. */
4391 if (! exit_bb)
4392 return;
4393
726a989a
RB
4394 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
4395 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
777f7f9a
RH
4396 statements that can appear in between are extremely limited -- no
4397 memory operations at all. Here, we allow nothing at all, so the
726a989a
RB
4398 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
4399 gsi = gsi_last_bb (exit_bb);
4400 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4401 gsi_prev (&gsi);
4402 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
50674e96 4403 return;
953ff289 4404
777f7f9a
RH
4405 FOR_EACH_EDGE (e, ei, exit_bb->preds)
4406 {
726a989a
RB
4407 gsi = gsi_last_bb (e->src);
4408 if (gsi_end_p (gsi))
777f7f9a 4409 continue;
726a989a 4410 stmt = gsi_stmt (gsi);
03742a9b
JJ
4411 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
4412 && !gimple_omp_return_nowait_p (stmt))
4413 {
4414 /* OpenMP 3.0 tasks unfortunately prevent this optimization
4415 in many cases. If there could be tasks queued, the barrier
4416 might be needed to let the tasks run before some local
4417 variable of the parallel that the task uses as shared
4418 runs out of scope. The task can be spawned either
4419 from within current function (this would be easy to check)
4420 or from some function it calls and gets passed an address
4421 of such a variable. */
4422 if (any_addressable_vars < 0)
4423 {
4424 gimple parallel_stmt = last_stmt (region->entry);
4425 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
c021f10b
NF
4426 tree local_decls, block, decl;
4427 unsigned ix;
03742a9b
JJ
4428
4429 any_addressable_vars = 0;
c021f10b
NF
4430 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (child_fun), ix, decl)
4431 if (TREE_ADDRESSABLE (decl))
03742a9b
JJ
4432 {
4433 any_addressable_vars = 1;
4434 break;
4435 }
4436 for (block = gimple_block (stmt);
4437 !any_addressable_vars
4438 && block
4439 && TREE_CODE (block) == BLOCK;
4440 block = BLOCK_SUPERCONTEXT (block))
4441 {
4442 for (local_decls = BLOCK_VARS (block);
4443 local_decls;
910ad8de 4444 local_decls = DECL_CHAIN (local_decls))
03742a9b
JJ
4445 if (TREE_ADDRESSABLE (local_decls))
4446 {
4447 any_addressable_vars = 1;
4448 break;
4449 }
4450 if (block == gimple_block (parallel_stmt))
4451 break;
4452 }
4453 }
4454 if (!any_addressable_vars)
4455 gimple_omp_return_set_nowait (stmt);
4456 }
777f7f9a 4457 }
953ff289
DN
4458}
4459
777f7f9a
RH
4460static void
4461remove_exit_barriers (struct omp_region *region)
4462{
726a989a 4463 if (region->type == GIMPLE_OMP_PARALLEL)
777f7f9a
RH
4464 remove_exit_barrier (region);
4465
4466 if (region->inner)
4467 {
4468 region = region->inner;
4469 remove_exit_barriers (region);
4470 while (region->next)
4471 {
4472 region = region->next;
4473 remove_exit_barriers (region);
4474 }
4475 }
4476}
50674e96 4477
2b4cf991
JJ
4478/* Optimize omp_get_thread_num () and omp_get_num_threads ()
4479 calls. These can't be declared as const functions, but
4480 within one parallel body they are constant, so they can be
4481 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
a68ab351
JJ
4482 which are declared const. Similarly for task body, except
4483 that in untied task omp_get_thread_num () can change at any task
4484 scheduling point. */
2b4cf991
JJ
4485
4486static void
726a989a 4487optimize_omp_library_calls (gimple entry_stmt)
2b4cf991
JJ
4488{
4489 basic_block bb;
726a989a 4490 gimple_stmt_iterator gsi;
e79983f4
MM
4491 tree thr_num_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4492 tree thr_num_id = DECL_ASSEMBLER_NAME (thr_num_tree);
4493 tree num_thr_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
4494 tree num_thr_id = DECL_ASSEMBLER_NAME (num_thr_tree);
726a989a
RB
4495 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
4496 && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
a68ab351 4497 OMP_CLAUSE_UNTIED) != NULL);
2b4cf991
JJ
4498
4499 FOR_EACH_BB (bb)
726a989a 4500 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2b4cf991 4501 {
726a989a 4502 gimple call = gsi_stmt (gsi);
2b4cf991
JJ
4503 tree decl;
4504
726a989a
RB
4505 if (is_gimple_call (call)
4506 && (decl = gimple_call_fndecl (call))
2b4cf991
JJ
4507 && DECL_EXTERNAL (decl)
4508 && TREE_PUBLIC (decl)
4509 && DECL_INITIAL (decl) == NULL)
4510 {
4511 tree built_in;
4512
4513 if (DECL_NAME (decl) == thr_num_id)
a68ab351
JJ
4514 {
4515 /* In #pragma omp task untied omp_get_thread_num () can change
4516 during the execution of the task region. */
4517 if (untied_task)
4518 continue;
e79983f4 4519 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
a68ab351 4520 }
2b4cf991 4521 else if (DECL_NAME (decl) == num_thr_id)
e79983f4 4522 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
2b4cf991
JJ
4523 else
4524 continue;
4525
4526 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
726a989a 4527 || gimple_call_num_args (call) != 0)
2b4cf991
JJ
4528 continue;
4529
4530 if (flag_exceptions && !TREE_NOTHROW (decl))
4531 continue;
4532
4533 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
9600efe1
MM
4534 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl)),
4535 TREE_TYPE (TREE_TYPE (built_in))))
2b4cf991
JJ
4536 continue;
4537
7c9577be 4538 gimple_call_set_fndecl (call, built_in);
2b4cf991
JJ
4539 }
4540 }
4541}
4542
5a0f4dd3
JJ
4543/* Callback for expand_omp_build_assign. Return non-NULL if *tp needs to be
4544 regimplified. */
4545
4546static tree
4547expand_omp_regimplify_p (tree *tp, int *walk_subtrees, void *)
4548{
4549 tree t = *tp;
4550
4551 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
4552 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
4553 return t;
4554
4555 if (TREE_CODE (t) == ADDR_EXPR)
4556 recompute_tree_invariant_for_addr_expr (t);
4557
4558 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
4559 return NULL_TREE;
4560}
4561
74bf76ed
JJ
4562/* Prepend TO = FROM assignment before *GSI_P. */
4563
4564static void
4565expand_omp_build_assign (gimple_stmt_iterator *gsi_p, tree to, tree from)
4566{
4567 bool simple_p = DECL_P (to) && TREE_ADDRESSABLE (to);
4568 from = force_gimple_operand_gsi (gsi_p, from, simple_p, NULL_TREE,
4569 true, GSI_SAME_STMT);
4570 gimple stmt = gimple_build_assign (to, from);
4571 gsi_insert_before (gsi_p, stmt, GSI_SAME_STMT);
4572 if (walk_tree (&from, expand_omp_regimplify_p, NULL, NULL)
4573 || walk_tree (&to, expand_omp_regimplify_p, NULL, NULL))
4574 {
4575 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4576 gimple_regimplify_operands (stmt, &gsi);
4577 }
4578}
4579
a68ab351 4580/* Expand the OpenMP parallel or task directive starting at REGION. */
953ff289
DN
4581
4582static void
a68ab351 4583expand_omp_taskreg (struct omp_region *region)
953ff289 4584{
50674e96 4585 basic_block entry_bb, exit_bb, new_bb;
db2960f4 4586 struct function *child_cfun;
3bb06db4 4587 tree child_fn, block, t;
726a989a
RB
4588 gimple_stmt_iterator gsi;
4589 gimple entry_stmt, stmt;
50674e96 4590 edge e;
9771b263 4591 vec<tree, va_gc> *ws_args;
50674e96 4592
777f7f9a 4593 entry_stmt = last_stmt (region->entry);
726a989a 4594 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
50674e96 4595 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
50674e96 4596
777f7f9a
RH
4597 entry_bb = region->entry;
4598 exit_bb = region->exit;
50674e96 4599
50674e96 4600 if (is_combined_parallel (region))
777f7f9a 4601 ws_args = region->ws_args;
50674e96 4602 else
3bb06db4 4603 ws_args = NULL;
953ff289 4604
777f7f9a 4605 if (child_cfun->cfg)
953ff289 4606 {
50674e96
DN
4607 /* Due to inlining, it may happen that we have already outlined
4608 the region, in which case all we need to do is make the
4609 sub-graph unreachable and emit the parallel call. */
4610 edge entry_succ_e, exit_succ_e;
726a989a 4611 gimple_stmt_iterator gsi;
50674e96
DN
4612
4613 entry_succ_e = single_succ_edge (entry_bb);
50674e96 4614
726a989a
RB
4615 gsi = gsi_last_bb (entry_bb);
4616 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
4617 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
4618 gsi_remove (&gsi, true);
50674e96
DN
4619
4620 new_bb = entry_bb;
d3c673c7
JJ
4621 if (exit_bb)
4622 {
4623 exit_succ_e = single_succ_edge (exit_bb);
4624 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
4625 }
917948d3 4626 remove_edge_and_dominated_blocks (entry_succ_e);
953ff289 4627 }
50674e96
DN
4628 else
4629 {
2fed2012 4630 unsigned srcidx, dstidx, num;
c021f10b 4631
50674e96 4632 /* If the parallel region needs data sent from the parent
b570947c
JJ
4633 function, then the very first statement (except possible
4634 tree profile counter updates) of the parallel body
50674e96
DN
4635 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
4636 &.OMP_DATA_O is passed as an argument to the child function,
4637 we need to replace it with the argument as seen by the child
4638 function.
4639
4640 In most cases, this will end up being the identity assignment
4641 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
4642 a function call that has been inlined, the original PARM_DECL
4643 .OMP_DATA_I may have been converted into a different local
4644 variable. In which case, we need to keep the assignment. */
726a989a 4645 if (gimple_omp_taskreg_data_arg (entry_stmt))
50674e96
DN
4646 {
4647 basic_block entry_succ_bb = single_succ (entry_bb);
726a989a
RB
4648 gimple_stmt_iterator gsi;
4649 tree arg, narg;
4650 gimple parcopy_stmt = NULL;
953ff289 4651
726a989a 4652 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
b570947c 4653 {
726a989a 4654 gimple stmt;
b570947c 4655
726a989a
RB
4656 gcc_assert (!gsi_end_p (gsi));
4657 stmt = gsi_stmt (gsi);
4658 if (gimple_code (stmt) != GIMPLE_ASSIGN)
018b899b
JJ
4659 continue;
4660
726a989a 4661 if (gimple_num_ops (stmt) == 2)
b570947c 4662 {
726a989a
RB
4663 tree arg = gimple_assign_rhs1 (stmt);
4664
4665 /* We're ignore the subcode because we're
4666 effectively doing a STRIP_NOPS. */
4667
4668 if (TREE_CODE (arg) == ADDR_EXPR
4669 && TREE_OPERAND (arg, 0)
4670 == gimple_omp_taskreg_data_arg (entry_stmt))
4671 {
4672 parcopy_stmt = stmt;
4673 break;
4674 }
b570947c
JJ
4675 }
4676 }
917948d3 4677
726a989a 4678 gcc_assert (parcopy_stmt != NULL);
917948d3
ZD
4679 arg = DECL_ARGUMENTS (child_fn);
4680
4681 if (!gimple_in_ssa_p (cfun))
4682 {
726a989a
RB
4683 if (gimple_assign_lhs (parcopy_stmt) == arg)
4684 gsi_remove (&gsi, true);
917948d3 4685 else
726a989a
RB
4686 {
4687 /* ?? Is setting the subcode really necessary ?? */
4688 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
4689 gimple_assign_set_rhs1 (parcopy_stmt, arg);
4690 }
917948d3
ZD
4691 }
4692 else
4693 {
4694 /* If we are in ssa form, we must load the value from the default
4695 definition of the argument. That should not be defined now,
4696 since the argument is not used uninitialized. */
32244553 4697 gcc_assert (ssa_default_def (cfun, arg) == NULL);
726a989a 4698 narg = make_ssa_name (arg, gimple_build_nop ());
32244553 4699 set_ssa_default_def (cfun, arg, narg);
726a989a
RB
4700 /* ?? Is setting the subcode really necessary ?? */
4701 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
4702 gimple_assign_set_rhs1 (parcopy_stmt, narg);
917948d3
ZD
4703 update_stmt (parcopy_stmt);
4704 }
50674e96
DN
4705 }
4706
4707 /* Declare local variables needed in CHILD_CFUN. */
4708 block = DECL_INITIAL (child_fn);
c021f10b 4709 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
4f0ae266
JJ
4710 /* The gimplifier could record temporaries in parallel/task block
4711 rather than in containing function's local_decls chain,
4712 which would mean cgraph missed finalizing them. Do it now. */
910ad8de 4713 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
4f0ae266
JJ
4714 if (TREE_CODE (t) == VAR_DECL
4715 && TREE_STATIC (t)
4716 && !DECL_EXTERNAL (t))
4717 varpool_finalize_decl (t);
726a989a 4718 DECL_SAVED_TREE (child_fn) = NULL;
355a7673
MM
4719 /* We'll create a CFG for child_fn, so no gimple body is needed. */
4720 gimple_set_body (child_fn, NULL);
b357f682 4721 TREE_USED (block) = 1;
50674e96 4722
917948d3 4723 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 4724 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
50674e96
DN
4725 DECL_CONTEXT (t) = child_fn;
4726
726a989a
RB
4727 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
4728 so that it can be moved to the child function. */
4729 gsi = gsi_last_bb (entry_bb);
4730 stmt = gsi_stmt (gsi);
4731 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
4732 || gimple_code (stmt) == GIMPLE_OMP_TASK));
4733 gsi_remove (&gsi, true);
4734 e = split_block (entry_bb, stmt);
50674e96
DN
4735 entry_bb = e->dest;
4736 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4737
726a989a 4738 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
2aee3e57
JJ
4739 if (exit_bb)
4740 {
726a989a
RB
4741 gsi = gsi_last_bb (exit_bb);
4742 gcc_assert (!gsi_end_p (gsi)
4743 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4744 stmt = gimple_build_return (NULL);
4745 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
4746 gsi_remove (&gsi, true);
2aee3e57 4747 }
917948d3
ZD
4748
4749 /* Move the parallel region into CHILD_CFUN. */
b8698a0f 4750
917948d3
ZD
4751 if (gimple_in_ssa_p (cfun))
4752 {
5db9ba0c 4753 init_tree_ssa (child_cfun);
3828719a
RG
4754 init_ssa_operands (child_cfun);
4755 child_cfun->gimple_df->in_ssa_p = true;
b357f682 4756 block = NULL_TREE;
917948d3 4757 }
b357f682 4758 else
726a989a 4759 block = gimple_block (entry_stmt);
b357f682
JJ
4760
4761 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
917948d3
ZD
4762 if (exit_bb)
4763 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
6093bc06
RB
4764 /* When the OMP expansion process cannot guarantee an up-to-date
4765 loop tree arrange for the child function to fixup loops. */
4766 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
4767 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
917948d3 4768
b357f682 4769 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
9771b263 4770 num = vec_safe_length (child_cfun->local_decls);
2fed2012
JJ
4771 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
4772 {
9771b263 4773 t = (*child_cfun->local_decls)[srcidx];
2fed2012
JJ
4774 if (DECL_CONTEXT (t) == cfun->decl)
4775 continue;
4776 if (srcidx != dstidx)
9771b263 4777 (*child_cfun->local_decls)[dstidx] = t;
2fed2012
JJ
4778 dstidx++;
4779 }
4780 if (dstidx != num)
9771b263 4781 vec_safe_truncate (child_cfun->local_decls, dstidx);
b357f682 4782
917948d3 4783 /* Inform the callgraph about the new function. */
d7ed20db 4784 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
917948d3
ZD
4785 cgraph_add_new_function (child_fn, true);
4786
4787 /* Fix the callgraph edges for child_cfun. Those for cfun will be
4788 fixed in a following pass. */
4789 push_cfun (child_cfun);
2b4cf991 4790 if (optimize)
a68ab351 4791 optimize_omp_library_calls (entry_stmt);
917948d3 4792 rebuild_cgraph_edges ();
99819c63
JJ
4793
4794 /* Some EH regions might become dead, see PR34608. If
4795 pass_cleanup_cfg isn't the first pass to happen with the
4796 new child, these dead EH edges might cause problems.
4797 Clean them up now. */
4798 if (flag_exceptions)
4799 {
4800 basic_block bb;
99819c63
JJ
4801 bool changed = false;
4802
99819c63 4803 FOR_EACH_BB (bb)
726a989a 4804 changed |= gimple_purge_dead_eh_edges (bb);
99819c63
JJ
4805 if (changed)
4806 cleanup_tree_cfg ();
99819c63 4807 }
5006671f
RG
4808 if (gimple_in_ssa_p (cfun))
4809 update_ssa (TODO_update_ssa);
917948d3 4810 pop_cfun ();
50674e96 4811 }
b8698a0f 4812
50674e96 4813 /* Emit a library call to launch the children threads. */
726a989a 4814 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
a68ab351
JJ
4815 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
4816 else
4817 expand_task_call (new_bb, entry_stmt);
a5efada7
RG
4818 if (gimple_in_ssa_p (cfun))
4819 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
4820}
4821
50674e96 4822
74bf76ed
JJ
4823/* Helper function for expand_omp_{for_*,simd}. If this is the outermost
4824 of the combined collapse > 1 loop constructs, generate code like:
4825 if (__builtin_expect (N32 cond3 N31, 0)) goto ZERO_ITER_BB;
4826 if (cond3 is <)
4827 adj = STEP3 - 1;
4828 else
4829 adj = STEP3 + 1;
4830 count3 = (adj + N32 - N31) / STEP3;
4831 if (__builtin_expect (N22 cond2 N21, 0)) goto ZERO_ITER_BB;
4832 if (cond2 is <)
4833 adj = STEP2 - 1;
4834 else
4835 adj = STEP2 + 1;
4836 count2 = (adj + N22 - N21) / STEP2;
4837 if (__builtin_expect (N12 cond1 N11, 0)) goto ZERO_ITER_BB;
4838 if (cond1 is <)
4839 adj = STEP1 - 1;
4840 else
4841 adj = STEP1 + 1;
4842 count1 = (adj + N12 - N11) / STEP1;
4843 count = count1 * count2 * count3;
4844 Furthermore, if ZERO_ITER_BB is NULL, create a BB which does:
4845 count = 0;
acf0174b
JJ
4846 and set ZERO_ITER_BB to that bb. If this isn't the outermost
4847 of the combined loop constructs, just initialize COUNTS array
4848 from the _looptemp_ clauses. */
74bf76ed
JJ
4849
4850/* NOTE: It *could* be better to moosh all of the BBs together,
4851 creating one larger BB with all the computation and the unexpected
4852 jump at the end. I.e.
4853
4854 bool zero3, zero2, zero1, zero;
4855
4856 zero3 = N32 c3 N31;
4857 count3 = (N32 - N31) /[cl] STEP3;
4858 zero2 = N22 c2 N21;
4859 count2 = (N22 - N21) /[cl] STEP2;
4860 zero1 = N12 c1 N11;
4861 count1 = (N12 - N11) /[cl] STEP1;
4862 zero = zero3 || zero2 || zero1;
4863 count = count1 * count2 * count3;
4864 if (__builtin_expect(zero, false)) goto zero_iter_bb;
4865
4866 After all, we expect the zero=false, and thus we expect to have to
4867 evaluate all of the comparison expressions, so short-circuiting
4868 oughtn't be a win. Since the condition isn't protecting a
4869 denominator, we're not concerned about divide-by-zero, so we can
4870 fully evaluate count even if a numerator turned out to be wrong.
4871
4872 It seems like putting this all together would create much better
4873 scheduling opportunities, and less pressure on the chip's branch
4874 predictor. */
4875
4876static void
4877expand_omp_for_init_counts (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
4878 basic_block &entry_bb, tree *counts,
4879 basic_block &zero_iter_bb, int &first_zero_iter,
4880 basic_block &l2_dom_bb)
4881{
4882 tree t, type = TREE_TYPE (fd->loop.v);
4883 gimple stmt;
4884 edge e, ne;
4885 int i;
4886
4887 /* Collapsed loops need work for expansion into SSA form. */
4888 gcc_assert (!gimple_in_ssa_p (cfun));
4889
acf0174b
JJ
4890 if (gimple_omp_for_combined_into_p (fd->for_stmt)
4891 && TREE_CODE (fd->loop.n2) != INTEGER_CST)
4892 {
4893 /* First two _looptemp_ clauses are for istart/iend, counts[0]
4894 isn't supposed to be handled, as the inner loop doesn't
4895 use it. */
4896 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
4897 OMP_CLAUSE__LOOPTEMP_);
4898 gcc_assert (innerc);
4899 for (i = 0; i < fd->collapse; i++)
4900 {
4901 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
4902 OMP_CLAUSE__LOOPTEMP_);
4903 gcc_assert (innerc);
4904 if (i)
4905 counts[i] = OMP_CLAUSE_DECL (innerc);
4906 else
4907 counts[0] = NULL_TREE;
4908 }
4909 return;
4910 }
4911
74bf76ed
JJ
4912 for (i = 0; i < fd->collapse; i++)
4913 {
4914 tree itype = TREE_TYPE (fd->loops[i].v);
4915
4916 if (SSA_VAR_P (fd->loop.n2)
4917 && ((t = fold_binary (fd->loops[i].cond_code, boolean_type_node,
4918 fold_convert (itype, fd->loops[i].n1),
4919 fold_convert (itype, fd->loops[i].n2)))
4920 == NULL_TREE || !integer_onep (t)))
4921 {
4922 tree n1, n2;
4923 n1 = fold_convert (itype, unshare_expr (fd->loops[i].n1));
4924 n1 = force_gimple_operand_gsi (gsi, n1, true, NULL_TREE,
4925 true, GSI_SAME_STMT);
4926 n2 = fold_convert (itype, unshare_expr (fd->loops[i].n2));
4927 n2 = force_gimple_operand_gsi (gsi, n2, true, NULL_TREE,
4928 true, GSI_SAME_STMT);
4929 stmt = gimple_build_cond (fd->loops[i].cond_code, n1, n2,
4930 NULL_TREE, NULL_TREE);
4931 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4932 if (walk_tree (gimple_cond_lhs_ptr (stmt),
4933 expand_omp_regimplify_p, NULL, NULL)
4934 || walk_tree (gimple_cond_rhs_ptr (stmt),
4935 expand_omp_regimplify_p, NULL, NULL))
4936 {
4937 *gsi = gsi_for_stmt (stmt);
4938 gimple_regimplify_operands (stmt, gsi);
4939 }
4940 e = split_block (entry_bb, stmt);
4941 if (zero_iter_bb == NULL)
4942 {
4943 first_zero_iter = i;
4944 zero_iter_bb = create_empty_bb (entry_bb);
4945 if (current_loops)
4946 add_bb_to_loop (zero_iter_bb, entry_bb->loop_father);
4947 *gsi = gsi_after_labels (zero_iter_bb);
4948 stmt = gimple_build_assign (fd->loop.n2,
4949 build_zero_cst (type));
4950 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4951 set_immediate_dominator (CDI_DOMINATORS, zero_iter_bb,
4952 entry_bb);
4953 }
4954 ne = make_edge (entry_bb, zero_iter_bb, EDGE_FALSE_VALUE);
4955 ne->probability = REG_BR_PROB_BASE / 2000 - 1;
4956 e->flags = EDGE_TRUE_VALUE;
4957 e->probability = REG_BR_PROB_BASE - ne->probability;
4958 if (l2_dom_bb == NULL)
4959 l2_dom_bb = entry_bb;
4960 entry_bb = e->dest;
4961 *gsi = gsi_last_bb (entry_bb);
4962 }
4963
4964 if (POINTER_TYPE_P (itype))
4965 itype = signed_type_for (itype);
4966 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
4967 ? -1 : 1));
4968 t = fold_build2 (PLUS_EXPR, itype,
4969 fold_convert (itype, fd->loops[i].step), t);
4970 t = fold_build2 (PLUS_EXPR, itype, t,
4971 fold_convert (itype, fd->loops[i].n2));
4972 t = fold_build2 (MINUS_EXPR, itype, t,
4973 fold_convert (itype, fd->loops[i].n1));
4974 /* ?? We could probably use CEIL_DIV_EXPR instead of
4975 TRUNC_DIV_EXPR and adjusting by hand. Unless we can't
4976 generate the same code in the end because generically we
4977 don't know that the values involved must be negative for
4978 GT?? */
4979 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
4980 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4981 fold_build1 (NEGATE_EXPR, itype, t),
4982 fold_build1 (NEGATE_EXPR, itype,
4983 fold_convert (itype,
4984 fd->loops[i].step)));
4985 else
4986 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
4987 fold_convert (itype, fd->loops[i].step));
4988 t = fold_convert (type, t);
4989 if (TREE_CODE (t) == INTEGER_CST)
4990 counts[i] = t;
4991 else
4992 {
4993 counts[i] = create_tmp_reg (type, ".count");
4994 expand_omp_build_assign (gsi, counts[i], t);
4995 }
4996 if (SSA_VAR_P (fd->loop.n2))
4997 {
4998 if (i == 0)
4999 t = counts[0];
5000 else
5001 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
5002 expand_omp_build_assign (gsi, fd->loop.n2, t);
5003 }
5004 }
5005}
5006
5007
5008/* Helper function for expand_omp_{for_*,simd}. Generate code like:
5009 T = V;
5010 V3 = N31 + (T % count3) * STEP3;
5011 T = T / count3;
5012 V2 = N21 + (T % count2) * STEP2;
5013 T = T / count2;
5014 V1 = N11 + T * STEP1;
acf0174b
JJ
5015 if this loop doesn't have an inner loop construct combined with it.
5016 If it does have an inner loop construct combined with it and the
5017 iteration count isn't known constant, store values from counts array
5018 into its _looptemp_ temporaries instead. */
74bf76ed
JJ
5019
5020static void
5021expand_omp_for_init_vars (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
acf0174b 5022 tree *counts, gimple inner_stmt, tree startvar)
74bf76ed
JJ
5023{
5024 int i;
acf0174b
JJ
5025 if (gimple_omp_for_combined_p (fd->for_stmt))
5026 {
5027 /* If fd->loop.n2 is constant, then no propagation of the counts
5028 is needed, they are constant. */
5029 if (TREE_CODE (fd->loop.n2) == INTEGER_CST)
5030 return;
5031
5032 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5033 ? gimple_omp_parallel_clauses (inner_stmt)
5034 : gimple_omp_for_clauses (inner_stmt);
5035 /* First two _looptemp_ clauses are for istart/iend, counts[0]
5036 isn't supposed to be handled, as the inner loop doesn't
5037 use it. */
5038 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5039 gcc_assert (innerc);
5040 for (i = 0; i < fd->collapse; i++)
5041 {
5042 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5043 OMP_CLAUSE__LOOPTEMP_);
5044 gcc_assert (innerc);
5045 if (i)
5046 {
5047 tree tem = OMP_CLAUSE_DECL (innerc);
5048 tree t = fold_convert (TREE_TYPE (tem), counts[i]);
5049 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5050 false, GSI_CONTINUE_LINKING);
5051 gimple stmt = gimple_build_assign (tem, t);
5052 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5053 }
5054 }
5055 return;
5056 }
5057
74bf76ed
JJ
5058 tree type = TREE_TYPE (fd->loop.v);
5059 tree tem = create_tmp_reg (type, ".tem");
5060 gimple stmt = gimple_build_assign (tem, startvar);
5061 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5062
5063 for (i = fd->collapse - 1; i >= 0; i--)
5064 {
5065 tree vtype = TREE_TYPE (fd->loops[i].v), itype, t;
5066 itype = vtype;
5067 if (POINTER_TYPE_P (vtype))
5068 itype = signed_type_for (vtype);
5069 if (i != 0)
5070 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
5071 else
5072 t = tem;
5073 t = fold_convert (itype, t);
5074 t = fold_build2 (MULT_EXPR, itype, t,
5075 fold_convert (itype, fd->loops[i].step));
5076 if (POINTER_TYPE_P (vtype))
5077 t = fold_build_pointer_plus (fd->loops[i].n1, t);
5078 else
5079 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
5080 t = force_gimple_operand_gsi (gsi, t,
5081 DECL_P (fd->loops[i].v)
5082 && TREE_ADDRESSABLE (fd->loops[i].v),
5083 NULL_TREE, false,
5084 GSI_CONTINUE_LINKING);
5085 stmt = gimple_build_assign (fd->loops[i].v, t);
5086 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5087 if (i != 0)
5088 {
5089 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
5090 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5091 false, GSI_CONTINUE_LINKING);
5092 stmt = gimple_build_assign (tem, t);
5093 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5094 }
5095 }
5096}
5097
5098
5099/* Helper function for expand_omp_for_*. Generate code like:
5100 L10:
5101 V3 += STEP3;
5102 if (V3 cond3 N32) goto BODY_BB; else goto L11;
5103 L11:
5104 V3 = N31;
5105 V2 += STEP2;
5106 if (V2 cond2 N22) goto BODY_BB; else goto L12;
5107 L12:
5108 V2 = N21;
5109 V1 += STEP1;
5110 goto BODY_BB; */
5111
5112static basic_block
5113extract_omp_for_update_vars (struct omp_for_data *fd, basic_block cont_bb,
5114 basic_block body_bb)
5115{
5116 basic_block last_bb, bb, collapse_bb = NULL;
5117 int i;
5118 gimple_stmt_iterator gsi;
5119 edge e;
5120 tree t;
5121 gimple stmt;
5122
5123 last_bb = cont_bb;
5124 for (i = fd->collapse - 1; i >= 0; i--)
5125 {
5126 tree vtype = TREE_TYPE (fd->loops[i].v);
5127
5128 bb = create_empty_bb (last_bb);
5129 if (current_loops)
5130 add_bb_to_loop (bb, last_bb->loop_father);
5131 gsi = gsi_start_bb (bb);
5132
5133 if (i < fd->collapse - 1)
5134 {
5135 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
5136 e->probability = REG_BR_PROB_BASE / 8;
5137
5138 t = fd->loops[i + 1].n1;
5139 t = force_gimple_operand_gsi (&gsi, t,
5140 DECL_P (fd->loops[i + 1].v)
5141 && TREE_ADDRESSABLE (fd->loops[i
5142 + 1].v),
5143 NULL_TREE, false,
5144 GSI_CONTINUE_LINKING);
5145 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
5146 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5147 }
5148 else
5149 collapse_bb = bb;
5150
5151 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
5152
5153 if (POINTER_TYPE_P (vtype))
5154 t = fold_build_pointer_plus (fd->loops[i].v, fd->loops[i].step);
5155 else
5156 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v, fd->loops[i].step);
5157 t = force_gimple_operand_gsi (&gsi, t,
5158 DECL_P (fd->loops[i].v)
5159 && TREE_ADDRESSABLE (fd->loops[i].v),
5160 NULL_TREE, false, GSI_CONTINUE_LINKING);
5161 stmt = gimple_build_assign (fd->loops[i].v, t);
5162 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5163
5164 if (i > 0)
5165 {
5166 t = fd->loops[i].n2;
5167 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5168 false, GSI_CONTINUE_LINKING);
5169 tree v = fd->loops[i].v;
5170 if (DECL_P (v) && TREE_ADDRESSABLE (v))
5171 v = force_gimple_operand_gsi (&gsi, v, true, NULL_TREE,
5172 false, GSI_CONTINUE_LINKING);
5173 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node, v, t);
5174 stmt = gimple_build_cond_empty (t);
5175 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5176 e = make_edge (bb, body_bb, EDGE_TRUE_VALUE);
5177 e->probability = REG_BR_PROB_BASE * 7 / 8;
5178 }
5179 else
5180 make_edge (bb, body_bb, EDGE_FALLTHRU);
5181 last_bb = bb;
5182 }
5183
5184 return collapse_bb;
5185}
5186
5187
50674e96 5188/* A subroutine of expand_omp_for. Generate code for a parallel
953ff289
DN
5189 loop with any schedule. Given parameters:
5190
5191 for (V = N1; V cond N2; V += STEP) BODY;
5192
5193 where COND is "<" or ">", we generate pseudocode
5194
5195 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
50674e96 5196 if (more) goto L0; else goto L3;
953ff289
DN
5197 L0:
5198 V = istart0;
5199 iend = iend0;
5200 L1:
5201 BODY;
5202 V += STEP;
50674e96 5203 if (V cond iend) goto L1; else goto L2;
953ff289 5204 L2:
50674e96
DN
5205 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5206 L3:
953ff289 5207
50674e96 5208 If this is a combined omp parallel loop, instead of the call to
a68ab351 5209 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
acf0174b
JJ
5210 If this is gimple_omp_for_combined_p loop, then instead of assigning
5211 V and iend in L0 we assign the first two _looptemp_ clause decls of the
5212 inner GIMPLE_OMP_FOR and V += STEP; and
5213 if (V cond iend) goto L1; else goto L2; are removed.
a68ab351
JJ
5214
5215 For collapsed loops, given parameters:
5216 collapse(3)
5217 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
5218 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
5219 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
5220 BODY;
5221
5222 we generate pseudocode
5223
5a0f4dd3 5224 if (__builtin_expect (N32 cond3 N31, 0)) goto Z0;
a68ab351
JJ
5225 if (cond3 is <)
5226 adj = STEP3 - 1;
5227 else
5228 adj = STEP3 + 1;
5229 count3 = (adj + N32 - N31) / STEP3;
5a0f4dd3 5230 if (__builtin_expect (N22 cond2 N21, 0)) goto Z0;
a68ab351
JJ
5231 if (cond2 is <)
5232 adj = STEP2 - 1;
5233 else
5234 adj = STEP2 + 1;
5235 count2 = (adj + N22 - N21) / STEP2;
5a0f4dd3 5236 if (__builtin_expect (N12 cond1 N11, 0)) goto Z0;
a68ab351
JJ
5237 if (cond1 is <)
5238 adj = STEP1 - 1;
5239 else
5240 adj = STEP1 + 1;
5241 count1 = (adj + N12 - N11) / STEP1;
5242 count = count1 * count2 * count3;
5a0f4dd3
JJ
5243 goto Z1;
5244 Z0:
5245 count = 0;
5246 Z1:
a68ab351
JJ
5247 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
5248 if (more) goto L0; else goto L3;
5249 L0:
5250 V = istart0;
5251 T = V;
5252 V3 = N31 + (T % count3) * STEP3;
5253 T = T / count3;
5254 V2 = N21 + (T % count2) * STEP2;
5255 T = T / count2;
5256 V1 = N11 + T * STEP1;
5257 iend = iend0;
5258 L1:
5259 BODY;
5260 V += 1;
5261 if (V < iend) goto L10; else goto L2;
5262 L10:
5263 V3 += STEP3;
5264 if (V3 cond3 N32) goto L1; else goto L11;
5265 L11:
5266 V3 = N31;
5267 V2 += STEP2;
5268 if (V2 cond2 N22) goto L1; else goto L12;
5269 L12:
5270 V2 = N21;
5271 V1 += STEP1;
5272 goto L1;
5273 L2:
5274 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5275 L3:
5276
5277 */
953ff289 5278
777f7f9a 5279static void
50674e96
DN
5280expand_omp_for_generic (struct omp_region *region,
5281 struct omp_for_data *fd,
953ff289 5282 enum built_in_function start_fn,
acf0174b
JJ
5283 enum built_in_function next_fn,
5284 gimple inner_stmt)
953ff289 5285{
726a989a 5286 tree type, istart0, iend0, iend;
a68ab351
JJ
5287 tree t, vmain, vback, bias = NULL_TREE;
5288 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
d3c673c7 5289 basic_block l2_bb = NULL, l3_bb = NULL;
726a989a
RB
5290 gimple_stmt_iterator gsi;
5291 gimple stmt;
50674e96 5292 bool in_combined_parallel = is_combined_parallel (region);
e5c95afe 5293 bool broken_loop = region->cont == NULL;
917948d3 5294 edge e, ne;
a68ab351
JJ
5295 tree *counts = NULL;
5296 int i;
e5c95afe
ZD
5297
5298 gcc_assert (!broken_loop || !in_combined_parallel);
a68ab351
JJ
5299 gcc_assert (fd->iter_type == long_integer_type_node
5300 || !in_combined_parallel);
953ff289 5301
a68ab351
JJ
5302 type = TREE_TYPE (fd->loop.v);
5303 istart0 = create_tmp_var (fd->iter_type, ".istart0");
5304 iend0 = create_tmp_var (fd->iter_type, ".iend0");
5b4fc8fb
JJ
5305 TREE_ADDRESSABLE (istart0) = 1;
5306 TREE_ADDRESSABLE (iend0) = 1;
953ff289 5307
a68ab351
JJ
5308 /* See if we need to bias by LLONG_MIN. */
5309 if (fd->iter_type == long_long_unsigned_type_node
5310 && TREE_CODE (type) == INTEGER_TYPE
5311 && !TYPE_UNSIGNED (type))
5312 {
5313 tree n1, n2;
5314
5315 if (fd->loop.cond_code == LT_EXPR)
5316 {
5317 n1 = fd->loop.n1;
5318 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
5319 }
5320 else
5321 {
5322 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
5323 n2 = fd->loop.n1;
5324 }
5325 if (TREE_CODE (n1) != INTEGER_CST
5326 || TREE_CODE (n2) != INTEGER_CST
5327 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
5328 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
5329 }
5330
777f7f9a 5331 entry_bb = region->entry;
d3c673c7 5332 cont_bb = region->cont;
a68ab351 5333 collapse_bb = NULL;
e5c95afe
ZD
5334 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
5335 gcc_assert (broken_loop
5336 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
5337 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5338 l1_bb = single_succ (l0_bb);
5339 if (!broken_loop)
d3c673c7
JJ
5340 {
5341 l2_bb = create_empty_bb (cont_bb);
e5c95afe
ZD
5342 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
5343 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
d3c673c7 5344 }
e5c95afe
ZD
5345 else
5346 l2_bb = NULL;
5347 l3_bb = BRANCH_EDGE (entry_bb)->dest;
5348 exit_bb = region->exit;
50674e96 5349
726a989a 5350 gsi = gsi_last_bb (entry_bb);
a68ab351 5351
726a989a 5352 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
a68ab351
JJ
5353 if (fd->collapse > 1)
5354 {
5a0f4dd3 5355 int first_zero_iter = -1;
74bf76ed 5356 basic_block zero_iter_bb = NULL, l2_dom_bb = NULL;
5a0f4dd3 5357
74bf76ed
JJ
5358 counts = XALLOCAVEC (tree, fd->collapse);
5359 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5360 zero_iter_bb, first_zero_iter,
5361 l2_dom_bb);
a68ab351 5362
5a0f4dd3
JJ
5363 if (zero_iter_bb)
5364 {
5365 /* Some counts[i] vars might be uninitialized if
5366 some loop has zero iterations. But the body shouldn't
5367 be executed in that case, so just avoid uninit warnings. */
5368 for (i = first_zero_iter; i < fd->collapse; i++)
5369 if (SSA_VAR_P (counts[i]))
5370 TREE_NO_WARNING (counts[i]) = 1;
5371 gsi_prev (&gsi);
5372 e = split_block (entry_bb, gsi_stmt (gsi));
5373 entry_bb = e->dest;
5374 make_edge (zero_iter_bb, entry_bb, EDGE_FALLTHRU);
5375 gsi = gsi_last_bb (entry_bb);
5376 set_immediate_dominator (CDI_DOMINATORS, entry_bb,
5377 get_immediate_dominator (CDI_DOMINATORS,
5378 zero_iter_bb));
5379 }
a68ab351 5380 }
917948d3
ZD
5381 if (in_combined_parallel)
5382 {
5383 /* In a combined parallel loop, emit a call to
5384 GOMP_loop_foo_next. */
e79983f4 5385 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
917948d3
ZD
5386 build_fold_addr_expr (istart0),
5387 build_fold_addr_expr (iend0));
5388 }
5389 else
953ff289 5390 {
5039610b 5391 tree t0, t1, t2, t3, t4;
50674e96
DN
5392 /* If this is not a combined parallel loop, emit a call to
5393 GOMP_loop_foo_start in ENTRY_BB. */
5039610b
SL
5394 t4 = build_fold_addr_expr (iend0);
5395 t3 = build_fold_addr_expr (istart0);
a68ab351 5396 t2 = fold_convert (fd->iter_type, fd->loop.step);
74bf76ed
JJ
5397 t1 = fd->loop.n2;
5398 t0 = fd->loop.n1;
acf0174b
JJ
5399 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5400 {
5401 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5402 OMP_CLAUSE__LOOPTEMP_);
5403 gcc_assert (innerc);
5404 t0 = OMP_CLAUSE_DECL (innerc);
5405 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5406 OMP_CLAUSE__LOOPTEMP_);
5407 gcc_assert (innerc);
5408 t1 = OMP_CLAUSE_DECL (innerc);
5409 }
74bf76ed
JJ
5410 if (POINTER_TYPE_P (TREE_TYPE (t0))
5411 && TYPE_PRECISION (TREE_TYPE (t0))
5412 != TYPE_PRECISION (fd->iter_type))
c6ff4493
SE
5413 {
5414 /* Avoid casting pointers to integer of a different size. */
96f9265a 5415 tree itype = signed_type_for (type);
74bf76ed
JJ
5416 t1 = fold_convert (fd->iter_type, fold_convert (itype, t1));
5417 t0 = fold_convert (fd->iter_type, fold_convert (itype, t0));
c6ff4493
SE
5418 }
5419 else
5420 {
74bf76ed
JJ
5421 t1 = fold_convert (fd->iter_type, t1);
5422 t0 = fold_convert (fd->iter_type, t0);
c6ff4493 5423 }
a68ab351 5424 if (bias)
953ff289 5425 {
a68ab351
JJ
5426 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
5427 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
5428 }
5429 if (fd->iter_type == long_integer_type_node)
5430 {
5431 if (fd->chunk_size)
5432 {
5433 t = fold_convert (fd->iter_type, fd->chunk_size);
e79983f4
MM
5434 t = build_call_expr (builtin_decl_explicit (start_fn),
5435 6, t0, t1, t2, t, t3, t4);
a68ab351
JJ
5436 }
5437 else
e79983f4
MM
5438 t = build_call_expr (builtin_decl_explicit (start_fn),
5439 5, t0, t1, t2, t3, t4);
953ff289 5440 }
5039610b 5441 else
a68ab351
JJ
5442 {
5443 tree t5;
5444 tree c_bool_type;
e79983f4 5445 tree bfn_decl;
a68ab351
JJ
5446
5447 /* The GOMP_loop_ull_*start functions have additional boolean
5448 argument, true for < loops and false for > loops.
5449 In Fortran, the C bool type can be different from
5450 boolean_type_node. */
e79983f4
MM
5451 bfn_decl = builtin_decl_explicit (start_fn);
5452 c_bool_type = TREE_TYPE (TREE_TYPE (bfn_decl));
a68ab351
JJ
5453 t5 = build_int_cst (c_bool_type,
5454 fd->loop.cond_code == LT_EXPR ? 1 : 0);
5455 if (fd->chunk_size)
5456 {
e79983f4 5457 tree bfn_decl = builtin_decl_explicit (start_fn);
a68ab351 5458 t = fold_convert (fd->iter_type, fd->chunk_size);
e79983f4 5459 t = build_call_expr (bfn_decl, 7, t5, t0, t1, t2, t, t3, t4);
a68ab351
JJ
5460 }
5461 else
e79983f4
MM
5462 t = build_call_expr (builtin_decl_explicit (start_fn),
5463 6, t5, t0, t1, t2, t3, t4);
a68ab351 5464 }
953ff289 5465 }
a68ab351
JJ
5466 if (TREE_TYPE (t) != boolean_type_node)
5467 t = fold_build2 (NE_EXPR, boolean_type_node,
5468 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
5469 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5470 true, GSI_SAME_STMT);
5471 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
917948d3 5472
726a989a
RB
5473 /* Remove the GIMPLE_OMP_FOR statement. */
5474 gsi_remove (&gsi, true);
953ff289 5475
50674e96 5476 /* Iteration setup for sequential loop goes in L0_BB. */
74bf76ed
JJ
5477 tree startvar = fd->loop.v;
5478 tree endvar = NULL_TREE;
5479
acf0174b
JJ
5480 if (gimple_omp_for_combined_p (fd->for_stmt))
5481 {
5482 gcc_assert (gimple_code (inner_stmt) == GIMPLE_OMP_FOR
5483 && gimple_omp_for_kind (inner_stmt)
5484 == GF_OMP_FOR_KIND_SIMD);
5485 tree innerc = find_omp_clause (gimple_omp_for_clauses (inner_stmt),
5486 OMP_CLAUSE__LOOPTEMP_);
5487 gcc_assert (innerc);
5488 startvar = OMP_CLAUSE_DECL (innerc);
5489 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5490 OMP_CLAUSE__LOOPTEMP_);
5491 gcc_assert (innerc);
5492 endvar = OMP_CLAUSE_DECL (innerc);
5493 }
5494
726a989a 5495 gsi = gsi_start_bb (l0_bb);
550918ca 5496 t = istart0;
a68ab351 5497 if (bias)
550918ca 5498 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
74bf76ed
JJ
5499 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5500 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5501 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 5502 t = force_gimple_operand_gsi (&gsi, t,
74bf76ed
JJ
5503 DECL_P (startvar)
5504 && TREE_ADDRESSABLE (startvar),
ea3a0fde 5505 NULL_TREE, false, GSI_CONTINUE_LINKING);
74bf76ed 5506 stmt = gimple_build_assign (startvar, t);
726a989a 5507 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
953ff289 5508
550918ca 5509 t = iend0;
a68ab351 5510 if (bias)
550918ca 5511 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
74bf76ed
JJ
5512 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5513 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5514 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
5515 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5516 false, GSI_CONTINUE_LINKING);
74bf76ed 5517 if (endvar)
a68ab351 5518 {
74bf76ed 5519 stmt = gimple_build_assign (endvar, iend);
726a989a 5520 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351 5521 }
74bf76ed 5522 if (fd->collapse > 1)
acf0174b 5523 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
50674e96 5524
e5c95afe 5525 if (!broken_loop)
d3c673c7 5526 {
e5c95afe
ZD
5527 /* Code to control the increment and predicate for the sequential
5528 loop goes in the CONT_BB. */
726a989a
RB
5529 gsi = gsi_last_bb (cont_bb);
5530 stmt = gsi_stmt (gsi);
5531 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5532 vmain = gimple_omp_continue_control_use (stmt);
5533 vback = gimple_omp_continue_control_def (stmt);
917948d3 5534
acf0174b 5535 if (!gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed
JJ
5536 {
5537 if (POINTER_TYPE_P (type))
5538 t = fold_build_pointer_plus (vmain, fd->loop.step);
5539 else
5540 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
5541 t = force_gimple_operand_gsi (&gsi, t,
5542 DECL_P (vback)
5543 && TREE_ADDRESSABLE (vback),
5544 NULL_TREE, true, GSI_SAME_STMT);
5545 stmt = gimple_build_assign (vback, t);
5546 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5547
5548 t = build2 (fd->loop.cond_code, boolean_type_node,
5549 DECL_P (vback) && TREE_ADDRESSABLE (vback) ? t : vback,
5550 iend);
5551 stmt = gimple_build_cond_empty (t);
5552 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5553 }
50674e96 5554
726a989a
RB
5555 /* Remove GIMPLE_OMP_CONTINUE. */
5556 gsi_remove (&gsi, true);
50674e96 5557
acf0174b 5558 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed 5559 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, l1_bb);
a68ab351 5560
e5c95afe 5561 /* Emit code to get the next parallel iteration in L2_BB. */
726a989a 5562 gsi = gsi_start_bb (l2_bb);
50674e96 5563
e79983f4 5564 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
e5c95afe
ZD
5565 build_fold_addr_expr (istart0),
5566 build_fold_addr_expr (iend0));
726a989a
RB
5567 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5568 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
5569 if (TREE_TYPE (t) != boolean_type_node)
5570 t = fold_build2 (NE_EXPR, boolean_type_node,
5571 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
5572 stmt = gimple_build_cond_empty (t);
5573 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
e5c95afe 5574 }
953ff289 5575
777f7f9a 5576 /* Add the loop cleanup function. */
726a989a
RB
5577 gsi = gsi_last_bb (exit_bb);
5578 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
e79983f4 5579 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_NOWAIT);
acf0174b
JJ
5580 else if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5581 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_CANCEL);
777f7f9a 5582 else
e79983f4 5583 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END);
726a989a 5584 stmt = gimple_build_call (t, 0);
acf0174b
JJ
5585 if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5586 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (gsi)));
726a989a
RB
5587 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
5588 gsi_remove (&gsi, true);
50674e96
DN
5589
5590 /* Connect the new blocks. */
917948d3
ZD
5591 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
5592 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
953ff289 5593
e5c95afe
ZD
5594 if (!broken_loop)
5595 {
726a989a
RB
5596 gimple_seq phis;
5597
917948d3
ZD
5598 e = find_edge (cont_bb, l3_bb);
5599 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
5600
726a989a
RB
5601 phis = phi_nodes (l3_bb);
5602 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5603 {
5604 gimple phi = gsi_stmt (gsi);
5605 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
5606 PHI_ARG_DEF_FROM_EDGE (phi, e));
5607 }
917948d3
ZD
5608 remove_edge (e);
5609
e5c95afe 5610 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
a9e0d843
RB
5611 if (current_loops)
5612 add_bb_to_loop (l2_bb, cont_bb->loop_father);
74bf76ed 5613 e = find_edge (cont_bb, l1_bb);
acf0174b
JJ
5614 if (gimple_omp_for_combined_p (fd->for_stmt))
5615 {
5616 remove_edge (e);
5617 e = NULL;
5618 }
74bf76ed 5619 else if (fd->collapse > 1)
a68ab351 5620 {
a68ab351
JJ
5621 remove_edge (e);
5622 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
5623 }
5624 else
74bf76ed
JJ
5625 e->flags = EDGE_TRUE_VALUE;
5626 if (e)
a68ab351 5627 {
74bf76ed
JJ
5628 e->probability = REG_BR_PROB_BASE * 7 / 8;
5629 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
5630 }
5631 else
5632 {
5633 e = find_edge (cont_bb, l2_bb);
5634 e->flags = EDGE_FALLTHRU;
a68ab351 5635 }
e5c95afe 5636 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
917948d3
ZD
5637
5638 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
5639 recompute_dominator (CDI_DOMINATORS, l2_bb));
5640 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
5641 recompute_dominator (CDI_DOMINATORS, l3_bb));
5642 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
5643 recompute_dominator (CDI_DOMINATORS, l0_bb));
5644 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
5645 recompute_dominator (CDI_DOMINATORS, l1_bb));
6093bc06
RB
5646
5647 struct loop *outer_loop = alloc_loop ();
5648 outer_loop->header = l0_bb;
5649 outer_loop->latch = l2_bb;
5650 add_loop (outer_loop, l0_bb->loop_father);
5651
acf0174b 5652 if (!gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed
JJ
5653 {
5654 struct loop *loop = alloc_loop ();
5655 loop->header = l1_bb;
5656 /* The loop may have multiple latches. */
5657 add_loop (loop, outer_loop);
5658 }
e5c95afe 5659 }
953ff289
DN
5660}
5661
5662
50674e96
DN
5663/* A subroutine of expand_omp_for. Generate code for a parallel
5664 loop with static schedule and no specified chunk size. Given
5665 parameters:
953ff289
DN
5666
5667 for (V = N1; V cond N2; V += STEP) BODY;
5668
5669 where COND is "<" or ">", we generate pseudocode
5670
5a0f4dd3 5671 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
953ff289
DN
5672 if (cond is <)
5673 adj = STEP - 1;
5674 else
5675 adj = STEP + 1;
a68ab351
JJ
5676 if ((__typeof (V)) -1 > 0 && cond is >)
5677 n = -(adj + N2 - N1) / -STEP;
5678 else
5679 n = (adj + N2 - N1) / STEP;
953ff289 5680 q = n / nthreads;
fb79f500
JJ
5681 tt = n % nthreads;
5682 if (threadid < tt) goto L3; else goto L4;
5683 L3:
5684 tt = 0;
5685 q = q + 1;
5686 L4:
5687 s0 = q * threadid + tt;
5688 e0 = s0 + q;
917948d3 5689 V = s0 * STEP + N1;
953ff289
DN
5690 if (s0 >= e0) goto L2; else goto L0;
5691 L0:
953ff289
DN
5692 e = e0 * STEP + N1;
5693 L1:
5694 BODY;
5695 V += STEP;
5696 if (V cond e) goto L1;
953ff289
DN
5697 L2:
5698*/
5699
777f7f9a 5700static void
50674e96 5701expand_omp_for_static_nochunk (struct omp_region *region,
acf0174b
JJ
5702 struct omp_for_data *fd,
5703 gimple inner_stmt)
953ff289 5704{
fb79f500 5705 tree n, q, s0, e0, e, t, tt, nthreads, threadid;
a68ab351 5706 tree type, itype, vmain, vback;
fb79f500 5707 basic_block entry_bb, second_bb, third_bb, exit_bb, seq_start_bb;
acf0174b 5708 basic_block body_bb, cont_bb, collapse_bb = NULL;
777f7f9a 5709 basic_block fin_bb;
726a989a
RB
5710 gimple_stmt_iterator gsi;
5711 gimple stmt;
fb79f500 5712 edge ep;
acf0174b
JJ
5713 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
5714 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
5715 bool broken_loop = region->cont == NULL;
5716 tree *counts = NULL;
5717 tree n1, n2, step;
953ff289 5718
a68ab351
JJ
5719 itype = type = TREE_TYPE (fd->loop.v);
5720 if (POINTER_TYPE_P (type))
96f9265a 5721 itype = signed_type_for (type);
953ff289 5722
777f7f9a 5723 entry_bb = region->entry;
777f7f9a 5724 cont_bb = region->cont;
e5c95afe 5725 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
acf0174b
JJ
5726 fin_bb = BRANCH_EDGE (entry_bb)->dest;
5727 gcc_assert (broken_loop
5728 || (fin_bb == FALLTHRU_EDGE (cont_bb)->dest));
e5c95afe
ZD
5729 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5730 body_bb = single_succ (seq_start_bb);
acf0174b
JJ
5731 if (!broken_loop)
5732 {
5733 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
5734 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
5735 }
777f7f9a
RH
5736 exit_bb = region->exit;
5737
50674e96 5738 /* Iteration space partitioning goes in ENTRY_BB. */
726a989a
RB
5739 gsi = gsi_last_bb (entry_bb);
5740 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
777f7f9a 5741
acf0174b
JJ
5742 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
5743 {
5744 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
5745 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
5746 }
5747
5748 if (fd->collapse > 1)
5749 {
5750 int first_zero_iter = -1;
5751 basic_block l2_dom_bb = NULL;
5752
5753 counts = XALLOCAVEC (tree, fd->collapse);
5754 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5755 fin_bb, first_zero_iter,
5756 l2_dom_bb);
5757 t = NULL_TREE;
5758 }
5759 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
5760 t = integer_one_node;
5761 else
5762 t = fold_binary (fd->loop.cond_code, boolean_type_node,
5763 fold_convert (type, fd->loop.n1),
5764 fold_convert (type, fd->loop.n2));
5765 if (fd->collapse == 1
5766 && TYPE_UNSIGNED (type)
5a0f4dd3
JJ
5767 && (t == NULL_TREE || !integer_onep (t)))
5768 {
5a0f4dd3
JJ
5769 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
5770 n1 = force_gimple_operand_gsi (&gsi, n1, true, NULL_TREE,
5771 true, GSI_SAME_STMT);
5772 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
5773 n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE,
5774 true, GSI_SAME_STMT);
5775 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
5776 NULL_TREE, NULL_TREE);
5777 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5778 if (walk_tree (gimple_cond_lhs_ptr (stmt),
5779 expand_omp_regimplify_p, NULL, NULL)
5780 || walk_tree (gimple_cond_rhs_ptr (stmt),
5781 expand_omp_regimplify_p, NULL, NULL))
5782 {
5783 gsi = gsi_for_stmt (stmt);
5784 gimple_regimplify_operands (stmt, &gsi);
5785 }
5786 ep = split_block (entry_bb, stmt);
5787 ep->flags = EDGE_TRUE_VALUE;
5788 entry_bb = ep->dest;
5789 ep->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
5790 ep = make_edge (ep->src, fin_bb, EDGE_FALSE_VALUE);
5791 ep->probability = REG_BR_PROB_BASE / 2000 - 1;
5792 if (gimple_in_ssa_p (cfun))
5793 {
5794 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
5795 for (gsi = gsi_start_phis (fin_bb);
5796 !gsi_end_p (gsi); gsi_next (&gsi))
5797 {
5798 gimple phi = gsi_stmt (gsi);
5799 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
5800 ep, UNKNOWN_LOCATION);
5801 }
5802 }
5803 gsi = gsi_last_bb (entry_bb);
5804 }
5805
acf0174b 5806 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
a68ab351 5807 t = fold_convert (itype, t);
726a989a
RB
5808 nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5809 true, GSI_SAME_STMT);
b8698a0f 5810
acf0174b 5811 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
a68ab351 5812 t = fold_convert (itype, t);
726a989a
RB
5813 threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5814 true, GSI_SAME_STMT);
953ff289 5815
acf0174b
JJ
5816 n1 = fd->loop.n1;
5817 n2 = fd->loop.n2;
5818 step = fd->loop.step;
5819 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5820 {
5821 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5822 OMP_CLAUSE__LOOPTEMP_);
5823 gcc_assert (innerc);
5824 n1 = OMP_CLAUSE_DECL (innerc);
5825 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5826 OMP_CLAUSE__LOOPTEMP_);
5827 gcc_assert (innerc);
5828 n2 = OMP_CLAUSE_DECL (innerc);
5829 }
5830 n1 = force_gimple_operand_gsi (&gsi, fold_convert (type, n1),
5831 true, NULL_TREE, true, GSI_SAME_STMT);
5832 n2 = force_gimple_operand_gsi (&gsi, fold_convert (itype, n2),
5833 true, NULL_TREE, true, GSI_SAME_STMT);
5834 step = force_gimple_operand_gsi (&gsi, fold_convert (itype, step),
5835 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
5836
5837 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
acf0174b
JJ
5838 t = fold_build2 (PLUS_EXPR, itype, step, t);
5839 t = fold_build2 (PLUS_EXPR, itype, t, n2);
5840 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
a68ab351
JJ
5841 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
5842 t = fold_build2 (TRUNC_DIV_EXPR, itype,
5843 fold_build1 (NEGATE_EXPR, itype, t),
acf0174b 5844 fold_build1 (NEGATE_EXPR, itype, step));
a68ab351 5845 else
acf0174b 5846 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
a68ab351 5847 t = fold_convert (itype, t);
726a989a 5848 n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 5849
7cc434a3 5850 q = create_tmp_reg (itype, "q");
a68ab351 5851 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
fb79f500
JJ
5852 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5853 gsi_insert_before (&gsi, gimple_build_assign (q, t), GSI_SAME_STMT);
5854
7cc434a3 5855 tt = create_tmp_reg (itype, "tt");
fb79f500
JJ
5856 t = fold_build2 (TRUNC_MOD_EXPR, itype, n, nthreads);
5857 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5858 gsi_insert_before (&gsi, gimple_build_assign (tt, t), GSI_SAME_STMT);
953ff289 5859
fb79f500
JJ
5860 t = build2 (LT_EXPR, boolean_type_node, threadid, tt);
5861 stmt = gimple_build_cond_empty (t);
5862 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5863
5864 second_bb = split_block (entry_bb, stmt)->dest;
5865 gsi = gsi_last_bb (second_bb);
5866 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
5867
5868 gsi_insert_before (&gsi, gimple_build_assign (tt, build_int_cst (itype, 0)),
5869 GSI_SAME_STMT);
5870 stmt = gimple_build_assign_with_ops (PLUS_EXPR, q, q,
5871 build_int_cst (itype, 1));
5872 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5873
5874 third_bb = split_block (second_bb, stmt)->dest;
5875 gsi = gsi_last_bb (third_bb);
5876 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
953ff289 5877
a68ab351 5878 t = build2 (MULT_EXPR, itype, q, threadid);
fb79f500 5879 t = build2 (PLUS_EXPR, itype, t, tt);
726a989a 5880 s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 5881
a68ab351 5882 t = fold_build2 (PLUS_EXPR, itype, s0, q);
726a989a 5883 e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 5884
953ff289 5885 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
726a989a 5886 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
50674e96 5887
726a989a
RB
5888 /* Remove the GIMPLE_OMP_FOR statement. */
5889 gsi_remove (&gsi, true);
50674e96
DN
5890
5891 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 5892 gsi = gsi_start_bb (seq_start_bb);
953ff289 5893
acf0174b
JJ
5894 tree startvar = fd->loop.v;
5895 tree endvar = NULL_TREE;
5896
5897 if (gimple_omp_for_combined_p (fd->for_stmt))
5898 {
5899 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5900 ? gimple_omp_parallel_clauses (inner_stmt)
5901 : gimple_omp_for_clauses (inner_stmt);
5902 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5903 gcc_assert (innerc);
5904 startvar = OMP_CLAUSE_DECL (innerc);
5905 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5906 OMP_CLAUSE__LOOPTEMP_);
5907 gcc_assert (innerc);
5908 endvar = OMP_CLAUSE_DECL (innerc);
5909 }
a68ab351 5910 t = fold_convert (itype, s0);
acf0174b 5911 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 5912 if (POINTER_TYPE_P (type))
acf0174b 5913 t = fold_build_pointer_plus (n1, t);
a68ab351 5914 else
acf0174b
JJ
5915 t = fold_build2 (PLUS_EXPR, type, t, n1);
5916 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 5917 t = force_gimple_operand_gsi (&gsi, t,
acf0174b
JJ
5918 DECL_P (startvar)
5919 && TREE_ADDRESSABLE (startvar),
ea3a0fde 5920 NULL_TREE, false, GSI_CONTINUE_LINKING);
acf0174b 5921 stmt = gimple_build_assign (startvar, t);
726a989a 5922 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
b8698a0f 5923
a68ab351 5924 t = fold_convert (itype, e0);
acf0174b 5925 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 5926 if (POINTER_TYPE_P (type))
acf0174b 5927 t = fold_build_pointer_plus (n1, t);
a68ab351 5928 else
acf0174b
JJ
5929 t = fold_build2 (PLUS_EXPR, type, t, n1);
5930 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
5931 e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5932 false, GSI_CONTINUE_LINKING);
acf0174b
JJ
5933 if (endvar)
5934 {
5935 stmt = gimple_build_assign (endvar, e);
5936 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5937 }
5938 if (fd->collapse > 1)
5939 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
953ff289 5940
acf0174b
JJ
5941 if (!broken_loop)
5942 {
5943 /* The code controlling the sequential loop replaces the
5944 GIMPLE_OMP_CONTINUE. */
5945 gsi = gsi_last_bb (cont_bb);
5946 stmt = gsi_stmt (gsi);
5947 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5948 vmain = gimple_omp_continue_control_use (stmt);
5949 vback = gimple_omp_continue_control_def (stmt);
917948d3 5950
acf0174b
JJ
5951 if (!gimple_omp_for_combined_p (fd->for_stmt))
5952 {
5953 if (POINTER_TYPE_P (type))
5954 t = fold_build_pointer_plus (vmain, step);
5955 else
5956 t = fold_build2 (PLUS_EXPR, type, vmain, step);
5957 t = force_gimple_operand_gsi (&gsi, t,
5958 DECL_P (vback)
5959 && TREE_ADDRESSABLE (vback),
5960 NULL_TREE, true, GSI_SAME_STMT);
5961 stmt = gimple_build_assign (vback, t);
5962 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
917948d3 5963
acf0174b
JJ
5964 t = build2 (fd->loop.cond_code, boolean_type_node,
5965 DECL_P (vback) && TREE_ADDRESSABLE (vback)
5966 ? t : vback, e);
5967 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
5968 }
953ff289 5969
acf0174b
JJ
5970 /* Remove the GIMPLE_OMP_CONTINUE statement. */
5971 gsi_remove (&gsi, true);
5972
5973 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
5974 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
5975 }
50674e96 5976
726a989a
RB
5977 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
5978 gsi = gsi_last_bb (exit_bb);
5979 if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
acf0174b
JJ
5980 {
5981 t = gimple_omp_return_lhs (gsi_stmt (gsi));
5982 gsi_insert_after (&gsi, build_omp_barrier (t), GSI_SAME_STMT);
5983 }
726a989a 5984 gsi_remove (&gsi, true);
50674e96
DN
5985
5986 /* Connect all the blocks. */
fb79f500
JJ
5987 ep = make_edge (entry_bb, third_bb, EDGE_FALSE_VALUE);
5988 ep->probability = REG_BR_PROB_BASE / 4 * 3;
5989 ep = find_edge (entry_bb, second_bb);
5990 ep->flags = EDGE_TRUE_VALUE;
5991 ep->probability = REG_BR_PROB_BASE / 4;
5992 find_edge (third_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
5993 find_edge (third_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
917948d3 5994
acf0174b
JJ
5995 if (!broken_loop)
5996 {
5997 ep = find_edge (cont_bb, body_bb);
5998 if (gimple_omp_for_combined_p (fd->for_stmt))
5999 {
6000 remove_edge (ep);
6001 ep = NULL;
6002 }
6003 else if (fd->collapse > 1)
6004 {
6005 remove_edge (ep);
6006 ep = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6007 }
6008 else
6009 ep->flags = EDGE_TRUE_VALUE;
6010 find_edge (cont_bb, fin_bb)->flags
6011 = ep ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
6012 }
b8698a0f 6013
fb79f500
JJ
6014 set_immediate_dominator (CDI_DOMINATORS, second_bb, entry_bb);
6015 set_immediate_dominator (CDI_DOMINATORS, third_bb, entry_bb);
6016 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, third_bb);
acf0174b 6017
917948d3
ZD
6018 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6019 recompute_dominator (CDI_DOMINATORS, body_bb));
6020 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6021 recompute_dominator (CDI_DOMINATORS, fin_bb));
6093bc06 6022
acf0174b
JJ
6023 if (!broken_loop && !gimple_omp_for_combined_p (fd->for_stmt))
6024 {
6025 struct loop *loop = alloc_loop ();
6026 loop->header = body_bb;
6027 if (collapse_bb == NULL)
6028 loop->latch = cont_bb;
6029 add_loop (loop, body_bb->loop_father);
6030 }
953ff289
DN
6031}
6032
50674e96
DN
6033
6034/* A subroutine of expand_omp_for. Generate code for a parallel
6035 loop with static schedule and a specified chunk size. Given
6036 parameters:
953ff289
DN
6037
6038 for (V = N1; V cond N2; V += STEP) BODY;
6039
6040 where COND is "<" or ">", we generate pseudocode
6041
5a0f4dd3 6042 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
953ff289
DN
6043 if (cond is <)
6044 adj = STEP - 1;
6045 else
6046 adj = STEP + 1;
a68ab351
JJ
6047 if ((__typeof (V)) -1 > 0 && cond is >)
6048 n = -(adj + N2 - N1) / -STEP;
6049 else
6050 n = (adj + N2 - N1) / STEP;
953ff289 6051 trip = 0;
917948d3
ZD
6052 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
6053 here so that V is defined
6054 if the loop is not entered
953ff289
DN
6055 L0:
6056 s0 = (trip * nthreads + threadid) * CHUNK;
6057 e0 = min(s0 + CHUNK, n);
6058 if (s0 < n) goto L1; else goto L4;
6059 L1:
6060 V = s0 * STEP + N1;
6061 e = e0 * STEP + N1;
6062 L2:
6063 BODY;
6064 V += STEP;
6065 if (V cond e) goto L2; else goto L3;
6066 L3:
6067 trip += 1;
6068 goto L0;
6069 L4:
953ff289
DN
6070*/
6071
777f7f9a 6072static void
acf0174b
JJ
6073expand_omp_for_static_chunk (struct omp_region *region,
6074 struct omp_for_data *fd, gimple inner_stmt)
953ff289 6075{
726a989a 6076 tree n, s0, e0, e, t;
917948d3 6077 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
726a989a 6078 tree type, itype, v_main, v_back, v_extra;
50674e96 6079 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
acf0174b 6080 basic_block trip_update_bb = NULL, cont_bb, collapse_bb = NULL, fin_bb;
726a989a
RB
6081 gimple_stmt_iterator si;
6082 gimple stmt;
6083 edge se;
acf0174b
JJ
6084 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
6085 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
6086 bool broken_loop = region->cont == NULL;
6087 tree *counts = NULL;
6088 tree n1, n2, step;
953ff289 6089
a68ab351
JJ
6090 itype = type = TREE_TYPE (fd->loop.v);
6091 if (POINTER_TYPE_P (type))
96f9265a 6092 itype = signed_type_for (type);
953ff289 6093
777f7f9a 6094 entry_bb = region->entry;
e5c95afe
ZD
6095 se = split_block (entry_bb, last_stmt (entry_bb));
6096 entry_bb = se->src;
6097 iter_part_bb = se->dest;
777f7f9a 6098 cont_bb = region->cont;
e5c95afe 6099 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
acf0174b
JJ
6100 fin_bb = BRANCH_EDGE (iter_part_bb)->dest;
6101 gcc_assert (broken_loop
6102 || fin_bb == FALLTHRU_EDGE (cont_bb)->dest);
e5c95afe
ZD
6103 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
6104 body_bb = single_succ (seq_start_bb);
acf0174b
JJ
6105 if (!broken_loop)
6106 {
6107 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
6108 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6109 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
6110 }
777f7f9a 6111 exit_bb = region->exit;
50674e96 6112
50674e96 6113 /* Trip and adjustment setup goes in ENTRY_BB. */
726a989a
RB
6114 si = gsi_last_bb (entry_bb);
6115 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
50674e96 6116
acf0174b
JJ
6117 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
6118 {
6119 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
6120 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
6121 }
6122
6123 if (fd->collapse > 1)
6124 {
6125 int first_zero_iter = -1;
6126 basic_block l2_dom_bb = NULL;
6127
6128 counts = XALLOCAVEC (tree, fd->collapse);
6129 expand_omp_for_init_counts (fd, &si, entry_bb, counts,
6130 fin_bb, first_zero_iter,
6131 l2_dom_bb);
6132 t = NULL_TREE;
6133 }
6134 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
6135 t = integer_one_node;
6136 else
6137 t = fold_binary (fd->loop.cond_code, boolean_type_node,
6138 fold_convert (type, fd->loop.n1),
6139 fold_convert (type, fd->loop.n2));
6140 if (fd->collapse == 1
6141 && TYPE_UNSIGNED (type)
5a0f4dd3
JJ
6142 && (t == NULL_TREE || !integer_onep (t)))
6143 {
5a0f4dd3
JJ
6144 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
6145 n1 = force_gimple_operand_gsi (&si, n1, true, NULL_TREE,
6146 true, GSI_SAME_STMT);
6147 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
6148 n2 = force_gimple_operand_gsi (&si, n2, true, NULL_TREE,
6149 true, GSI_SAME_STMT);
6150 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
6151 NULL_TREE, NULL_TREE);
6152 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
6153 if (walk_tree (gimple_cond_lhs_ptr (stmt),
6154 expand_omp_regimplify_p, NULL, NULL)
6155 || walk_tree (gimple_cond_rhs_ptr (stmt),
6156 expand_omp_regimplify_p, NULL, NULL))
6157 {
6158 si = gsi_for_stmt (stmt);
6159 gimple_regimplify_operands (stmt, &si);
6160 }
6161 se = split_block (entry_bb, stmt);
6162 se->flags = EDGE_TRUE_VALUE;
6163 entry_bb = se->dest;
6164 se->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
6165 se = make_edge (se->src, fin_bb, EDGE_FALSE_VALUE);
6166 se->probability = REG_BR_PROB_BASE / 2000 - 1;
6167 if (gimple_in_ssa_p (cfun))
6168 {
6169 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
6170 for (si = gsi_start_phis (fin_bb);
6171 !gsi_end_p (si); gsi_next (&si))
6172 {
6173 gimple phi = gsi_stmt (si);
6174 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
6175 se, UNKNOWN_LOCATION);
6176 }
6177 }
6178 si = gsi_last_bb (entry_bb);
6179 }
6180
acf0174b 6181 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
a68ab351 6182 t = fold_convert (itype, t);
726a989a
RB
6183 nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6184 true, GSI_SAME_STMT);
b8698a0f 6185
acf0174b 6186 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
a68ab351 6187 t = fold_convert (itype, t);
726a989a
RB
6188 threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6189 true, GSI_SAME_STMT);
917948d3 6190
acf0174b
JJ
6191 n1 = fd->loop.n1;
6192 n2 = fd->loop.n2;
6193 step = fd->loop.step;
6194 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6195 {
6196 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6197 OMP_CLAUSE__LOOPTEMP_);
6198 gcc_assert (innerc);
6199 n1 = OMP_CLAUSE_DECL (innerc);
6200 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6201 OMP_CLAUSE__LOOPTEMP_);
6202 gcc_assert (innerc);
6203 n2 = OMP_CLAUSE_DECL (innerc);
6204 }
6205 n1 = force_gimple_operand_gsi (&si, fold_convert (type, n1),
6206 true, NULL_TREE, true, GSI_SAME_STMT);
6207 n2 = force_gimple_operand_gsi (&si, fold_convert (itype, n2),
6208 true, NULL_TREE, true, GSI_SAME_STMT);
6209 step = force_gimple_operand_gsi (&si, fold_convert (itype, step),
6210 true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 6211 fd->chunk_size
726a989a
RB
6212 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
6213 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
6214
6215 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
acf0174b
JJ
6216 t = fold_build2 (PLUS_EXPR, itype, step, t);
6217 t = fold_build2 (PLUS_EXPR, itype, t, n2);
6218 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
a68ab351
JJ
6219 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
6220 t = fold_build2 (TRUNC_DIV_EXPR, itype,
6221 fold_build1 (NEGATE_EXPR, itype, t),
acf0174b 6222 fold_build1 (NEGATE_EXPR, itype, step));
a68ab351 6223 else
acf0174b 6224 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
a68ab351 6225 t = fold_convert (itype, t);
726a989a
RB
6226 n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6227 true, GSI_SAME_STMT);
917948d3 6228
a5efada7 6229 trip_var = create_tmp_reg (itype, ".trip");
917948d3
ZD
6230 if (gimple_in_ssa_p (cfun))
6231 {
726a989a
RB
6232 trip_init = make_ssa_name (trip_var, NULL);
6233 trip_main = make_ssa_name (trip_var, NULL);
6234 trip_back = make_ssa_name (trip_var, NULL);
917948d3 6235 }
953ff289 6236 else
917948d3
ZD
6237 {
6238 trip_init = trip_var;
6239 trip_main = trip_var;
6240 trip_back = trip_var;
6241 }
953ff289 6242
726a989a
RB
6243 stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
6244 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
50674e96 6245
a68ab351 6246 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
acf0174b 6247 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6248 if (POINTER_TYPE_P (type))
acf0174b 6249 t = fold_build_pointer_plus (n1, t);
a68ab351 6250 else
acf0174b 6251 t = fold_build2 (PLUS_EXPR, type, t, n1);
726a989a
RB
6252 v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6253 true, GSI_SAME_STMT);
917948d3 6254
726a989a
RB
6255 /* Remove the GIMPLE_OMP_FOR. */
6256 gsi_remove (&si, true);
50674e96
DN
6257
6258 /* Iteration space partitioning goes in ITER_PART_BB. */
726a989a 6259 si = gsi_last_bb (iter_part_bb);
953ff289 6260
a68ab351
JJ
6261 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
6262 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
6263 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
726a989a
RB
6264 s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6265 false, GSI_CONTINUE_LINKING);
953ff289 6266
a68ab351
JJ
6267 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
6268 t = fold_build2 (MIN_EXPR, itype, t, n);
726a989a
RB
6269 e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6270 false, GSI_CONTINUE_LINKING);
953ff289
DN
6271
6272 t = build2 (LT_EXPR, boolean_type_node, s0, n);
726a989a 6273 gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
50674e96
DN
6274
6275 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 6276 si = gsi_start_bb (seq_start_bb);
953ff289 6277
acf0174b
JJ
6278 tree startvar = fd->loop.v;
6279 tree endvar = NULL_TREE;
6280
6281 if (gimple_omp_for_combined_p (fd->for_stmt))
6282 {
6283 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
6284 ? gimple_omp_parallel_clauses (inner_stmt)
6285 : gimple_omp_for_clauses (inner_stmt);
6286 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
6287 gcc_assert (innerc);
6288 startvar = OMP_CLAUSE_DECL (innerc);
6289 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6290 OMP_CLAUSE__LOOPTEMP_);
6291 gcc_assert (innerc);
6292 endvar = OMP_CLAUSE_DECL (innerc);
6293 }
6294
a68ab351 6295 t = fold_convert (itype, s0);
acf0174b 6296 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6297 if (POINTER_TYPE_P (type))
acf0174b 6298 t = fold_build_pointer_plus (n1, t);
a68ab351 6299 else
acf0174b
JJ
6300 t = fold_build2 (PLUS_EXPR, type, t, n1);
6301 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 6302 t = force_gimple_operand_gsi (&si, t,
acf0174b
JJ
6303 DECL_P (startvar)
6304 && TREE_ADDRESSABLE (startvar),
ea3a0fde 6305 NULL_TREE, false, GSI_CONTINUE_LINKING);
acf0174b 6306 stmt = gimple_build_assign (startvar, t);
726a989a 6307 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
953ff289 6308
a68ab351 6309 t = fold_convert (itype, e0);
acf0174b 6310 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6311 if (POINTER_TYPE_P (type))
acf0174b 6312 t = fold_build_pointer_plus (n1, t);
a68ab351 6313 else
acf0174b
JJ
6314 t = fold_build2 (PLUS_EXPR, type, t, n1);
6315 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
6316 e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6317 false, GSI_CONTINUE_LINKING);
acf0174b
JJ
6318 if (endvar)
6319 {
6320 stmt = gimple_build_assign (endvar, e);
6321 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6322 }
6323 if (fd->collapse > 1)
6324 expand_omp_for_init_vars (fd, &si, counts, inner_stmt, startvar);
6325
6326 if (!broken_loop)
6327 {
6328 /* The code controlling the sequential loop goes in CONT_BB,
6329 replacing the GIMPLE_OMP_CONTINUE. */
6330 si = gsi_last_bb (cont_bb);
6331 stmt = gsi_stmt (si);
6332 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6333 v_main = gimple_omp_continue_control_use (stmt);
6334 v_back = gimple_omp_continue_control_def (stmt);
953ff289 6335
acf0174b
JJ
6336 if (!gimple_omp_for_combined_p (fd->for_stmt))
6337 {
6338 if (POINTER_TYPE_P (type))
6339 t = fold_build_pointer_plus (v_main, step);
6340 else
6341 t = fold_build2 (PLUS_EXPR, type, v_main, step);
6342 if (DECL_P (v_back) && TREE_ADDRESSABLE (v_back))
6343 t = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6344 true, GSI_SAME_STMT);
6345 stmt = gimple_build_assign (v_back, t);
6346 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
917948d3 6347
acf0174b
JJ
6348 t = build2 (fd->loop.cond_code, boolean_type_node,
6349 DECL_P (v_back) && TREE_ADDRESSABLE (v_back)
6350 ? t : v_back, e);
6351 gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
6352 }
917948d3 6353
acf0174b
JJ
6354 /* Remove GIMPLE_OMP_CONTINUE. */
6355 gsi_remove (&si, true);
b8698a0f 6356
acf0174b
JJ
6357 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
6358 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
50674e96 6359
acf0174b
JJ
6360 /* Trip update code goes into TRIP_UPDATE_BB. */
6361 si = gsi_start_bb (trip_update_bb);
953ff289 6362
acf0174b
JJ
6363 t = build_int_cst (itype, 1);
6364 t = build2 (PLUS_EXPR, itype, trip_main, t);
6365 stmt = gimple_build_assign (trip_back, t);
6366 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6367 }
953ff289 6368
726a989a
RB
6369 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
6370 si = gsi_last_bb (exit_bb);
6371 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
acf0174b
JJ
6372 {
6373 t = gimple_omp_return_lhs (gsi_stmt (si));
6374 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
6375 }
726a989a 6376 gsi_remove (&si, true);
953ff289 6377
50674e96 6378 /* Connect the new blocks. */
e5c95afe
ZD
6379 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
6380 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
917948d3 6381
acf0174b
JJ
6382 if (!broken_loop)
6383 {
6384 se = find_edge (cont_bb, body_bb);
6385 if (gimple_omp_for_combined_p (fd->for_stmt))
6386 {
6387 remove_edge (se);
6388 se = NULL;
6389 }
6390 else if (fd->collapse > 1)
6391 {
6392 remove_edge (se);
6393 se = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6394 }
6395 else
6396 se->flags = EDGE_TRUE_VALUE;
6397 find_edge (cont_bb, trip_update_bb)->flags
6398 = se ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
917948d3 6399
acf0174b
JJ
6400 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
6401 }
917948d3
ZD
6402
6403 if (gimple_in_ssa_p (cfun))
6404 {
726a989a
RB
6405 gimple_stmt_iterator psi;
6406 gimple phi;
6407 edge re, ene;
9771b263 6408 edge_var_map_vector *head;
726a989a
RB
6409 edge_var_map *vm;
6410 size_t i;
6411
acf0174b
JJ
6412 gcc_assert (fd->collapse == 1 && !broken_loop);
6413
917948d3
ZD
6414 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
6415 remove arguments of the phi nodes in fin_bb. We need to create
6416 appropriate phi nodes in iter_part_bb instead. */
6417 se = single_pred_edge (fin_bb);
6418 re = single_succ_edge (trip_update_bb);
726a989a 6419 head = redirect_edge_var_map_vector (re);
917948d3
ZD
6420 ene = single_succ_edge (entry_bb);
6421
726a989a 6422 psi = gsi_start_phis (fin_bb);
9771b263 6423 for (i = 0; !gsi_end_p (psi) && head->iterate (i, &vm);
726a989a 6424 gsi_next (&psi), ++i)
917948d3 6425 {
726a989a 6426 gimple nphi;
f5045c96 6427 source_location locus;
726a989a
RB
6428
6429 phi = gsi_stmt (psi);
6430 t = gimple_phi_result (phi);
6431 gcc_assert (t == redirect_edge_var_map_result (vm));
917948d3 6432 nphi = create_phi_node (t, iter_part_bb);
917948d3
ZD
6433
6434 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
f5045c96
AM
6435 locus = gimple_phi_arg_location_from_edge (phi, se);
6436
a68ab351
JJ
6437 /* A special case -- fd->loop.v is not yet computed in
6438 iter_part_bb, we need to use v_extra instead. */
6439 if (t == fd->loop.v)
917948d3 6440 t = v_extra;
9e227d60 6441 add_phi_arg (nphi, t, ene, locus);
f5045c96 6442 locus = redirect_edge_var_map_location (vm);
9e227d60 6443 add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
726a989a 6444 }
9771b263 6445 gcc_assert (!gsi_end_p (psi) && i == head->length ());
726a989a
RB
6446 redirect_edge_var_map_clear (re);
6447 while (1)
6448 {
6449 psi = gsi_start_phis (fin_bb);
6450 if (gsi_end_p (psi))
6451 break;
6452 remove_phi_node (&psi, false);
917948d3 6453 }
917948d3
ZD
6454
6455 /* Make phi node for trip. */
6456 phi = create_phi_node (trip_main, iter_part_bb);
f5045c96 6457 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb),
9e227d60 6458 UNKNOWN_LOCATION);
f5045c96 6459 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb),
9e227d60 6460 UNKNOWN_LOCATION);
917948d3
ZD
6461 }
6462
acf0174b
JJ
6463 if (!broken_loop)
6464 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
917948d3
ZD
6465 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
6466 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
6467 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6468 recompute_dominator (CDI_DOMINATORS, fin_bb));
6469 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
6470 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
6471 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6472 recompute_dominator (CDI_DOMINATORS, body_bb));
6093bc06 6473
acf0174b
JJ
6474 if (!broken_loop)
6475 {
6476 struct loop *trip_loop = alloc_loop ();
6477 trip_loop->header = iter_part_bb;
6478 trip_loop->latch = trip_update_bb;
6479 add_loop (trip_loop, iter_part_bb->loop_father);
6093bc06 6480
acf0174b
JJ
6481 if (!gimple_omp_for_combined_p (fd->for_stmt))
6482 {
6483 struct loop *loop = alloc_loop ();
6484 loop->header = body_bb;
6485 loop->latch = cont_bb;
6486 add_loop (loop, trip_loop);
6487 }
6488 }
953ff289
DN
6489}
6490
acf0174b 6491
74bf76ed
JJ
6492/* A subroutine of expand_omp_for. Generate code for a simd non-worksharing
6493 loop. Given parameters:
6494
6495 for (V = N1; V cond N2; V += STEP) BODY;
6496
6497 where COND is "<" or ">", we generate pseudocode
6498
6499 V = N1;
6500 goto L1;
6501 L0:
6502 BODY;
6503 V += STEP;
6504 L1:
6505 if (V cond N2) goto L0; else goto L2;
6506 L2:
6507
6508 For collapsed loops, given parameters:
6509 collapse(3)
6510 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
6511 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
6512 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
6513 BODY;
6514
6515 we generate pseudocode
6516
6517 if (cond3 is <)
6518 adj = STEP3 - 1;
6519 else
6520 adj = STEP3 + 1;
6521 count3 = (adj + N32 - N31) / STEP3;
6522 if (cond2 is <)
6523 adj = STEP2 - 1;
6524 else
6525 adj = STEP2 + 1;
6526 count2 = (adj + N22 - N21) / STEP2;
6527 if (cond1 is <)
6528 adj = STEP1 - 1;
6529 else
6530 adj = STEP1 + 1;
6531 count1 = (adj + N12 - N11) / STEP1;
6532 count = count1 * count2 * count3;
6533 V = 0;
6534 V1 = N11;
6535 V2 = N21;
6536 V3 = N31;
6537 goto L1;
6538 L0:
6539 BODY;
6540 V += 1;
6541 V3 += STEP3;
6542 V2 += (V3 cond3 N32) ? 0 : STEP2;
6543 V3 = (V3 cond3 N32) ? V3 : N31;
6544 V1 += (V2 cond2 N22) ? 0 : STEP1;
6545 V2 = (V2 cond2 N22) ? V2 : N21;
6546 L1:
6547 if (V < count) goto L0; else goto L2;
6548 L2:
6549
6550 */
6551
6552static void
6553expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
6554{
6555 tree type, t;
6556 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
6557 gimple_stmt_iterator gsi;
6558 gimple stmt;
6559 bool broken_loop = region->cont == NULL;
6560 edge e, ne;
6561 tree *counts = NULL;
6562 int i;
6563 tree safelen = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6564 OMP_CLAUSE_SAFELEN);
6565 tree simduid = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6566 OMP_CLAUSE__SIMDUID_);
acf0174b 6567 tree n1, n2;
74bf76ed
JJ
6568
6569 type = TREE_TYPE (fd->loop.v);
6570 entry_bb = region->entry;
6571 cont_bb = region->cont;
6572 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
6573 gcc_assert (broken_loop
6574 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
6575 l0_bb = FALLTHRU_EDGE (entry_bb)->dest;
6576 if (!broken_loop)
6577 {
6578 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l0_bb);
6579 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6580 l1_bb = split_block (cont_bb, last_stmt (cont_bb))->dest;
6581 l2_bb = BRANCH_EDGE (entry_bb)->dest;
6582 }
6583 else
6584 {
6585 BRANCH_EDGE (entry_bb)->flags &= ~EDGE_ABNORMAL;
6586 l1_bb = split_edge (BRANCH_EDGE (entry_bb));
6587 l2_bb = single_succ (l1_bb);
6588 }
6589 exit_bb = region->exit;
6590 l2_dom_bb = NULL;
6591
6592 gsi = gsi_last_bb (entry_bb);
6593
6594 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
6595 /* Not needed in SSA form right now. */
6596 gcc_assert (!gimple_in_ssa_p (cfun));
6597 if (fd->collapse > 1)
6598 {
6599 int first_zero_iter = -1;
6600 basic_block zero_iter_bb = l2_bb;
6601
6602 counts = XALLOCAVEC (tree, fd->collapse);
6603 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
6604 zero_iter_bb, first_zero_iter,
6605 l2_dom_bb);
6606 }
6607 if (l2_dom_bb == NULL)
6608 l2_dom_bb = l1_bb;
6609
acf0174b 6610 n1 = fd->loop.n1;
74bf76ed 6611 n2 = fd->loop.n2;
acf0174b
JJ
6612 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6613 {
6614 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6615 OMP_CLAUSE__LOOPTEMP_);
6616 gcc_assert (innerc);
6617 n1 = OMP_CLAUSE_DECL (innerc);
6618 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6619 OMP_CLAUSE__LOOPTEMP_);
6620 gcc_assert (innerc);
6621 n2 = OMP_CLAUSE_DECL (innerc);
6622 expand_omp_build_assign (&gsi, fd->loop.v,
6623 fold_convert (type, n1));
6624 if (fd->collapse > 1)
6625 {
6626 gsi_prev (&gsi);
6627 expand_omp_for_init_vars (fd, &gsi, counts, NULL, n1);
6628 gsi_next (&gsi);
6629 }
6630 }
74bf76ed
JJ
6631 else
6632 {
6633 expand_omp_build_assign (&gsi, fd->loop.v,
6634 fold_convert (type, fd->loop.n1));
6635 if (fd->collapse > 1)
6636 for (i = 0; i < fd->collapse; i++)
6637 {
6638 tree itype = TREE_TYPE (fd->loops[i].v);
6639 if (POINTER_TYPE_P (itype))
6640 itype = signed_type_for (itype);
6641 t = fold_convert (TREE_TYPE (fd->loops[i].v), fd->loops[i].n1);
6642 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6643 }
6644 }
6645
6646 /* Remove the GIMPLE_OMP_FOR statement. */
6647 gsi_remove (&gsi, true);
6648
6649 if (!broken_loop)
6650 {
6651 /* Code to control the increment goes in the CONT_BB. */
6652 gsi = gsi_last_bb (cont_bb);
6653 stmt = gsi_stmt (gsi);
6654 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6655
6656 if (POINTER_TYPE_P (type))
6657 t = fold_build_pointer_plus (fd->loop.v, fd->loop.step);
6658 else
6659 t = fold_build2 (PLUS_EXPR, type, fd->loop.v, fd->loop.step);
6660 expand_omp_build_assign (&gsi, fd->loop.v, t);
6661
6662 if (fd->collapse > 1)
6663 {
6664 i = fd->collapse - 1;
6665 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v)))
6666 {
6667 t = fold_convert (sizetype, fd->loops[i].step);
6668 t = fold_build_pointer_plus (fd->loops[i].v, t);
6669 }
6670 else
6671 {
6672 t = fold_convert (TREE_TYPE (fd->loops[i].v),
6673 fd->loops[i].step);
6674 t = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->loops[i].v),
6675 fd->loops[i].v, t);
6676 }
6677 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6678
6679 for (i = fd->collapse - 1; i > 0; i--)
6680 {
6681 tree itype = TREE_TYPE (fd->loops[i].v);
6682 tree itype2 = TREE_TYPE (fd->loops[i - 1].v);
6683 if (POINTER_TYPE_P (itype2))
6684 itype2 = signed_type_for (itype2);
6685 t = build3 (COND_EXPR, itype2,
6686 build2 (fd->loops[i].cond_code, boolean_type_node,
6687 fd->loops[i].v,
6688 fold_convert (itype, fd->loops[i].n2)),
6689 build_int_cst (itype2, 0),
6690 fold_convert (itype2, fd->loops[i - 1].step));
6691 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i - 1].v)))
6692 t = fold_build_pointer_plus (fd->loops[i - 1].v, t);
6693 else
6694 t = fold_build2 (PLUS_EXPR, itype2, fd->loops[i - 1].v, t);
6695 expand_omp_build_assign (&gsi, fd->loops[i - 1].v, t);
6696
6697 t = build3 (COND_EXPR, itype,
6698 build2 (fd->loops[i].cond_code, boolean_type_node,
6699 fd->loops[i].v,
6700 fold_convert (itype, fd->loops[i].n2)),
6701 fd->loops[i].v,
6702 fold_convert (itype, fd->loops[i].n1));
6703 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6704 }
6705 }
6706
6707 /* Remove GIMPLE_OMP_CONTINUE. */
6708 gsi_remove (&gsi, true);
6709 }
6710
6711 /* Emit the condition in L1_BB. */
6712 gsi = gsi_start_bb (l1_bb);
6713
6714 t = fold_convert (type, n2);
6715 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
6716 false, GSI_CONTINUE_LINKING);
6717 t = build2 (fd->loop.cond_code, boolean_type_node, fd->loop.v, t);
6718 stmt = gimple_build_cond_empty (t);
6719 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
6720 if (walk_tree (gimple_cond_lhs_ptr (stmt), expand_omp_regimplify_p,
6721 NULL, NULL)
6722 || walk_tree (gimple_cond_rhs_ptr (stmt), expand_omp_regimplify_p,
6723 NULL, NULL))
6724 {
6725 gsi = gsi_for_stmt (stmt);
6726 gimple_regimplify_operands (stmt, &gsi);
6727 }
6728
6729 /* Remove GIMPLE_OMP_RETURN. */
6730 gsi = gsi_last_bb (exit_bb);
6731 gsi_remove (&gsi, true);
6732
6733 /* Connect the new blocks. */
6734 remove_edge (FALLTHRU_EDGE (entry_bb));
6735
6736 if (!broken_loop)
6737 {
6738 remove_edge (BRANCH_EDGE (entry_bb));
6739 make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
6740
6741 e = BRANCH_EDGE (l1_bb);
6742 ne = FALLTHRU_EDGE (l1_bb);
6743 e->flags = EDGE_TRUE_VALUE;
6744 }
6745 else
6746 {
6747 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
6748
6749 ne = single_succ_edge (l1_bb);
6750 e = make_edge (l1_bb, l0_bb, EDGE_TRUE_VALUE);
6751
6752 }
6753 ne->flags = EDGE_FALSE_VALUE;
6754 e->probability = REG_BR_PROB_BASE * 7 / 8;
6755 ne->probability = REG_BR_PROB_BASE / 8;
6756
6757 set_immediate_dominator (CDI_DOMINATORS, l1_bb, entry_bb);
6758 set_immediate_dominator (CDI_DOMINATORS, l2_bb, l2_dom_bb);
6759 set_immediate_dominator (CDI_DOMINATORS, l0_bb, l1_bb);
6760
6761 if (!broken_loop)
6762 {
6763 struct loop *loop = alloc_loop ();
6764 loop->header = l1_bb;
6765 loop->latch = e->dest;
6766 add_loop (loop, l1_bb->loop_father);
6767 if (safelen == NULL_TREE)
6768 loop->safelen = INT_MAX;
6769 else
6770 {
6771 safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
cc269bb6 6772 if (!tree_fits_uhwi_p (safelen)
74bf76ed
JJ
6773 || (unsigned HOST_WIDE_INT) tree_low_cst (safelen, 1)
6774 > INT_MAX)
6775 loop->safelen = INT_MAX;
6776 else
6777 loop->safelen = tree_low_cst (safelen, 1);
6778 if (loop->safelen == 1)
6779 loop->safelen = 0;
6780 }
6781 if (simduid)
6782 {
6783 loop->simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6784 cfun->has_simduid_loops = true;
6785 }
ea0f3e87 6786 /* If not -fno-tree-loop-vectorize, hint that we want to vectorize
74bf76ed 6787 the loop. */
ea0f3e87
XDL
6788 if ((flag_tree_loop_vectorize
6789 || (!global_options_set.x_flag_tree_loop_vectorize
6790 && !global_options_set.x_flag_tree_vectorize))
74bf76ed
JJ
6791 && loop->safelen > 1)
6792 {
6793 loop->force_vect = true;
6794 cfun->has_force_vect_loops = true;
6795 }
6796 }
6797}
6798
953ff289 6799
50674e96 6800/* Expand the OpenMP loop defined by REGION. */
953ff289 6801
50674e96 6802static void
acf0174b 6803expand_omp_for (struct omp_region *region, gimple inner_stmt)
50674e96
DN
6804{
6805 struct omp_for_data fd;
a68ab351 6806 struct omp_for_data_loop *loops;
953ff289 6807
a68ab351
JJ
6808 loops
6809 = (struct omp_for_data_loop *)
726a989a 6810 alloca (gimple_omp_for_collapse (last_stmt (region->entry))
a68ab351 6811 * sizeof (struct omp_for_data_loop));
a68ab351 6812 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
21a66e91 6813 region->sched_kind = fd.sched_kind;
953ff289 6814
135a171d
JJ
6815 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
6816 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6817 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6818 if (region->cont)
6819 {
6820 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
6821 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6822 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6823 }
6093bc06 6824 else
1aa95df7 6825 /* If there isn't a continue then this is a degerate case where
6093bc06
RB
6826 the introduction of abnormal edges during lowering will prevent
6827 original loops from being detected. Fix that up. */
6828 loops_state_set (LOOPS_NEED_FIXUP);
135a171d 6829
c02065fc 6830 if (gimple_omp_for_kind (fd.for_stmt) & GF_OMP_FOR_KIND_SIMD)
74bf76ed
JJ
6831 expand_omp_simd (region, &fd);
6832 else if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
acf0174b 6833 && !fd.have_ordered)
953ff289
DN
6834 {
6835 if (fd.chunk_size == NULL)
acf0174b 6836 expand_omp_for_static_nochunk (region, &fd, inner_stmt);
953ff289 6837 else
acf0174b 6838 expand_omp_for_static_chunk (region, &fd, inner_stmt);
953ff289
DN
6839 }
6840 else
6841 {
a68ab351
JJ
6842 int fn_index, start_ix, next_ix;
6843
74bf76ed
JJ
6844 gcc_assert (gimple_omp_for_kind (fd.for_stmt)
6845 == GF_OMP_FOR_KIND_FOR);
9abd5ed9
JJ
6846 if (fd.chunk_size == NULL
6847 && fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
6848 fd.chunk_size = integer_zero_node;
a68ab351
JJ
6849 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
6850 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
726a989a 6851 ? 3 : fd.sched_kind;
a68ab351 6852 fn_index += fd.have_ordered * 4;
e79983f4
MM
6853 start_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_START) + fn_index;
6854 next_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_NEXT) + fn_index;
a68ab351
JJ
6855 if (fd.iter_type == long_long_unsigned_type_node)
6856 {
e79983f4
MM
6857 start_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_START
6858 - (int)BUILT_IN_GOMP_LOOP_STATIC_START);
6859 next_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
6860 - (int)BUILT_IN_GOMP_LOOP_STATIC_NEXT);
a68ab351 6861 }
bbbbb16a 6862 expand_omp_for_generic (region, &fd, (enum built_in_function) start_ix,
acf0174b 6863 (enum built_in_function) next_ix, inner_stmt);
953ff289 6864 }
5f40b3cb 6865
a5efada7
RG
6866 if (gimple_in_ssa_p (cfun))
6867 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
6868}
6869
953ff289
DN
6870
6871/* Expand code for an OpenMP sections directive. In pseudo code, we generate
6872
953ff289
DN
6873 v = GOMP_sections_start (n);
6874 L0:
6875 switch (v)
6876 {
6877 case 0:
6878 goto L2;
6879 case 1:
6880 section 1;
6881 goto L1;
6882 case 2:
6883 ...
6884 case n:
6885 ...
953ff289
DN
6886 default:
6887 abort ();
6888 }
6889 L1:
6890 v = GOMP_sections_next ();
6891 goto L0;
6892 L2:
6893 reduction;
6894
50674e96 6895 If this is a combined parallel sections, replace the call to
917948d3 6896 GOMP_sections_start with call to GOMP_sections_next. */
953ff289
DN
6897
6898static void
50674e96 6899expand_omp_sections (struct omp_region *region)
953ff289 6900{
0f900dfa 6901 tree t, u, vin = NULL, vmain, vnext, l2;
9771b263 6902 vec<tree> label_vec;
726a989a 6903 unsigned len;
e5c95afe 6904 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
726a989a
RB
6905 gimple_stmt_iterator si, switch_si;
6906 gimple sections_stmt, stmt, cont;
c34938a8
JJ
6907 edge_iterator ei;
6908 edge e;
777f7f9a 6909 struct omp_region *inner;
726a989a 6910 unsigned i, casei;
e5c95afe 6911 bool exit_reachable = region->cont != NULL;
953ff289 6912
65e7bfe3 6913 gcc_assert (region->exit != NULL);
777f7f9a 6914 entry_bb = region->entry;
e5c95afe 6915 l0_bb = single_succ (entry_bb);
777f7f9a 6916 l1_bb = region->cont;
e5c95afe 6917 l2_bb = region->exit;
65e7bfe3
JJ
6918 if (single_pred_p (l2_bb) && single_pred (l2_bb) == l0_bb)
6919 l2 = gimple_block_label (l2_bb);
6920 else
d3c673c7 6921 {
65e7bfe3
JJ
6922 /* This can happen if there are reductions. */
6923 len = EDGE_COUNT (l0_bb->succs);
6924 gcc_assert (len > 0);
6925 e = EDGE_SUCC (l0_bb, len - 1);
6926 si = gsi_last_bb (e->dest);
6927 l2 = NULL_TREE;
6928 if (gsi_end_p (si)
6929 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
6930 l2 = gimple_block_label (e->dest);
c34938a8 6931 else
65e7bfe3
JJ
6932 FOR_EACH_EDGE (e, ei, l0_bb->succs)
6933 {
6934 si = gsi_last_bb (e->dest);
6935 if (gsi_end_p (si)
6936 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
c34938a8 6937 {
65e7bfe3
JJ
6938 l2 = gimple_block_label (e->dest);
6939 break;
c34938a8 6940 }
65e7bfe3 6941 }
d3c673c7 6942 }
65e7bfe3
JJ
6943 if (exit_reachable)
6944 default_bb = create_empty_bb (l1_bb->prev_bb);
d3c673c7 6945 else
65e7bfe3 6946 default_bb = create_empty_bb (l0_bb);
50674e96
DN
6947
6948 /* We will build a switch() with enough cases for all the
726a989a 6949 GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
50674e96 6950 and a default case to abort if something goes wrong. */
e5c95afe 6951 len = EDGE_COUNT (l0_bb->succs);
726a989a 6952
9771b263 6953 /* Use vec::quick_push on label_vec throughout, since we know the size
726a989a 6954 in advance. */
9771b263 6955 label_vec.create (len);
953ff289 6956
777f7f9a 6957 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
726a989a
RB
6958 GIMPLE_OMP_SECTIONS statement. */
6959 si = gsi_last_bb (entry_bb);
6960 sections_stmt = gsi_stmt (si);
6961 gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
6962 vin = gimple_omp_sections_control (sections_stmt);
50674e96 6963 if (!is_combined_parallel (region))
953ff289 6964 {
50674e96
DN
6965 /* If we are not inside a combined parallel+sections region,
6966 call GOMP_sections_start. */
4befd127 6967 t = build_int_cst (unsigned_type_node, len - 1);
e79983f4 6968 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_START);
726a989a 6969 stmt = gimple_build_call (u, 1, t);
953ff289 6970 }
917948d3
ZD
6971 else
6972 {
6973 /* Otherwise, call GOMP_sections_next. */
e79983f4 6974 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
726a989a 6975 stmt = gimple_build_call (u, 0);
917948d3 6976 }
726a989a
RB
6977 gimple_call_set_lhs (stmt, vin);
6978 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
6979 gsi_remove (&si, true);
6980
6981 /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
6982 L0_BB. */
6983 switch_si = gsi_last_bb (l0_bb);
6984 gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
917948d3
ZD
6985 if (exit_reachable)
6986 {
6987 cont = last_stmt (l1_bb);
726a989a
RB
6988 gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
6989 vmain = gimple_omp_continue_control_use (cont);
6990 vnext = gimple_omp_continue_control_def (cont);
917948d3
ZD
6991 }
6992 else
6993 {
6994 vmain = vin;
6995 vnext = NULL_TREE;
6996 }
953ff289 6997
65e7bfe3 6998 t = build_case_label (build_int_cst (unsigned_type_node, 0), NULL, l2);
9771b263 6999 label_vec.quick_push (t);
65e7bfe3 7000 i = 1;
d3c673c7 7001
726a989a 7002 /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
e5c95afe
ZD
7003 for (inner = region->inner, casei = 1;
7004 inner;
7005 inner = inner->next, i++, casei++)
953ff289 7006 {
50674e96
DN
7007 basic_block s_entry_bb, s_exit_bb;
7008
c34938a8 7009 /* Skip optional reduction region. */
726a989a 7010 if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
c34938a8
JJ
7011 {
7012 --i;
7013 --casei;
7014 continue;
7015 }
7016
777f7f9a
RH
7017 s_entry_bb = inner->entry;
7018 s_exit_bb = inner->exit;
953ff289 7019
726a989a 7020 t = gimple_block_label (s_entry_bb);
e5c95afe 7021 u = build_int_cst (unsigned_type_node, casei);
3d528853 7022 u = build_case_label (u, NULL, t);
9771b263 7023 label_vec.quick_push (u);
777f7f9a 7024
726a989a
RB
7025 si = gsi_last_bb (s_entry_bb);
7026 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
7027 gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
7028 gsi_remove (&si, true);
777f7f9a 7029 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
d3c673c7
JJ
7030
7031 if (s_exit_bb == NULL)
7032 continue;
7033
726a989a
RB
7034 si = gsi_last_bb (s_exit_bb);
7035 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7036 gsi_remove (&si, true);
d3c673c7 7037
50674e96 7038 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
953ff289
DN
7039 }
7040
50674e96 7041 /* Error handling code goes in DEFAULT_BB. */
726a989a 7042 t = gimple_block_label (default_bb);
3d528853 7043 u = build_case_label (NULL, NULL, t);
777f7f9a 7044 make_edge (l0_bb, default_bb, 0);
a9e0d843 7045 if (current_loops)
6093bc06 7046 add_bb_to_loop (default_bb, current_loops->tree_root);
953ff289 7047
fd8d363e 7048 stmt = gimple_build_switch (vmain, u, label_vec);
726a989a
RB
7049 gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
7050 gsi_remove (&switch_si, true);
9771b263 7051 label_vec.release ();
726a989a
RB
7052
7053 si = gsi_start_bb (default_bb);
e79983f4 7054 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
726a989a 7055 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
50674e96 7056
e5c95afe 7057 if (exit_reachable)
d3c673c7 7058 {
e79983f4
MM
7059 tree bfn_decl;
7060
e5c95afe 7061 /* Code to get the next section goes in L1_BB. */
726a989a
RB
7062 si = gsi_last_bb (l1_bb);
7063 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
953ff289 7064
e79983f4
MM
7065 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
7066 stmt = gimple_build_call (bfn_decl, 0);
726a989a
RB
7067 gimple_call_set_lhs (stmt, vnext);
7068 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7069 gsi_remove (&si, true);
50674e96 7070
e5c95afe 7071 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
d3c673c7 7072 }
50674e96 7073
65e7bfe3
JJ
7074 /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB. */
7075 si = gsi_last_bb (l2_bb);
7076 if (gimple_omp_return_nowait_p (gsi_stmt (si)))
7077 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_NOWAIT);
acf0174b
JJ
7078 else if (gimple_omp_return_lhs (gsi_stmt (si)))
7079 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_CANCEL);
65e7bfe3
JJ
7080 else
7081 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END);
7082 stmt = gimple_build_call (t, 0);
acf0174b
JJ
7083 if (gimple_omp_return_lhs (gsi_stmt (si)))
7084 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (si)));
65e7bfe3
JJ
7085 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7086 gsi_remove (&si, true);
7087
917948d3 7088 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
50674e96 7089}
953ff289 7090
953ff289 7091
777f7f9a
RH
7092/* Expand code for an OpenMP single directive. We've already expanded
7093 much of the code, here we simply place the GOMP_barrier call. */
7094
7095static void
7096expand_omp_single (struct omp_region *region)
7097{
7098 basic_block entry_bb, exit_bb;
726a989a 7099 gimple_stmt_iterator si;
777f7f9a
RH
7100
7101 entry_bb = region->entry;
7102 exit_bb = region->exit;
7103
726a989a 7104 si = gsi_last_bb (entry_bb);
726a989a
RB
7105 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
7106 gsi_remove (&si, true);
777f7f9a
RH
7107 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7108
726a989a 7109 si = gsi_last_bb (exit_bb);
acf0174b
JJ
7110 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
7111 {
7112 tree t = gimple_omp_return_lhs (gsi_stmt (si));
7113 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
7114 }
726a989a 7115 gsi_remove (&si, true);
777f7f9a
RH
7116 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7117}
7118
7119
7120/* Generic expansion for OpenMP synchronization directives: master,
7121 ordered and critical. All we need to do here is remove the entry
7122 and exit markers for REGION. */
50674e96
DN
7123
7124static void
7125expand_omp_synch (struct omp_region *region)
7126{
7127 basic_block entry_bb, exit_bb;
726a989a 7128 gimple_stmt_iterator si;
50674e96 7129
777f7f9a
RH
7130 entry_bb = region->entry;
7131 exit_bb = region->exit;
50674e96 7132
726a989a
RB
7133 si = gsi_last_bb (entry_bb);
7134 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
7135 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
acf0174b 7136 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TASKGROUP
726a989a 7137 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
acf0174b
JJ
7138 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL
7139 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TEAMS);
726a989a 7140 gsi_remove (&si, true);
50674e96
DN
7141 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7142
d3c673c7
JJ
7143 if (exit_bb)
7144 {
726a989a
RB
7145 si = gsi_last_bb (exit_bb);
7146 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7147 gsi_remove (&si, true);
d3c673c7
JJ
7148 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7149 }
50674e96 7150}
953ff289 7151
20906c66
JJ
7152/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7153 operation as a normal volatile load. */
7154
7155static bool
05409788
RH
7156expand_omp_atomic_load (basic_block load_bb, tree addr,
7157 tree loaded_val, int index)
20906c66 7158{
05409788
RH
7159 enum built_in_function tmpbase;
7160 gimple_stmt_iterator gsi;
7161 basic_block store_bb;
7162 location_t loc;
7163 gimple stmt;
7164 tree decl, call, type, itype;
7165
7166 gsi = gsi_last_bb (load_bb);
7167 stmt = gsi_stmt (gsi);
7168 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7169 loc = gimple_location (stmt);
7170
7171 /* ??? If the target does not implement atomic_load_optab[mode], and mode
7172 is smaller than word size, then expand_atomic_load assumes that the load
7173 is atomic. We could avoid the builtin entirely in this case. */
7174
7175 tmpbase = (enum built_in_function) (BUILT_IN_ATOMIC_LOAD_N + index + 1);
7176 decl = builtin_decl_explicit (tmpbase);
7177 if (decl == NULL_TREE)
7178 return false;
7179
7180 type = TREE_TYPE (loaded_val);
7181 itype = TREE_TYPE (TREE_TYPE (decl));
7182
7183 call = build_call_expr_loc (loc, decl, 2, addr,
acf0174b
JJ
7184 build_int_cst (NULL,
7185 gimple_omp_atomic_seq_cst_p (stmt)
7186 ? MEMMODEL_SEQ_CST
7187 : MEMMODEL_RELAXED));
05409788
RH
7188 if (!useless_type_conversion_p (type, itype))
7189 call = fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7190 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7191
7192 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7193 gsi_remove (&gsi, true);
7194
7195 store_bb = single_succ (load_bb);
7196 gsi = gsi_last_bb (store_bb);
7197 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7198 gsi_remove (&gsi, true);
7199
7200 if (gimple_in_ssa_p (cfun))
7201 update_ssa (TODO_update_ssa_no_phi);
7202
7203 return true;
20906c66
JJ
7204}
7205
7206/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7207 operation as a normal volatile store. */
7208
7209static bool
05409788
RH
7210expand_omp_atomic_store (basic_block load_bb, tree addr,
7211 tree loaded_val, tree stored_val, int index)
20906c66 7212{
05409788
RH
7213 enum built_in_function tmpbase;
7214 gimple_stmt_iterator gsi;
7215 basic_block store_bb = single_succ (load_bb);
7216 location_t loc;
7217 gimple stmt;
7218 tree decl, call, type, itype;
7219 enum machine_mode imode;
7220 bool exchange;
7221
7222 gsi = gsi_last_bb (load_bb);
7223 stmt = gsi_stmt (gsi);
7224 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7225
7226 /* If the load value is needed, then this isn't a store but an exchange. */
7227 exchange = gimple_omp_atomic_need_value_p (stmt);
7228
7229 gsi = gsi_last_bb (store_bb);
7230 stmt = gsi_stmt (gsi);
7231 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE);
7232 loc = gimple_location (stmt);
7233
7234 /* ??? If the target does not implement atomic_store_optab[mode], and mode
7235 is smaller than word size, then expand_atomic_store assumes that the store
7236 is atomic. We could avoid the builtin entirely in this case. */
7237
7238 tmpbase = (exchange ? BUILT_IN_ATOMIC_EXCHANGE_N : BUILT_IN_ATOMIC_STORE_N);
7239 tmpbase = (enum built_in_function) ((int) tmpbase + index + 1);
7240 decl = builtin_decl_explicit (tmpbase);
7241 if (decl == NULL_TREE)
7242 return false;
7243
7244 type = TREE_TYPE (stored_val);
7245
7246 /* Dig out the type of the function's second argument. */
7247 itype = TREE_TYPE (decl);
7248 itype = TYPE_ARG_TYPES (itype);
7249 itype = TREE_CHAIN (itype);
7250 itype = TREE_VALUE (itype);
7251 imode = TYPE_MODE (itype);
7252
7253 if (exchange && !can_atomic_exchange_p (imode, true))
7254 return false;
7255
7256 if (!useless_type_conversion_p (itype, type))
7257 stored_val = fold_build1_loc (loc, VIEW_CONVERT_EXPR, itype, stored_val);
7258 call = build_call_expr_loc (loc, decl, 3, addr, stored_val,
acf0174b
JJ
7259 build_int_cst (NULL,
7260 gimple_omp_atomic_seq_cst_p (stmt)
7261 ? MEMMODEL_SEQ_CST
7262 : MEMMODEL_RELAXED));
05409788
RH
7263 if (exchange)
7264 {
7265 if (!useless_type_conversion_p (type, itype))
7266 call = build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7267 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7268 }
7269
7270 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7271 gsi_remove (&gsi, true);
7272
7273 /* Remove the GIMPLE_OMP_ATOMIC_LOAD that we verified above. */
7274 gsi = gsi_last_bb (load_bb);
7275 gsi_remove (&gsi, true);
7276
7277 if (gimple_in_ssa_p (cfun))
7278 update_ssa (TODO_update_ssa_no_phi);
7279
7280 return true;
20906c66
JJ
7281}
7282
a509ebb5 7283/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
86951993 7284 operation as a __atomic_fetch_op builtin. INDEX is log2 of the
a509ebb5
RL
7285 size of the data type, and thus usable to find the index of the builtin
7286 decl. Returns false if the expression is not of the proper form. */
7287
7288static bool
7289expand_omp_atomic_fetch_op (basic_block load_bb,
7290 tree addr, tree loaded_val,
7291 tree stored_val, int index)
7292{
e79983f4 7293 enum built_in_function oldbase, newbase, tmpbase;
a509ebb5 7294 tree decl, itype, call;
20906c66 7295 tree lhs, rhs;
a509ebb5 7296 basic_block store_bb = single_succ (load_bb);
726a989a
RB
7297 gimple_stmt_iterator gsi;
7298 gimple stmt;
db3927fb 7299 location_t loc;
86951993 7300 enum tree_code code;
20906c66 7301 bool need_old, need_new;
86951993 7302 enum machine_mode imode;
acf0174b 7303 bool seq_cst;
a509ebb5
RL
7304
7305 /* We expect to find the following sequences:
b8698a0f 7306
a509ebb5 7307 load_bb:
726a989a 7308 GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
a509ebb5
RL
7309
7310 store_bb:
7311 val = tmp OP something; (or: something OP tmp)
b8698a0f 7312 GIMPLE_OMP_STORE (val)
a509ebb5 7313
b8698a0f 7314 ???FIXME: Allow a more flexible sequence.
a509ebb5 7315 Perhaps use data flow to pick the statements.
b8698a0f 7316
a509ebb5
RL
7317 */
7318
726a989a
RB
7319 gsi = gsi_after_labels (store_bb);
7320 stmt = gsi_stmt (gsi);
db3927fb 7321 loc = gimple_location (stmt);
726a989a 7322 if (!is_gimple_assign (stmt))
a509ebb5 7323 return false;
726a989a
RB
7324 gsi_next (&gsi);
7325 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
a509ebb5 7326 return false;
20906c66
JJ
7327 need_new = gimple_omp_atomic_need_value_p (gsi_stmt (gsi));
7328 need_old = gimple_omp_atomic_need_value_p (last_stmt (load_bb));
acf0174b 7329 seq_cst = gimple_omp_atomic_seq_cst_p (last_stmt (load_bb));
20906c66 7330 gcc_checking_assert (!need_old || !need_new);
a509ebb5 7331
726a989a 7332 if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
a509ebb5
RL
7333 return false;
7334
a509ebb5 7335 /* Check for one of the supported fetch-op operations. */
86951993
AM
7336 code = gimple_assign_rhs_code (stmt);
7337 switch (code)
a509ebb5
RL
7338 {
7339 case PLUS_EXPR:
7340 case POINTER_PLUS_EXPR:
86951993
AM
7341 oldbase = BUILT_IN_ATOMIC_FETCH_ADD_N;
7342 newbase = BUILT_IN_ATOMIC_ADD_FETCH_N;
a509ebb5
RL
7343 break;
7344 case MINUS_EXPR:
86951993
AM
7345 oldbase = BUILT_IN_ATOMIC_FETCH_SUB_N;
7346 newbase = BUILT_IN_ATOMIC_SUB_FETCH_N;
a509ebb5
RL
7347 break;
7348 case BIT_AND_EXPR:
86951993
AM
7349 oldbase = BUILT_IN_ATOMIC_FETCH_AND_N;
7350 newbase = BUILT_IN_ATOMIC_AND_FETCH_N;
a509ebb5
RL
7351 break;
7352 case BIT_IOR_EXPR:
86951993
AM
7353 oldbase = BUILT_IN_ATOMIC_FETCH_OR_N;
7354 newbase = BUILT_IN_ATOMIC_OR_FETCH_N;
a509ebb5
RL
7355 break;
7356 case BIT_XOR_EXPR:
86951993
AM
7357 oldbase = BUILT_IN_ATOMIC_FETCH_XOR_N;
7358 newbase = BUILT_IN_ATOMIC_XOR_FETCH_N;
a509ebb5
RL
7359 break;
7360 default:
7361 return false;
7362 }
86951993 7363
a509ebb5 7364 /* Make sure the expression is of the proper form. */
726a989a
RB
7365 if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
7366 rhs = gimple_assign_rhs2 (stmt);
7367 else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
7368 && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
7369 rhs = gimple_assign_rhs1 (stmt);
a509ebb5
RL
7370 else
7371 return false;
7372
e79983f4
MM
7373 tmpbase = ((enum built_in_function)
7374 ((need_new ? newbase : oldbase) + index + 1));
7375 decl = builtin_decl_explicit (tmpbase);
20790697
JJ
7376 if (decl == NULL_TREE)
7377 return false;
a509ebb5 7378 itype = TREE_TYPE (TREE_TYPE (decl));
86951993 7379 imode = TYPE_MODE (itype);
a509ebb5 7380
86951993
AM
7381 /* We could test all of the various optabs involved, but the fact of the
7382 matter is that (with the exception of i486 vs i586 and xadd) all targets
7383 that support any atomic operaton optab also implements compare-and-swap.
7384 Let optabs.c take care of expanding any compare-and-swap loop. */
cedb4a1a 7385 if (!can_compare_and_swap_p (imode, true))
a509ebb5
RL
7386 return false;
7387
726a989a
RB
7388 gsi = gsi_last_bb (load_bb);
7389 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
86951993
AM
7390
7391 /* OpenMP does not imply any barrier-like semantics on its atomic ops.
7392 It only requires that the operation happen atomically. Thus we can
7393 use the RELAXED memory model. */
7394 call = build_call_expr_loc (loc, decl, 3, addr,
7395 fold_convert_loc (loc, itype, rhs),
acf0174b
JJ
7396 build_int_cst (NULL,
7397 seq_cst ? MEMMODEL_SEQ_CST
7398 : MEMMODEL_RELAXED));
86951993 7399
20906c66
JJ
7400 if (need_old || need_new)
7401 {
7402 lhs = need_old ? loaded_val : stored_val;
7403 call = fold_convert_loc (loc, TREE_TYPE (lhs), call);
7404 call = build2_loc (loc, MODIFY_EXPR, void_type_node, lhs, call);
7405 }
7406 else
7407 call = fold_convert_loc (loc, void_type_node, call);
726a989a
RB
7408 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7409 gsi_remove (&gsi, true);
a509ebb5 7410
726a989a
RB
7411 gsi = gsi_last_bb (store_bb);
7412 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7413 gsi_remove (&gsi, true);
7414 gsi = gsi_last_bb (store_bb);
7415 gsi_remove (&gsi, true);
a509ebb5
RL
7416
7417 if (gimple_in_ssa_p (cfun))
7418 update_ssa (TODO_update_ssa_no_phi);
7419
7420 return true;
7421}
7422
7423/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7424
7425 oldval = *addr;
7426 repeat:
7427 newval = rhs; // with oldval replacing *addr in rhs
7428 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
7429 if (oldval != newval)
7430 goto repeat;
7431
7432 INDEX is log2 of the size of the data type, and thus usable to find the
7433 index of the builtin decl. */
7434
7435static bool
7436expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
7437 tree addr, tree loaded_val, tree stored_val,
7438 int index)
7439{
c18c98c0 7440 tree loadedi, storedi, initial, new_storedi, old_vali;
a509ebb5 7441 tree type, itype, cmpxchg, iaddr;
726a989a 7442 gimple_stmt_iterator si;
a509ebb5 7443 basic_block loop_header = single_succ (load_bb);
726a989a 7444 gimple phi, stmt;
a509ebb5 7445 edge e;
e79983f4 7446 enum built_in_function fncode;
a509ebb5 7447
86951993
AM
7448 /* ??? We need a non-pointer interface to __atomic_compare_exchange in
7449 order to use the RELAXED memory model effectively. */
e79983f4
MM
7450 fncode = (enum built_in_function)((int)BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N
7451 + index + 1);
7452 cmpxchg = builtin_decl_explicit (fncode);
20790697
JJ
7453 if (cmpxchg == NULL_TREE)
7454 return false;
a509ebb5
RL
7455 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7456 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
7457
cedb4a1a 7458 if (!can_compare_and_swap_p (TYPE_MODE (itype), true))
a509ebb5
RL
7459 return false;
7460
726a989a
RB
7461 /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD. */
7462 si = gsi_last_bb (load_bb);
7463 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
7464
c18c98c0
JJ
7465 /* For floating-point values, we'll need to view-convert them to integers
7466 so that we can perform the atomic compare and swap. Simplify the
7467 following code by always setting up the "i"ntegral variables. */
7468 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
7469 {
726a989a
RB
7470 tree iaddr_val;
7471
7cc434a3
RG
7472 iaddr = create_tmp_reg (build_pointer_type_for_mode (itype, ptr_mode,
7473 true), NULL);
726a989a
RB
7474 iaddr_val
7475 = force_gimple_operand_gsi (&si,
7476 fold_convert (TREE_TYPE (iaddr), addr),
7477 false, NULL_TREE, true, GSI_SAME_STMT);
7478 stmt = gimple_build_assign (iaddr, iaddr_val);
7479 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
c18c98c0
JJ
7480 loadedi = create_tmp_var (itype, NULL);
7481 if (gimple_in_ssa_p (cfun))
46eb666a 7482 loadedi = make_ssa_name (loadedi, NULL);
c18c98c0
JJ
7483 }
7484 else
7485 {
7486 iaddr = addr;
7487 loadedi = loaded_val;
7488 }
726a989a 7489
70f34814
RG
7490 initial
7491 = force_gimple_operand_gsi (&si,
7492 build2 (MEM_REF, TREE_TYPE (TREE_TYPE (iaddr)),
7493 iaddr,
7494 build_int_cst (TREE_TYPE (iaddr), 0)),
7495 true, NULL_TREE, true, GSI_SAME_STMT);
c18c98c0
JJ
7496
7497 /* Move the value to the LOADEDI temporary. */
a509ebb5
RL
7498 if (gimple_in_ssa_p (cfun))
7499 {
726a989a 7500 gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
c18c98c0 7501 phi = create_phi_node (loadedi, loop_header);
a509ebb5
RL
7502 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
7503 initial);
7504 }
7505 else
726a989a
RB
7506 gsi_insert_before (&si,
7507 gimple_build_assign (loadedi, initial),
7508 GSI_SAME_STMT);
c18c98c0
JJ
7509 if (loadedi != loaded_val)
7510 {
726a989a
RB
7511 gimple_stmt_iterator gsi2;
7512 tree x;
c18c98c0
JJ
7513
7514 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
726a989a 7515 gsi2 = gsi_start_bb (loop_header);
c18c98c0
JJ
7516 if (gimple_in_ssa_p (cfun))
7517 {
726a989a
RB
7518 gimple stmt;
7519 x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7520 true, GSI_SAME_STMT);
7521 stmt = gimple_build_assign (loaded_val, x);
7522 gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
c18c98c0
JJ
7523 }
7524 else
7525 {
726a989a
RB
7526 x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
7527 force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7528 true, GSI_SAME_STMT);
c18c98c0
JJ
7529 }
7530 }
726a989a 7531 gsi_remove (&si, true);
a509ebb5 7532
726a989a
RB
7533 si = gsi_last_bb (store_bb);
7534 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 7535
c18c98c0
JJ
7536 if (iaddr == addr)
7537 storedi = stored_val;
a509ebb5 7538 else
c18c98c0 7539 storedi =
726a989a 7540 force_gimple_operand_gsi (&si,
c18c98c0
JJ
7541 build1 (VIEW_CONVERT_EXPR, itype,
7542 stored_val), true, NULL_TREE, true,
726a989a 7543 GSI_SAME_STMT);
a509ebb5
RL
7544
7545 /* Build the compare&swap statement. */
7546 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
726a989a 7547 new_storedi = force_gimple_operand_gsi (&si,
587aa063
RG
7548 fold_convert (TREE_TYPE (loadedi),
7549 new_storedi),
a509ebb5 7550 true, NULL_TREE,
726a989a 7551 true, GSI_SAME_STMT);
a509ebb5
RL
7552
7553 if (gimple_in_ssa_p (cfun))
7554 old_vali = loadedi;
7555 else
7556 {
587aa063 7557 old_vali = create_tmp_var (TREE_TYPE (loadedi), NULL);
726a989a
RB
7558 stmt = gimple_build_assign (old_vali, loadedi);
7559 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5 7560
726a989a
RB
7561 stmt = gimple_build_assign (loadedi, new_storedi);
7562 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
7563 }
7564
7565 /* Note that we always perform the comparison as an integer, even for
b8698a0f 7566 floating point. This allows the atomic operation to properly
a509ebb5 7567 succeed even with NaNs and -0.0. */
726a989a
RB
7568 stmt = gimple_build_cond_empty
7569 (build2 (NE_EXPR, boolean_type_node,
7570 new_storedi, old_vali));
7571 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
7572
7573 /* Update cfg. */
7574 e = single_succ_edge (store_bb);
7575 e->flags &= ~EDGE_FALLTHRU;
7576 e->flags |= EDGE_FALSE_VALUE;
7577
7578 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
7579
c18c98c0 7580 /* Copy the new value to loadedi (we already did that before the condition
a509ebb5
RL
7581 if we are not in SSA). */
7582 if (gimple_in_ssa_p (cfun))
7583 {
726a989a 7584 phi = gimple_seq_first_stmt (phi_nodes (loop_header));
c18c98c0 7585 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
a509ebb5
RL
7586 }
7587
726a989a
RB
7588 /* Remove GIMPLE_OMP_ATOMIC_STORE. */
7589 gsi_remove (&si, true);
a509ebb5 7590
6093bc06
RB
7591 struct loop *loop = alloc_loop ();
7592 loop->header = loop_header;
a1756c0a 7593 loop->latch = store_bb;
6093bc06
RB
7594 add_loop (loop, loop_header->loop_father);
7595
a509ebb5
RL
7596 if (gimple_in_ssa_p (cfun))
7597 update_ssa (TODO_update_ssa_no_phi);
7598
7599 return true;
7600}
7601
7602/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7603
7604 GOMP_atomic_start ();
7605 *addr = rhs;
7606 GOMP_atomic_end ();
7607
7608 The result is not globally atomic, but works so long as all parallel
7609 references are within #pragma omp atomic directives. According to
7610 responses received from omp@openmp.org, appears to be within spec.
7611 Which makes sense, since that's how several other compilers handle
b8698a0f 7612 this situation as well.
726a989a
RB
7613 LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
7614 expanding. STORED_VAL is the operand of the matching
7615 GIMPLE_OMP_ATOMIC_STORE.
a509ebb5 7616
b8698a0f
L
7617 We replace
7618 GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with
a509ebb5
RL
7619 loaded_val = *addr;
7620
7621 and replace
05409788 7622 GIMPLE_OMP_ATOMIC_STORE (stored_val) with
b8698a0f 7623 *addr = stored_val;
a509ebb5
RL
7624*/
7625
7626static bool
7627expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
7628 tree addr, tree loaded_val, tree stored_val)
7629{
726a989a
RB
7630 gimple_stmt_iterator si;
7631 gimple stmt;
a509ebb5
RL
7632 tree t;
7633
726a989a
RB
7634 si = gsi_last_bb (load_bb);
7635 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5 7636
e79983f4 7637 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
3bb06db4 7638 t = build_call_expr (t, 0);
726a989a 7639 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
a509ebb5 7640
70f34814 7641 stmt = gimple_build_assign (loaded_val, build_simple_mem_ref (addr));
726a989a
RB
7642 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
7643 gsi_remove (&si, true);
a509ebb5 7644
726a989a
RB
7645 si = gsi_last_bb (store_bb);
7646 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 7647
70f34814
RG
7648 stmt = gimple_build_assign (build_simple_mem_ref (unshare_expr (addr)),
7649 stored_val);
726a989a 7650 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5 7651
e79983f4 7652 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
3bb06db4 7653 t = build_call_expr (t, 0);
726a989a
RB
7654 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
7655 gsi_remove (&si, true);
a509ebb5
RL
7656
7657 if (gimple_in_ssa_p (cfun))
7658 update_ssa (TODO_update_ssa_no_phi);
7659 return true;
7660}
7661
b8698a0f
L
7662/* Expand an GIMPLE_OMP_ATOMIC statement. We try to expand
7663 using expand_omp_atomic_fetch_op. If it failed, we try to
a509ebb5
RL
7664 call expand_omp_atomic_pipeline, and if it fails too, the
7665 ultimate fallback is wrapping the operation in a mutex
b8698a0f
L
7666 (expand_omp_atomic_mutex). REGION is the atomic region built
7667 by build_omp_regions_1(). */
a509ebb5
RL
7668
7669static void
7670expand_omp_atomic (struct omp_region *region)
7671{
7672 basic_block load_bb = region->entry, store_bb = region->exit;
726a989a
RB
7673 gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
7674 tree loaded_val = gimple_omp_atomic_load_lhs (load);
7675 tree addr = gimple_omp_atomic_load_rhs (load);
7676 tree stored_val = gimple_omp_atomic_store_val (store);
a509ebb5
RL
7677 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7678 HOST_WIDE_INT index;
7679
7680 /* Make sure the type is one of the supported sizes. */
7681 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7682 index = exact_log2 (index);
7683 if (index >= 0 && index <= 4)
7684 {
7685 unsigned int align = TYPE_ALIGN_UNIT (type);
7686
7687 /* __sync builtins require strict data alignment. */
4999c62c 7688 if (exact_log2 (align) >= index)
a509ebb5 7689 {
05409788 7690 /* Atomic load. */
20906c66
JJ
7691 if (loaded_val == stored_val
7692 && (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7693 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7694 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
05409788 7695 && expand_omp_atomic_load (load_bb, addr, loaded_val, index))
20906c66
JJ
7696 return;
7697
05409788 7698 /* Atomic store. */
20906c66
JJ
7699 if ((GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7700 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7701 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
7702 && store_bb == single_succ (load_bb)
7703 && first_stmt (store_bb) == store
05409788
RH
7704 && expand_omp_atomic_store (load_bb, addr, loaded_val,
7705 stored_val, index))
20906c66
JJ
7706 return;
7707
a509ebb5
RL
7708 /* When possible, use specialized atomic update functions. */
7709 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
05409788
RH
7710 && store_bb == single_succ (load_bb)
7711 && expand_omp_atomic_fetch_op (load_bb, addr,
7712 loaded_val, stored_val, index))
7713 return;
a509ebb5
RL
7714
7715 /* If we don't have specialized __sync builtins, try and implement
7716 as a compare and swap loop. */
7717 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
7718 loaded_val, stored_val, index))
7719 return;
7720 }
7721 }
7722
7723 /* The ultimate fallback is wrapping the operation in a mutex. */
7724 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
7725}
7726
953ff289 7727
acf0174b
JJ
7728/* Expand the OpenMP target{, data, update} directive starting at REGION. */
7729
7730static void
7731expand_omp_target (struct omp_region *region)
7732{
7733 basic_block entry_bb, exit_bb, new_bb;
7734 struct function *child_cfun = NULL;
7735 tree child_fn = NULL_TREE, block, t;
7736 gimple_stmt_iterator gsi;
7737 gimple entry_stmt, stmt;
7738 edge e;
7739
7740 entry_stmt = last_stmt (region->entry);
7741 new_bb = region->entry;
7742 int kind = gimple_omp_target_kind (entry_stmt);
7743 if (kind == GF_OMP_TARGET_KIND_REGION)
7744 {
7745 child_fn = gimple_omp_target_child_fn (entry_stmt);
7746 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7747 }
7748
7749 entry_bb = region->entry;
7750 exit_bb = region->exit;
7751
7752 if (kind == GF_OMP_TARGET_KIND_REGION)
7753 {
7754 unsigned srcidx, dstidx, num;
7755
7756 /* If the target region needs data sent from the parent
7757 function, then the very first statement (except possible
7758 tree profile counter updates) of the parallel body
7759 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
7760 &.OMP_DATA_O is passed as an argument to the child function,
7761 we need to replace it with the argument as seen by the child
7762 function.
7763
7764 In most cases, this will end up being the identity assignment
7765 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
7766 a function call that has been inlined, the original PARM_DECL
7767 .OMP_DATA_I may have been converted into a different local
7768 variable. In which case, we need to keep the assignment. */
7769 if (gimple_omp_target_data_arg (entry_stmt))
7770 {
7771 basic_block entry_succ_bb = single_succ (entry_bb);
7772 gimple_stmt_iterator gsi;
7773 tree arg;
7774 gimple tgtcopy_stmt = NULL;
7775 tree sender
7776 = TREE_VEC_ELT (gimple_omp_target_data_arg (entry_stmt), 0);
7777
7778 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
7779 {
7780 gcc_assert (!gsi_end_p (gsi));
7781 stmt = gsi_stmt (gsi);
7782 if (gimple_code (stmt) != GIMPLE_ASSIGN)
7783 continue;
7784
7785 if (gimple_num_ops (stmt) == 2)
7786 {
7787 tree arg = gimple_assign_rhs1 (stmt);
7788
7789 /* We're ignoring the subcode because we're
7790 effectively doing a STRIP_NOPS. */
7791
7792 if (TREE_CODE (arg) == ADDR_EXPR
7793 && TREE_OPERAND (arg, 0) == sender)
7794 {
7795 tgtcopy_stmt = stmt;
7796 break;
7797 }
7798 }
7799 }
7800
7801 gcc_assert (tgtcopy_stmt != NULL);
7802 arg = DECL_ARGUMENTS (child_fn);
7803
7804 gcc_assert (gimple_assign_lhs (tgtcopy_stmt) == arg);
7805 gsi_remove (&gsi, true);
7806 }
7807
7808 /* Declare local variables needed in CHILD_CFUN. */
7809 block = DECL_INITIAL (child_fn);
7810 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
7811 /* The gimplifier could record temporaries in target block
7812 rather than in containing function's local_decls chain,
7813 which would mean cgraph missed finalizing them. Do it now. */
7814 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
7815 if (TREE_CODE (t) == VAR_DECL
7816 && TREE_STATIC (t)
7817 && !DECL_EXTERNAL (t))
7818 varpool_finalize_decl (t);
7819 DECL_SAVED_TREE (child_fn) = NULL;
7820 /* We'll create a CFG for child_fn, so no gimple body is needed. */
7821 gimple_set_body (child_fn, NULL);
7822 TREE_USED (block) = 1;
7823
7824 /* Reset DECL_CONTEXT on function arguments. */
7825 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7826 DECL_CONTEXT (t) = child_fn;
7827
7828 /* Split ENTRY_BB at GIMPLE_OMP_TARGET,
7829 so that it can be moved to the child function. */
7830 gsi = gsi_last_bb (entry_bb);
7831 stmt = gsi_stmt (gsi);
7832 gcc_assert (stmt && gimple_code (stmt) == GIMPLE_OMP_TARGET
7833 && gimple_omp_target_kind (stmt)
7834 == GF_OMP_TARGET_KIND_REGION);
7835 gsi_remove (&gsi, true);
7836 e = split_block (entry_bb, stmt);
7837 entry_bb = e->dest;
7838 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7839
7840 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
7841 if (exit_bb)
7842 {
7843 gsi = gsi_last_bb (exit_bb);
7844 gcc_assert (!gsi_end_p (gsi)
7845 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
7846 stmt = gimple_build_return (NULL);
7847 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
7848 gsi_remove (&gsi, true);
7849 }
7850
7851 /* Move the target region into CHILD_CFUN. */
7852
7853 block = gimple_block (entry_stmt);
7854
7855 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
7856 if (exit_bb)
7857 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
7858 /* When the OMP expansion process cannot guarantee an up-to-date
7859 loop tree arrange for the child function to fixup loops. */
7860 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
7861 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
7862
7863 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
7864 num = vec_safe_length (child_cfun->local_decls);
7865 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
7866 {
7867 t = (*child_cfun->local_decls)[srcidx];
7868 if (DECL_CONTEXT (t) == cfun->decl)
7869 continue;
7870 if (srcidx != dstidx)
7871 (*child_cfun->local_decls)[dstidx] = t;
7872 dstidx++;
7873 }
7874 if (dstidx != num)
7875 vec_safe_truncate (child_cfun->local_decls, dstidx);
7876
7877 /* Inform the callgraph about the new function. */
7878 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
7879 cgraph_add_new_function (child_fn, true);
7880
7881 /* Fix the callgraph edges for child_cfun. Those for cfun will be
7882 fixed in a following pass. */
7883 push_cfun (child_cfun);
7884 rebuild_cgraph_edges ();
7885
7886 /* Some EH regions might become dead, see PR34608. If
7887 pass_cleanup_cfg isn't the first pass to happen with the
7888 new child, these dead EH edges might cause problems.
7889 Clean them up now. */
7890 if (flag_exceptions)
7891 {
7892 basic_block bb;
7893 bool changed = false;
7894
7895 FOR_EACH_BB (bb)
7896 changed |= gimple_purge_dead_eh_edges (bb);
7897 if (changed)
7898 cleanup_tree_cfg ();
7899 }
7900 pop_cfun ();
7901 }
7902
7903 /* Emit a library call to launch the target region, or do data
7904 transfers. */
7905 tree t1, t2, t3, t4, device, cond, c, clauses;
7906 enum built_in_function start_ix;
7907 location_t clause_loc;
7908
7909 clauses = gimple_omp_target_clauses (entry_stmt);
7910
7911 if (kind == GF_OMP_TARGET_KIND_REGION)
7912 start_ix = BUILT_IN_GOMP_TARGET;
7913 else if (kind == GF_OMP_TARGET_KIND_DATA)
7914 start_ix = BUILT_IN_GOMP_TARGET_DATA;
7915 else
7916 start_ix = BUILT_IN_GOMP_TARGET_UPDATE;
7917
7918 /* By default, the value of DEVICE is -1 (let runtime library choose)
7919 and there is no conditional. */
7920 cond = NULL_TREE;
7921 device = build_int_cst (integer_type_node, -1);
7922
7923 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
7924 if (c)
7925 cond = OMP_CLAUSE_IF_EXPR (c);
7926
7927 c = find_omp_clause (clauses, OMP_CLAUSE_DEVICE);
7928 if (c)
7929 {
7930 device = OMP_CLAUSE_DEVICE_ID (c);
7931 clause_loc = OMP_CLAUSE_LOCATION (c);
7932 }
7933 else
7934 clause_loc = gimple_location (entry_stmt);
7935
7936 /* Ensure 'device' is of the correct type. */
7937 device = fold_convert_loc (clause_loc, integer_type_node, device);
7938
7939 /* If we found the clause 'if (cond)', build
7940 (cond ? device : -2). */
7941 if (cond)
7942 {
7943 cond = gimple_boolify (cond);
7944
7945 basic_block cond_bb, then_bb, else_bb;
7946 edge e;
7947 tree tmp_var;
7948
7949 tmp_var = create_tmp_var (TREE_TYPE (device), NULL);
7950 if (kind != GF_OMP_TARGET_KIND_REGION)
7951 {
7952 gsi = gsi_last_bb (new_bb);
7953 gsi_prev (&gsi);
7954 e = split_block (new_bb, gsi_stmt (gsi));
7955 }
7956 else
7957 e = split_block (new_bb, NULL);
7958 cond_bb = e->src;
7959 new_bb = e->dest;
7960 remove_edge (e);
7961
7962 then_bb = create_empty_bb (cond_bb);
7963 else_bb = create_empty_bb (then_bb);
7964 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
7965 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
7966
7967 stmt = gimple_build_cond_empty (cond);
7968 gsi = gsi_last_bb (cond_bb);
7969 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7970
7971 gsi = gsi_start_bb (then_bb);
7972 stmt = gimple_build_assign (tmp_var, device);
7973 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7974
7975 gsi = gsi_start_bb (else_bb);
7976 stmt = gimple_build_assign (tmp_var,
7977 build_int_cst (integer_type_node, -2));
7978 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7979
7980 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
7981 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
7982 if (current_loops)
7983 {
7984 add_bb_to_loop (then_bb, cond_bb->loop_father);
7985 add_bb_to_loop (else_bb, cond_bb->loop_father);
7986 }
7987 make_edge (then_bb, new_bb, EDGE_FALLTHRU);
7988 make_edge (else_bb, new_bb, EDGE_FALLTHRU);
7989
7990 device = tmp_var;
7991 }
7992
7993 gsi = gsi_last_bb (new_bb);
7994 t = gimple_omp_target_data_arg (entry_stmt);
7995 if (t == NULL)
7996 {
7997 t1 = size_zero_node;
7998 t2 = build_zero_cst (ptr_type_node);
7999 t3 = t2;
8000 t4 = t2;
8001 }
8002 else
8003 {
8004 t1 = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (TREE_VEC_ELT (t, 1))));
8005 t1 = size_binop (PLUS_EXPR, t1, size_int (1));
8006 t2 = build_fold_addr_expr (TREE_VEC_ELT (t, 0));
8007 t3 = build_fold_addr_expr (TREE_VEC_ELT (t, 1));
8008 t4 = build_fold_addr_expr (TREE_VEC_ELT (t, 2));
8009 }
8010
8011 gimple g;
8012 /* FIXME: This will be address of
8013 extern char __OPENMP_TARGET__[] __attribute__((visibility ("hidden")))
8014 symbol, as soon as the linker plugin is able to create it for us. */
8015 tree openmp_target = build_zero_cst (ptr_type_node);
8016 if (kind == GF_OMP_TARGET_KIND_REGION)
8017 {
8018 tree fnaddr = build_fold_addr_expr (child_fn);
8019 g = gimple_build_call (builtin_decl_explicit (start_ix), 7,
8020 device, fnaddr, openmp_target, t1, t2, t3, t4);
8021 }
8022 else
8023 g = gimple_build_call (builtin_decl_explicit (start_ix), 6,
8024 device, openmp_target, t1, t2, t3, t4);
8025 gimple_set_location (g, gimple_location (entry_stmt));
8026 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
8027 if (kind != GF_OMP_TARGET_KIND_REGION)
8028 {
8029 g = gsi_stmt (gsi);
8030 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_TARGET);
8031 gsi_remove (&gsi, true);
8032 }
8033 if (kind == GF_OMP_TARGET_KIND_DATA && region->exit)
8034 {
8035 gsi = gsi_last_bb (region->exit);
8036 g = gsi_stmt (gsi);
8037 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_RETURN);
8038 gsi_remove (&gsi, true);
8039 }
8040}
8041
8042
8043/* Expand the parallel region tree rooted at REGION. Expansion
8044 proceeds in depth-first order. Innermost regions are expanded
8045 first. This way, parallel regions that require a new function to
726a989a 8046 be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
50674e96
DN
8047 internal dependencies in their body. */
8048
8049static void
8050expand_omp (struct omp_region *region)
8051{
8052 while (region)
8053 {
b357f682 8054 location_t saved_location;
acf0174b 8055 gimple inner_stmt = NULL;
b357f682 8056
068e1875
ZD
8057 /* First, determine whether this is a combined parallel+workshare
8058 region. */
726a989a 8059 if (region->type == GIMPLE_OMP_PARALLEL)
068e1875
ZD
8060 determine_parallel_type (region);
8061
acf0174b
JJ
8062 if (region->type == GIMPLE_OMP_FOR
8063 && gimple_omp_for_combined_p (last_stmt (region->entry)))
8064 inner_stmt = last_stmt (region->inner->entry);
8065
50674e96
DN
8066 if (region->inner)
8067 expand_omp (region->inner);
8068
b357f682 8069 saved_location = input_location;
726a989a
RB
8070 if (gimple_has_location (last_stmt (region->entry)))
8071 input_location = gimple_location (last_stmt (region->entry));
b357f682 8072
777f7f9a 8073 switch (region->type)
50674e96 8074 {
726a989a
RB
8075 case GIMPLE_OMP_PARALLEL:
8076 case GIMPLE_OMP_TASK:
a68ab351
JJ
8077 expand_omp_taskreg (region);
8078 break;
8079
726a989a 8080 case GIMPLE_OMP_FOR:
acf0174b 8081 expand_omp_for (region, inner_stmt);
777f7f9a 8082 break;
50674e96 8083
726a989a 8084 case GIMPLE_OMP_SECTIONS:
777f7f9a
RH
8085 expand_omp_sections (region);
8086 break;
50674e96 8087
726a989a 8088 case GIMPLE_OMP_SECTION:
777f7f9a 8089 /* Individual omp sections are handled together with their
726a989a 8090 parent GIMPLE_OMP_SECTIONS region. */
777f7f9a 8091 break;
50674e96 8092
726a989a 8093 case GIMPLE_OMP_SINGLE:
777f7f9a
RH
8094 expand_omp_single (region);
8095 break;
50674e96 8096
726a989a 8097 case GIMPLE_OMP_MASTER:
acf0174b 8098 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
8099 case GIMPLE_OMP_ORDERED:
8100 case GIMPLE_OMP_CRITICAL:
acf0174b 8101 case GIMPLE_OMP_TEAMS:
777f7f9a
RH
8102 expand_omp_synch (region);
8103 break;
50674e96 8104
726a989a 8105 case GIMPLE_OMP_ATOMIC_LOAD:
a509ebb5
RL
8106 expand_omp_atomic (region);
8107 break;
8108
acf0174b
JJ
8109 case GIMPLE_OMP_TARGET:
8110 expand_omp_target (region);
8111 break;
8112
777f7f9a
RH
8113 default:
8114 gcc_unreachable ();
8115 }
8d9c1aec 8116
b357f682 8117 input_location = saved_location;
50674e96
DN
8118 region = region->next;
8119 }
8120}
8121
8122
8123/* Helper for build_omp_regions. Scan the dominator tree starting at
5f40b3cb
ZD
8124 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
8125 true, the function ends once a single tree is built (otherwise, whole
8126 forest of OMP constructs may be built). */
50674e96
DN
8127
8128static void
5f40b3cb
ZD
8129build_omp_regions_1 (basic_block bb, struct omp_region *parent,
8130 bool single_tree)
50674e96 8131{
726a989a
RB
8132 gimple_stmt_iterator gsi;
8133 gimple stmt;
50674e96
DN
8134 basic_block son;
8135
726a989a
RB
8136 gsi = gsi_last_bb (bb);
8137 if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
50674e96
DN
8138 {
8139 struct omp_region *region;
726a989a 8140 enum gimple_code code;
50674e96 8141
726a989a
RB
8142 stmt = gsi_stmt (gsi);
8143 code = gimple_code (stmt);
8144 if (code == GIMPLE_OMP_RETURN)
50674e96
DN
8145 {
8146 /* STMT is the return point out of region PARENT. Mark it
8147 as the exit point and make PARENT the immediately
8148 enclosing region. */
8149 gcc_assert (parent);
8150 region = parent;
777f7f9a 8151 region->exit = bb;
50674e96 8152 parent = parent->outer;
50674e96 8153 }
726a989a 8154 else if (code == GIMPLE_OMP_ATOMIC_STORE)
a509ebb5 8155 {
726a989a
RB
8156 /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
8157 GIMPLE_OMP_RETURN, but matches with
8158 GIMPLE_OMP_ATOMIC_LOAD. */
a509ebb5 8159 gcc_assert (parent);
726a989a 8160 gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5
RL
8161 region = parent;
8162 region->exit = bb;
8163 parent = parent->outer;
8164 }
8165
726a989a 8166 else if (code == GIMPLE_OMP_CONTINUE)
777f7f9a
RH
8167 {
8168 gcc_assert (parent);
8169 parent->cont = bb;
8170 }
726a989a 8171 else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
e5c95afe 8172 {
726a989a
RB
8173 /* GIMPLE_OMP_SECTIONS_SWITCH is part of
8174 GIMPLE_OMP_SECTIONS, and we do nothing for it. */
8175 ;
e5c95afe 8176 }
acf0174b
JJ
8177 else if (code == GIMPLE_OMP_TARGET
8178 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_UPDATE)
8179 new_omp_region (bb, code, parent);
50674e96
DN
8180 else
8181 {
8182 /* Otherwise, this directive becomes the parent for a new
8183 region. */
777f7f9a 8184 region = new_omp_region (bb, code, parent);
50674e96
DN
8185 parent = region;
8186 }
50674e96
DN
8187 }
8188
5f40b3cb
ZD
8189 if (single_tree && !parent)
8190 return;
8191
50674e96
DN
8192 for (son = first_dom_son (CDI_DOMINATORS, bb);
8193 son;
8194 son = next_dom_son (CDI_DOMINATORS, son))
5f40b3cb
ZD
8195 build_omp_regions_1 (son, parent, single_tree);
8196}
8197
8198/* Builds the tree of OMP regions rooted at ROOT, storing it to
8199 root_omp_region. */
8200
8201static void
8202build_omp_regions_root (basic_block root)
8203{
8204 gcc_assert (root_omp_region == NULL);
8205 build_omp_regions_1 (root, NULL, true);
8206 gcc_assert (root_omp_region != NULL);
50674e96
DN
8207}
8208
5f40b3cb
ZD
8209/* Expands omp construct (and its subconstructs) starting in HEAD. */
8210
8211void
8212omp_expand_local (basic_block head)
8213{
8214 build_omp_regions_root (head);
8215 if (dump_file && (dump_flags & TDF_DETAILS))
8216 {
8217 fprintf (dump_file, "\nOMP region tree\n\n");
8218 dump_omp_region (dump_file, root_omp_region, 0);
8219 fprintf (dump_file, "\n");
8220 }
8221
8222 remove_exit_barriers (root_omp_region);
8223 expand_omp (root_omp_region);
8224
8225 free_omp_regions ();
8226}
50674e96
DN
8227
8228/* Scan the CFG and build a tree of OMP regions. Return the root of
8229 the OMP region tree. */
8230
8231static void
8232build_omp_regions (void)
8233{
777f7f9a 8234 gcc_assert (root_omp_region == NULL);
50674e96 8235 calculate_dominance_info (CDI_DOMINATORS);
5f40b3cb 8236 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
50674e96
DN
8237}
8238
50674e96
DN
8239/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
8240
c2924966 8241static unsigned int
50674e96
DN
8242execute_expand_omp (void)
8243{
8244 build_omp_regions ();
8245
777f7f9a
RH
8246 if (!root_omp_region)
8247 return 0;
50674e96 8248
777f7f9a
RH
8249 if (dump_file)
8250 {
8251 fprintf (dump_file, "\nOMP region tree\n\n");
8252 dump_omp_region (dump_file, root_omp_region, 0);
8253 fprintf (dump_file, "\n");
50674e96 8254 }
777f7f9a
RH
8255
8256 remove_exit_barriers (root_omp_region);
8257
8258 expand_omp (root_omp_region);
8259
777f7f9a
RH
8260 cleanup_tree_cfg ();
8261
8262 free_omp_regions ();
8263
c2924966 8264 return 0;
50674e96
DN
8265}
8266
917948d3
ZD
8267/* OMP expansion -- the default pass, run before creation of SSA form. */
8268
50674e96
DN
8269static bool
8270gate_expand_omp (void)
8271{
c02065fc
AH
8272 return ((flag_openmp != 0 || flag_openmp_simd != 0
8273 || flag_enable_cilkplus != 0) && !seen_error ());
50674e96
DN
8274}
8275
27a4cd48
DM
8276namespace {
8277
8278const pass_data pass_data_expand_omp =
8279{
8280 GIMPLE_PASS, /* type */
8281 "ompexp", /* name */
8282 OPTGROUP_NONE, /* optinfo_flags */
8283 true, /* has_gate */
8284 true, /* has_execute */
8285 TV_NONE, /* tv_id */
8286 PROP_gimple_any, /* properties_required */
8287 0, /* properties_provided */
8288 0, /* properties_destroyed */
8289 0, /* todo_flags_start */
8290 0, /* todo_flags_finish */
50674e96 8291};
27a4cd48
DM
8292
8293class pass_expand_omp : public gimple_opt_pass
8294{
8295public:
c3284718
RS
8296 pass_expand_omp (gcc::context *ctxt)
8297 : gimple_opt_pass (pass_data_expand_omp, ctxt)
27a4cd48
DM
8298 {}
8299
8300 /* opt_pass methods: */
8301 bool gate () { return gate_expand_omp (); }
8302 unsigned int execute () { return execute_expand_omp (); }
8303
8304}; // class pass_expand_omp
8305
8306} // anon namespace
8307
8308gimple_opt_pass *
8309make_pass_expand_omp (gcc::context *ctxt)
8310{
8311 return new pass_expand_omp (ctxt);
8312}
50674e96
DN
8313\f
8314/* Routines to lower OpenMP directives into OMP-GIMPLE. */
8315
acf0174b
JJ
8316/* If ctx is a worksharing context inside of a cancellable parallel
8317 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8318 and conditional branch to parallel's cancel_label to handle
8319 cancellation in the implicit barrier. */
8320
8321static void
8322maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
8323{
8324 gimple omp_return = gimple_seq_last_stmt (*body);
8325 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8326 if (gimple_omp_return_nowait_p (omp_return))
8327 return;
8328 if (ctx->outer
8329 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
8330 && ctx->outer->cancellable)
8331 {
8332 tree lhs = create_tmp_var (boolean_type_node, NULL);
8333 gimple_omp_return_set_lhs (omp_return, lhs);
8334 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8335 gimple g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
8336 ctx->outer->cancel_label, fallthru_label);
8337 gimple_seq_add_stmt (body, g);
8338 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8339 }
8340}
8341
726a989a
RB
8342/* Lower the OpenMP sections directive in the current statement in GSI_P.
8343 CTX is the enclosing OMP context for the current statement. */
50674e96
DN
8344
8345static void
726a989a 8346lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 8347{
726a989a
RB
8348 tree block, control;
8349 gimple_stmt_iterator tgsi;
726a989a 8350 gimple stmt, new_stmt, bind, t;
355a7673 8351 gimple_seq ilist, dlist, olist, new_body;
d406b663 8352 struct gimplify_ctx gctx;
50674e96 8353
726a989a 8354 stmt = gsi_stmt (*gsi_p);
50674e96 8355
d406b663 8356 push_gimplify_context (&gctx);
50674e96
DN
8357
8358 dlist = NULL;
8359 ilist = NULL;
726a989a 8360 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
acf0174b 8361 &ilist, &dlist, ctx, NULL);
50674e96 8362
355a7673
MM
8363 new_body = gimple_omp_body (stmt);
8364 gimple_omp_set_body (stmt, NULL);
8365 tgsi = gsi_start (new_body);
8366 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
50674e96
DN
8367 {
8368 omp_context *sctx;
726a989a 8369 gimple sec_start;
50674e96 8370
726a989a 8371 sec_start = gsi_stmt (tgsi);
50674e96
DN
8372 sctx = maybe_lookup_ctx (sec_start);
8373 gcc_assert (sctx);
8374
355a7673
MM
8375 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8376 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8377 GSI_CONTINUE_LINKING);
726a989a 8378 gimple_omp_set_body (sec_start, NULL);
50674e96 8379
355a7673 8380 if (gsi_one_before_end_p (tgsi))
50674e96 8381 {
726a989a
RB
8382 gimple_seq l = NULL;
8383 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
50674e96 8384 &l, ctx);
355a7673 8385 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
726a989a 8386 gimple_omp_section_set_last (sec_start);
50674e96 8387 }
b8698a0f 8388
355a7673
MM
8389 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8390 GSI_CONTINUE_LINKING);
50674e96 8391 }
953ff289
DN
8392
8393 block = make_node (BLOCK);
355a7673 8394 bind = gimple_build_bind (NULL, new_body, block);
953ff289 8395
726a989a
RB
8396 olist = NULL;
8397 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 8398
b357f682 8399 block = make_node (BLOCK);
726a989a 8400 new_stmt = gimple_build_bind (NULL, NULL, block);
355a7673 8401 gsi_replace (gsi_p, new_stmt, true);
50674e96 8402
b357f682 8403 pop_gimplify_context (new_stmt);
726a989a
RB
8404 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8405 BLOCK_VARS (block) = gimple_bind_vars (bind);
b357f682
JJ
8406 if (BLOCK_VARS (block))
8407 TREE_USED (block) = 1;
8408
726a989a
RB
8409 new_body = NULL;
8410 gimple_seq_add_seq (&new_body, ilist);
8411 gimple_seq_add_stmt (&new_body, stmt);
8412 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8413 gimple_seq_add_stmt (&new_body, bind);
777f7f9a 8414
e5c95afe 8415 control = create_tmp_var (unsigned_type_node, ".section");
726a989a
RB
8416 t = gimple_build_omp_continue (control, control);
8417 gimple_omp_sections_set_control (stmt, control);
8418 gimple_seq_add_stmt (&new_body, t);
777f7f9a 8419
726a989a 8420 gimple_seq_add_seq (&new_body, olist);
acf0174b
JJ
8421 if (ctx->cancellable)
8422 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
726a989a 8423 gimple_seq_add_seq (&new_body, dlist);
50674e96 8424
726a989a 8425 new_body = maybe_catch_exception (new_body);
4a31b7ee 8426
726a989a
RB
8427 t = gimple_build_omp_return
8428 (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
8429 OMP_CLAUSE_NOWAIT));
8430 gimple_seq_add_stmt (&new_body, t);
acf0174b 8431 maybe_add_implicit_barrier_cancel (ctx, &new_body);
777f7f9a 8432
726a989a 8433 gimple_bind_set_body (new_stmt, new_body);
953ff289
DN
8434}
8435
8436
50674e96 8437/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 8438 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
953ff289
DN
8439
8440 if (GOMP_single_start ())
8441 BODY;
8442 [ GOMP_barrier (); ] -> unless 'nowait' is present.
50674e96
DN
8443
8444 FIXME. It may be better to delay expanding the logic of this until
8445 pass_expand_omp. The expanded logic may make the job more difficult
8446 to a synchronization analysis pass. */
953ff289
DN
8447
8448static void
726a989a 8449lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
953ff289 8450{
c2255bc4
AH
8451 location_t loc = gimple_location (single_stmt);
8452 tree tlabel = create_artificial_label (loc);
8453 tree flabel = create_artificial_label (loc);
726a989a
RB
8454 gimple call, cond;
8455 tree lhs, decl;
8456
e79983f4 8457 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
726a989a
RB
8458 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
8459 call = gimple_build_call (decl, 0);
8460 gimple_call_set_lhs (call, lhs);
8461 gimple_seq_add_stmt (pre_p, call);
8462
8463 cond = gimple_build_cond (EQ_EXPR, lhs,
db3927fb
AH
8464 fold_convert_loc (loc, TREE_TYPE (lhs),
8465 boolean_true_node),
726a989a
RB
8466 tlabel, flabel);
8467 gimple_seq_add_stmt (pre_p, cond);
8468 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8469 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8470 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
953ff289
DN
8471}
8472
50674e96
DN
8473
8474/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 8475 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289
DN
8476
8477 #pragma omp single copyprivate (a, b, c)
8478
8479 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8480
8481 {
8482 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8483 {
8484 BODY;
8485 copyout.a = a;
8486 copyout.b = b;
8487 copyout.c = c;
8488 GOMP_single_copy_end (&copyout);
8489 }
8490 else
8491 {
8492 a = copyout_p->a;
8493 b = copyout_p->b;
8494 c = copyout_p->c;
8495 }
8496 GOMP_barrier ();
8497 }
50674e96
DN
8498
8499 FIXME. It may be better to delay expanding the logic of this until
8500 pass_expand_omp. The expanded logic may make the job more difficult
8501 to a synchronization analysis pass. */
953ff289
DN
8502
8503static void
726a989a 8504lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
953ff289 8505{
e79983f4 8506 tree ptr_type, t, l0, l1, l2, bfn_decl;
726a989a 8507 gimple_seq copyin_seq;
c2255bc4 8508 location_t loc = gimple_location (single_stmt);
953ff289
DN
8509
8510 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8511
8512 ptr_type = build_pointer_type (ctx->record_type);
8513 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8514
c2255bc4
AH
8515 l0 = create_artificial_label (loc);
8516 l1 = create_artificial_label (loc);
8517 l2 = create_artificial_label (loc);
953ff289 8518
e79983f4
MM
8519 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8520 t = build_call_expr_loc (loc, bfn_decl, 0);
db3927fb 8521 t = fold_convert_loc (loc, ptr_type, t);
726a989a 8522 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289
DN
8523
8524 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8525 build_int_cst (ptr_type, 0));
8526 t = build3 (COND_EXPR, void_type_node, t,
8527 build_and_jump (&l0), build_and_jump (&l1));
8528 gimplify_and_add (t, pre_p);
8529
726a989a 8530 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 8531
726a989a 8532 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289
DN
8533
8534 copyin_seq = NULL;
726a989a 8535 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
953ff289
DN
8536 &copyin_seq, ctx);
8537
db3927fb 8538 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
e79983f4
MM
8539 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8540 t = build_call_expr_loc (loc, bfn_decl, 1, t);
953ff289
DN
8541 gimplify_and_add (t, pre_p);
8542
8543 t = build_and_jump (&l2);
8544 gimplify_and_add (t, pre_p);
8545
726a989a 8546 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 8547
726a989a 8548 gimple_seq_add_seq (pre_p, copyin_seq);
953ff289 8549
726a989a 8550 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
953ff289
DN
8551}
8552
50674e96 8553
953ff289
DN
8554/* Expand code for an OpenMP single directive. */
8555
8556static void
726a989a 8557lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8558{
726a989a
RB
8559 tree block;
8560 gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
acf0174b 8561 gimple_seq bind_body, bind_body_tail = NULL, dlist;
d406b663 8562 struct gimplify_ctx gctx;
953ff289 8563
d406b663 8564 push_gimplify_context (&gctx);
953ff289 8565
355a7673
MM
8566 block = make_node (BLOCK);
8567 bind = gimple_build_bind (NULL, NULL, block);
8568 gsi_replace (gsi_p, bind, true);
726a989a 8569 bind_body = NULL;
355a7673 8570 dlist = NULL;
726a989a 8571 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
acf0174b 8572 &bind_body, &dlist, ctx, NULL);
355a7673 8573 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
953ff289 8574
726a989a 8575 gimple_seq_add_stmt (&bind_body, single_stmt);
953ff289
DN
8576
8577 if (ctx->record_type)
726a989a 8578 lower_omp_single_copy (single_stmt, &bind_body, ctx);
953ff289 8579 else
726a989a
RB
8580 lower_omp_single_simple (single_stmt, &bind_body);
8581
8582 gimple_omp_set_body (single_stmt, NULL);
953ff289 8583
726a989a 8584 gimple_seq_add_seq (&bind_body, dlist);
777f7f9a 8585
726a989a 8586 bind_body = maybe_catch_exception (bind_body);
777f7f9a 8587
b8698a0f 8588 t = gimple_build_omp_return
726a989a
RB
8589 (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
8590 OMP_CLAUSE_NOWAIT));
acf0174b
JJ
8591 gimple_seq_add_stmt (&bind_body_tail, t);
8592 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
8593 if (ctx->record_type)
8594 {
8595 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8596 tree clobber = build_constructor (ctx->record_type, NULL);
8597 TREE_THIS_VOLATILE (clobber) = 1;
8598 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8599 clobber), GSI_SAME_STMT);
8600 }
8601 gimple_seq_add_seq (&bind_body, bind_body_tail);
355a7673 8602 gimple_bind_set_body (bind, bind_body);
777f7f9a 8603
953ff289 8604 pop_gimplify_context (bind);
50674e96 8605
726a989a
RB
8606 gimple_bind_append_vars (bind, ctx->block_vars);
8607 BLOCK_VARS (block) = ctx->block_vars;
b357f682
JJ
8608 if (BLOCK_VARS (block))
8609 TREE_USED (block) = 1;
953ff289
DN
8610}
8611
50674e96 8612
953ff289
DN
8613/* Expand code for an OpenMP master directive. */
8614
8615static void
726a989a 8616lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8617{
e79983f4 8618 tree block, lab = NULL, x, bfn_decl;
726a989a 8619 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 8620 location_t loc = gimple_location (stmt);
726a989a 8621 gimple_seq tseq;
d406b663 8622 struct gimplify_ctx gctx;
953ff289 8623
d406b663 8624 push_gimplify_context (&gctx);
953ff289
DN
8625
8626 block = make_node (BLOCK);
355a7673
MM
8627 bind = gimple_build_bind (NULL, NULL, block);
8628 gsi_replace (gsi_p, bind, true);
8629 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8630
e79983f4
MM
8631 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8632 x = build_call_expr_loc (loc, bfn_decl, 0);
953ff289
DN
8633 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8634 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
726a989a
RB
8635 tseq = NULL;
8636 gimplify_and_add (x, &tseq);
8637 gimple_bind_add_seq (bind, tseq);
953ff289 8638
355a7673 8639 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8640 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8641 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8642 gimple_omp_set_body (stmt, NULL);
953ff289 8643
726a989a 8644 gimple_bind_add_stmt (bind, gimple_build_label (lab));
777f7f9a 8645
726a989a 8646 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 8647
953ff289 8648 pop_gimplify_context (bind);
50674e96 8649
726a989a
RB
8650 gimple_bind_append_vars (bind, ctx->block_vars);
8651 BLOCK_VARS (block) = ctx->block_vars;
953ff289
DN
8652}
8653
50674e96 8654
acf0174b
JJ
8655/* Expand code for an OpenMP taskgroup directive. */
8656
8657static void
8658lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8659{
8660 gimple stmt = gsi_stmt (*gsi_p), bind, x;
8661 tree block = make_node (BLOCK);
8662
8663 bind = gimple_build_bind (NULL, NULL, block);
8664 gsi_replace (gsi_p, bind, true);
8665 gimple_bind_add_stmt (bind, stmt);
8666
8667 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8668 0);
8669 gimple_bind_add_stmt (bind, x);
8670
8671 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8672 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8673 gimple_omp_set_body (stmt, NULL);
8674
8675 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8676
8677 gimple_bind_append_vars (bind, ctx->block_vars);
8678 BLOCK_VARS (block) = ctx->block_vars;
8679}
8680
8681
953ff289
DN
8682/* Expand code for an OpenMP ordered directive. */
8683
8684static void
726a989a 8685lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8686{
726a989a
RB
8687 tree block;
8688 gimple stmt = gsi_stmt (*gsi_p), bind, x;
d406b663 8689 struct gimplify_ctx gctx;
953ff289 8690
d406b663 8691 push_gimplify_context (&gctx);
953ff289
DN
8692
8693 block = make_node (BLOCK);
355a7673
MM
8694 bind = gimple_build_bind (NULL, NULL, block);
8695 gsi_replace (gsi_p, bind, true);
8696 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8697
e79983f4
MM
8698 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8699 0);
726a989a 8700 gimple_bind_add_stmt (bind, x);
953ff289 8701
355a7673 8702 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8703 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8704 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8705 gimple_omp_set_body (stmt, NULL);
953ff289 8706
e79983f4 8707 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END), 0);
726a989a 8708 gimple_bind_add_stmt (bind, x);
777f7f9a 8709
726a989a 8710 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 8711
953ff289 8712 pop_gimplify_context (bind);
50674e96 8713
726a989a
RB
8714 gimple_bind_append_vars (bind, ctx->block_vars);
8715 BLOCK_VARS (block) = gimple_bind_vars (bind);
953ff289
DN
8716}
8717
953ff289 8718
726a989a 8719/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
953ff289
DN
8720 substitution of a couple of function calls. But in the NAMED case,
8721 requires that languages coordinate a symbol name. It is therefore
8722 best put here in common code. */
8723
8724static GTY((param1_is (tree), param2_is (tree)))
8725 splay_tree critical_name_mutexes;
8726
8727static void
726a989a 8728lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8729{
726a989a
RB
8730 tree block;
8731 tree name, lock, unlock;
8732 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 8733 location_t loc = gimple_location (stmt);
726a989a 8734 gimple_seq tbody;
d406b663 8735 struct gimplify_ctx gctx;
953ff289 8736
726a989a 8737 name = gimple_omp_critical_name (stmt);
953ff289
DN
8738 if (name)
8739 {
5039610b 8740 tree decl;
953ff289
DN
8741 splay_tree_node n;
8742
8743 if (!critical_name_mutexes)
8744 critical_name_mutexes
a9429e29
LB
8745 = splay_tree_new_ggc (splay_tree_compare_pointers,
8746 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
8747 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
953ff289
DN
8748
8749 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
8750 if (n == NULL)
8751 {
8752 char *new_str;
8753
8754 decl = create_tmp_var_raw (ptr_type_node, NULL);
8755
8756 new_str = ACONCAT ((".gomp_critical_user_",
8757 IDENTIFIER_POINTER (name), NULL));
8758 DECL_NAME (decl) = get_identifier (new_str);
8759 TREE_PUBLIC (decl) = 1;
8760 TREE_STATIC (decl) = 1;
8761 DECL_COMMON (decl) = 1;
8762 DECL_ARTIFICIAL (decl) = 1;
8763 DECL_IGNORED_P (decl) = 1;
8a4a83ed 8764 varpool_finalize_decl (decl);
953ff289
DN
8765
8766 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
8767 (splay_tree_value) decl);
8768 }
8769 else
8770 decl = (tree) n->value;
8771
e79983f4 8772 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
db3927fb 8773 lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
953ff289 8774
e79983f4 8775 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
db3927fb
AH
8776 unlock = build_call_expr_loc (loc, unlock, 1,
8777 build_fold_addr_expr_loc (loc, decl));
953ff289
DN
8778 }
8779 else
8780 {
e79983f4 8781 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
db3927fb 8782 lock = build_call_expr_loc (loc, lock, 0);
953ff289 8783
e79983f4 8784 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
db3927fb 8785 unlock = build_call_expr_loc (loc, unlock, 0);
953ff289
DN
8786 }
8787
d406b663 8788 push_gimplify_context (&gctx);
953ff289
DN
8789
8790 block = make_node (BLOCK);
355a7673
MM
8791 bind = gimple_build_bind (NULL, NULL, block);
8792 gsi_replace (gsi_p, bind, true);
8793 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8794
726a989a
RB
8795 tbody = gimple_bind_body (bind);
8796 gimplify_and_add (lock, &tbody);
8797 gimple_bind_set_body (bind, tbody);
953ff289 8798
355a7673 8799 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8800 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8801 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8802 gimple_omp_set_body (stmt, NULL);
953ff289 8803
726a989a
RB
8804 tbody = gimple_bind_body (bind);
8805 gimplify_and_add (unlock, &tbody);
8806 gimple_bind_set_body (bind, tbody);
777f7f9a 8807
726a989a 8808 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
953ff289
DN
8809
8810 pop_gimplify_context (bind);
726a989a
RB
8811 gimple_bind_append_vars (bind, ctx->block_vars);
8812 BLOCK_VARS (block) = gimple_bind_vars (bind);
50674e96
DN
8813}
8814
8815
8816/* A subroutine of lower_omp_for. Generate code to emit the predicate
8817 for a lastprivate clause. Given a loop control predicate of (V
8818 cond N2), we gate the clause on (!(V cond N2)). The lowered form
3d55c64b
JJ
8819 is appended to *DLIST, iterator initialization is appended to
8820 *BODY_P. */
50674e96
DN
8821
8822static void
726a989a
RB
8823lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8824 gimple_seq *dlist, struct omp_context *ctx)
50674e96 8825{
726a989a 8826 tree clauses, cond, vinit;
50674e96 8827 enum tree_code cond_code;
726a989a 8828 gimple_seq stmts;
b8698a0f 8829
a68ab351 8830 cond_code = fd->loop.cond_code;
50674e96
DN
8831 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8832
8833 /* When possible, use a strict equality expression. This can let VRP
8834 type optimizations deduce the value and remove a copy. */
9541ffee 8835 if (tree_fits_shwi_p (fd->loop.step))
50674e96 8836 {
a68ab351 8837 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
50674e96
DN
8838 if (step == 1 || step == -1)
8839 cond_code = EQ_EXPR;
8840 }
8841
a68ab351 8842 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
50674e96 8843
726a989a 8844 clauses = gimple_omp_for_clauses (fd->for_stmt);
3d55c64b
JJ
8845 stmts = NULL;
8846 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
726a989a 8847 if (!gimple_seq_empty_p (stmts))
3d55c64b 8848 {
726a989a 8849 gimple_seq_add_seq (&stmts, *dlist);
a68ab351 8850 *dlist = stmts;
3d55c64b
JJ
8851
8852 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
a68ab351 8853 vinit = fd->loop.n1;
3d55c64b 8854 if (cond_code == EQ_EXPR
9541ffee 8855 && tree_fits_shwi_p (fd->loop.n2)
a68ab351
JJ
8856 && ! integer_zerop (fd->loop.n2))
8857 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
74bf76ed
JJ
8858 else
8859 vinit = unshare_expr (vinit);
3d55c64b
JJ
8860
8861 /* Initialize the iterator variable, so that threads that don't execute
8862 any iterations don't execute the lastprivate clauses by accident. */
726a989a 8863 gimplify_assign (fd->loop.v, vinit, body_p);
3d55c64b 8864 }
50674e96
DN
8865}
8866
8867
8868/* Lower code for an OpenMP loop directive. */
8869
8870static void
726a989a 8871lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 8872{
726a989a 8873 tree *rhs_p, block;
acf0174b 8874 struct omp_for_data fd, *fdp = NULL;
726a989a 8875 gimple stmt = gsi_stmt (*gsi_p), new_stmt;
0f900dfa 8876 gimple_seq omp_for_body, body, dlist;
726a989a 8877 size_t i;
d406b663 8878 struct gimplify_ctx gctx;
50674e96 8879
d406b663 8880 push_gimplify_context (&gctx);
50674e96 8881
355a7673 8882 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
50674e96 8883
b357f682 8884 block = make_node (BLOCK);
726a989a 8885 new_stmt = gimple_build_bind (NULL, NULL, block);
355a7673
MM
8886 /* Replace at gsi right away, so that 'stmt' is no member
8887 of a sequence anymore as we're going to add to to a different
8888 one below. */
8889 gsi_replace (gsi_p, new_stmt, true);
b357f682 8890
50674e96
DN
8891 /* Move declaration of temporaries in the loop body before we make
8892 it go away. */
726a989a
RB
8893 omp_for_body = gimple_omp_body (stmt);
8894 if (!gimple_seq_empty_p (omp_for_body)
8895 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8896 {
8897 tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
8898 gimple_bind_append_vars (new_stmt, vars);
8899 }
50674e96 8900
acf0174b
JJ
8901 if (gimple_omp_for_combined_into_p (stmt))
8902 {
8903 extract_omp_for_data (stmt, &fd, NULL);
8904 fdp = &fd;
8905
8906 /* We need two temporaries with fd.loop.v type (istart/iend)
8907 and then (fd.collapse - 1) temporaries with the same
8908 type for count2 ... countN-1 vars if not constant. */
8909 size_t count = 2;
8910 tree type = fd.iter_type;
8911 if (fd.collapse > 1
8912 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8913 count += fd.collapse - 1;
8914 bool parallel_for = gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR;
8915 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8916 tree clauses = *pc;
8917 if (parallel_for)
8918 outerc
8919 = find_omp_clause (gimple_omp_parallel_clauses (ctx->outer->stmt),
8920 OMP_CLAUSE__LOOPTEMP_);
8921 for (i = 0; i < count; i++)
8922 {
8923 tree temp;
8924 if (parallel_for)
8925 {
8926 gcc_assert (outerc);
8927 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8928 outerc = find_omp_clause (OMP_CLAUSE_CHAIN (outerc),
8929 OMP_CLAUSE__LOOPTEMP_);
8930 }
8931 else
8932 temp = create_tmp_var (type, NULL);
8933 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8934 OMP_CLAUSE_DECL (*pc) = temp;
8935 pc = &OMP_CLAUSE_CHAIN (*pc);
8936 }
8937 *pc = clauses;
8938 }
8939
726a989a 8940 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
50674e96 8941 dlist = NULL;
726a989a 8942 body = NULL;
acf0174b
JJ
8943 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8944 fdp);
726a989a 8945 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
50674e96 8946
74bf76ed
JJ
8947 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8948
50674e96
DN
8949 /* Lower the header expressions. At this point, we can assume that
8950 the header is of the form:
8951
8952 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8953
8954 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8955 using the .omp_data_s mapping, if needed. */
726a989a 8956 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 8957 {
726a989a 8958 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
a68ab351 8959 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8960 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8961
726a989a 8962 rhs_p = gimple_omp_for_final_ptr (stmt, i);
a68ab351 8963 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8964 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8965
726a989a 8966 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
a68ab351 8967 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8968 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8969 }
50674e96
DN
8970
8971 /* Once lowered, extract the bounds and clauses. */
a68ab351 8972 extract_omp_for_data (stmt, &fd, NULL);
50674e96 8973
726a989a 8974 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
50674e96 8975
726a989a
RB
8976 gimple_seq_add_stmt (&body, stmt);
8977 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
777f7f9a 8978
726a989a
RB
8979 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8980 fd.loop.v));
777f7f9a 8981
50674e96 8982 /* After the loop, add exit clauses. */
726a989a 8983 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
acf0174b
JJ
8984
8985 if (ctx->cancellable)
8986 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8987
726a989a 8988 gimple_seq_add_seq (&body, dlist);
50674e96 8989
726a989a 8990 body = maybe_catch_exception (body);
4a31b7ee 8991
777f7f9a 8992 /* Region exit marker goes at the end of the loop body. */
726a989a 8993 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
acf0174b 8994 maybe_add_implicit_barrier_cancel (ctx, &body);
b357f682 8995 pop_gimplify_context (new_stmt);
726a989a
RB
8996
8997 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8998 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
b357f682
JJ
8999 if (BLOCK_VARS (block))
9000 TREE_USED (block) = 1;
50674e96 9001
726a989a
RB
9002 gimple_bind_set_body (new_stmt, body);
9003 gimple_omp_set_body (stmt, NULL);
9004 gimple_omp_for_set_pre_body (stmt, NULL);
953ff289
DN
9005}
9006
b8698a0f 9007/* Callback for walk_stmts. Check if the current statement only contains
726a989a 9008 GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL. */
69f1837b
JJ
9009
9010static tree
726a989a
RB
9011check_combined_parallel (gimple_stmt_iterator *gsi_p,
9012 bool *handled_ops_p,
9013 struct walk_stmt_info *wi)
69f1837b 9014{
d3bfe4de 9015 int *info = (int *) wi->info;
726a989a 9016 gimple stmt = gsi_stmt (*gsi_p);
69f1837b 9017
726a989a
RB
9018 *handled_ops_p = true;
9019 switch (gimple_code (stmt))
69f1837b 9020 {
726a989a
RB
9021 WALK_SUBSTMTS;
9022
9023 case GIMPLE_OMP_FOR:
9024 case GIMPLE_OMP_SECTIONS:
69f1837b
JJ
9025 *info = *info == 0 ? 1 : -1;
9026 break;
9027 default:
9028 *info = -1;
9029 break;
9030 }
9031 return NULL;
9032}
50674e96 9033
a68ab351
JJ
9034struct omp_taskcopy_context
9035{
9036 /* This field must be at the beginning, as we do "inheritance": Some
9037 callback functions for tree-inline.c (e.g., omp_copy_decl)
9038 receive a copy_body_data pointer that is up-casted to an
9039 omp_context pointer. */
9040 copy_body_data cb;
9041 omp_context *ctx;
9042};
9043
9044static tree
9045task_copyfn_copy_decl (tree var, copy_body_data *cb)
9046{
9047 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9048
9049 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
9050 return create_tmp_var (TREE_TYPE (var), NULL);
9051
9052 return var;
9053}
9054
9055static tree
9056task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9057{
9058 tree name, new_fields = NULL, type, f;
9059
9060 type = lang_hooks.types.make_type (RECORD_TYPE);
9061 name = DECL_NAME (TYPE_NAME (orig_type));
c2255bc4
AH
9062 name = build_decl (gimple_location (tcctx->ctx->stmt),
9063 TYPE_DECL, name, type);
a68ab351
JJ
9064 TYPE_NAME (type) = name;
9065
9066 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9067 {
9068 tree new_f = copy_node (f);
9069 DECL_CONTEXT (new_f) = type;
9070 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
9071 TREE_CHAIN (new_f) = new_fields;
726a989a
RB
9072 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9073 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9074 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
9075 &tcctx->cb, NULL);
a68ab351
JJ
9076 new_fields = new_f;
9077 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
9078 }
9079 TYPE_FIELDS (type) = nreverse (new_fields);
9080 layout_type (type);
9081 return type;
9082}
9083
9084/* Create task copyfn. */
9085
9086static void
726a989a 9087create_task_copyfn (gimple task_stmt, omp_context *ctx)
a68ab351
JJ
9088{
9089 struct function *child_cfun;
9090 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
9091 tree record_type, srecord_type, bind, list;
9092 bool record_needs_remap = false, srecord_needs_remap = false;
9093 splay_tree_node n;
9094 struct omp_taskcopy_context tcctx;
d406b663 9095 struct gimplify_ctx gctx;
db3927fb 9096 location_t loc = gimple_location (task_stmt);
a68ab351 9097
726a989a 9098 child_fn = gimple_omp_task_copy_fn (task_stmt);
a68ab351
JJ
9099 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
9100 gcc_assert (child_cfun->cfg == NULL);
a68ab351
JJ
9101 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9102
9103 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 9104 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
a68ab351
JJ
9105 DECL_CONTEXT (t) = child_fn;
9106
9107 /* Populate the function. */
d406b663 9108 push_gimplify_context (&gctx);
af16bc76 9109 push_cfun (child_cfun);
a68ab351
JJ
9110
9111 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
9112 TREE_SIDE_EFFECTS (bind) = 1;
9113 list = NULL;
9114 DECL_SAVED_TREE (child_fn) = bind;
726a989a 9115 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
a68ab351
JJ
9116
9117 /* Remap src and dst argument types if needed. */
9118 record_type = ctx->record_type;
9119 srecord_type = ctx->srecord_type;
910ad8de 9120 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
9121 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9122 {
9123 record_needs_remap = true;
9124 break;
9125 }
910ad8de 9126 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
9127 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9128 {
9129 srecord_needs_remap = true;
9130 break;
9131 }
9132
9133 if (record_needs_remap || srecord_needs_remap)
9134 {
9135 memset (&tcctx, '\0', sizeof (tcctx));
9136 tcctx.cb.src_fn = ctx->cb.src_fn;
9137 tcctx.cb.dst_fn = child_fn;
fe660d7b
MJ
9138 tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
9139 gcc_checking_assert (tcctx.cb.src_node);
a68ab351
JJ
9140 tcctx.cb.dst_node = tcctx.cb.src_node;
9141 tcctx.cb.src_cfun = ctx->cb.src_cfun;
9142 tcctx.cb.copy_decl = task_copyfn_copy_decl;
1d65f45c 9143 tcctx.cb.eh_lp_nr = 0;
a68ab351
JJ
9144 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
9145 tcctx.cb.decl_map = pointer_map_create ();
9146 tcctx.ctx = ctx;
9147
9148 if (record_needs_remap)
9149 record_type = task_copyfn_remap_type (&tcctx, record_type);
9150 if (srecord_needs_remap)
9151 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9152 }
9153 else
9154 tcctx.cb.decl_map = NULL;
9155
a68ab351
JJ
9156 arg = DECL_ARGUMENTS (child_fn);
9157 TREE_TYPE (arg) = build_pointer_type (record_type);
910ad8de 9158 sarg = DECL_CHAIN (arg);
a68ab351
JJ
9159 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9160
9161 /* First pass: initialize temporaries used in record_type and srecord_type
9162 sizes and field offsets. */
9163 if (tcctx.cb.decl_map)
726a989a 9164 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9165 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9166 {
9167 tree *p;
9168
9169 decl = OMP_CLAUSE_DECL (c);
9170 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
9171 if (p == NULL)
9172 continue;
9173 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9174 sf = (tree) n->value;
9175 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9176 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9177 src = omp_build_component_ref (src, sf);
726a989a 9178 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
a68ab351
JJ
9179 append_to_statement_list (t, &list);
9180 }
9181
9182 /* Second pass: copy shared var pointers and copy construct non-VLA
9183 firstprivate vars. */
726a989a 9184 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9185 switch (OMP_CLAUSE_CODE (c))
9186 {
9187 case OMP_CLAUSE_SHARED:
9188 decl = OMP_CLAUSE_DECL (c);
9189 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9190 if (n == NULL)
9191 break;
9192 f = (tree) n->value;
9193 if (tcctx.cb.decl_map)
9194 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9195 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9196 sf = (tree) n->value;
9197 if (tcctx.cb.decl_map)
9198 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9199 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9200 src = omp_build_component_ref (src, sf);
70f34814 9201 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9202 dst = omp_build_component_ref (dst, f);
726a989a 9203 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
9204 append_to_statement_list (t, &list);
9205 break;
9206 case OMP_CLAUSE_FIRSTPRIVATE:
9207 decl = OMP_CLAUSE_DECL (c);
9208 if (is_variable_sized (decl))
9209 break;
9210 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9211 if (n == NULL)
9212 break;
9213 f = (tree) n->value;
9214 if (tcctx.cb.decl_map)
9215 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9216 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9217 if (n != NULL)
9218 {
9219 sf = (tree) n->value;
9220 if (tcctx.cb.decl_map)
9221 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9222 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9223 src = omp_build_component_ref (src, sf);
a68ab351 9224 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
70f34814 9225 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
9226 }
9227 else
9228 src = decl;
70f34814 9229 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9230 dst = omp_build_component_ref (dst, f);
a68ab351
JJ
9231 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9232 append_to_statement_list (t, &list);
9233 break;
9234 case OMP_CLAUSE_PRIVATE:
9235 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
9236 break;
9237 decl = OMP_CLAUSE_DECL (c);
9238 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9239 f = (tree) n->value;
9240 if (tcctx.cb.decl_map)
9241 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9242 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9243 if (n != NULL)
9244 {
9245 sf = (tree) n->value;
9246 if (tcctx.cb.decl_map)
9247 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9248 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9249 src = omp_build_component_ref (src, sf);
a68ab351 9250 if (use_pointer_for_field (decl, NULL))
70f34814 9251 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
9252 }
9253 else
9254 src = decl;
70f34814 9255 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9256 dst = omp_build_component_ref (dst, f);
726a989a 9257 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
9258 append_to_statement_list (t, &list);
9259 break;
9260 default:
9261 break;
9262 }
9263
9264 /* Last pass: handle VLA firstprivates. */
9265 if (tcctx.cb.decl_map)
726a989a 9266 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9267 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9268 {
9269 tree ind, ptr, df;
9270
9271 decl = OMP_CLAUSE_DECL (c);
9272 if (!is_variable_sized (decl))
9273 continue;
9274 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9275 if (n == NULL)
9276 continue;
9277 f = (tree) n->value;
9278 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9279 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
9280 ind = DECL_VALUE_EXPR (decl);
9281 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
9282 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
9283 n = splay_tree_lookup (ctx->sfield_map,
9284 (splay_tree_key) TREE_OPERAND (ind, 0));
9285 sf = (tree) n->value;
9286 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9287 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9288 src = omp_build_component_ref (src, sf);
70f34814
RG
9289 src = build_simple_mem_ref_loc (loc, src);
9290 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9291 dst = omp_build_component_ref (dst, f);
a68ab351
JJ
9292 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9293 append_to_statement_list (t, &list);
9294 n = splay_tree_lookup (ctx->field_map,
9295 (splay_tree_key) TREE_OPERAND (ind, 0));
9296 df = (tree) n->value;
9297 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
70f34814 9298 ptr = build_simple_mem_ref_loc (loc, arg);
a9a58711 9299 ptr = omp_build_component_ref (ptr, df);
726a989a 9300 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
db3927fb 9301 build_fold_addr_expr_loc (loc, dst));
a68ab351
JJ
9302 append_to_statement_list (t, &list);
9303 }
9304
9305 t = build1 (RETURN_EXPR, void_type_node, NULL);
9306 append_to_statement_list (t, &list);
9307
9308 if (tcctx.cb.decl_map)
9309 pointer_map_destroy (tcctx.cb.decl_map);
9310 pop_gimplify_context (NULL);
9311 BIND_EXPR_BODY (bind) = list;
9312 pop_cfun ();
a68ab351
JJ
9313}
9314
acf0174b
JJ
9315static void
9316lower_depend_clauses (gimple stmt, gimple_seq *iseq, gimple_seq *oseq)
9317{
9318 tree c, clauses;
9319 gimple g;
9320 size_t n_in = 0, n_out = 0, idx = 2, i;
9321
9322 clauses = find_omp_clause (gimple_omp_task_clauses (stmt),
9323 OMP_CLAUSE_DEPEND);
9324 gcc_assert (clauses);
9325 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9326 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9327 switch (OMP_CLAUSE_DEPEND_KIND (c))
9328 {
9329 case OMP_CLAUSE_DEPEND_IN:
9330 n_in++;
9331 break;
9332 case OMP_CLAUSE_DEPEND_OUT:
9333 case OMP_CLAUSE_DEPEND_INOUT:
9334 n_out++;
9335 break;
9336 default:
9337 gcc_unreachable ();
9338 }
9339 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
9340 tree array = create_tmp_var (type, NULL);
9341 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9342 NULL_TREE);
9343 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
9344 gimple_seq_add_stmt (iseq, g);
9345 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9346 NULL_TREE);
9347 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
9348 gimple_seq_add_stmt (iseq, g);
9349 for (i = 0; i < 2; i++)
9350 {
9351 if ((i ? n_in : n_out) == 0)
9352 continue;
9353 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9354 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9355 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
9356 {
9357 tree t = OMP_CLAUSE_DECL (c);
9358 t = fold_convert (ptr_type_node, t);
9359 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
9360 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
9361 NULL_TREE, NULL_TREE);
9362 g = gimple_build_assign (r, t);
9363 gimple_seq_add_stmt (iseq, g);
9364 }
9365 }
9366 tree *p = gimple_omp_task_clauses_ptr (stmt);
9367 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9368 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9369 OMP_CLAUSE_CHAIN (c) = *p;
9370 *p = c;
9371 tree clobber = build_constructor (type, NULL);
9372 TREE_THIS_VOLATILE (clobber) = 1;
9373 g = gimple_build_assign (array, clobber);
9374 gimple_seq_add_stmt (oseq, g);
9375}
9376
726a989a
RB
9377/* Lower the OpenMP parallel or task directive in the current statement
9378 in GSI_P. CTX holds context information for the directive. */
50674e96
DN
9379
9380static void
726a989a 9381lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 9382{
726a989a
RB
9383 tree clauses;
9384 tree child_fn, t;
9385 gimple stmt = gsi_stmt (*gsi_p);
acf0174b
JJ
9386 gimple par_bind, bind, dep_bind = NULL;
9387 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
9388 struct gimplify_ctx gctx, dep_gctx;
db3927fb 9389 location_t loc = gimple_location (stmt);
50674e96 9390
726a989a
RB
9391 clauses = gimple_omp_taskreg_clauses (stmt);
9392 par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9393 par_body = gimple_bind_body (par_bind);
50674e96 9394 child_fn = ctx->cb.dst_fn;
726a989a
RB
9395 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9396 && !gimple_omp_parallel_combined_p (stmt))
69f1837b
JJ
9397 {
9398 struct walk_stmt_info wi;
9399 int ws_num = 0;
9400
9401 memset (&wi, 0, sizeof (wi));
69f1837b
JJ
9402 wi.info = &ws_num;
9403 wi.val_only = true;
726a989a 9404 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
69f1837b 9405 if (ws_num == 1)
726a989a 9406 gimple_omp_parallel_set_combined_p (stmt, true);
69f1837b 9407 }
acf0174b
JJ
9408 gimple_seq dep_ilist = NULL;
9409 gimple_seq dep_olist = NULL;
9410 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9411 && find_omp_clause (clauses, OMP_CLAUSE_DEPEND))
9412 {
9413 push_gimplify_context (&dep_gctx);
9414 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9415 lower_depend_clauses (stmt, &dep_ilist, &dep_olist);
9416 }
9417
a68ab351
JJ
9418 if (ctx->srecord_type)
9419 create_task_copyfn (stmt, ctx);
50674e96 9420
d406b663 9421 push_gimplify_context (&gctx);
50674e96 9422
726a989a
RB
9423 par_olist = NULL;
9424 par_ilist = NULL;
acf0174b
JJ
9425 par_rlist = NULL;
9426 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
355a7673 9427 lower_omp (&par_body, ctx);
726a989a 9428 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
acf0174b 9429 lower_reduction_clauses (clauses, &par_rlist, ctx);
50674e96
DN
9430
9431 /* Declare all the variables created by mapping and the variables
9432 declared in the scope of the parallel body. */
9433 record_vars_into (ctx->block_vars, child_fn);
726a989a 9434 record_vars_into (gimple_bind_vars (par_bind), child_fn);
50674e96
DN
9435
9436 if (ctx->record_type)
9437 {
a68ab351
JJ
9438 ctx->sender_decl
9439 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9440 : ctx->record_type, ".omp_data_o");
cd3f04c8 9441 DECL_NAMELESS (ctx->sender_decl) = 1;
628c189e 9442 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
726a989a 9443 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
50674e96
DN
9444 }
9445
726a989a
RB
9446 olist = NULL;
9447 ilist = NULL;
50674e96
DN
9448 lower_send_clauses (clauses, &ilist, &olist, ctx);
9449 lower_send_shared_vars (&ilist, &olist, ctx);
9450
acf0174b
JJ
9451 if (ctx->record_type)
9452 {
9453 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9454 TREE_THIS_VOLATILE (clobber) = 1;
9455 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9456 clobber));
9457 }
9458
50674e96 9459 /* Once all the expansions are done, sequence all the different
726a989a 9460 fragments inside gimple_omp_body. */
50674e96 9461
726a989a 9462 new_body = NULL;
50674e96
DN
9463
9464 if (ctx->record_type)
9465 {
db3927fb 9466 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
018b899b 9467 /* fixup_child_record_type might have changed receiver_decl's type. */
db3927fb 9468 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
726a989a
RB
9469 gimple_seq_add_stmt (&new_body,
9470 gimple_build_assign (ctx->receiver_decl, t));
50674e96
DN
9471 }
9472
726a989a
RB
9473 gimple_seq_add_seq (&new_body, par_ilist);
9474 gimple_seq_add_seq (&new_body, par_body);
acf0174b
JJ
9475 gimple_seq_add_seq (&new_body, par_rlist);
9476 if (ctx->cancellable)
9477 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
726a989a
RB
9478 gimple_seq_add_seq (&new_body, par_olist);
9479 new_body = maybe_catch_exception (new_body);
9480 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9481 gimple_omp_set_body (stmt, new_body);
50674e96 9482
726a989a 9483 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
acf0174b
JJ
9484 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9485 gimple_bind_add_seq (bind, ilist);
9486 gimple_bind_add_stmt (bind, stmt);
9487 gimple_bind_add_seq (bind, olist);
9488
9489 pop_gimplify_context (NULL);
9490
9491 if (dep_bind)
9492 {
9493 gimple_bind_add_seq (dep_bind, dep_ilist);
9494 gimple_bind_add_stmt (dep_bind, bind);
9495 gimple_bind_add_seq (dep_bind, dep_olist);
9496 pop_gimplify_context (dep_bind);
9497 }
9498}
9499
9500/* Lower the OpenMP target directive in the current statement
9501 in GSI_P. CTX holds context information for the directive. */
9502
9503static void
9504lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9505{
9506 tree clauses;
9507 tree child_fn, t, c;
9508 gimple stmt = gsi_stmt (*gsi_p);
9509 gimple tgt_bind = NULL, bind;
9510 gimple_seq tgt_body = NULL, olist, ilist, new_body;
9511 struct gimplify_ctx gctx;
9512 location_t loc = gimple_location (stmt);
9513 int kind = gimple_omp_target_kind (stmt);
9514 unsigned int map_cnt = 0;
9515
9516 clauses = gimple_omp_target_clauses (stmt);
9517 if (kind == GF_OMP_TARGET_KIND_REGION)
9518 {
9519 tgt_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9520 tgt_body = gimple_bind_body (tgt_bind);
9521 }
9522 else if (kind == GF_OMP_TARGET_KIND_DATA)
9523 tgt_body = gimple_omp_body (stmt);
9524 child_fn = ctx->cb.dst_fn;
9525
9526 push_gimplify_context (&gctx);
9527
9528 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9529 switch (OMP_CLAUSE_CODE (c))
9530 {
9531 tree var, x;
9532
9533 default:
9534 break;
9535 case OMP_CLAUSE_MAP:
9536 case OMP_CLAUSE_TO:
9537 case OMP_CLAUSE_FROM:
9538 var = OMP_CLAUSE_DECL (c);
9539 if (!DECL_P (var))
9540 {
9541 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9542 || !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9543 map_cnt++;
9544 continue;
9545 }
9546
9547 if (DECL_SIZE (var)
9548 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9549 {
9550 tree var2 = DECL_VALUE_EXPR (var);
9551 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9552 var2 = TREE_OPERAND (var2, 0);
9553 gcc_assert (DECL_P (var2));
9554 var = var2;
9555 }
9556
9557 if (!maybe_lookup_field (var, ctx))
9558 continue;
9559
9560 if (kind == GF_OMP_TARGET_KIND_REGION)
9561 {
9562 x = build_receiver_ref (var, true, ctx);
9563 tree new_var = lookup_decl (var, ctx);
9564 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9565 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9566 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9567 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9568 x = build_simple_mem_ref (x);
9569 SET_DECL_VALUE_EXPR (new_var, x);
9570 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9571 }
9572 map_cnt++;
9573 }
9574
9575 if (kind == GF_OMP_TARGET_KIND_REGION)
9576 {
9577 target_nesting_level++;
9578 lower_omp (&tgt_body, ctx);
9579 target_nesting_level--;
9580 }
9581 else if (kind == GF_OMP_TARGET_KIND_DATA)
9582 lower_omp (&tgt_body, ctx);
9583
9584 if (kind == GF_OMP_TARGET_KIND_REGION)
9585 {
9586 /* Declare all the variables created by mapping and the variables
9587 declared in the scope of the target body. */
9588 record_vars_into (ctx->block_vars, child_fn);
9589 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9590 }
9591
9592 olist = NULL;
9593 ilist = NULL;
9594 if (ctx->record_type)
9595 {
9596 ctx->sender_decl
9597 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9598 DECL_NAMELESS (ctx->sender_decl) = 1;
9599 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9600 t = make_tree_vec (3);
9601 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9602 TREE_VEC_ELT (t, 1)
9603 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9604 ".omp_data_sizes");
9605 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9606 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9607 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9608 TREE_VEC_ELT (t, 2)
9609 = create_tmp_var (build_array_type_nelts (unsigned_char_type_node,
9610 map_cnt),
9611 ".omp_data_kinds");
9612 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9613 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9614 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9615 gimple_omp_target_set_data_arg (stmt, t);
9616
9617 vec<constructor_elt, va_gc> *vsize;
9618 vec<constructor_elt, va_gc> *vkind;
9619 vec_alloc (vsize, map_cnt);
9620 vec_alloc (vkind, map_cnt);
9621 unsigned int map_idx = 0;
9622
9623 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9624 switch (OMP_CLAUSE_CODE (c))
9625 {
9626 tree ovar, nc;
9627
9628 default:
9629 break;
9630 case OMP_CLAUSE_MAP:
9631 case OMP_CLAUSE_TO:
9632 case OMP_CLAUSE_FROM:
9633 nc = c;
9634 ovar = OMP_CLAUSE_DECL (c);
9635 if (!DECL_P (ovar))
9636 {
9637 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9638 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9639 {
9640 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9641 == get_base_address (ovar));
9642 nc = OMP_CLAUSE_CHAIN (c);
9643 ovar = OMP_CLAUSE_DECL (nc);
9644 }
9645 else
9646 {
9647 tree x = build_sender_ref (ovar, ctx);
9648 tree v
9649 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9650 gimplify_assign (x, v, &ilist);
9651 nc = NULL_TREE;
9652 }
9653 }
9654 else
9655 {
9656 if (DECL_SIZE (ovar)
9657 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9658 {
9659 tree ovar2 = DECL_VALUE_EXPR (ovar);
9660 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9661 ovar2 = TREE_OPERAND (ovar2, 0);
9662 gcc_assert (DECL_P (ovar2));
9663 ovar = ovar2;
9664 }
9665 if (!maybe_lookup_field (ovar, ctx))
9666 continue;
9667 }
9668
9669 if (nc)
9670 {
9671 tree var = lookup_decl_in_outer_ctx (ovar, ctx);
9672 tree x = build_sender_ref (ovar, ctx);
9673 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9674 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9675 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9676 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9677 {
9678 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9679 tree avar
9680 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)), NULL);
9681 mark_addressable (avar);
9682 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9683 avar = build_fold_addr_expr (avar);
9684 gimplify_assign (x, avar, &ilist);
9685 }
9686 else if (is_gimple_reg (var))
9687 {
9688 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9689 tree avar = create_tmp_var (TREE_TYPE (var), NULL);
9690 mark_addressable (avar);
9691 if (OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_ALLOC
9692 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_FROM)
9693 gimplify_assign (avar, var, &ilist);
9694 avar = build_fold_addr_expr (avar);
9695 gimplify_assign (x, avar, &ilist);
9696 if ((OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_FROM
9697 || OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_TOFROM)
9698 && !TYPE_READONLY (TREE_TYPE (var)))
9699 {
9700 x = build_sender_ref (ovar, ctx);
9701 x = build_simple_mem_ref (x);
9702 gimplify_assign (var, x, &olist);
9703 }
9704 }
9705 else
9706 {
9707 var = build_fold_addr_expr (var);
9708 gimplify_assign (x, var, &ilist);
9709 }
9710 }
9711 tree s = OMP_CLAUSE_SIZE (c);
9712 if (s == NULL_TREE)
9713 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9714 s = fold_convert (size_type_node, s);
9715 tree purpose = size_int (map_idx++);
9716 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9717 if (TREE_CODE (s) != INTEGER_CST)
9718 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9719
9720 unsigned char tkind = 0;
9721 switch (OMP_CLAUSE_CODE (c))
9722 {
9723 case OMP_CLAUSE_MAP:
9724 tkind = OMP_CLAUSE_MAP_KIND (c);
9725 break;
9726 case OMP_CLAUSE_TO:
9727 tkind = OMP_CLAUSE_MAP_TO;
9728 break;
9729 case OMP_CLAUSE_FROM:
9730 tkind = OMP_CLAUSE_MAP_FROM;
9731 break;
9732 default:
9733 gcc_unreachable ();
9734 }
9735 unsigned int talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9736 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9737 talign = DECL_ALIGN_UNIT (ovar);
9738 talign = ceil_log2 (talign);
9739 tkind |= talign << 3;
9740 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9741 build_int_cst (unsigned_char_type_node,
9742 tkind));
9743 if (nc && nc != c)
9744 c = nc;
9745 }
9746
9747 gcc_assert (map_idx == map_cnt);
9748
9749 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9750 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9751 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9752 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9753 if (!TREE_STATIC (TREE_VEC_ELT (t, 1)))
9754 {
9755 gimple_seq initlist = NULL;
9756 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9757 TREE_VEC_ELT (t, 1)),
9758 &initlist, true, NULL_TREE);
9759 gimple_seq_add_seq (&ilist, initlist);
9760 }
9761
9762 tree clobber = build_constructor (ctx->record_type, NULL);
9763 TREE_THIS_VOLATILE (clobber) = 1;
9764 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9765 clobber));
9766 }
9767
9768 /* Once all the expansions are done, sequence all the different
9769 fragments inside gimple_omp_body. */
9770
9771 new_body = NULL;
9772
9773 if (ctx->record_type && kind == GF_OMP_TARGET_KIND_REGION)
9774 {
9775 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9776 /* fixup_child_record_type might have changed receiver_decl's type. */
9777 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9778 gimple_seq_add_stmt (&new_body,
9779 gimple_build_assign (ctx->receiver_decl, t));
9780 }
9781
9782 if (kind == GF_OMP_TARGET_KIND_REGION)
9783 {
9784 gimple_seq_add_seq (&new_body, tgt_body);
9785 new_body = maybe_catch_exception (new_body);
9786 }
9787 else if (kind == GF_OMP_TARGET_KIND_DATA)
9788 new_body = tgt_body;
9789 if (kind != GF_OMP_TARGET_KIND_UPDATE)
9790 {
9791 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9792 gimple_omp_set_body (stmt, new_body);
9793 }
9794
9795 bind = gimple_build_bind (NULL, NULL,
9796 tgt_bind ? gimple_bind_block (tgt_bind)
9797 : NULL_TREE);
726a989a 9798 gsi_replace (gsi_p, bind, true);
355a7673
MM
9799 gimple_bind_add_seq (bind, ilist);
9800 gimple_bind_add_stmt (bind, stmt);
9801 gimple_bind_add_seq (bind, olist);
50674e96 9802
726a989a 9803 pop_gimplify_context (NULL);
50674e96
DN
9804}
9805
acf0174b
JJ
9806/* Expand code for an OpenMP teams directive. */
9807
9808static void
9809lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9810{
9811 gimple teams_stmt = gsi_stmt (*gsi_p);
9812 struct gimplify_ctx gctx;
9813 push_gimplify_context (&gctx);
9814
9815 tree block = make_node (BLOCK);
9816 gimple bind = gimple_build_bind (NULL, NULL, block);
9817 gsi_replace (gsi_p, bind, true);
9818 gimple_seq bind_body = NULL;
9819 gimple_seq dlist = NULL;
9820 gimple_seq olist = NULL;
9821
9822 tree num_teams = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9823 OMP_CLAUSE_NUM_TEAMS);
9824 if (num_teams == NULL_TREE)
9825 num_teams = build_int_cst (unsigned_type_node, 0);
9826 else
9827 {
9828 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
9829 num_teams = fold_convert (unsigned_type_node, num_teams);
9830 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
9831 }
9832 tree thread_limit = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9833 OMP_CLAUSE_THREAD_LIMIT);
9834 if (thread_limit == NULL_TREE)
9835 thread_limit = build_int_cst (unsigned_type_node, 0);
9836 else
9837 {
9838 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
9839 thread_limit = fold_convert (unsigned_type_node, thread_limit);
9840 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
9841 fb_rvalue);
9842 }
9843
9844 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
9845 &bind_body, &dlist, ctx, NULL);
9846 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
9847 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
9848 gimple_seq_add_stmt (&bind_body, teams_stmt);
9849
9850 location_t loc = gimple_location (teams_stmt);
9851 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
9852 gimple call = gimple_build_call (decl, 2, num_teams, thread_limit);
9853 gimple_set_location (call, loc);
9854 gimple_seq_add_stmt (&bind_body, call);
9855
9856 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
9857 gimple_omp_set_body (teams_stmt, NULL);
9858 gimple_seq_add_seq (&bind_body, olist);
9859 gimple_seq_add_seq (&bind_body, dlist);
9860 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
9861 gimple_bind_set_body (bind, bind_body);
9862
9863 pop_gimplify_context (bind);
9864
9865 gimple_bind_append_vars (bind, ctx->block_vars);
9866 BLOCK_VARS (block) = ctx->block_vars;
9867 if (BLOCK_VARS (block))
9868 TREE_USED (block) = 1;
9869}
9870
9871
d0fb20be 9872/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
726a989a
RB
9873 regimplified. If DATA is non-NULL, lower_omp_1 is outside
9874 of OpenMP context, but with task_shared_vars set. */
75a4c3c1
AP
9875
9876static tree
726a989a
RB
9877lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
9878 void *data)
75a4c3c1 9879{
d0fb20be 9880 tree t = *tp;
75a4c3c1 9881
d0fb20be 9882 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
726a989a 9883 if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
b826efd9
JJ
9884 return t;
9885
9886 if (task_shared_vars
9887 && DECL_P (t)
9888 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
d0fb20be 9889 return t;
75a4c3c1 9890
d0fb20be
JJ
9891 /* If a global variable has been privatized, TREE_CONSTANT on
9892 ADDR_EXPR might be wrong. */
726a989a 9893 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
d0fb20be 9894 recompute_tree_invariant_for_addr_expr (t);
75a4c3c1 9895
d0fb20be
JJ
9896 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
9897 return NULL_TREE;
75a4c3c1 9898}
50674e96 9899
d0fb20be 9900static void
726a989a 9901lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 9902{
726a989a
RB
9903 gimple stmt = gsi_stmt (*gsi_p);
9904 struct walk_stmt_info wi;
953ff289 9905
726a989a
RB
9906 if (gimple_has_location (stmt))
9907 input_location = gimple_location (stmt);
d0fb20be 9908
726a989a
RB
9909 if (task_shared_vars)
9910 memset (&wi, '\0', sizeof (wi));
d0fb20be 9911
50674e96
DN
9912 /* If we have issued syntax errors, avoid doing any heavy lifting.
9913 Just replace the OpenMP directives with a NOP to avoid
9914 confusing RTL expansion. */
1da2ed5f 9915 if (seen_error () && is_gimple_omp (stmt))
50674e96 9916 {
726a989a 9917 gsi_replace (gsi_p, gimple_build_nop (), true);
d0fb20be 9918 return;
50674e96
DN
9919 }
9920
726a989a 9921 switch (gimple_code (stmt))
953ff289 9922 {
726a989a 9923 case GIMPLE_COND:
a68ab351 9924 if ((ctx || task_shared_vars)
726a989a
RB
9925 && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
9926 ctx ? NULL : &wi, NULL)
9927 || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
9928 ctx ? NULL : &wi, NULL)))
9929 gimple_regimplify_operands (stmt, gsi_p);
d0fb20be 9930 break;
726a989a 9931 case GIMPLE_CATCH:
355a7673 9932 lower_omp (gimple_catch_handler_ptr (stmt), ctx);
d0fb20be 9933 break;
726a989a 9934 case GIMPLE_EH_FILTER:
355a7673 9935 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
d0fb20be 9936 break;
726a989a 9937 case GIMPLE_TRY:
355a7673
MM
9938 lower_omp (gimple_try_eval_ptr (stmt), ctx);
9939 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
d0fb20be 9940 break;
d88513ea
PM
9941 case GIMPLE_TRANSACTION:
9942 lower_omp (gimple_transaction_body_ptr (stmt), ctx);
9943 break;
726a989a 9944 case GIMPLE_BIND:
355a7673 9945 lower_omp (gimple_bind_body_ptr (stmt), ctx);
d0fb20be 9946 break;
726a989a
RB
9947 case GIMPLE_OMP_PARALLEL:
9948 case GIMPLE_OMP_TASK:
9949 ctx = maybe_lookup_ctx (stmt);
acf0174b
JJ
9950 gcc_assert (ctx);
9951 if (ctx->cancellable)
9952 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9953 lower_omp_taskreg (gsi_p, ctx);
d0fb20be 9954 break;
726a989a
RB
9955 case GIMPLE_OMP_FOR:
9956 ctx = maybe_lookup_ctx (stmt);
953ff289 9957 gcc_assert (ctx);
acf0174b
JJ
9958 if (ctx->cancellable)
9959 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9960 lower_omp_for (gsi_p, ctx);
953ff289 9961 break;
726a989a
RB
9962 case GIMPLE_OMP_SECTIONS:
9963 ctx = maybe_lookup_ctx (stmt);
953ff289 9964 gcc_assert (ctx);
acf0174b
JJ
9965 if (ctx->cancellable)
9966 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9967 lower_omp_sections (gsi_p, ctx);
953ff289 9968 break;
726a989a
RB
9969 case GIMPLE_OMP_SINGLE:
9970 ctx = maybe_lookup_ctx (stmt);
953ff289 9971 gcc_assert (ctx);
726a989a 9972 lower_omp_single (gsi_p, ctx);
953ff289 9973 break;
726a989a
RB
9974 case GIMPLE_OMP_MASTER:
9975 ctx = maybe_lookup_ctx (stmt);
953ff289 9976 gcc_assert (ctx);
726a989a 9977 lower_omp_master (gsi_p, ctx);
953ff289 9978 break;
acf0174b
JJ
9979 case GIMPLE_OMP_TASKGROUP:
9980 ctx = maybe_lookup_ctx (stmt);
9981 gcc_assert (ctx);
9982 lower_omp_taskgroup (gsi_p, ctx);
9983 break;
726a989a
RB
9984 case GIMPLE_OMP_ORDERED:
9985 ctx = maybe_lookup_ctx (stmt);
953ff289 9986 gcc_assert (ctx);
726a989a 9987 lower_omp_ordered (gsi_p, ctx);
953ff289 9988 break;
726a989a
RB
9989 case GIMPLE_OMP_CRITICAL:
9990 ctx = maybe_lookup_ctx (stmt);
953ff289 9991 gcc_assert (ctx);
726a989a
RB
9992 lower_omp_critical (gsi_p, ctx);
9993 break;
9994 case GIMPLE_OMP_ATOMIC_LOAD:
9995 if ((ctx || task_shared_vars)
9996 && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
9997 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
9998 gimple_regimplify_operands (stmt, gsi_p);
953ff289 9999 break;
acf0174b
JJ
10000 case GIMPLE_OMP_TARGET:
10001 ctx = maybe_lookup_ctx (stmt);
10002 gcc_assert (ctx);
10003 lower_omp_target (gsi_p, ctx);
10004 break;
10005 case GIMPLE_OMP_TEAMS:
10006 ctx = maybe_lookup_ctx (stmt);
10007 gcc_assert (ctx);
10008 lower_omp_teams (gsi_p, ctx);
10009 break;
10010 case GIMPLE_CALL:
10011 tree fndecl;
10012 fndecl = gimple_call_fndecl (stmt);
10013 if (fndecl
10014 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10015 switch (DECL_FUNCTION_CODE (fndecl))
10016 {
10017 case BUILT_IN_GOMP_BARRIER:
10018 if (ctx == NULL)
10019 break;
10020 /* FALLTHRU */
10021 case BUILT_IN_GOMP_CANCEL:
10022 case BUILT_IN_GOMP_CANCELLATION_POINT:
10023 omp_context *cctx;
10024 cctx = ctx;
10025 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10026 cctx = cctx->outer;
10027 gcc_assert (gimple_call_lhs (stmt) == NULL_TREE);
10028 if (!cctx->cancellable)
10029 {
10030 if (DECL_FUNCTION_CODE (fndecl)
10031 == BUILT_IN_GOMP_CANCELLATION_POINT)
10032 {
10033 stmt = gimple_build_nop ();
10034 gsi_replace (gsi_p, stmt, false);
10035 }
10036 break;
10037 }
10038 tree lhs;
10039 lhs = create_tmp_var (boolean_type_node, NULL);
10040 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10041 {
10042 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10043 gimple_call_set_fndecl (stmt, fndecl);
10044 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
10045 }
10046 gimple_call_set_lhs (stmt, lhs);
10047 tree fallthru_label;
10048 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10049 gimple g;
10050 g = gimple_build_label (fallthru_label);
10051 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10052 g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
10053 cctx->cancel_label, fallthru_label);
10054 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10055 break;
10056 default:
10057 break;
10058 }
10059 /* FALLTHRU */
d0fb20be 10060 default:
a68ab351 10061 if ((ctx || task_shared_vars)
726a989a
RB
10062 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10063 ctx ? NULL : &wi))
10064 gimple_regimplify_operands (stmt, gsi_p);
953ff289 10065 break;
953ff289 10066 }
953ff289
DN
10067}
10068
10069static void
355a7673 10070lower_omp (gimple_seq *body, omp_context *ctx)
953ff289 10071{
b357f682 10072 location_t saved_location = input_location;
355a7673
MM
10073 gimple_stmt_iterator gsi;
10074 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
726a989a 10075 lower_omp_1 (&gsi, ctx);
acf0174b
JJ
10076 /* Inside target region we haven't called fold_stmt during gimplification,
10077 because it can break code by adding decl references that weren't in the
10078 source. Call fold_stmt now. */
10079 if (target_nesting_level)
10080 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10081 fold_stmt (&gsi);
b357f682 10082 input_location = saved_location;
953ff289
DN
10083}
10084\f
10085/* Main entry point. */
10086
c2924966 10087static unsigned int
953ff289
DN
10088execute_lower_omp (void)
10089{
726a989a
RB
10090 gimple_seq body;
10091
535b544a
SB
10092 /* This pass always runs, to provide PROP_gimple_lomp.
10093 But there is nothing to do unless -fopenmp is given. */
c02065fc 10094 if (flag_openmp == 0 && flag_openmp_simd == 0 && flag_enable_cilkplus == 0)
535b544a
SB
10095 return 0;
10096
953ff289
DN
10097 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10098 delete_omp_context);
10099
726a989a 10100 body = gimple_body (current_function_decl);
26127932 10101 scan_omp (&body, NULL);
a68ab351 10102 gcc_assert (taskreg_nesting_level == 0);
953ff289
DN
10103
10104 if (all_contexts->root)
a68ab351 10105 {
d406b663
JJ
10106 struct gimplify_ctx gctx;
10107
a68ab351 10108 if (task_shared_vars)
d406b663 10109 push_gimplify_context (&gctx);
355a7673 10110 lower_omp (&body, NULL);
a68ab351
JJ
10111 if (task_shared_vars)
10112 pop_gimplify_context (NULL);
10113 }
953ff289 10114
50674e96
DN
10115 if (all_contexts)
10116 {
10117 splay_tree_delete (all_contexts);
10118 all_contexts = NULL;
10119 }
a68ab351 10120 BITMAP_FREE (task_shared_vars);
c2924966 10121 return 0;
953ff289
DN
10122}
10123
27a4cd48
DM
10124namespace {
10125
10126const pass_data pass_data_lower_omp =
10127{
10128 GIMPLE_PASS, /* type */
10129 "omplower", /* name */
10130 OPTGROUP_NONE, /* optinfo_flags */
10131 false, /* has_gate */
10132 true, /* has_execute */
10133 TV_NONE, /* tv_id */
10134 PROP_gimple_any, /* properties_required */
10135 PROP_gimple_lomp, /* properties_provided */
10136 0, /* properties_destroyed */
10137 0, /* todo_flags_start */
10138 0, /* todo_flags_finish */
953ff289 10139};
27a4cd48
DM
10140
10141class pass_lower_omp : public gimple_opt_pass
10142{
10143public:
c3284718
RS
10144 pass_lower_omp (gcc::context *ctxt)
10145 : gimple_opt_pass (pass_data_lower_omp, ctxt)
27a4cd48
DM
10146 {}
10147
10148 /* opt_pass methods: */
10149 unsigned int execute () { return execute_lower_omp (); }
10150
10151}; // class pass_lower_omp
10152
10153} // anon namespace
10154
10155gimple_opt_pass *
10156make_pass_lower_omp (gcc::context *ctxt)
10157{
10158 return new pass_lower_omp (ctxt);
10159}
953ff289
DN
10160\f
10161/* The following is a utility to diagnose OpenMP structured block violations.
777f7f9a
RH
10162 It is not part of the "omplower" pass, as that's invoked too late. It
10163 should be invoked by the respective front ends after gimplification. */
953ff289
DN
10164
10165static splay_tree all_labels;
10166
10167/* Check for mismatched contexts and generate an error if needed. Return
10168 true if an error is detected. */
10169
10170static bool
726a989a
RB
10171diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10172 gimple branch_ctx, gimple label_ctx)
953ff289 10173{
726a989a 10174 if (label_ctx == branch_ctx)
953ff289
DN
10175 return false;
10176
b8698a0f 10177
726a989a
RB
10178 /*
10179 Previously we kept track of the label's entire context in diagnose_sb_[12]
10180 so we could traverse it and issue a correct "exit" or "enter" error
10181 message upon a structured block violation.
10182
10183 We built the context by building a list with tree_cons'ing, but there is
10184 no easy counterpart in gimple tuples. It seems like far too much work
10185 for issuing exit/enter error messages. If someone really misses the
10186 distinct error message... patches welcome.
10187 */
b8698a0f 10188
726a989a 10189#if 0
953ff289 10190 /* Try to avoid confusing the user by producing and error message
fa10beec 10191 with correct "exit" or "enter" verbiage. We prefer "exit"
953ff289
DN
10192 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10193 if (branch_ctx == NULL)
10194 exit_p = false;
10195 else
10196 {
10197 while (label_ctx)
10198 {
10199 if (TREE_VALUE (label_ctx) == branch_ctx)
10200 {
10201 exit_p = false;
10202 break;
10203 }
10204 label_ctx = TREE_CHAIN (label_ctx);
10205 }
10206 }
10207
10208 if (exit_p)
10209 error ("invalid exit from OpenMP structured block");
10210 else
10211 error ("invalid entry to OpenMP structured block");
726a989a 10212#endif
953ff289 10213
c02065fc
AH
10214 bool cilkplus_block = false;
10215 if (flag_enable_cilkplus)
10216 {
10217 if ((branch_ctx
10218 && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
10219 && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
10220 || (gimple_code (label_ctx) == GIMPLE_OMP_FOR
10221 && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
10222 cilkplus_block = true;
10223 }
10224
726a989a
RB
10225 /* If it's obvious we have an invalid entry, be specific about the error. */
10226 if (branch_ctx == NULL)
c02065fc
AH
10227 {
10228 if (cilkplus_block)
10229 error ("invalid entry to Cilk Plus structured block");
10230 else
10231 error ("invalid entry to OpenMP structured block");
10232 }
726a989a 10233 else
c02065fc
AH
10234 {
10235 /* Otherwise, be vague and lazy, but efficient. */
10236 if (cilkplus_block)
10237 error ("invalid branch to/from a Cilk Plus structured block");
10238 else
10239 error ("invalid branch to/from an OpenMP structured block");
10240 }
726a989a
RB
10241
10242 gsi_replace (gsi_p, gimple_build_nop (), false);
953ff289
DN
10243 return true;
10244}
10245
10246/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
726a989a 10247 where each label is found. */
953ff289
DN
10248
10249static tree
726a989a
RB
10250diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10251 struct walk_stmt_info *wi)
953ff289 10252{
726a989a
RB
10253 gimple context = (gimple) wi->info;
10254 gimple inner_context;
10255 gimple stmt = gsi_stmt (*gsi_p);
953ff289 10256
726a989a
RB
10257 *handled_ops_p = true;
10258
10259 switch (gimple_code (stmt))
953ff289 10260 {
726a989a 10261 WALK_SUBSTMTS;
b8698a0f 10262
726a989a
RB
10263 case GIMPLE_OMP_PARALLEL:
10264 case GIMPLE_OMP_TASK:
10265 case GIMPLE_OMP_SECTIONS:
10266 case GIMPLE_OMP_SINGLE:
10267 case GIMPLE_OMP_SECTION:
10268 case GIMPLE_OMP_MASTER:
10269 case GIMPLE_OMP_ORDERED:
10270 case GIMPLE_OMP_CRITICAL:
acf0174b
JJ
10271 case GIMPLE_OMP_TARGET:
10272 case GIMPLE_OMP_TEAMS:
10273 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
10274 /* The minimal context here is just the current OMP construct. */
10275 inner_context = stmt;
953ff289 10276 wi->info = inner_context;
726a989a 10277 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
10278 wi->info = context;
10279 break;
10280
726a989a
RB
10281 case GIMPLE_OMP_FOR:
10282 inner_context = stmt;
953ff289 10283 wi->info = inner_context;
726a989a
RB
10284 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10285 walk them. */
10286 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10287 diagnose_sb_1, NULL, wi);
10288 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
10289 wi->info = context;
10290 break;
10291
726a989a
RB
10292 case GIMPLE_LABEL:
10293 splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
953ff289
DN
10294 (splay_tree_value) context);
10295 break;
10296
10297 default:
10298 break;
10299 }
10300
10301 return NULL_TREE;
10302}
10303
10304/* Pass 2: Check each branch and see if its context differs from that of
10305 the destination label's context. */
10306
10307static tree
726a989a
RB
10308diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10309 struct walk_stmt_info *wi)
953ff289 10310{
726a989a 10311 gimple context = (gimple) wi->info;
953ff289 10312 splay_tree_node n;
726a989a 10313 gimple stmt = gsi_stmt (*gsi_p);
953ff289 10314
726a989a
RB
10315 *handled_ops_p = true;
10316
10317 switch (gimple_code (stmt))
953ff289 10318 {
726a989a
RB
10319 WALK_SUBSTMTS;
10320
10321 case GIMPLE_OMP_PARALLEL:
10322 case GIMPLE_OMP_TASK:
10323 case GIMPLE_OMP_SECTIONS:
10324 case GIMPLE_OMP_SINGLE:
10325 case GIMPLE_OMP_SECTION:
10326 case GIMPLE_OMP_MASTER:
10327 case GIMPLE_OMP_ORDERED:
10328 case GIMPLE_OMP_CRITICAL:
acf0174b
JJ
10329 case GIMPLE_OMP_TARGET:
10330 case GIMPLE_OMP_TEAMS:
10331 case GIMPLE_OMP_TASKGROUP:
726a989a 10332 wi->info = stmt;
355a7673 10333 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
10334 wi->info = context;
10335 break;
10336
726a989a
RB
10337 case GIMPLE_OMP_FOR:
10338 wi->info = stmt;
10339 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10340 walk them. */
355a7673
MM
10341 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10342 diagnose_sb_2, NULL, wi);
10343 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
10344 wi->info = context;
10345 break;
10346
ca50f84a
L
10347 case GIMPLE_COND:
10348 {
10349 tree lab = gimple_cond_true_label (stmt);
10350 if (lab)
10351 {
10352 n = splay_tree_lookup (all_labels,
10353 (splay_tree_key) lab);
10354 diagnose_sb_0 (gsi_p, context,
10355 n ? (gimple) n->value : NULL);
10356 }
10357 lab = gimple_cond_false_label (stmt);
10358 if (lab)
10359 {
10360 n = splay_tree_lookup (all_labels,
10361 (splay_tree_key) lab);
10362 diagnose_sb_0 (gsi_p, context,
10363 n ? (gimple) n->value : NULL);
10364 }
10365 }
10366 break;
10367
726a989a 10368 case GIMPLE_GOTO:
953ff289 10369 {
726a989a 10370 tree lab = gimple_goto_dest (stmt);
953ff289
DN
10371 if (TREE_CODE (lab) != LABEL_DECL)
10372 break;
10373
10374 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 10375 diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
953ff289
DN
10376 }
10377 break;
10378
726a989a 10379 case GIMPLE_SWITCH:
953ff289 10380 {
726a989a
RB
10381 unsigned int i;
10382 for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
953ff289 10383 {
726a989a 10384 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
953ff289 10385 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 10386 if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
953ff289
DN
10387 break;
10388 }
10389 }
10390 break;
10391
726a989a
RB
10392 case GIMPLE_RETURN:
10393 diagnose_sb_0 (gsi_p, context, NULL);
953ff289
DN
10394 break;
10395
10396 default:
10397 break;
10398 }
10399
10400 return NULL_TREE;
10401}
10402
0645c1a2
AM
10403/* Called from tree-cfg.c::make_edges to create cfg edges for all GIMPLE_OMP
10404 codes. */
10405bool
10406make_gimple_omp_edges (basic_block bb, struct omp_region **region)
10407{
10408 gimple last = last_stmt (bb);
10409 enum gimple_code code = gimple_code (last);
10410 struct omp_region *cur_region = *region;
10411 bool fallthru = false;
10412
10413 switch (code)
10414 {
10415 case GIMPLE_OMP_PARALLEL:
10416 case GIMPLE_OMP_TASK:
10417 case GIMPLE_OMP_FOR:
10418 case GIMPLE_OMP_SINGLE:
10419 case GIMPLE_OMP_TEAMS:
10420 case GIMPLE_OMP_MASTER:
10421 case GIMPLE_OMP_TASKGROUP:
10422 case GIMPLE_OMP_ORDERED:
10423 case GIMPLE_OMP_CRITICAL:
10424 case GIMPLE_OMP_SECTION:
10425 cur_region = new_omp_region (bb, code, cur_region);
10426 fallthru = true;
10427 break;
10428
10429 case GIMPLE_OMP_TARGET:
10430 cur_region = new_omp_region (bb, code, cur_region);
10431 fallthru = true;
10432 if (gimple_omp_target_kind (last) == GF_OMP_TARGET_KIND_UPDATE)
10433 cur_region = cur_region->outer;
10434 break;
10435
10436 case GIMPLE_OMP_SECTIONS:
10437 cur_region = new_omp_region (bb, code, cur_region);
10438 fallthru = true;
10439 break;
10440
10441 case GIMPLE_OMP_SECTIONS_SWITCH:
10442 fallthru = false;
10443 break;
10444
10445 case GIMPLE_OMP_ATOMIC_LOAD:
10446 case GIMPLE_OMP_ATOMIC_STORE:
10447 fallthru = true;
10448 break;
10449
10450 case GIMPLE_OMP_RETURN:
10451 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
10452 somewhere other than the next block. This will be
10453 created later. */
10454 cur_region->exit = bb;
10455 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
10456 cur_region = cur_region->outer;
10457 break;
10458
10459 case GIMPLE_OMP_CONTINUE:
10460 cur_region->cont = bb;
10461 switch (cur_region->type)
10462 {
10463 case GIMPLE_OMP_FOR:
10464 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
10465 succs edges as abnormal to prevent splitting
10466 them. */
10467 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
10468 /* Make the loopback edge. */
10469 make_edge (bb, single_succ (cur_region->entry),
10470 EDGE_ABNORMAL);
10471
10472 /* Create an edge from GIMPLE_OMP_FOR to exit, which
10473 corresponds to the case that the body of the loop
10474 is not executed at all. */
10475 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
10476 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
10477 fallthru = false;
10478 break;
10479
10480 case GIMPLE_OMP_SECTIONS:
10481 /* Wire up the edges into and out of the nested sections. */
10482 {
10483 basic_block switch_bb = single_succ (cur_region->entry);
10484
10485 struct omp_region *i;
10486 for (i = cur_region->inner; i ; i = i->next)
10487 {
10488 gcc_assert (i->type == GIMPLE_OMP_SECTION);
10489 make_edge (switch_bb, i->entry, 0);
10490 make_edge (i->exit, bb, EDGE_FALLTHRU);
10491 }
10492
10493 /* Make the loopback edge to the block with
10494 GIMPLE_OMP_SECTIONS_SWITCH. */
10495 make_edge (bb, switch_bb, 0);
10496
10497 /* Make the edge from the switch to exit. */
10498 make_edge (switch_bb, bb->next_bb, 0);
10499 fallthru = false;
10500 }
10501 break;
10502
10503 default:
10504 gcc_unreachable ();
10505 }
10506 break;
10507
10508 default:
10509 gcc_unreachable ();
10510 }
10511
10512 if (*region != cur_region)
10513 *region = cur_region;
10514
10515 return fallthru;
10516}
10517
a406865a
RG
10518static unsigned int
10519diagnose_omp_structured_block_errors (void)
953ff289 10520{
953ff289 10521 struct walk_stmt_info wi;
a406865a 10522 gimple_seq body = gimple_body (current_function_decl);
953ff289
DN
10523
10524 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10525
10526 memset (&wi, 0, sizeof (wi));
726a989a 10527 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
953ff289
DN
10528
10529 memset (&wi, 0, sizeof (wi));
953ff289 10530 wi.want_locations = true;
355a7673
MM
10531 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10532
10533 gimple_set_body (current_function_decl, body);
953ff289
DN
10534
10535 splay_tree_delete (all_labels);
10536 all_labels = NULL;
10537
a406865a 10538 return 0;
953ff289
DN
10539}
10540
a406865a
RG
10541static bool
10542gate_diagnose_omp_blocks (void)
10543{
c02065fc 10544 return flag_openmp || flag_enable_cilkplus;
a406865a
RG
10545}
10546
27a4cd48
DM
10547namespace {
10548
10549const pass_data pass_data_diagnose_omp_blocks =
10550{
10551 GIMPLE_PASS, /* type */
10552 "*diagnose_omp_blocks", /* name */
10553 OPTGROUP_NONE, /* optinfo_flags */
10554 true, /* has_gate */
10555 true, /* has_execute */
10556 TV_NONE, /* tv_id */
10557 PROP_gimple_any, /* properties_required */
10558 0, /* properties_provided */
10559 0, /* properties_destroyed */
10560 0, /* todo_flags_start */
10561 0, /* todo_flags_finish */
a406865a
RG
10562};
10563
27a4cd48
DM
10564class pass_diagnose_omp_blocks : public gimple_opt_pass
10565{
10566public:
c3284718
RS
10567 pass_diagnose_omp_blocks (gcc::context *ctxt)
10568 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
27a4cd48
DM
10569 {}
10570
10571 /* opt_pass methods: */
10572 bool gate () { return gate_diagnose_omp_blocks (); }
10573 unsigned int execute () {
10574 return diagnose_omp_structured_block_errors ();
10575 }
10576
10577}; // class pass_diagnose_omp_blocks
10578
10579} // anon namespace
10580
10581gimple_opt_pass *
10582make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10583{
10584 return new pass_diagnose_omp_blocks (ctxt);
10585}
10586
953ff289 10587#include "gt-omp-low.h"