]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
gimple-ssa-isolate-paths.c (pass_isolate_erroneous_paths): Comment fix.
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
953ff289
DN
1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
d1e082c2 6 Copyright (C) 2005-2013 Free Software Foundation, Inc.
953ff289
DN
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
9dcd6f09 12Software Foundation; either version 3, or (at your option) any later
953ff289
DN
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
9dcd6f09
NC
21along with GCC; see the file COPYING3. If not see
22<http://www.gnu.org/licenses/>. */
953ff289
DN
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "tm.h"
28#include "tree.h"
29#include "rtl.h"
45b0be94 30#include "gimplify.h"
726a989a 31#include "tree-iterator.h"
953ff289
DN
32#include "tree-inline.h"
33#include "langhooks.h"
1da2ed5f 34#include "diagnostic-core.h"
442b4905
AM
35#include "gimple-ssa.h"
36#include "cgraph.h"
37#include "tree-cfg.h"
38#include "tree-phinodes.h"
39#include "ssa-iterators.h"
40#include "tree-ssanames.h"
41#include "tree-into-ssa.h"
42#include "tree-dfa.h"
7a300452 43#include "tree-ssa.h"
953ff289
DN
44#include "flags.h"
45#include "function.h"
46#include "expr.h"
953ff289
DN
47#include "tree-pass.h"
48#include "ggc.h"
49#include "except.h"
6be42dd4 50#include "splay-tree.h"
a509ebb5
RL
51#include "optabs.h"
52#include "cfgloop.h"
74bf76ed 53#include "target.h"
0645c1a2 54#include "omp-low.h"
4484a35a
AM
55#include "gimple-low.h"
56#include "tree-cfgcleanup.h"
953ff289 57
726a989a 58
b8698a0f 59/* Lowering of OpenMP parallel and workshare constructs proceeds in two
953ff289
DN
60 phases. The first phase scans the function looking for OMP statements
61 and then for variables that must be replaced to satisfy data sharing
62 clauses. The second phase expands code for the constructs, as well as
c0220ea4 63 re-gimplifying things when variables have been replaced with complex
953ff289
DN
64 expressions.
65
7ebaeab5
DN
66 Final code generation is done by pass_expand_omp. The flowgraph is
67 scanned for parallel regions which are then moved to a new
68 function, to be invoked by the thread library. */
953ff289 69
0645c1a2
AM
70/* Parallel region information. Every parallel and workshare
71 directive is enclosed between two markers, the OMP_* directive
72 and a corresponding OMP_RETURN statement. */
73
74struct omp_region
75{
76 /* The enclosing region. */
77 struct omp_region *outer;
78
79 /* First child region. */
80 struct omp_region *inner;
81
82 /* Next peer region. */
83 struct omp_region *next;
84
85 /* Block containing the omp directive as its last stmt. */
86 basic_block entry;
87
88 /* Block containing the OMP_RETURN as its last stmt. */
89 basic_block exit;
90
91 /* Block containing the OMP_CONTINUE as its last stmt. */
92 basic_block cont;
93
94 /* If this is a combined parallel+workshare region, this is a list
95 of additional arguments needed by the combined parallel+workshare
96 library call. */
97 vec<tree, va_gc> *ws_args;
98
99 /* The code for the omp directive of this region. */
100 enum gimple_code type;
101
102 /* Schedule kind, only used for OMP_FOR type regions. */
103 enum omp_clause_schedule_kind sched_kind;
104
105 /* True if this is a combined parallel+workshare region. */
106 bool is_combined_parallel;
107};
108
953ff289
DN
109/* Context structure. Used to store information about each parallel
110 directive in the code. */
111
112typedef struct omp_context
113{
114 /* This field must be at the beginning, as we do "inheritance": Some
115 callback functions for tree-inline.c (e.g., omp_copy_decl)
116 receive a copy_body_data pointer that is up-casted to an
117 omp_context pointer. */
118 copy_body_data cb;
119
120 /* The tree of contexts corresponding to the encountered constructs. */
121 struct omp_context *outer;
726a989a 122 gimple stmt;
953ff289 123
b8698a0f 124 /* Map variables to fields in a structure that allows communication
953ff289
DN
125 between sending and receiving threads. */
126 splay_tree field_map;
127 tree record_type;
128 tree sender_decl;
129 tree receiver_decl;
130
a68ab351
JJ
131 /* These are used just by task contexts, if task firstprivate fn is
132 needed. srecord_type is used to communicate from the thread
133 that encountered the task construct to task firstprivate fn,
134 record_type is allocated by GOMP_task, initialized by task firstprivate
135 fn and passed to the task body fn. */
136 splay_tree sfield_map;
137 tree srecord_type;
138
953ff289
DN
139 /* A chain of variables to add to the top-level block surrounding the
140 construct. In the case of a parallel, this is in the child function. */
141 tree block_vars;
142
acf0174b
JJ
143 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
144 barriers should jump to during omplower pass. */
145 tree cancel_label;
146
953ff289
DN
147 /* What to do with variables with implicitly determined sharing
148 attributes. */
149 enum omp_clause_default_kind default_kind;
150
151 /* Nesting depth of this context. Used to beautify error messages re
152 invalid gotos. The outermost ctx is depth 1, with depth 0 being
153 reserved for the main body of the function. */
154 int depth;
155
953ff289
DN
156 /* True if this parallel directive is nested within another. */
157 bool is_nested;
acf0174b
JJ
158
159 /* True if this construct can be cancelled. */
160 bool cancellable;
953ff289
DN
161} omp_context;
162
163
a68ab351
JJ
164struct omp_for_data_loop
165{
166 tree v, n1, n2, step;
167 enum tree_code cond_code;
168};
169
50674e96 170/* A structure describing the main elements of a parallel loop. */
953ff289 171
50674e96 172struct omp_for_data
953ff289 173{
a68ab351 174 struct omp_for_data_loop loop;
726a989a
RB
175 tree chunk_size;
176 gimple for_stmt;
a68ab351
JJ
177 tree pre, iter_type;
178 int collapse;
953ff289
DN
179 bool have_nowait, have_ordered;
180 enum omp_clause_schedule_kind sched_kind;
a68ab351 181 struct omp_for_data_loop *loops;
953ff289
DN
182};
183
50674e96 184
953ff289 185static splay_tree all_contexts;
a68ab351 186static int taskreg_nesting_level;
acf0174b 187static int target_nesting_level;
0645c1a2 188static struct omp_region *root_omp_region;
a68ab351 189static bitmap task_shared_vars;
953ff289 190
26127932 191static void scan_omp (gimple_seq *, omp_context *);
726a989a
RB
192static tree scan_omp_1_op (tree *, int *, void *);
193
194#define WALK_SUBSTMTS \
195 case GIMPLE_BIND: \
196 case GIMPLE_TRY: \
197 case GIMPLE_CATCH: \
198 case GIMPLE_EH_FILTER: \
0a35513e 199 case GIMPLE_TRANSACTION: \
726a989a
RB
200 /* The sub-statements for these should be walked. */ \
201 *handled_ops_p = false; \
202 break;
203
204/* Convenience function for calling scan_omp_1_op on tree operands. */
205
206static inline tree
207scan_omp_op (tree *tp, omp_context *ctx)
208{
209 struct walk_stmt_info wi;
210
211 memset (&wi, 0, sizeof (wi));
212 wi.info = ctx;
213 wi.want_locations = true;
214
215 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
216}
217
355a7673 218static void lower_omp (gimple_seq *, omp_context *);
8ca5b2a2
JJ
219static tree lookup_decl_in_outer_ctx (tree, omp_context *);
220static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
953ff289
DN
221
222/* Find an OpenMP clause of type KIND within CLAUSES. */
223
917948d3 224tree
e0c68ce9 225find_omp_clause (tree clauses, enum omp_clause_code kind)
953ff289
DN
226{
227 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
aaf46ef9 228 if (OMP_CLAUSE_CODE (clauses) == kind)
953ff289
DN
229 return clauses;
230
231 return NULL_TREE;
232}
233
234/* Return true if CTX is for an omp parallel. */
235
236static inline bool
237is_parallel_ctx (omp_context *ctx)
238{
726a989a 239 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
953ff289
DN
240}
241
50674e96 242
a68ab351
JJ
243/* Return true if CTX is for an omp task. */
244
245static inline bool
246is_task_ctx (omp_context *ctx)
247{
726a989a 248 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
249}
250
251
252/* Return true if CTX is for an omp parallel or omp task. */
253
254static inline bool
255is_taskreg_ctx (omp_context *ctx)
256{
726a989a
RB
257 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
258 || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
a68ab351
JJ
259}
260
261
50674e96 262/* Return true if REGION is a combined parallel+workshare region. */
953ff289
DN
263
264static inline bool
50674e96
DN
265is_combined_parallel (struct omp_region *region)
266{
267 return region->is_combined_parallel;
268}
269
270
271/* Extract the header elements of parallel loop FOR_STMT and store
272 them into *FD. */
273
274static void
726a989a 275extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
a68ab351 276 struct omp_for_data_loop *loops)
50674e96 277{
a68ab351
JJ
278 tree t, var, *collapse_iter, *collapse_count;
279 tree count = NULL_TREE, iter_type = long_integer_type_node;
280 struct omp_for_data_loop *loop;
281 int i;
282 struct omp_for_data_loop dummy_loop;
db3927fb 283 location_t loc = gimple_location (for_stmt);
74bf76ed 284 bool simd = gimple_omp_for_kind (for_stmt) == GF_OMP_FOR_KIND_SIMD;
acf0174b
JJ
285 bool distribute = gimple_omp_for_kind (for_stmt)
286 == GF_OMP_FOR_KIND_DISTRIBUTE;
50674e96
DN
287
288 fd->for_stmt = for_stmt;
289 fd->pre = NULL;
726a989a 290 fd->collapse = gimple_omp_for_collapse (for_stmt);
a68ab351
JJ
291 if (fd->collapse > 1)
292 fd->loops = loops;
293 else
294 fd->loops = &fd->loop;
50674e96 295
acf0174b
JJ
296 fd->have_nowait = distribute || simd;
297 fd->have_ordered = false;
50674e96
DN
298 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
299 fd->chunk_size = NULL_TREE;
a68ab351
JJ
300 collapse_iter = NULL;
301 collapse_count = NULL;
50674e96 302
726a989a 303 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
aaf46ef9 304 switch (OMP_CLAUSE_CODE (t))
50674e96
DN
305 {
306 case OMP_CLAUSE_NOWAIT:
307 fd->have_nowait = true;
308 break;
309 case OMP_CLAUSE_ORDERED:
310 fd->have_ordered = true;
311 break;
312 case OMP_CLAUSE_SCHEDULE:
acf0174b 313 gcc_assert (!distribute);
50674e96
DN
314 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
315 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
316 break;
acf0174b
JJ
317 case OMP_CLAUSE_DIST_SCHEDULE:
318 gcc_assert (distribute);
319 fd->chunk_size = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t);
320 break;
a68ab351
JJ
321 case OMP_CLAUSE_COLLAPSE:
322 if (fd->collapse > 1)
323 {
324 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
325 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
326 }
50674e96
DN
327 default:
328 break;
329 }
330
a68ab351
JJ
331 /* FIXME: for now map schedule(auto) to schedule(static).
332 There should be analysis to determine whether all iterations
333 are approximately the same amount of work (then schedule(static)
1cbc62c0 334 is best) or if it varies (then schedule(dynamic,N) is better). */
a68ab351
JJ
335 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
336 {
337 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
338 gcc_assert (fd->chunk_size == NULL);
339 }
340 gcc_assert (fd->collapse == 1 || collapse_iter != NULL);
50674e96
DN
341 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
342 gcc_assert (fd->chunk_size == NULL);
343 else if (fd->chunk_size == NULL)
344 {
345 /* We only need to compute a default chunk size for ordered
346 static loops and dynamic loops. */
a68ab351 347 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
acf0174b 348 || fd->have_ordered)
50674e96
DN
349 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
350 ? integer_zero_node : integer_one_node;
351 }
a68ab351
JJ
352
353 for (i = 0; i < fd->collapse; i++)
354 {
355 if (fd->collapse == 1)
356 loop = &fd->loop;
357 else if (loops != NULL)
358 loop = loops + i;
359 else
360 loop = &dummy_loop;
361
726a989a 362 loop->v = gimple_omp_for_index (for_stmt, i);
a68ab351
JJ
363 gcc_assert (SSA_VAR_P (loop->v));
364 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
365 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
366 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
726a989a 367 loop->n1 = gimple_omp_for_initial (for_stmt, i);
a68ab351 368
726a989a
RB
369 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
370 loop->n2 = gimple_omp_for_final (for_stmt, i);
a68ab351
JJ
371 switch (loop->cond_code)
372 {
373 case LT_EXPR:
374 case GT_EXPR:
375 break;
376 case LE_EXPR:
377 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
5d49b6a7 378 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, 1);
a68ab351 379 else
db3927fb
AH
380 loop->n2 = fold_build2_loc (loc,
381 PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
382 build_int_cst (TREE_TYPE (loop->n2), 1));
383 loop->cond_code = LT_EXPR;
384 break;
385 case GE_EXPR:
386 if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
5d49b6a7 387 loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, -1);
a68ab351 388 else
db3927fb
AH
389 loop->n2 = fold_build2_loc (loc,
390 MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
a68ab351
JJ
391 build_int_cst (TREE_TYPE (loop->n2), 1));
392 loop->cond_code = GT_EXPR;
393 break;
394 default:
395 gcc_unreachable ();
396 }
397
726a989a 398 t = gimple_omp_for_incr (for_stmt, i);
a68ab351
JJ
399 gcc_assert (TREE_OPERAND (t, 0) == var);
400 switch (TREE_CODE (t))
401 {
402 case PLUS_EXPR:
a68ab351
JJ
403 loop->step = TREE_OPERAND (t, 1);
404 break;
56099f00
RG
405 case POINTER_PLUS_EXPR:
406 loop->step = fold_convert (ssizetype, TREE_OPERAND (t, 1));
407 break;
a68ab351
JJ
408 case MINUS_EXPR:
409 loop->step = TREE_OPERAND (t, 1);
db3927fb
AH
410 loop->step = fold_build1_loc (loc,
411 NEGATE_EXPR, TREE_TYPE (loop->step),
a68ab351
JJ
412 loop->step);
413 break;
414 default:
415 gcc_unreachable ();
416 }
417
acf0174b
JJ
418 if (simd
419 || (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
420 && !fd->have_ordered))
74bf76ed
JJ
421 {
422 if (fd->collapse == 1)
423 iter_type = TREE_TYPE (loop->v);
424 else if (i == 0
425 || TYPE_PRECISION (iter_type)
426 < TYPE_PRECISION (TREE_TYPE (loop->v)))
427 iter_type
428 = build_nonstandard_integer_type
acf0174b 429 (TYPE_PRECISION (TREE_TYPE (loop->v)), 1);
74bf76ed
JJ
430 }
431 else if (iter_type != long_long_unsigned_type_node)
a68ab351
JJ
432 {
433 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
434 iter_type = long_long_unsigned_type_node;
435 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
436 && TYPE_PRECISION (TREE_TYPE (loop->v))
437 >= TYPE_PRECISION (iter_type))
438 {
439 tree n;
440
441 if (loop->cond_code == LT_EXPR)
db3927fb
AH
442 n = fold_build2_loc (loc,
443 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
444 loop->n2, loop->step);
445 else
446 n = loop->n1;
447 if (TREE_CODE (n) != INTEGER_CST
448 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
449 iter_type = long_long_unsigned_type_node;
450 }
451 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
452 > TYPE_PRECISION (iter_type))
453 {
454 tree n1, n2;
455
456 if (loop->cond_code == LT_EXPR)
457 {
458 n1 = loop->n1;
db3927fb
AH
459 n2 = fold_build2_loc (loc,
460 PLUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
461 loop->n2, loop->step);
462 }
463 else
464 {
db3927fb
AH
465 n1 = fold_build2_loc (loc,
466 MINUS_EXPR, TREE_TYPE (loop->v),
a68ab351
JJ
467 loop->n2, loop->step);
468 n2 = loop->n1;
469 }
470 if (TREE_CODE (n1) != INTEGER_CST
471 || TREE_CODE (n2) != INTEGER_CST
472 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
473 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
474 iter_type = long_long_unsigned_type_node;
475 }
476 }
477
478 if (collapse_count && *collapse_count == NULL)
479 {
5a0f4dd3
JJ
480 t = fold_binary (loop->cond_code, boolean_type_node,
481 fold_convert (TREE_TYPE (loop->v), loop->n1),
482 fold_convert (TREE_TYPE (loop->v), loop->n2));
483 if (t && integer_zerop (t))
484 count = build_zero_cst (long_long_unsigned_type_node);
485 else if ((i == 0 || count != NULL_TREE)
486 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
487 && TREE_CONSTANT (loop->n1)
488 && TREE_CONSTANT (loop->n2)
489 && TREE_CODE (loop->step) == INTEGER_CST)
a68ab351
JJ
490 {
491 tree itype = TREE_TYPE (loop->v);
492
493 if (POINTER_TYPE_P (itype))
96f9265a 494 itype = signed_type_for (itype);
a68ab351 495 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
db3927fb
AH
496 t = fold_build2_loc (loc,
497 PLUS_EXPR, itype,
498 fold_convert_loc (loc, itype, loop->step), t);
499 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
500 fold_convert_loc (loc, itype, loop->n2));
501 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
502 fold_convert_loc (loc, itype, loop->n1));
a68ab351 503 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
db3927fb
AH
504 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
505 fold_build1_loc (loc, NEGATE_EXPR, itype, t),
506 fold_build1_loc (loc, NEGATE_EXPR, itype,
507 fold_convert_loc (loc, itype,
508 loop->step)));
a68ab351 509 else
db3927fb
AH
510 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
511 fold_convert_loc (loc, itype, loop->step));
512 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
a68ab351 513 if (count != NULL_TREE)
db3927fb
AH
514 count = fold_build2_loc (loc,
515 MULT_EXPR, long_long_unsigned_type_node,
a68ab351
JJ
516 count, t);
517 else
518 count = t;
519 if (TREE_CODE (count) != INTEGER_CST)
520 count = NULL_TREE;
521 }
5a0f4dd3 522 else if (count && !integer_zerop (count))
a68ab351
JJ
523 count = NULL_TREE;
524 }
525 }
526
74bf76ed 527 if (count
acf0174b
JJ
528 && !simd
529 && (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
530 || fd->have_ordered))
a68ab351
JJ
531 {
532 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
533 iter_type = long_long_unsigned_type_node;
534 else
535 iter_type = long_integer_type_node;
536 }
537 else if (collapse_iter && *collapse_iter != NULL)
538 iter_type = TREE_TYPE (*collapse_iter);
539 fd->iter_type = iter_type;
540 if (collapse_iter && *collapse_iter == NULL)
541 *collapse_iter = create_tmp_var (iter_type, ".iter");
542 if (collapse_count && *collapse_count == NULL)
543 {
544 if (count)
db3927fb 545 *collapse_count = fold_convert_loc (loc, iter_type, count);
a68ab351
JJ
546 else
547 *collapse_count = create_tmp_var (iter_type, ".count");
548 }
549
550 if (fd->collapse > 1)
551 {
552 fd->loop.v = *collapse_iter;
553 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
554 fd->loop.n2 = *collapse_count;
555 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
556 fd->loop.cond_code = LT_EXPR;
557 }
50674e96
DN
558}
559
560
561/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
562 is the immediate dominator of PAR_ENTRY_BB, return true if there
563 are no data dependencies that would prevent expanding the parallel
564 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
565
566 When expanding a combined parallel+workshare region, the call to
567 the child function may need additional arguments in the case of
726a989a
RB
568 GIMPLE_OMP_FOR regions. In some cases, these arguments are
569 computed out of variables passed in from the parent to the child
570 via 'struct .omp_data_s'. For instance:
50674e96
DN
571
572 #pragma omp parallel for schedule (guided, i * 4)
573 for (j ...)
574
575 Is lowered into:
576
577 # BLOCK 2 (PAR_ENTRY_BB)
578 .omp_data_o.i = i;
579 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
b8698a0f 580
50674e96
DN
581 # BLOCK 3 (WS_ENTRY_BB)
582 .omp_data_i = &.omp_data_o;
583 D.1667 = .omp_data_i->i;
584 D.1598 = D.1667 * 4;
585 #pragma omp for schedule (guided, D.1598)
586
587 When we outline the parallel region, the call to the child function
588 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
589 that value is computed *after* the call site. So, in principle we
590 cannot do the transformation.
591
592 To see whether the code in WS_ENTRY_BB blocks the combined
593 parallel+workshare call, we collect all the variables used in the
726a989a 594 GIMPLE_OMP_FOR header check whether they appear on the LHS of any
50674e96
DN
595 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
596 call.
597
598 FIXME. If we had the SSA form built at this point, we could merely
599 hoist the code in block 3 into block 2 and be done with it. But at
600 this point we don't have dataflow information and though we could
601 hack something up here, it is really not worth the aggravation. */
602
603static bool
0f900dfa 604workshare_safe_to_combine_p (basic_block ws_entry_bb)
50674e96
DN
605{
606 struct omp_for_data fd;
0f900dfa 607 gimple ws_stmt = last_stmt (ws_entry_bb);
50674e96 608
726a989a 609 if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96
DN
610 return true;
611
726a989a 612 gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
50674e96 613
a68ab351
JJ
614 extract_omp_for_data (ws_stmt, &fd, NULL);
615
616 if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
617 return false;
618 if (fd.iter_type != long_integer_type_node)
619 return false;
50674e96
DN
620
621 /* FIXME. We give up too easily here. If any of these arguments
622 are not constants, they will likely involve variables that have
623 been mapped into fields of .omp_data_s for sharing with the child
624 function. With appropriate data flow, it would be possible to
625 see through this. */
a68ab351
JJ
626 if (!is_gimple_min_invariant (fd.loop.n1)
627 || !is_gimple_min_invariant (fd.loop.n2)
628 || !is_gimple_min_invariant (fd.loop.step)
50674e96
DN
629 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
630 return false;
631
632 return true;
633}
634
635
636/* Collect additional arguments needed to emit a combined
637 parallel+workshare call. WS_STMT is the workshare directive being
638 expanded. */
639
9771b263 640static vec<tree, va_gc> *
acf0174b 641get_ws_args_for (gimple par_stmt, gimple ws_stmt)
50674e96
DN
642{
643 tree t;
db3927fb 644 location_t loc = gimple_location (ws_stmt);
9771b263 645 vec<tree, va_gc> *ws_args;
50674e96 646
726a989a 647 if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
50674e96
DN
648 {
649 struct omp_for_data fd;
acf0174b 650 tree n1, n2;
50674e96 651
a68ab351 652 extract_omp_for_data (ws_stmt, &fd, NULL);
acf0174b
JJ
653 n1 = fd.loop.n1;
654 n2 = fd.loop.n2;
655
656 if (gimple_omp_for_combined_into_p (ws_stmt))
657 {
658 tree innerc
659 = find_omp_clause (gimple_omp_parallel_clauses (par_stmt),
660 OMP_CLAUSE__LOOPTEMP_);
661 gcc_assert (innerc);
662 n1 = OMP_CLAUSE_DECL (innerc);
663 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
664 OMP_CLAUSE__LOOPTEMP_);
665 gcc_assert (innerc);
666 n2 = OMP_CLAUSE_DECL (innerc);
667 }
50674e96 668
9771b263 669 vec_alloc (ws_args, 3 + (fd.chunk_size != 0));
50674e96 670
acf0174b 671 t = fold_convert_loc (loc, long_integer_type_node, n1);
9771b263 672 ws_args->quick_push (t);
50674e96 673
acf0174b 674 t = fold_convert_loc (loc, long_integer_type_node, n2);
9771b263 675 ws_args->quick_push (t);
50674e96 676
3bb06db4 677 t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
9771b263 678 ws_args->quick_push (t);
3bb06db4
NF
679
680 if (fd.chunk_size)
681 {
682 t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
9771b263 683 ws_args->quick_push (t);
3bb06db4 684 }
50674e96
DN
685
686 return ws_args;
687 }
726a989a 688 else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
50674e96 689 {
e5c95afe 690 /* Number of sections is equal to the number of edges from the
726a989a
RB
691 GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
692 the exit of the sections region. */
693 basic_block bb = single_succ (gimple_bb (ws_stmt));
e5c95afe 694 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
9771b263
DN
695 vec_alloc (ws_args, 1);
696 ws_args->quick_push (t);
3bb06db4 697 return ws_args;
50674e96
DN
698 }
699
700 gcc_unreachable ();
701}
702
703
704/* Discover whether REGION is a combined parallel+workshare region. */
705
706static void
707determine_parallel_type (struct omp_region *region)
953ff289 708{
50674e96
DN
709 basic_block par_entry_bb, par_exit_bb;
710 basic_block ws_entry_bb, ws_exit_bb;
711
d3c673c7 712 if (region == NULL || region->inner == NULL
e5c95afe
ZD
713 || region->exit == NULL || region->inner->exit == NULL
714 || region->inner->cont == NULL)
50674e96
DN
715 return;
716
717 /* We only support parallel+for and parallel+sections. */
726a989a
RB
718 if (region->type != GIMPLE_OMP_PARALLEL
719 || (region->inner->type != GIMPLE_OMP_FOR
720 && region->inner->type != GIMPLE_OMP_SECTIONS))
50674e96
DN
721 return;
722
723 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
724 WS_EXIT_BB -> PAR_EXIT_BB. */
777f7f9a
RH
725 par_entry_bb = region->entry;
726 par_exit_bb = region->exit;
727 ws_entry_bb = region->inner->entry;
728 ws_exit_bb = region->inner->exit;
50674e96
DN
729
730 if (single_succ (par_entry_bb) == ws_entry_bb
731 && single_succ (ws_exit_bb) == par_exit_bb
0f900dfa 732 && workshare_safe_to_combine_p (ws_entry_bb)
726a989a 733 && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
69f1837b
JJ
734 || (last_and_only_stmt (ws_entry_bb)
735 && last_and_only_stmt (par_exit_bb))))
50674e96 736 {
acf0174b 737 gimple par_stmt = last_stmt (par_entry_bb);
726a989a 738 gimple ws_stmt = last_stmt (ws_entry_bb);
777f7f9a 739
726a989a 740 if (region->inner->type == GIMPLE_OMP_FOR)
50674e96
DN
741 {
742 /* If this is a combined parallel loop, we need to determine
743 whether or not to use the combined library calls. There
744 are two cases where we do not apply the transformation:
745 static loops and any kind of ordered loop. In the first
746 case, we already open code the loop so there is no need
747 to do anything else. In the latter case, the combined
748 parallel loop call would still need extra synchronization
749 to implement ordered semantics, so there would not be any
750 gain in using the combined call. */
726a989a 751 tree clauses = gimple_omp_for_clauses (ws_stmt);
50674e96
DN
752 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
753 if (c == NULL
754 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
755 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
756 {
757 region->is_combined_parallel = false;
758 region->inner->is_combined_parallel = false;
759 return;
760 }
761 }
762
763 region->is_combined_parallel = true;
764 region->inner->is_combined_parallel = true;
acf0174b 765 region->ws_args = get_ws_args_for (par_stmt, ws_stmt);
50674e96 766 }
953ff289
DN
767}
768
50674e96 769
953ff289
DN
770/* Return true if EXPR is variable sized. */
771
772static inline bool
22ea9ec0 773is_variable_sized (const_tree expr)
953ff289
DN
774{
775 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
776}
777
778/* Return true if DECL is a reference type. */
779
780static inline bool
781is_reference (tree decl)
782{
783 return lang_hooks.decls.omp_privatize_by_reference (decl);
784}
785
786/* Lookup variables in the decl or field splay trees. The "maybe" form
787 allows for the variable form to not have been entered, otherwise we
788 assert that the variable must have been entered. */
789
790static inline tree
791lookup_decl (tree var, omp_context *ctx)
792{
6be42dd4
RG
793 tree *n;
794 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
795 return *n;
953ff289
DN
796}
797
798static inline tree
7c8f7639 799maybe_lookup_decl (const_tree var, omp_context *ctx)
953ff289 800{
6be42dd4
RG
801 tree *n;
802 n = (tree *) pointer_map_contains (ctx->cb.decl_map, var);
803 return n ? *n : NULL_TREE;
953ff289
DN
804}
805
806static inline tree
807lookup_field (tree var, omp_context *ctx)
808{
809 splay_tree_node n;
810 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
811 return (tree) n->value;
812}
813
a68ab351
JJ
814static inline tree
815lookup_sfield (tree var, omp_context *ctx)
816{
817 splay_tree_node n;
818 n = splay_tree_lookup (ctx->sfield_map
819 ? ctx->sfield_map : ctx->field_map,
820 (splay_tree_key) var);
821 return (tree) n->value;
822}
823
953ff289
DN
824static inline tree
825maybe_lookup_field (tree var, omp_context *ctx)
826{
827 splay_tree_node n;
828 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
829 return n ? (tree) n->value : NULL_TREE;
830}
831
7c8f7639
JJ
832/* Return true if DECL should be copied by pointer. SHARED_CTX is
833 the parallel context if DECL is to be shared. */
953ff289
DN
834
835static bool
a68ab351 836use_pointer_for_field (tree decl, omp_context *shared_ctx)
953ff289
DN
837{
838 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
839 return true;
840
6fc0bb99 841 /* We can only use copy-in/copy-out semantics for shared variables
953ff289 842 when we know the value is not accessible from an outer scope. */
7c8f7639 843 if (shared_ctx)
953ff289
DN
844 {
845 /* ??? Trivially accessible from anywhere. But why would we even
846 be passing an address in this case? Should we simply assert
847 this to be false, or should we have a cleanup pass that removes
848 these from the list of mappings? */
849 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
850 return true;
851
852 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
853 without analyzing the expression whether or not its location
854 is accessible to anyone else. In the case of nested parallel
855 regions it certainly may be. */
077b0dfb 856 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
953ff289
DN
857 return true;
858
859 /* Do not use copy-in/copy-out for variables that have their
860 address taken. */
861 if (TREE_ADDRESSABLE (decl))
862 return true;
7c8f7639 863
6d840d99
JJ
864 /* lower_send_shared_vars only uses copy-in, but not copy-out
865 for these. */
866 if (TREE_READONLY (decl)
867 || ((TREE_CODE (decl) == RESULT_DECL
868 || TREE_CODE (decl) == PARM_DECL)
869 && DECL_BY_REFERENCE (decl)))
870 return false;
871
7c8f7639
JJ
872 /* Disallow copy-in/out in nested parallel if
873 decl is shared in outer parallel, otherwise
874 each thread could store the shared variable
875 in its own copy-in location, making the
876 variable no longer really shared. */
6d840d99 877 if (shared_ctx->is_nested)
7c8f7639
JJ
878 {
879 omp_context *up;
880
881 for (up = shared_ctx->outer; up; up = up->outer)
d9c194cb 882 if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
7c8f7639
JJ
883 break;
884
d9c194cb 885 if (up)
7c8f7639
JJ
886 {
887 tree c;
888
726a989a 889 for (c = gimple_omp_taskreg_clauses (up->stmt);
7c8f7639
JJ
890 c; c = OMP_CLAUSE_CHAIN (c))
891 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
892 && OMP_CLAUSE_DECL (c) == decl)
893 break;
894
895 if (c)
25142650 896 goto maybe_mark_addressable_and_ret;
7c8f7639
JJ
897 }
898 }
a68ab351 899
6d840d99 900 /* For tasks avoid using copy-in/out. As tasks can be
a68ab351
JJ
901 deferred or executed in different thread, when GOMP_task
902 returns, the task hasn't necessarily terminated. */
6d840d99 903 if (is_task_ctx (shared_ctx))
a68ab351 904 {
25142650
JJ
905 tree outer;
906 maybe_mark_addressable_and_ret:
907 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
a68ab351
JJ
908 if (is_gimple_reg (outer))
909 {
910 /* Taking address of OUTER in lower_send_shared_vars
911 might need regimplification of everything that uses the
912 variable. */
913 if (!task_shared_vars)
914 task_shared_vars = BITMAP_ALLOC (NULL);
915 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
916 TREE_ADDRESSABLE (outer) = 1;
917 }
918 return true;
919 }
953ff289
DN
920 }
921
922 return false;
923}
924
917948d3
ZD
925/* Construct a new automatic decl similar to VAR. */
926
927static tree
928omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
929{
930 tree copy = copy_var_decl (var, name, type);
931
932 DECL_CONTEXT (copy) = current_function_decl;
910ad8de 933 DECL_CHAIN (copy) = ctx->block_vars;
953ff289
DN
934 ctx->block_vars = copy;
935
936 return copy;
937}
938
939static tree
940omp_copy_decl_1 (tree var, omp_context *ctx)
941{
942 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
943}
944
a9a58711
JJ
945/* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
946 as appropriate. */
947static tree
948omp_build_component_ref (tree obj, tree field)
949{
950 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
951 if (TREE_THIS_VOLATILE (field))
952 TREE_THIS_VOLATILE (ret) |= 1;
953 if (TREE_READONLY (field))
954 TREE_READONLY (ret) |= 1;
955 return ret;
956}
957
953ff289
DN
958/* Build tree nodes to access the field for VAR on the receiver side. */
959
960static tree
961build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
962{
963 tree x, field = lookup_field (var, ctx);
964
965 /* If the receiver record type was remapped in the child function,
966 remap the field into the new record type. */
967 x = maybe_lookup_field (field, ctx);
968 if (x != NULL)
969 field = x;
970
70f34814 971 x = build_simple_mem_ref (ctx->receiver_decl);
a9a58711 972 x = omp_build_component_ref (x, field);
953ff289 973 if (by_ref)
70f34814 974 x = build_simple_mem_ref (x);
953ff289
DN
975
976 return x;
977}
978
979/* Build tree nodes to access VAR in the scope outer to CTX. In the case
980 of a parallel, this is a component reference; for workshare constructs
981 this is some variable. */
982
983static tree
984build_outer_var_ref (tree var, omp_context *ctx)
985{
986 tree x;
987
8ca5b2a2 988 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
953ff289
DN
989 x = var;
990 else if (is_variable_sized (var))
991 {
992 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
993 x = build_outer_var_ref (x, ctx);
70f34814 994 x = build_simple_mem_ref (x);
953ff289 995 }
a68ab351 996 else if (is_taskreg_ctx (ctx))
953ff289 997 {
7c8f7639 998 bool by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
999 x = build_receiver_ref (var, by_ref, ctx);
1000 }
74bf76ed
JJ
1001 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1002 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
1003 {
1004 /* #pragma omp simd isn't a worksharing construct, and can reference even
1005 private vars in its linear etc. clauses. */
1006 x = NULL_TREE;
1007 if (ctx->outer && is_taskreg_ctx (ctx))
1008 x = lookup_decl (var, ctx->outer);
1009 else if (ctx->outer)
f3b331d1 1010 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
74bf76ed
JJ
1011 if (x == NULL_TREE)
1012 x = var;
1013 }
953ff289
DN
1014 else if (ctx->outer)
1015 x = lookup_decl (var, ctx->outer);
eeb1d9e0
JJ
1016 else if (is_reference (var))
1017 /* This can happen with orphaned constructs. If var is reference, it is
1018 possible it is shared and as such valid. */
1019 x = var;
953ff289
DN
1020 else
1021 gcc_unreachable ();
1022
1023 if (is_reference (var))
70f34814 1024 x = build_simple_mem_ref (x);
953ff289
DN
1025
1026 return x;
1027}
1028
1029/* Build tree nodes to access the field for VAR on the sender side. */
1030
1031static tree
1032build_sender_ref (tree var, omp_context *ctx)
1033{
a68ab351 1034 tree field = lookup_sfield (var, ctx);
a9a58711 1035 return omp_build_component_ref (ctx->sender_decl, field);
953ff289
DN
1036}
1037
1038/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
1039
1040static void
a68ab351 1041install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
953ff289 1042{
a68ab351 1043 tree field, type, sfield = NULL_TREE;
953ff289 1044
a68ab351
JJ
1045 gcc_assert ((mask & 1) == 0
1046 || !splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
1047 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
1048 || !splay_tree_lookup (ctx->sfield_map, (splay_tree_key) var));
953ff289
DN
1049
1050 type = TREE_TYPE (var);
acf0174b
JJ
1051 if (mask & 4)
1052 {
1053 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
1054 type = build_pointer_type (build_pointer_type (type));
1055 }
1056 else if (by_ref)
953ff289 1057 type = build_pointer_type (type);
a68ab351
JJ
1058 else if ((mask & 3) == 1 && is_reference (var))
1059 type = TREE_TYPE (type);
953ff289 1060
c2255bc4
AH
1061 field = build_decl (DECL_SOURCE_LOCATION (var),
1062 FIELD_DECL, DECL_NAME (var), type);
953ff289
DN
1063
1064 /* Remember what variable this field was created for. This does have a
1065 side effect of making dwarf2out ignore this member, so for helpful
1066 debugging we clear it later in delete_omp_context. */
1067 DECL_ABSTRACT_ORIGIN (field) = var;
a68ab351
JJ
1068 if (type == TREE_TYPE (var))
1069 {
1070 DECL_ALIGN (field) = DECL_ALIGN (var);
1071 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
1072 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
1073 }
1074 else
1075 DECL_ALIGN (field) = TYPE_ALIGN (type);
953ff289 1076
a68ab351
JJ
1077 if ((mask & 3) == 3)
1078 {
1079 insert_field_into_struct (ctx->record_type, field);
1080 if (ctx->srecord_type)
1081 {
c2255bc4
AH
1082 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1083 FIELD_DECL, DECL_NAME (var), type);
a68ab351
JJ
1084 DECL_ABSTRACT_ORIGIN (sfield) = var;
1085 DECL_ALIGN (sfield) = DECL_ALIGN (field);
1086 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
1087 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
1088 insert_field_into_struct (ctx->srecord_type, sfield);
1089 }
1090 }
1091 else
1092 {
1093 if (ctx->srecord_type == NULL_TREE)
1094 {
1095 tree t;
1096
1097 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
1098 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1099 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
1100 {
c2255bc4
AH
1101 sfield = build_decl (DECL_SOURCE_LOCATION (var),
1102 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
a68ab351
JJ
1103 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
1104 insert_field_into_struct (ctx->srecord_type, sfield);
1105 splay_tree_insert (ctx->sfield_map,
1106 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
1107 (splay_tree_value) sfield);
1108 }
1109 }
1110 sfield = field;
1111 insert_field_into_struct ((mask & 1) ? ctx->record_type
1112 : ctx->srecord_type, field);
1113 }
953ff289 1114
a68ab351
JJ
1115 if (mask & 1)
1116 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
1117 (splay_tree_value) field);
1118 if ((mask & 2) && ctx->sfield_map)
1119 splay_tree_insert (ctx->sfield_map, (splay_tree_key) var,
1120 (splay_tree_value) sfield);
953ff289
DN
1121}
1122
1123static tree
1124install_var_local (tree var, omp_context *ctx)
1125{
1126 tree new_var = omp_copy_decl_1 (var, ctx);
1127 insert_decl_map (&ctx->cb, var, new_var);
1128 return new_var;
1129}
1130
1131/* Adjust the replacement for DECL in CTX for the new context. This means
1132 copying the DECL_VALUE_EXPR, and fixing up the type. */
1133
1134static void
1135fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
1136{
1137 tree new_decl, size;
1138
1139 new_decl = lookup_decl (decl, ctx);
1140
1141 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
1142
1143 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
1144 && DECL_HAS_VALUE_EXPR_P (decl))
1145 {
1146 tree ve = DECL_VALUE_EXPR (decl);
726a989a 1147 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
953ff289
DN
1148 SET_DECL_VALUE_EXPR (new_decl, ve);
1149 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
1150 }
1151
1152 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
1153 {
1154 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
1155 if (size == error_mark_node)
1156 size = TYPE_SIZE (TREE_TYPE (new_decl));
1157 DECL_SIZE (new_decl) = size;
1158
1159 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
1160 if (size == error_mark_node)
1161 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
1162 DECL_SIZE_UNIT (new_decl) = size;
1163 }
1164}
1165
1166/* The callback for remap_decl. Search all containing contexts for a
1167 mapping of the variable; this avoids having to duplicate the splay
1168 tree ahead of time. We know a mapping doesn't already exist in the
1169 given context. Create new mappings to implement default semantics. */
1170
1171static tree
1172omp_copy_decl (tree var, copy_body_data *cb)
1173{
1174 omp_context *ctx = (omp_context *) cb;
1175 tree new_var;
1176
953ff289
DN
1177 if (TREE_CODE (var) == LABEL_DECL)
1178 {
c2255bc4 1179 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
50674e96 1180 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
1181 insert_decl_map (&ctx->cb, var, new_var);
1182 return new_var;
1183 }
1184
a68ab351 1185 while (!is_taskreg_ctx (ctx))
953ff289
DN
1186 {
1187 ctx = ctx->outer;
1188 if (ctx == NULL)
1189 return var;
1190 new_var = maybe_lookup_decl (var, ctx);
1191 if (new_var)
1192 return new_var;
1193 }
1194
8ca5b2a2
JJ
1195 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
1196 return var;
1197
953ff289
DN
1198 return error_mark_node;
1199}
1200
50674e96
DN
1201
1202/* Return the parallel region associated with STMT. */
1203
50674e96
DN
1204/* Debugging dumps for parallel regions. */
1205void dump_omp_region (FILE *, struct omp_region *, int);
1206void debug_omp_region (struct omp_region *);
1207void debug_all_omp_regions (void);
1208
1209/* Dump the parallel region tree rooted at REGION. */
1210
1211void
1212dump_omp_region (FILE *file, struct omp_region *region, int indent)
1213{
777f7f9a 1214 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
726a989a 1215 gimple_code_name[region->type]);
50674e96
DN
1216
1217 if (region->inner)
1218 dump_omp_region (file, region->inner, indent + 4);
1219
777f7f9a
RH
1220 if (region->cont)
1221 {
726a989a 1222 fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
777f7f9a
RH
1223 region->cont->index);
1224 }
b8698a0f 1225
50674e96 1226 if (region->exit)
726a989a 1227 fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
777f7f9a 1228 region->exit->index);
50674e96 1229 else
777f7f9a 1230 fprintf (file, "%*s[no exit marker]\n", indent, "");
50674e96
DN
1231
1232 if (region->next)
777f7f9a 1233 dump_omp_region (file, region->next, indent);
50674e96
DN
1234}
1235
24e47c76 1236DEBUG_FUNCTION void
50674e96
DN
1237debug_omp_region (struct omp_region *region)
1238{
1239 dump_omp_region (stderr, region, 0);
1240}
1241
24e47c76 1242DEBUG_FUNCTION void
50674e96
DN
1243debug_all_omp_regions (void)
1244{
1245 dump_omp_region (stderr, root_omp_region, 0);
1246}
1247
1248
1249/* Create a new parallel region starting at STMT inside region PARENT. */
1250
0645c1a2 1251static struct omp_region *
726a989a
RB
1252new_omp_region (basic_block bb, enum gimple_code type,
1253 struct omp_region *parent)
50674e96 1254{
d3bfe4de 1255 struct omp_region *region = XCNEW (struct omp_region);
50674e96
DN
1256
1257 region->outer = parent;
777f7f9a
RH
1258 region->entry = bb;
1259 region->type = type;
50674e96
DN
1260
1261 if (parent)
1262 {
1263 /* This is a nested region. Add it to the list of inner
1264 regions in PARENT. */
1265 region->next = parent->inner;
1266 parent->inner = region;
1267 }
777f7f9a 1268 else
50674e96
DN
1269 {
1270 /* This is a toplevel region. Add it to the list of toplevel
1271 regions in ROOT_OMP_REGION. */
1272 region->next = root_omp_region;
1273 root_omp_region = region;
1274 }
777f7f9a
RH
1275
1276 return region;
1277}
1278
1279/* Release the memory associated with the region tree rooted at REGION. */
1280
1281static void
1282free_omp_region_1 (struct omp_region *region)
1283{
1284 struct omp_region *i, *n;
1285
1286 for (i = region->inner; i ; i = n)
50674e96 1287 {
777f7f9a
RH
1288 n = i->next;
1289 free_omp_region_1 (i);
50674e96
DN
1290 }
1291
777f7f9a
RH
1292 free (region);
1293}
50674e96 1294
777f7f9a
RH
1295/* Release the memory for the entire omp region tree. */
1296
1297void
1298free_omp_regions (void)
1299{
1300 struct omp_region *r, *n;
1301 for (r = root_omp_region; r ; r = n)
1302 {
1303 n = r->next;
1304 free_omp_region_1 (r);
1305 }
1306 root_omp_region = NULL;
50674e96
DN
1307}
1308
1309
953ff289
DN
1310/* Create a new context, with OUTER_CTX being the surrounding context. */
1311
1312static omp_context *
726a989a 1313new_omp_context (gimple stmt, omp_context *outer_ctx)
953ff289
DN
1314{
1315 omp_context *ctx = XCNEW (omp_context);
1316
1317 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
1318 (splay_tree_value) ctx);
1319 ctx->stmt = stmt;
1320
1321 if (outer_ctx)
1322 {
1323 ctx->outer = outer_ctx;
1324 ctx->cb = outer_ctx->cb;
1325 ctx->cb.block = NULL;
1326 ctx->depth = outer_ctx->depth + 1;
1327 }
1328 else
1329 {
1330 ctx->cb.src_fn = current_function_decl;
1331 ctx->cb.dst_fn = current_function_decl;
fe660d7b
MJ
1332 ctx->cb.src_node = cgraph_get_node (current_function_decl);
1333 gcc_checking_assert (ctx->cb.src_node);
953ff289
DN
1334 ctx->cb.dst_node = ctx->cb.src_node;
1335 ctx->cb.src_cfun = cfun;
1336 ctx->cb.copy_decl = omp_copy_decl;
1d65f45c 1337 ctx->cb.eh_lp_nr = 0;
953ff289
DN
1338 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1339 ctx->depth = 1;
1340 }
1341
6be42dd4 1342 ctx->cb.decl_map = pointer_map_create ();
953ff289
DN
1343
1344 return ctx;
1345}
1346
726a989a 1347static gimple_seq maybe_catch_exception (gimple_seq);
2368a460
JJ
1348
1349/* Finalize task copyfn. */
1350
1351static void
726a989a 1352finalize_task_copyfn (gimple task_stmt)
2368a460
JJ
1353{
1354 struct function *child_cfun;
af16bc76 1355 tree child_fn;
355a7673 1356 gimple_seq seq = NULL, new_seq;
726a989a 1357 gimple bind;
2368a460 1358
726a989a 1359 child_fn = gimple_omp_task_copy_fn (task_stmt);
2368a460
JJ
1360 if (child_fn == NULL_TREE)
1361 return;
1362
1363 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
d7ed20db 1364 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
2368a460 1365
2368a460 1366 push_cfun (child_cfun);
3ad065ef 1367 bind = gimplify_body (child_fn, false);
726a989a
RB
1368 gimple_seq_add_stmt (&seq, bind);
1369 new_seq = maybe_catch_exception (seq);
1370 if (new_seq != seq)
1371 {
1372 bind = gimple_build_bind (NULL, new_seq, NULL);
355a7673 1373 seq = NULL;
726a989a
RB
1374 gimple_seq_add_stmt (&seq, bind);
1375 }
1376 gimple_set_body (child_fn, seq);
2368a460 1377 pop_cfun ();
2368a460 1378
d7ed20db 1379 /* Inform the callgraph about the new function. */
2368a460
JJ
1380 cgraph_add_new_function (child_fn, false);
1381}
1382
953ff289
DN
1383/* Destroy a omp_context data structures. Called through the splay tree
1384 value delete callback. */
1385
1386static void
1387delete_omp_context (splay_tree_value value)
1388{
1389 omp_context *ctx = (omp_context *) value;
1390
6be42dd4 1391 pointer_map_destroy (ctx->cb.decl_map);
953ff289
DN
1392
1393 if (ctx->field_map)
1394 splay_tree_delete (ctx->field_map);
a68ab351
JJ
1395 if (ctx->sfield_map)
1396 splay_tree_delete (ctx->sfield_map);
953ff289
DN
1397
1398 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1399 it produces corrupt debug information. */
1400 if (ctx->record_type)
1401 {
1402 tree t;
910ad8de 1403 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
953ff289
DN
1404 DECL_ABSTRACT_ORIGIN (t) = NULL;
1405 }
a68ab351
JJ
1406 if (ctx->srecord_type)
1407 {
1408 tree t;
910ad8de 1409 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
a68ab351
JJ
1410 DECL_ABSTRACT_ORIGIN (t) = NULL;
1411 }
953ff289 1412
2368a460
JJ
1413 if (is_task_ctx (ctx))
1414 finalize_task_copyfn (ctx->stmt);
1415
953ff289
DN
1416 XDELETE (ctx);
1417}
1418
1419/* Fix up RECEIVER_DECL with a type that has been remapped to the child
1420 context. */
1421
1422static void
1423fixup_child_record_type (omp_context *ctx)
1424{
1425 tree f, type = ctx->record_type;
1426
1427 /* ??? It isn't sufficient to just call remap_type here, because
1428 variably_modified_type_p doesn't work the way we expect for
1429 record types. Testing each field for whether it needs remapping
1430 and creating a new record by hand works, however. */
910ad8de 1431 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953ff289
DN
1432 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1433 break;
1434 if (f)
1435 {
1436 tree name, new_fields = NULL;
1437
1438 type = lang_hooks.types.make_type (RECORD_TYPE);
1439 name = DECL_NAME (TYPE_NAME (ctx->record_type));
c2255bc4
AH
1440 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1441 TYPE_DECL, name, type);
953ff289
DN
1442 TYPE_NAME (type) = name;
1443
910ad8de 1444 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
1445 {
1446 tree new_f = copy_node (f);
1447 DECL_CONTEXT (new_f) = type;
1448 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
910ad8de 1449 DECL_CHAIN (new_f) = new_fields;
726a989a
RB
1450 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1451 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1452 &ctx->cb, NULL);
1453 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1454 &ctx->cb, NULL);
953ff289
DN
1455 new_fields = new_f;
1456
1457 /* Arrange to be able to look up the receiver field
1458 given the sender field. */
1459 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1460 (splay_tree_value) new_f);
1461 }
1462 TYPE_FIELDS (type) = nreverse (new_fields);
1463 layout_type (type);
1464 }
1465
1466 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
1467}
1468
1469/* Instantiate decls as necessary in CTX to satisfy the data sharing
1470 specified by CLAUSES. */
1471
1472static void
1473scan_sharing_clauses (tree clauses, omp_context *ctx)
1474{
1475 tree c, decl;
1476 bool scan_array_reductions = false;
1477
1478 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1479 {
1480 bool by_ref;
1481
aaf46ef9 1482 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1483 {
1484 case OMP_CLAUSE_PRIVATE:
1485 decl = OMP_CLAUSE_DECL (c);
a68ab351
JJ
1486 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1487 goto do_private;
1488 else if (!is_variable_sized (decl))
953ff289
DN
1489 install_var_local (decl, ctx);
1490 break;
1491
1492 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1493 /* Ignore shared directives in teams construct. */
1494 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1495 break;
a68ab351 1496 gcc_assert (is_taskreg_ctx (ctx));
953ff289 1497 decl = OMP_CLAUSE_DECL (c);
5da250fc
JJ
1498 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1499 || !is_variable_sized (decl));
8ca5b2a2
JJ
1500 /* Global variables don't need to be copied,
1501 the receiver side will use them directly. */
1502 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1503 break;
a68ab351 1504 by_ref = use_pointer_for_field (decl, ctx);
953ff289
DN
1505 if (! TREE_READONLY (decl)
1506 || TREE_ADDRESSABLE (decl)
1507 || by_ref
1508 || is_reference (decl))
1509 {
a68ab351 1510 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1511 install_var_local (decl, ctx);
1512 break;
1513 }
1514 /* We don't need to copy const scalar vars back. */
aaf46ef9 1515 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
1516 goto do_private;
1517
1518 case OMP_CLAUSE_LASTPRIVATE:
1519 /* Let the corresponding firstprivate clause create
1520 the variable. */
1521 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1522 break;
1523 /* FALLTHRU */
1524
1525 case OMP_CLAUSE_FIRSTPRIVATE:
1526 case OMP_CLAUSE_REDUCTION:
74bf76ed 1527 case OMP_CLAUSE_LINEAR:
953ff289
DN
1528 decl = OMP_CLAUSE_DECL (c);
1529 do_private:
1530 if (is_variable_sized (decl))
953ff289 1531 {
a68ab351
JJ
1532 if (is_task_ctx (ctx))
1533 install_var_field (decl, false, 1, ctx);
1534 break;
1535 }
1536 else if (is_taskreg_ctx (ctx))
1537 {
1538 bool global
1539 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
7c8f7639 1540 by_ref = use_pointer_for_field (decl, NULL);
a68ab351
JJ
1541
1542 if (is_task_ctx (ctx)
1543 && (global || by_ref || is_reference (decl)))
1544 {
1545 install_var_field (decl, false, 1, ctx);
1546 if (!global)
1547 install_var_field (decl, by_ref, 2, ctx);
1548 }
1549 else if (!global)
1550 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1551 }
1552 install_var_local (decl, ctx);
1553 break;
1554
acf0174b
JJ
1555 case OMP_CLAUSE__LOOPTEMP_:
1556 gcc_assert (is_parallel_ctx (ctx));
1557 decl = OMP_CLAUSE_DECL (c);
1558 install_var_field (decl, false, 3, ctx);
1559 install_var_local (decl, ctx);
1560 break;
1561
953ff289 1562 case OMP_CLAUSE_COPYPRIVATE:
953ff289
DN
1563 case OMP_CLAUSE_COPYIN:
1564 decl = OMP_CLAUSE_DECL (c);
7c8f7639 1565 by_ref = use_pointer_for_field (decl, NULL);
a68ab351 1566 install_var_field (decl, by_ref, 3, ctx);
953ff289
DN
1567 break;
1568
1569 case OMP_CLAUSE_DEFAULT:
1570 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1571 break;
1572
20906c66 1573 case OMP_CLAUSE_FINAL:
953ff289
DN
1574 case OMP_CLAUSE_IF:
1575 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1576 case OMP_CLAUSE_NUM_TEAMS:
1577 case OMP_CLAUSE_THREAD_LIMIT:
1578 case OMP_CLAUSE_DEVICE:
953ff289 1579 case OMP_CLAUSE_SCHEDULE:
acf0174b
JJ
1580 case OMP_CLAUSE_DIST_SCHEDULE:
1581 case OMP_CLAUSE_DEPEND:
953ff289 1582 if (ctx->outer)
726a989a 1583 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
1584 break;
1585
acf0174b
JJ
1586 case OMP_CLAUSE_TO:
1587 case OMP_CLAUSE_FROM:
1588 case OMP_CLAUSE_MAP:
1589 if (ctx->outer)
1590 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1591 decl = OMP_CLAUSE_DECL (c);
1592 /* Global variables with "omp declare target" attribute
1593 don't need to be copied, the receiver side will use them
1594 directly. */
1595 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1596 && DECL_P (decl)
1597 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1598 && lookup_attribute ("omp declare target",
1599 DECL_ATTRIBUTES (decl)))
1600 break;
1601 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1602 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER)
1603 {
1604 /* Ignore OMP_CLAUSE_MAP_POINTER kind for arrays in
1605 #pragma omp target data, there is nothing to map for
1606 those. */
1607 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA
1608 && !POINTER_TYPE_P (TREE_TYPE (decl)))
1609 break;
1610 }
1611 if (DECL_P (decl))
1612 {
1613 if (DECL_SIZE (decl)
1614 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1615 {
1616 tree decl2 = DECL_VALUE_EXPR (decl);
1617 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1618 decl2 = TREE_OPERAND (decl2, 0);
1619 gcc_assert (DECL_P (decl2));
1620 install_var_field (decl2, true, 3, ctx);
1621 install_var_local (decl2, ctx);
1622 install_var_local (decl, ctx);
1623 }
1624 else
1625 {
1626 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1627 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1628 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1629 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1630 install_var_field (decl, true, 7, ctx);
1631 else
1632 install_var_field (decl, true, 3, ctx);
1633 if (gimple_omp_target_kind (ctx->stmt)
1634 == GF_OMP_TARGET_KIND_REGION)
1635 install_var_local (decl, ctx);
1636 }
1637 }
1638 else
1639 {
1640 tree base = get_base_address (decl);
1641 tree nc = OMP_CLAUSE_CHAIN (c);
1642 if (DECL_P (base)
1643 && nc != NULL_TREE
1644 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1645 && OMP_CLAUSE_DECL (nc) == base
1646 && OMP_CLAUSE_MAP_KIND (nc) == OMP_CLAUSE_MAP_POINTER
1647 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1648 {
1649 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1650 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1651 }
1652 else
1653 {
1654 gcc_assert (!splay_tree_lookup (ctx->field_map,
1655 (splay_tree_key) decl));
1656 tree field
1657 = build_decl (OMP_CLAUSE_LOCATION (c),
1658 FIELD_DECL, NULL_TREE, ptr_type_node);
1659 DECL_ALIGN (field) = TYPE_ALIGN (ptr_type_node);
1660 insert_field_into_struct (ctx->record_type, field);
1661 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1662 (splay_tree_value) field);
1663 }
1664 }
1665 break;
1666
953ff289
DN
1667 case OMP_CLAUSE_NOWAIT:
1668 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1669 case OMP_CLAUSE_COLLAPSE:
1670 case OMP_CLAUSE_UNTIED:
20906c66 1671 case OMP_CLAUSE_MERGEABLE:
acf0174b 1672 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1673 case OMP_CLAUSE_SAFELEN:
953ff289
DN
1674 break;
1675
acf0174b
JJ
1676 case OMP_CLAUSE_ALIGNED:
1677 decl = OMP_CLAUSE_DECL (c);
1678 if (is_global_var (decl)
1679 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1680 install_var_local (decl, ctx);
1681 break;
1682
953ff289
DN
1683 default:
1684 gcc_unreachable ();
1685 }
1686 }
1687
1688 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1689 {
aaf46ef9 1690 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1691 {
1692 case OMP_CLAUSE_LASTPRIVATE:
1693 /* Let the corresponding firstprivate clause create
1694 the variable. */
726a989a 1695 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
a68ab351 1696 scan_array_reductions = true;
953ff289
DN
1697 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1698 break;
1699 /* FALLTHRU */
1700
1701 case OMP_CLAUSE_PRIVATE:
1702 case OMP_CLAUSE_FIRSTPRIVATE:
1703 case OMP_CLAUSE_REDUCTION:
74bf76ed 1704 case OMP_CLAUSE_LINEAR:
953ff289
DN
1705 decl = OMP_CLAUSE_DECL (c);
1706 if (is_variable_sized (decl))
1707 install_var_local (decl, ctx);
1708 fixup_remapped_decl (decl, ctx,
aaf46ef9 1709 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1710 && OMP_CLAUSE_PRIVATE_DEBUG (c));
aaf46ef9 1711 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1712 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1713 scan_array_reductions = true;
1714 break;
1715
1716 case OMP_CLAUSE_SHARED:
acf0174b
JJ
1717 /* Ignore shared directives in teams construct. */
1718 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1719 break;
953ff289 1720 decl = OMP_CLAUSE_DECL (c);
8ca5b2a2
JJ
1721 if (! is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1722 fixup_remapped_decl (decl, ctx, false);
953ff289
DN
1723 break;
1724
acf0174b
JJ
1725 case OMP_CLAUSE_MAP:
1726 if (gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_DATA)
1727 break;
1728 decl = OMP_CLAUSE_DECL (c);
1729 if (DECL_P (decl)
1730 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1731 && lookup_attribute ("omp declare target",
1732 DECL_ATTRIBUTES (decl)))
1733 break;
1734 if (DECL_P (decl))
1735 {
1736 if (OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
1737 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1738 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1739 {
1740 tree new_decl = lookup_decl (decl, ctx);
1741 TREE_TYPE (new_decl)
1742 = remap_type (TREE_TYPE (decl), &ctx->cb);
1743 }
1744 else if (DECL_SIZE (decl)
1745 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1746 {
1747 tree decl2 = DECL_VALUE_EXPR (decl);
1748 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1749 decl2 = TREE_OPERAND (decl2, 0);
1750 gcc_assert (DECL_P (decl2));
1751 fixup_remapped_decl (decl2, ctx, false);
1752 fixup_remapped_decl (decl, ctx, true);
1753 }
1754 else
1755 fixup_remapped_decl (decl, ctx, false);
1756 }
1757 break;
1758
953ff289
DN
1759 case OMP_CLAUSE_COPYPRIVATE:
1760 case OMP_CLAUSE_COPYIN:
1761 case OMP_CLAUSE_DEFAULT:
1762 case OMP_CLAUSE_IF:
1763 case OMP_CLAUSE_NUM_THREADS:
acf0174b
JJ
1764 case OMP_CLAUSE_NUM_TEAMS:
1765 case OMP_CLAUSE_THREAD_LIMIT:
1766 case OMP_CLAUSE_DEVICE:
953ff289 1767 case OMP_CLAUSE_SCHEDULE:
acf0174b 1768 case OMP_CLAUSE_DIST_SCHEDULE:
953ff289
DN
1769 case OMP_CLAUSE_NOWAIT:
1770 case OMP_CLAUSE_ORDERED:
a68ab351
JJ
1771 case OMP_CLAUSE_COLLAPSE:
1772 case OMP_CLAUSE_UNTIED:
20906c66
JJ
1773 case OMP_CLAUSE_FINAL:
1774 case OMP_CLAUSE_MERGEABLE:
acf0174b 1775 case OMP_CLAUSE_PROC_BIND:
74bf76ed 1776 case OMP_CLAUSE_SAFELEN:
acf0174b
JJ
1777 case OMP_CLAUSE_ALIGNED:
1778 case OMP_CLAUSE_DEPEND:
1779 case OMP_CLAUSE__LOOPTEMP_:
1780 case OMP_CLAUSE_TO:
1781 case OMP_CLAUSE_FROM:
953ff289
DN
1782 break;
1783
1784 default:
1785 gcc_unreachable ();
1786 }
1787 }
1788
1789 if (scan_array_reductions)
1790 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 1791 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1792 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1793 {
26127932
JJ
1794 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1795 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
953ff289 1796 }
a68ab351 1797 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
726a989a 1798 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
26127932 1799 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
953ff289
DN
1800}
1801
1802/* Create a new name for omp child function. Returns an identifier. */
1803
1804static GTY(()) unsigned int tmp_ompfn_id_num;
1805
1806static tree
a68ab351 1807create_omp_child_function_name (bool task_copy)
953ff289 1808{
036546e5
JH
1809 return (clone_function_name (current_function_decl,
1810 task_copy ? "_omp_cpyfn" : "_omp_fn"));
953ff289
DN
1811}
1812
1813/* Build a decl for the omp child function. It'll not contain a body
1814 yet, just the bare decl. */
1815
1816static void
a68ab351 1817create_omp_child_function (omp_context *ctx, bool task_copy)
953ff289
DN
1818{
1819 tree decl, type, name, t;
1820
a68ab351
JJ
1821 name = create_omp_child_function_name (task_copy);
1822 if (task_copy)
1823 type = build_function_type_list (void_type_node, ptr_type_node,
1824 ptr_type_node, NULL_TREE);
1825 else
1826 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
953ff289 1827
c2255bc4
AH
1828 decl = build_decl (gimple_location (ctx->stmt),
1829 FUNCTION_DECL, name, type);
953ff289 1830
a68ab351
JJ
1831 if (!task_copy)
1832 ctx->cb.dst_fn = decl;
1833 else
726a989a 1834 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
953ff289
DN
1835
1836 TREE_STATIC (decl) = 1;
1837 TREE_USED (decl) = 1;
1838 DECL_ARTIFICIAL (decl) = 1;
cd3f04c8 1839 DECL_NAMELESS (decl) = 1;
953ff289
DN
1840 DECL_IGNORED_P (decl) = 0;
1841 TREE_PUBLIC (decl) = 0;
1842 DECL_UNINLINABLE (decl) = 1;
1843 DECL_EXTERNAL (decl) = 0;
1844 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1845 DECL_INITIAL (decl) = make_node (BLOCK);
acf0174b
JJ
1846 bool target_p = false;
1847 if (lookup_attribute ("omp declare target",
1848 DECL_ATTRIBUTES (current_function_decl)))
1849 target_p = true;
1850 else
1851 {
1852 omp_context *octx;
1853 for (octx = ctx; octx; octx = octx->outer)
1854 if (gimple_code (octx->stmt) == GIMPLE_OMP_TARGET
1855 && gimple_omp_target_kind (octx->stmt)
1856 == GF_OMP_TARGET_KIND_REGION)
1857 {
1858 target_p = true;
1859 break;
1860 }
1861 }
1862 if (target_p)
1863 DECL_ATTRIBUTES (decl)
1864 = tree_cons (get_identifier ("omp declare target"),
1865 NULL_TREE, DECL_ATTRIBUTES (decl));
953ff289 1866
c2255bc4
AH
1867 t = build_decl (DECL_SOURCE_LOCATION (decl),
1868 RESULT_DECL, NULL_TREE, void_type_node);
953ff289
DN
1869 DECL_ARTIFICIAL (t) = 1;
1870 DECL_IGNORED_P (t) = 1;
07485407 1871 DECL_CONTEXT (t) = decl;
953ff289
DN
1872 DECL_RESULT (decl) = t;
1873
c2255bc4
AH
1874 t = build_decl (DECL_SOURCE_LOCATION (decl),
1875 PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
953ff289 1876 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1877 DECL_NAMELESS (t) = 1;
953ff289 1878 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1879 DECL_CONTEXT (t) = current_function_decl;
953ff289
DN
1880 TREE_USED (t) = 1;
1881 DECL_ARGUMENTS (decl) = t;
a68ab351
JJ
1882 if (!task_copy)
1883 ctx->receiver_decl = t;
1884 else
1885 {
c2255bc4
AH
1886 t = build_decl (DECL_SOURCE_LOCATION (decl),
1887 PARM_DECL, get_identifier (".omp_data_o"),
a68ab351
JJ
1888 ptr_type_node);
1889 DECL_ARTIFICIAL (t) = 1;
cd3f04c8 1890 DECL_NAMELESS (t) = 1;
a68ab351
JJ
1891 DECL_ARG_TYPE (t) = ptr_type_node;
1892 DECL_CONTEXT (t) = current_function_decl;
1893 TREE_USED (t) = 1;
628c189e 1894 TREE_ADDRESSABLE (t) = 1;
910ad8de 1895 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
a68ab351
JJ
1896 DECL_ARGUMENTS (decl) = t;
1897 }
953ff289 1898
b8698a0f 1899 /* Allocate memory for the function structure. The call to
50674e96 1900 allocate_struct_function clobbers CFUN, so we need to restore
953ff289 1901 it afterward. */
db2960f4 1902 push_struct_function (decl);
726a989a 1903 cfun->function_end_locus = gimple_location (ctx->stmt);
db2960f4 1904 pop_cfun ();
953ff289
DN
1905}
1906
acf0174b
JJ
1907/* Callback for walk_gimple_seq. Check if combined parallel
1908 contains gimple_omp_for_combined_into_p OMP_FOR. */
1909
1910static tree
1911find_combined_for (gimple_stmt_iterator *gsi_p,
1912 bool *handled_ops_p,
1913 struct walk_stmt_info *wi)
1914{
1915 gimple stmt = gsi_stmt (*gsi_p);
1916
1917 *handled_ops_p = true;
1918 switch (gimple_code (stmt))
1919 {
1920 WALK_SUBSTMTS;
1921
1922 case GIMPLE_OMP_FOR:
1923 if (gimple_omp_for_combined_into_p (stmt)
1924 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
1925 {
1926 wi->info = stmt;
1927 return integer_zero_node;
1928 }
1929 break;
1930 default:
1931 break;
1932 }
1933 return NULL;
1934}
1935
953ff289
DN
1936/* Scan an OpenMP parallel directive. */
1937
1938static void
726a989a 1939scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
953ff289
DN
1940{
1941 omp_context *ctx;
1942 tree name;
726a989a 1943 gimple stmt = gsi_stmt (*gsi);
953ff289
DN
1944
1945 /* Ignore parallel directives with empty bodies, unless there
1946 are copyin clauses. */
1947 if (optimize > 0
726a989a
RB
1948 && empty_body_p (gimple_omp_body (stmt))
1949 && find_omp_clause (gimple_omp_parallel_clauses (stmt),
1950 OMP_CLAUSE_COPYIN) == NULL)
953ff289 1951 {
726a989a 1952 gsi_replace (gsi, gimple_build_nop (), false);
953ff289
DN
1953 return;
1954 }
1955
acf0174b
JJ
1956 if (gimple_omp_parallel_combined_p (stmt))
1957 {
1958 gimple for_stmt;
1959 struct walk_stmt_info wi;
1960
1961 memset (&wi, 0, sizeof (wi));
1962 wi.val_only = true;
1963 walk_gimple_seq (gimple_omp_body (stmt),
1964 find_combined_for, NULL, &wi);
1965 for_stmt = (gimple) wi.info;
1966 if (for_stmt)
1967 {
1968 struct omp_for_data fd;
1969 extract_omp_for_data (for_stmt, &fd, NULL);
1970 /* We need two temporaries with fd.loop.v type (istart/iend)
1971 and then (fd.collapse - 1) temporaries with the same
1972 type for count2 ... countN-1 vars if not constant. */
1973 size_t count = 2, i;
1974 tree type = fd.iter_type;
1975 if (fd.collapse > 1
1976 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1977 count += fd.collapse - 1;
1978 for (i = 0; i < count; i++)
1979 {
1980 tree temp = create_tmp_var (type, NULL);
1981 tree c = build_omp_clause (UNKNOWN_LOCATION,
1982 OMP_CLAUSE__LOOPTEMP_);
1983 OMP_CLAUSE_DECL (c) = temp;
1984 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1985 gimple_omp_parallel_set_clauses (stmt, c);
1986 }
1987 }
1988 }
1989
726a989a 1990 ctx = new_omp_context (stmt, outer_ctx);
a68ab351 1991 if (taskreg_nesting_level > 1)
50674e96 1992 ctx->is_nested = true;
953ff289 1993 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289
DN
1994 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1995 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289 1996 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
1997 name = build_decl (gimple_location (stmt),
1998 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
1999 DECL_ARTIFICIAL (name) = 1;
2000 DECL_NAMELESS (name) = 1;
953ff289 2001 TYPE_NAME (ctx->record_type) = name;
a68ab351 2002 create_omp_child_function (ctx, false);
726a989a 2003 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
953ff289 2004
726a989a 2005 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
26127932 2006 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2007
2008 if (TYPE_FIELDS (ctx->record_type) == NULL)
2009 ctx->record_type = ctx->receiver_decl = NULL;
2010 else
2011 {
2012 layout_type (ctx->record_type);
2013 fixup_child_record_type (ctx);
2014 }
2015}
2016
a68ab351
JJ
2017/* Scan an OpenMP task directive. */
2018
2019static void
726a989a 2020scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
a68ab351
JJ
2021{
2022 omp_context *ctx;
726a989a
RB
2023 tree name, t;
2024 gimple stmt = gsi_stmt (*gsi);
db3927fb 2025 location_t loc = gimple_location (stmt);
a68ab351
JJ
2026
2027 /* Ignore task directives with empty bodies. */
2028 if (optimize > 0
726a989a 2029 && empty_body_p (gimple_omp_body (stmt)))
a68ab351 2030 {
726a989a 2031 gsi_replace (gsi, gimple_build_nop (), false);
a68ab351
JJ
2032 return;
2033 }
2034
726a989a 2035 ctx = new_omp_context (stmt, outer_ctx);
a68ab351
JJ
2036 if (taskreg_nesting_level > 1)
2037 ctx->is_nested = true;
2038 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2039 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2040 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2041 name = create_tmp_var_name (".omp_data_s");
c2255bc4
AH
2042 name = build_decl (gimple_location (stmt),
2043 TYPE_DECL, name, ctx->record_type);
cd3f04c8
JJ
2044 DECL_ARTIFICIAL (name) = 1;
2045 DECL_NAMELESS (name) = 1;
a68ab351
JJ
2046 TYPE_NAME (ctx->record_type) = name;
2047 create_omp_child_function (ctx, false);
726a989a 2048 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
a68ab351 2049
726a989a 2050 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
a68ab351
JJ
2051
2052 if (ctx->srecord_type)
2053 {
2054 name = create_tmp_var_name (".omp_data_a");
c2255bc4
AH
2055 name = build_decl (gimple_location (stmt),
2056 TYPE_DECL, name, ctx->srecord_type);
cd3f04c8
JJ
2057 DECL_ARTIFICIAL (name) = 1;
2058 DECL_NAMELESS (name) = 1;
a68ab351
JJ
2059 TYPE_NAME (ctx->srecord_type) = name;
2060 create_omp_child_function (ctx, true);
2061 }
2062
26127932 2063 scan_omp (gimple_omp_body_ptr (stmt), ctx);
a68ab351
JJ
2064
2065 if (TYPE_FIELDS (ctx->record_type) == NULL)
2066 {
2067 ctx->record_type = ctx->receiver_decl = NULL;
726a989a
RB
2068 t = build_int_cst (long_integer_type_node, 0);
2069 gimple_omp_task_set_arg_size (stmt, t);
2070 t = build_int_cst (long_integer_type_node, 1);
2071 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
2072 }
2073 else
2074 {
2075 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2076 /* Move VLA fields to the end. */
2077 p = &TYPE_FIELDS (ctx->record_type);
2078 while (*p)
2079 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2080 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2081 {
2082 *q = *p;
2083 *p = TREE_CHAIN (*p);
2084 TREE_CHAIN (*q) = NULL_TREE;
2085 q = &TREE_CHAIN (*q);
2086 }
2087 else
910ad8de 2088 p = &DECL_CHAIN (*p);
a68ab351
JJ
2089 *p = vla_fields;
2090 layout_type (ctx->record_type);
2091 fixup_child_record_type (ctx);
2092 if (ctx->srecord_type)
2093 layout_type (ctx->srecord_type);
db3927fb 2094 t = fold_convert_loc (loc, long_integer_type_node,
a68ab351 2095 TYPE_SIZE_UNIT (ctx->record_type));
726a989a
RB
2096 gimple_omp_task_set_arg_size (stmt, t);
2097 t = build_int_cst (long_integer_type_node,
a68ab351 2098 TYPE_ALIGN_UNIT (ctx->record_type));
726a989a 2099 gimple_omp_task_set_arg_align (stmt, t);
a68ab351
JJ
2100 }
2101}
2102
953ff289 2103
50674e96 2104/* Scan an OpenMP loop directive. */
953ff289
DN
2105
2106static void
726a989a 2107scan_omp_for (gimple stmt, omp_context *outer_ctx)
953ff289 2108{
50674e96 2109 omp_context *ctx;
726a989a 2110 size_t i;
953ff289 2111
50674e96 2112 ctx = new_omp_context (stmt, outer_ctx);
953ff289 2113
726a989a 2114 scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
953ff289 2115
26127932 2116 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
726a989a 2117 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 2118 {
726a989a
RB
2119 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2120 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2121 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2122 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
a68ab351 2123 }
26127932 2124 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2125}
2126
2127/* Scan an OpenMP sections directive. */
2128
2129static void
726a989a 2130scan_omp_sections (gimple stmt, omp_context *outer_ctx)
953ff289 2131{
953ff289
DN
2132 omp_context *ctx;
2133
2134 ctx = new_omp_context (stmt, outer_ctx);
726a989a 2135 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
26127932 2136 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2137}
2138
2139/* Scan an OpenMP single directive. */
2140
2141static void
726a989a 2142scan_omp_single (gimple stmt, omp_context *outer_ctx)
953ff289 2143{
953ff289
DN
2144 omp_context *ctx;
2145 tree name;
2146
2147 ctx = new_omp_context (stmt, outer_ctx);
2148 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2149 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2150 name = create_tmp_var_name (".omp_copy_s");
c2255bc4
AH
2151 name = build_decl (gimple_location (stmt),
2152 TYPE_DECL, name, ctx->record_type);
953ff289
DN
2153 TYPE_NAME (ctx->record_type) = name;
2154
726a989a 2155 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
26127932 2156 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2157
2158 if (TYPE_FIELDS (ctx->record_type) == NULL)
2159 ctx->record_type = NULL;
2160 else
2161 layout_type (ctx->record_type);
2162}
2163
acf0174b
JJ
2164/* Scan an OpenMP target{, data, update} directive. */
2165
2166static void
2167scan_omp_target (gimple stmt, omp_context *outer_ctx)
2168{
2169 omp_context *ctx;
2170 tree name;
2171 int kind = gimple_omp_target_kind (stmt);
2172
2173 ctx = new_omp_context (stmt, outer_ctx);
2174 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2175 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2176 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2177 name = create_tmp_var_name (".omp_data_t");
2178 name = build_decl (gimple_location (stmt),
2179 TYPE_DECL, name, ctx->record_type);
2180 DECL_ARTIFICIAL (name) = 1;
2181 DECL_NAMELESS (name) = 1;
2182 TYPE_NAME (ctx->record_type) = name;
2183 if (kind == GF_OMP_TARGET_KIND_REGION)
2184 {
2185 create_omp_child_function (ctx, false);
2186 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2187 }
2188
2189 scan_sharing_clauses (gimple_omp_target_clauses (stmt), ctx);
2190 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2191
2192 if (TYPE_FIELDS (ctx->record_type) == NULL)
2193 ctx->record_type = ctx->receiver_decl = NULL;
2194 else
2195 {
2196 TYPE_FIELDS (ctx->record_type)
2197 = nreverse (TYPE_FIELDS (ctx->record_type));
2198#ifdef ENABLE_CHECKING
2199 tree field;
2200 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2201 for (field = TYPE_FIELDS (ctx->record_type);
2202 field;
2203 field = DECL_CHAIN (field))
2204 gcc_assert (DECL_ALIGN (field) == align);
2205#endif
2206 layout_type (ctx->record_type);
2207 if (kind == GF_OMP_TARGET_KIND_REGION)
2208 fixup_child_record_type (ctx);
2209 }
2210}
2211
2212/* Scan an OpenMP teams directive. */
2213
2214static void
2215scan_omp_teams (gimple stmt, omp_context *outer_ctx)
2216{
2217 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2218 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2219 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2220}
953ff289 2221
a6fc8e21 2222/* Check OpenMP nesting restrictions. */
26127932
JJ
2223static bool
2224check_omp_nesting_restrictions (gimple stmt, omp_context *ctx)
a6fc8e21 2225{
74bf76ed
JJ
2226 if (ctx != NULL)
2227 {
2228 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2229 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2230 {
2231 error_at (gimple_location (stmt),
2232 "OpenMP constructs may not be nested inside simd region");
2233 return false;
2234 }
acf0174b
JJ
2235 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2236 {
2237 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2238 || (gimple_omp_for_kind (stmt)
2239 != GF_OMP_FOR_KIND_DISTRIBUTE))
2240 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2241 {
2242 error_at (gimple_location (stmt),
2243 "only distribute or parallel constructs are allowed to "
2244 "be closely nested inside teams construct");
2245 return false;
2246 }
2247 }
74bf76ed 2248 }
726a989a 2249 switch (gimple_code (stmt))
a6fc8e21 2250 {
726a989a 2251 case GIMPLE_OMP_FOR:
74bf76ed
JJ
2252 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2253 return true;
acf0174b
JJ
2254 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2255 {
2256 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2257 {
2258 error_at (gimple_location (stmt),
2259 "distribute construct must be closely nested inside "
2260 "teams construct");
2261 return false;
2262 }
2263 return true;
2264 }
2265 /* FALLTHRU */
2266 case GIMPLE_CALL:
2267 if (is_gimple_call (stmt)
2268 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2269 == BUILT_IN_GOMP_CANCEL
2270 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2271 == BUILT_IN_GOMP_CANCELLATION_POINT))
2272 {
2273 const char *bad = NULL;
2274 const char *kind = NULL;
2275 if (ctx == NULL)
2276 {
2277 error_at (gimple_location (stmt), "orphaned %qs construct",
2278 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2279 == BUILT_IN_GOMP_CANCEL
2280 ? "#pragma omp cancel"
2281 : "#pragma omp cancellation point");
2282 return false;
2283 }
2284 switch (host_integerp (gimple_call_arg (stmt, 0), 0)
2285 ? tree_low_cst (gimple_call_arg (stmt, 0), 0)
2286 : 0)
2287 {
2288 case 1:
2289 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2290 bad = "#pragma omp parallel";
2291 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2292 == BUILT_IN_GOMP_CANCEL
2293 && !integer_zerop (gimple_call_arg (stmt, 1)))
2294 ctx->cancellable = true;
2295 kind = "parallel";
2296 break;
2297 case 2:
2298 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2299 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2300 bad = "#pragma omp for";
2301 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2302 == BUILT_IN_GOMP_CANCEL
2303 && !integer_zerop (gimple_call_arg (stmt, 1)))
2304 {
2305 ctx->cancellable = true;
2306 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2307 OMP_CLAUSE_NOWAIT))
2308 warning_at (gimple_location (stmt), 0,
2309 "%<#pragma omp cancel for%> inside "
2310 "%<nowait%> for construct");
2311 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2312 OMP_CLAUSE_ORDERED))
2313 warning_at (gimple_location (stmt), 0,
2314 "%<#pragma omp cancel for%> inside "
2315 "%<ordered%> for construct");
2316 }
2317 kind = "for";
2318 break;
2319 case 4:
2320 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2321 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2322 bad = "#pragma omp sections";
2323 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2324 == BUILT_IN_GOMP_CANCEL
2325 && !integer_zerop (gimple_call_arg (stmt, 1)))
2326 {
2327 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2328 {
2329 ctx->cancellable = true;
2330 if (find_omp_clause (gimple_omp_sections_clauses
2331 (ctx->stmt),
2332 OMP_CLAUSE_NOWAIT))
2333 warning_at (gimple_location (stmt), 0,
2334 "%<#pragma omp cancel sections%> inside "
2335 "%<nowait%> sections construct");
2336 }
2337 else
2338 {
2339 gcc_assert (ctx->outer
2340 && gimple_code (ctx->outer->stmt)
2341 == GIMPLE_OMP_SECTIONS);
2342 ctx->outer->cancellable = true;
2343 if (find_omp_clause (gimple_omp_sections_clauses
2344 (ctx->outer->stmt),
2345 OMP_CLAUSE_NOWAIT))
2346 warning_at (gimple_location (stmt), 0,
2347 "%<#pragma omp cancel sections%> inside "
2348 "%<nowait%> sections construct");
2349 }
2350 }
2351 kind = "sections";
2352 break;
2353 case 8:
2354 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2355 bad = "#pragma omp task";
2356 else
2357 ctx->cancellable = true;
2358 kind = "taskgroup";
2359 break;
2360 default:
2361 error_at (gimple_location (stmt), "invalid arguments");
2362 return false;
2363 }
2364 if (bad)
2365 {
2366 error_at (gimple_location (stmt),
2367 "%<%s %s%> construct not closely nested inside of %qs",
2368 DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2369 == BUILT_IN_GOMP_CANCEL
2370 ? "#pragma omp cancel"
2371 : "#pragma omp cancellation point", kind, bad);
2372 return false;
2373 }
2374 }
74bf76ed 2375 /* FALLTHRU */
726a989a
RB
2376 case GIMPLE_OMP_SECTIONS:
2377 case GIMPLE_OMP_SINGLE:
a6fc8e21 2378 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2379 switch (gimple_code (ctx->stmt))
a6fc8e21 2380 {
726a989a
RB
2381 case GIMPLE_OMP_FOR:
2382 case GIMPLE_OMP_SECTIONS:
2383 case GIMPLE_OMP_SINGLE:
2384 case GIMPLE_OMP_ORDERED:
2385 case GIMPLE_OMP_MASTER:
2386 case GIMPLE_OMP_TASK:
acf0174b 2387 case GIMPLE_OMP_CRITICAL:
726a989a 2388 if (is_gimple_call (stmt))
a68ab351 2389 {
acf0174b
JJ
2390 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2391 != BUILT_IN_GOMP_BARRIER)
2392 return true;
26127932
JJ
2393 error_at (gimple_location (stmt),
2394 "barrier region may not be closely nested inside "
2395 "of work-sharing, critical, ordered, master or "
2396 "explicit task region");
2397 return false;
a68ab351 2398 }
26127932
JJ
2399 error_at (gimple_location (stmt),
2400 "work-sharing region may not be closely nested inside "
2401 "of work-sharing, critical, ordered, master or explicit "
2402 "task region");
2403 return false;
726a989a 2404 case GIMPLE_OMP_PARALLEL:
26127932 2405 return true;
a6fc8e21
JJ
2406 default:
2407 break;
2408 }
2409 break;
726a989a 2410 case GIMPLE_OMP_MASTER:
a6fc8e21 2411 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2412 switch (gimple_code (ctx->stmt))
a6fc8e21 2413 {
726a989a
RB
2414 case GIMPLE_OMP_FOR:
2415 case GIMPLE_OMP_SECTIONS:
2416 case GIMPLE_OMP_SINGLE:
2417 case GIMPLE_OMP_TASK:
26127932
JJ
2418 error_at (gimple_location (stmt),
2419 "master region may not be closely nested inside "
2420 "of work-sharing or explicit task region");
2421 return false;
726a989a 2422 case GIMPLE_OMP_PARALLEL:
26127932 2423 return true;
a6fc8e21
JJ
2424 default:
2425 break;
2426 }
2427 break;
726a989a 2428 case GIMPLE_OMP_ORDERED:
a6fc8e21 2429 for (; ctx != NULL; ctx = ctx->outer)
726a989a 2430 switch (gimple_code (ctx->stmt))
a6fc8e21 2431 {
726a989a
RB
2432 case GIMPLE_OMP_CRITICAL:
2433 case GIMPLE_OMP_TASK:
26127932
JJ
2434 error_at (gimple_location (stmt),
2435 "ordered region may not be closely nested inside "
2436 "of critical or explicit task region");
2437 return false;
726a989a
RB
2438 case GIMPLE_OMP_FOR:
2439 if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
a6fc8e21 2440 OMP_CLAUSE_ORDERED) == NULL)
26127932
JJ
2441 {
2442 error_at (gimple_location (stmt),
2443 "ordered region must be closely nested inside "
a6fc8e21 2444 "a loop region with an ordered clause");
26127932
JJ
2445 return false;
2446 }
2447 return true;
726a989a 2448 case GIMPLE_OMP_PARALLEL:
acf0174b
JJ
2449 error_at (gimple_location (stmt),
2450 "ordered region must be closely nested inside "
2451 "a loop region with an ordered clause");
2452 return false;
a6fc8e21
JJ
2453 default:
2454 break;
2455 }
2456 break;
726a989a 2457 case GIMPLE_OMP_CRITICAL:
a6fc8e21 2458 for (; ctx != NULL; ctx = ctx->outer)
726a989a
RB
2459 if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
2460 && (gimple_omp_critical_name (stmt)
2461 == gimple_omp_critical_name (ctx->stmt)))
a6fc8e21 2462 {
26127932
JJ
2463 error_at (gimple_location (stmt),
2464 "critical region may not be nested inside a critical "
2465 "region with the same name");
2466 return false;
a6fc8e21
JJ
2467 }
2468 break;
acf0174b
JJ
2469 case GIMPLE_OMP_TEAMS:
2470 if (ctx == NULL
2471 || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2472 || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2473 {
2474 error_at (gimple_location (stmt),
2475 "teams construct not closely nested inside of target "
2476 "region");
2477 return false;
2478 }
2479 break;
a6fc8e21
JJ
2480 default:
2481 break;
2482 }
26127932 2483 return true;
a6fc8e21
JJ
2484}
2485
2486
726a989a
RB
2487/* Helper function scan_omp.
2488
2489 Callback for walk_tree or operators in walk_gimple_stmt used to
2490 scan for OpenMP directives in TP. */
953ff289
DN
2491
2492static tree
726a989a 2493scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
953ff289 2494{
d3bfe4de
KG
2495 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
2496 omp_context *ctx = (omp_context *) wi->info;
953ff289
DN
2497 tree t = *tp;
2498
726a989a
RB
2499 switch (TREE_CODE (t))
2500 {
2501 case VAR_DECL:
2502 case PARM_DECL:
2503 case LABEL_DECL:
2504 case RESULT_DECL:
2505 if (ctx)
2506 *tp = remap_decl (t, &ctx->cb);
2507 break;
2508
2509 default:
2510 if (ctx && TYPE_P (t))
2511 *tp = remap_type (t, &ctx->cb);
2512 else if (!DECL_P (t))
a900ae6b
JJ
2513 {
2514 *walk_subtrees = 1;
2515 if (ctx)
70f34814
RG
2516 {
2517 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
2518 if (tem != TREE_TYPE (t))
2519 {
2520 if (TREE_CODE (t) == INTEGER_CST)
2521 *tp = build_int_cst_wide (tem,
2522 TREE_INT_CST_LOW (t),
2523 TREE_INT_CST_HIGH (t));
2524 else
2525 TREE_TYPE (t) = tem;
2526 }
2527 }
a900ae6b 2528 }
726a989a
RB
2529 break;
2530 }
2531
2532 return NULL_TREE;
2533}
2534
2535
2536/* Helper function for scan_omp.
2537
2538 Callback for walk_gimple_stmt used to scan for OpenMP directives in
2539 the current statement in GSI. */
2540
2541static tree
2542scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2543 struct walk_stmt_info *wi)
2544{
2545 gimple stmt = gsi_stmt (*gsi);
2546 omp_context *ctx = (omp_context *) wi->info;
2547
2548 if (gimple_has_location (stmt))
2549 input_location = gimple_location (stmt);
953ff289 2550
a6fc8e21 2551 /* Check the OpenMP nesting restrictions. */
acf0174b
JJ
2552 bool remove = false;
2553 if (is_gimple_omp (stmt))
2554 remove = !check_omp_nesting_restrictions (stmt, ctx);
2555 else if (is_gimple_call (stmt))
2556 {
2557 tree fndecl = gimple_call_fndecl (stmt);
2558 if (fndecl
2559 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2560 switch (DECL_FUNCTION_CODE (fndecl))
2561 {
2562 case BUILT_IN_GOMP_BARRIER:
2563 case BUILT_IN_GOMP_CANCEL:
2564 case BUILT_IN_GOMP_CANCELLATION_POINT:
2565 case BUILT_IN_GOMP_TASKYIELD:
2566 case BUILT_IN_GOMP_TASKWAIT:
2567 case BUILT_IN_GOMP_TASKGROUP_START:
2568 case BUILT_IN_GOMP_TASKGROUP_END:
26127932 2569 remove = !check_omp_nesting_restrictions (stmt, ctx);
acf0174b
JJ
2570 break;
2571 default:
2572 break;
2573 }
2574 }
2575 if (remove)
2576 {
2577 stmt = gimple_build_nop ();
2578 gsi_replace (gsi, stmt, false);
a68ab351 2579 }
a6fc8e21 2580
726a989a
RB
2581 *handled_ops_p = true;
2582
2583 switch (gimple_code (stmt))
953ff289 2584 {
726a989a 2585 case GIMPLE_OMP_PARALLEL:
a68ab351 2586 taskreg_nesting_level++;
726a989a 2587 scan_omp_parallel (gsi, ctx);
a68ab351
JJ
2588 taskreg_nesting_level--;
2589 break;
2590
726a989a 2591 case GIMPLE_OMP_TASK:
a68ab351 2592 taskreg_nesting_level++;
726a989a 2593 scan_omp_task (gsi, ctx);
a68ab351 2594 taskreg_nesting_level--;
953ff289
DN
2595 break;
2596
726a989a
RB
2597 case GIMPLE_OMP_FOR:
2598 scan_omp_for (stmt, ctx);
953ff289
DN
2599 break;
2600
726a989a
RB
2601 case GIMPLE_OMP_SECTIONS:
2602 scan_omp_sections (stmt, ctx);
953ff289
DN
2603 break;
2604
726a989a
RB
2605 case GIMPLE_OMP_SINGLE:
2606 scan_omp_single (stmt, ctx);
953ff289
DN
2607 break;
2608
726a989a
RB
2609 case GIMPLE_OMP_SECTION:
2610 case GIMPLE_OMP_MASTER:
acf0174b 2611 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
2612 case GIMPLE_OMP_ORDERED:
2613 case GIMPLE_OMP_CRITICAL:
2614 ctx = new_omp_context (stmt, ctx);
26127932 2615 scan_omp (gimple_omp_body_ptr (stmt), ctx);
953ff289
DN
2616 break;
2617
acf0174b
JJ
2618 case GIMPLE_OMP_TARGET:
2619 scan_omp_target (stmt, ctx);
2620 break;
2621
2622 case GIMPLE_OMP_TEAMS:
2623 scan_omp_teams (stmt, ctx);
2624 break;
2625
726a989a 2626 case GIMPLE_BIND:
953ff289
DN
2627 {
2628 tree var;
953ff289 2629
726a989a
RB
2630 *handled_ops_p = false;
2631 if (ctx)
910ad8de 2632 for (var = gimple_bind_vars (stmt); var ; var = DECL_CHAIN (var))
726a989a 2633 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
2634 }
2635 break;
953ff289 2636 default:
726a989a 2637 *handled_ops_p = false;
953ff289
DN
2638 break;
2639 }
2640
2641 return NULL_TREE;
2642}
2643
2644
726a989a
RB
2645/* Scan all the statements starting at the current statement. CTX
2646 contains context information about the OpenMP directives and
2647 clauses found during the scan. */
953ff289
DN
2648
2649static void
26127932 2650scan_omp (gimple_seq *body_p, omp_context *ctx)
953ff289
DN
2651{
2652 location_t saved_location;
2653 struct walk_stmt_info wi;
2654
2655 memset (&wi, 0, sizeof (wi));
953ff289 2656 wi.info = ctx;
953ff289
DN
2657 wi.want_locations = true;
2658
2659 saved_location = input_location;
26127932 2660 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
953ff289
DN
2661 input_location = saved_location;
2662}
2663\f
2664/* Re-gimplification and code generation routines. */
2665
2666/* Build a call to GOMP_barrier. */
2667
acf0174b
JJ
2668static gimple
2669build_omp_barrier (tree lhs)
2670{
2671 tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
2672 : BUILT_IN_GOMP_BARRIER);
2673 gimple g = gimple_build_call (fndecl, 0);
2674 if (lhs)
2675 gimple_call_set_lhs (g, lhs);
2676 return g;
953ff289
DN
2677}
2678
2679/* If a context was created for STMT when it was scanned, return it. */
2680
2681static omp_context *
726a989a 2682maybe_lookup_ctx (gimple stmt)
953ff289
DN
2683{
2684 splay_tree_node n;
2685 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
2686 return n ? (omp_context *) n->value : NULL;
2687}
2688
50674e96
DN
2689
2690/* Find the mapping for DECL in CTX or the immediately enclosing
2691 context that has a mapping for DECL.
2692
2693 If CTX is a nested parallel directive, we may have to use the decl
2694 mappings created in CTX's parent context. Suppose that we have the
2695 following parallel nesting (variable UIDs showed for clarity):
2696
2697 iD.1562 = 0;
2698 #omp parallel shared(iD.1562) -> outer parallel
2699 iD.1562 = iD.1562 + 1;
2700
2701 #omp parallel shared (iD.1562) -> inner parallel
2702 iD.1562 = iD.1562 - 1;
2703
2704 Each parallel structure will create a distinct .omp_data_s structure
2705 for copying iD.1562 in/out of the directive:
2706
2707 outer parallel .omp_data_s.1.i -> iD.1562
2708 inner parallel .omp_data_s.2.i -> iD.1562
2709
2710 A shared variable mapping will produce a copy-out operation before
2711 the parallel directive and a copy-in operation after it. So, in
2712 this case we would have:
2713
2714 iD.1562 = 0;
2715 .omp_data_o.1.i = iD.1562;
2716 #omp parallel shared(iD.1562) -> outer parallel
2717 .omp_data_i.1 = &.omp_data_o.1
2718 .omp_data_i.1->i = .omp_data_i.1->i + 1;
2719
2720 .omp_data_o.2.i = iD.1562; -> **
2721 #omp parallel shared(iD.1562) -> inner parallel
2722 .omp_data_i.2 = &.omp_data_o.2
2723 .omp_data_i.2->i = .omp_data_i.2->i - 1;
2724
2725
2726 ** This is a problem. The symbol iD.1562 cannot be referenced
2727 inside the body of the outer parallel region. But since we are
2728 emitting this copy operation while expanding the inner parallel
2729 directive, we need to access the CTX structure of the outer
2730 parallel directive to get the correct mapping:
2731
2732 .omp_data_o.2.i = .omp_data_i.1->i
2733
2734 Since there may be other workshare or parallel directives enclosing
2735 the parallel directive, it may be necessary to walk up the context
2736 parent chain. This is not a problem in general because nested
2737 parallelism happens only rarely. */
2738
2739static tree
2740lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2741{
2742 tree t;
2743 omp_context *up;
2744
50674e96
DN
2745 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2746 t = maybe_lookup_decl (decl, up);
2747
d2dda7fe 2748 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
50674e96 2749
64964499 2750 return t ? t : decl;
50674e96
DN
2751}
2752
2753
8ca5b2a2
JJ
2754/* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
2755 in outer contexts. */
2756
2757static tree
2758maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
2759{
2760 tree t = NULL;
2761 omp_context *up;
2762
d2dda7fe
JJ
2763 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
2764 t = maybe_lookup_decl (decl, up);
8ca5b2a2
JJ
2765
2766 return t ? t : decl;
2767}
2768
2769
953ff289
DN
2770/* Construct the initialization value for reduction CLAUSE. */
2771
2772tree
2773omp_reduction_init (tree clause, tree type)
2774{
db3927fb 2775 location_t loc = OMP_CLAUSE_LOCATION (clause);
953ff289
DN
2776 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
2777 {
2778 case PLUS_EXPR:
2779 case MINUS_EXPR:
2780 case BIT_IOR_EXPR:
2781 case BIT_XOR_EXPR:
2782 case TRUTH_OR_EXPR:
2783 case TRUTH_ORIF_EXPR:
2784 case TRUTH_XOR_EXPR:
2785 case NE_EXPR:
e8160c9a 2786 return build_zero_cst (type);
953ff289
DN
2787
2788 case MULT_EXPR:
2789 case TRUTH_AND_EXPR:
2790 case TRUTH_ANDIF_EXPR:
2791 case EQ_EXPR:
db3927fb 2792 return fold_convert_loc (loc, type, integer_one_node);
953ff289
DN
2793
2794 case BIT_AND_EXPR:
db3927fb 2795 return fold_convert_loc (loc, type, integer_minus_one_node);
953ff289
DN
2796
2797 case MAX_EXPR:
2798 if (SCALAR_FLOAT_TYPE_P (type))
2799 {
2800 REAL_VALUE_TYPE max, min;
2801 if (HONOR_INFINITIES (TYPE_MODE (type)))
2802 {
2803 real_inf (&max);
2804 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
2805 }
2806 else
2807 real_maxval (&min, 1, TYPE_MODE (type));
2808 return build_real (type, min);
2809 }
2810 else
2811 {
2812 gcc_assert (INTEGRAL_TYPE_P (type));
2813 return TYPE_MIN_VALUE (type);
2814 }
2815
2816 case MIN_EXPR:
2817 if (SCALAR_FLOAT_TYPE_P (type))
2818 {
2819 REAL_VALUE_TYPE max;
2820 if (HONOR_INFINITIES (TYPE_MODE (type)))
2821 real_inf (&max);
2822 else
2823 real_maxval (&max, 0, TYPE_MODE (type));
2824 return build_real (type, max);
2825 }
2826 else
2827 {
2828 gcc_assert (INTEGRAL_TYPE_P (type));
2829 return TYPE_MAX_VALUE (type);
2830 }
2831
2832 default:
2833 gcc_unreachable ();
2834 }
2835}
2836
acf0174b
JJ
2837/* Return alignment to be assumed for var in CLAUSE, which should be
2838 OMP_CLAUSE_ALIGNED. */
2839
2840static tree
2841omp_clause_aligned_alignment (tree clause)
2842{
2843 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
2844 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
2845
2846 /* Otherwise return implementation defined alignment. */
2847 unsigned int al = 1;
2848 enum machine_mode mode, vmode;
2849 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2850 if (vs)
2851 vs = 1 << floor_log2 (vs);
2852 static enum mode_class classes[]
2853 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
2854 for (int i = 0; i < 4; i += 2)
2855 for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
2856 mode != VOIDmode;
2857 mode = GET_MODE_WIDER_MODE (mode))
2858 {
2859 vmode = targetm.vectorize.preferred_simd_mode (mode);
2860 if (GET_MODE_CLASS (vmode) != classes[i + 1])
2861 continue;
2862 while (vs
2863 && GET_MODE_SIZE (vmode) < vs
2864 && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
2865 vmode = GET_MODE_2XWIDER_MODE (vmode);
2866
2867 tree type = lang_hooks.types.type_for_mode (mode, 1);
2868 if (type == NULL_TREE || TYPE_MODE (type) != mode)
2869 continue;
2870 type = build_vector_type (type, GET_MODE_SIZE (vmode)
2871 / GET_MODE_SIZE (mode));
2872 if (TYPE_MODE (type) != vmode)
2873 continue;
2874 if (TYPE_ALIGN_UNIT (type) > al)
2875 al = TYPE_ALIGN_UNIT (type);
2876 }
2877 return build_int_cst (integer_type_node, al);
2878}
2879
74bf76ed
JJ
2880/* Return maximum possible vectorization factor for the target. */
2881
2882static int
2883omp_max_vf (void)
2884{
2885 if (!optimize
2886 || optimize_debug
ea0f3e87
XDL
2887 || (!flag_tree_loop_vectorize
2888 && (global_options_set.x_flag_tree_loop_vectorize
2889 || global_options_set.x_flag_tree_vectorize)))
74bf76ed
JJ
2890 return 1;
2891
2892 int vs = targetm.vectorize.autovectorize_vector_sizes ();
2893 if (vs)
2894 {
2895 vs = 1 << floor_log2 (vs);
2896 return vs;
2897 }
2898 enum machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
2899 if (GET_MODE_CLASS (vqimode) == MODE_VECTOR_INT)
2900 return GET_MODE_NUNITS (vqimode);
2901 return 1;
2902}
2903
2904/* Helper function of lower_rec_input_clauses, used for #pragma omp simd
2905 privatization. */
2906
2907static bool
2908lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, int &max_vf,
2909 tree &idx, tree &lane, tree &ivar, tree &lvar)
2910{
2911 if (max_vf == 0)
2912 {
2913 max_vf = omp_max_vf ();
2914 if (max_vf > 1)
2915 {
2916 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
2917 OMP_CLAUSE_SAFELEN);
2918 if (c
2919 && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c), max_vf) == -1)
2920 max_vf = tree_low_cst (OMP_CLAUSE_SAFELEN_EXPR (c), 0);
2921 }
2922 if (max_vf > 1)
2923 {
2924 idx = create_tmp_var (unsigned_type_node, NULL);
2925 lane = create_tmp_var (unsigned_type_node, NULL);
2926 }
2927 }
2928 if (max_vf == 1)
2929 return false;
2930
2931 tree atype = build_array_type_nelts (TREE_TYPE (new_var), max_vf);
2932 tree avar = create_tmp_var_raw (atype, NULL);
2933 if (TREE_ADDRESSABLE (new_var))
2934 TREE_ADDRESSABLE (avar) = 1;
2935 DECL_ATTRIBUTES (avar)
2936 = tree_cons (get_identifier ("omp simd array"), NULL,
2937 DECL_ATTRIBUTES (avar));
2938 gimple_add_tmp_var (avar);
2939 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, idx,
2940 NULL_TREE, NULL_TREE);
2941 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, lane,
2942 NULL_TREE, NULL_TREE);
acf0174b
JJ
2943 if (DECL_P (new_var))
2944 {
2945 SET_DECL_VALUE_EXPR (new_var, lvar);
2946 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
2947 }
74bf76ed
JJ
2948 return true;
2949}
2950
953ff289
DN
2951/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
2952 from the receiver (aka child) side and initializers for REFERENCE_TYPE
2953 private variables. Initialization statements go in ILIST, while calls
2954 to destructors go in DLIST. */
2955
2956static void
726a989a 2957lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
acf0174b 2958 omp_context *ctx, struct omp_for_data *fd)
953ff289 2959{
5039610b 2960 tree c, dtor, copyin_seq, x, ptr;
953ff289 2961 bool copyin_by_ref = false;
8ca5b2a2 2962 bool lastprivate_firstprivate = false;
acf0174b 2963 bool reduction_omp_orig_ref = false;
953ff289 2964 int pass;
74bf76ed
JJ
2965 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2966 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
2967 int max_vf = 0;
2968 tree lane = NULL_TREE, idx = NULL_TREE;
2969 tree ivar = NULL_TREE, lvar = NULL_TREE;
2970 gimple_seq llist[2] = { NULL, NULL };
953ff289 2971
953ff289
DN
2972 copyin_seq = NULL;
2973
74bf76ed
JJ
2974 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
2975 with data sharing clauses referencing variable sized vars. That
2976 is unnecessarily hard to support and very unlikely to result in
2977 vectorized code anyway. */
2978 if (is_simd)
2979 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
2980 switch (OMP_CLAUSE_CODE (c))
2981 {
2982 case OMP_CLAUSE_REDUCTION:
74bf76ed
JJ
2983 case OMP_CLAUSE_PRIVATE:
2984 case OMP_CLAUSE_FIRSTPRIVATE:
2985 case OMP_CLAUSE_LASTPRIVATE:
2986 case OMP_CLAUSE_LINEAR:
2987 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
2988 max_vf = 1;
2989 break;
2990 default:
2991 continue;
2992 }
2993
953ff289
DN
2994 /* Do all the fixed sized types in the first pass, and the variable sized
2995 types in the second pass. This makes sure that the scalar arguments to
b8698a0f 2996 the variable sized types are processed before we use them in the
953ff289
DN
2997 variable sized operations. */
2998 for (pass = 0; pass < 2; ++pass)
2999 {
3000 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3001 {
aaf46ef9 3002 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
3003 tree var, new_var;
3004 bool by_ref;
db3927fb 3005 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289
DN
3006
3007 switch (c_kind)
3008 {
3009 case OMP_CLAUSE_PRIVATE:
3010 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3011 continue;
3012 break;
3013 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3014 /* Ignore shared directives in teams construct. */
3015 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3016 continue;
8ca5b2a2
JJ
3017 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3018 {
3019 gcc_assert (is_global_var (OMP_CLAUSE_DECL (c)));
3020 continue;
3021 }
953ff289 3022 case OMP_CLAUSE_FIRSTPRIVATE:
953ff289 3023 case OMP_CLAUSE_COPYIN:
acf0174b
JJ
3024 case OMP_CLAUSE_LINEAR:
3025 break;
953ff289 3026 case OMP_CLAUSE_REDUCTION:
acf0174b
JJ
3027 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3028 reduction_omp_orig_ref = true;
953ff289 3029 break;
acf0174b
JJ
3030 case OMP_CLAUSE__LOOPTEMP_:
3031 /* Handle _looptemp_ clauses only on parallel. */
3032 if (fd)
3033 continue;
74bf76ed 3034 break;
077b0dfb 3035 case OMP_CLAUSE_LASTPRIVATE:
8ca5b2a2
JJ
3036 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3037 {
3038 lastprivate_firstprivate = true;
3039 if (pass != 0)
3040 continue;
3041 }
077b0dfb 3042 break;
acf0174b
JJ
3043 case OMP_CLAUSE_ALIGNED:
3044 if (pass == 0)
3045 continue;
3046 var = OMP_CLAUSE_DECL (c);
3047 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3048 && !is_global_var (var))
3049 {
3050 new_var = maybe_lookup_decl (var, ctx);
3051 if (new_var == NULL_TREE)
3052 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3053 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3054 x = build_call_expr_loc (clause_loc, x, 2, new_var,
3055 omp_clause_aligned_alignment (c));
3056 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3057 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3058 gimplify_and_add (x, ilist);
3059 }
3060 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3061 && is_global_var (var))
3062 {
3063 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3064 new_var = lookup_decl (var, ctx);
3065 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3066 t = build_fold_addr_expr_loc (clause_loc, t);
3067 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3068 t = build_call_expr_loc (clause_loc, t2, 2, t,
3069 omp_clause_aligned_alignment (c));
3070 t = fold_convert_loc (clause_loc, ptype, t);
3071 x = create_tmp_var (ptype, NULL);
3072 t = build2 (MODIFY_EXPR, ptype, x, t);
3073 gimplify_and_add (t, ilist);
3074 t = build_simple_mem_ref_loc (clause_loc, x);
3075 SET_DECL_VALUE_EXPR (new_var, t);
3076 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3077 }
3078 continue;
953ff289
DN
3079 default:
3080 continue;
3081 }
3082
3083 new_var = var = OMP_CLAUSE_DECL (c);
3084 if (c_kind != OMP_CLAUSE_COPYIN)
3085 new_var = lookup_decl (var, ctx);
3086
3087 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3088 {
3089 if (pass != 0)
3090 continue;
3091 }
953ff289
DN
3092 else if (is_variable_sized (var))
3093 {
50674e96
DN
3094 /* For variable sized types, we need to allocate the
3095 actual storage here. Call alloca and store the
3096 result in the pointer decl that we created elsewhere. */
953ff289
DN
3097 if (pass == 0)
3098 continue;
3099
a68ab351
JJ
3100 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
3101 {
726a989a 3102 gimple stmt;
e79983f4 3103 tree tmp, atmp;
726a989a 3104
a68ab351
JJ
3105 ptr = DECL_VALUE_EXPR (new_var);
3106 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
3107 ptr = TREE_OPERAND (ptr, 0);
3108 gcc_assert (DECL_P (ptr));
3109 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
726a989a
RB
3110
3111 /* void *tmp = __builtin_alloca */
e79983f4
MM
3112 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3113 stmt = gimple_build_call (atmp, 1, x);
726a989a
RB
3114 tmp = create_tmp_var_raw (ptr_type_node, NULL);
3115 gimple_add_tmp_var (tmp);
3116 gimple_call_set_lhs (stmt, tmp);
3117
3118 gimple_seq_add_stmt (ilist, stmt);
3119
db3927fb 3120 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
726a989a 3121 gimplify_assign (ptr, x, ilist);
a68ab351 3122 }
953ff289 3123 }
953ff289
DN
3124 else if (is_reference (var))
3125 {
50674e96
DN
3126 /* For references that are being privatized for Fortran,
3127 allocate new backing storage for the new pointer
3128 variable. This allows us to avoid changing all the
3129 code that expects a pointer to something that expects
acf0174b 3130 a direct variable. */
953ff289
DN
3131 if (pass == 0)
3132 continue;
3133
3134 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
a68ab351
JJ
3135 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
3136 {
3137 x = build_receiver_ref (var, false, ctx);
db3927fb 3138 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
3139 }
3140 else if (TREE_CONSTANT (x))
953ff289
DN
3141 {
3142 const char *name = NULL;
3143 if (DECL_NAME (var))
3144 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
3145
077b0dfb
JJ
3146 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
3147 name);
3148 gimple_add_tmp_var (x);
628c189e 3149 TREE_ADDRESSABLE (x) = 1;
db3927fb 3150 x = build_fold_addr_expr_loc (clause_loc, x);
953ff289
DN
3151 }
3152 else
3153 {
e79983f4
MM
3154 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA);
3155 x = build_call_expr_loc (clause_loc, atmp, 1, x);
953ff289
DN
3156 }
3157
db3927fb 3158 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
726a989a 3159 gimplify_assign (new_var, x, ilist);
953ff289 3160
70f34814 3161 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
3162 }
3163 else if (c_kind == OMP_CLAUSE_REDUCTION
3164 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3165 {
3166 if (pass == 0)
3167 continue;
3168 }
3169 else if (pass != 0)
3170 continue;
3171
aaf46ef9 3172 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
3173 {
3174 case OMP_CLAUSE_SHARED:
acf0174b
JJ
3175 /* Ignore shared directives in teams construct. */
3176 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3177 continue;
8ca5b2a2
JJ
3178 /* Shared global vars are just accessed directly. */
3179 if (is_global_var (new_var))
3180 break;
953ff289
DN
3181 /* Set up the DECL_VALUE_EXPR for shared variables now. This
3182 needs to be delayed until after fixup_child_record_type so
3183 that we get the correct type during the dereference. */
7c8f7639 3184 by_ref = use_pointer_for_field (var, ctx);
953ff289
DN
3185 x = build_receiver_ref (var, by_ref, ctx);
3186 SET_DECL_VALUE_EXPR (new_var, x);
3187 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3188
3189 /* ??? If VAR is not passed by reference, and the variable
3190 hasn't been initialized yet, then we'll get a warning for
3191 the store into the omp_data_s structure. Ideally, we'd be
b8698a0f 3192 able to notice this and not store anything at all, but
953ff289
DN
3193 we're generating code too early. Suppress the warning. */
3194 if (!by_ref)
3195 TREE_NO_WARNING (var) = 1;
3196 break;
3197
3198 case OMP_CLAUSE_LASTPRIVATE:
3199 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3200 break;
3201 /* FALLTHRU */
3202
3203 case OMP_CLAUSE_PRIVATE:
a68ab351
JJ
3204 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
3205 x = build_outer_var_ref (var, ctx);
3206 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3207 {
3208 if (is_task_ctx (ctx))
3209 x = build_receiver_ref (var, false, ctx);
3210 else
3211 x = build_outer_var_ref (var, ctx);
3212 }
3213 else
3214 x = NULL;
74bf76ed 3215 do_private:
acf0174b
JJ
3216 tree nx;
3217 nx = lang_hooks.decls.omp_clause_default_ctor (c, new_var, x);
74bf76ed
JJ
3218 if (is_simd)
3219 {
3220 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
acf0174b 3221 if ((TREE_ADDRESSABLE (new_var) || nx || y
74bf76ed
JJ
3222 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
3223 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3224 idx, lane, ivar, lvar))
3225 {
acf0174b 3226 if (nx)
74bf76ed
JJ
3227 x = lang_hooks.decls.omp_clause_default_ctor
3228 (c, unshare_expr (ivar), x);
acf0174b 3229 if (nx && x)
74bf76ed
JJ
3230 gimplify_and_add (x, &llist[0]);
3231 if (y)
3232 {
3233 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
3234 if (y)
3235 {
3236 gimple_seq tseq = NULL;
3237
3238 dtor = y;
3239 gimplify_stmt (&dtor, &tseq);
3240 gimple_seq_add_seq (&llist[1], tseq);
3241 }
3242 }
3243 break;
3244 }
3245 }
acf0174b
JJ
3246 if (nx)
3247 gimplify_and_add (nx, ilist);
953ff289
DN
3248 /* FALLTHRU */
3249
3250 do_dtor:
3251 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
3252 if (x)
3253 {
726a989a
RB
3254 gimple_seq tseq = NULL;
3255
953ff289 3256 dtor = x;
726a989a 3257 gimplify_stmt (&dtor, &tseq);
355a7673 3258 gimple_seq_add_seq (dlist, tseq);
953ff289
DN
3259 }
3260 break;
3261
74bf76ed
JJ
3262 case OMP_CLAUSE_LINEAR:
3263 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
3264 goto do_firstprivate;
3265 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3266 x = NULL;
3267 else
3268 x = build_outer_var_ref (var, ctx);
3269 goto do_private;
3270
953ff289 3271 case OMP_CLAUSE_FIRSTPRIVATE:
a68ab351
JJ
3272 if (is_task_ctx (ctx))
3273 {
3274 if (is_reference (var) || is_variable_sized (var))
3275 goto do_dtor;
3276 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
3277 ctx))
3278 || use_pointer_for_field (var, NULL))
3279 {
3280 x = build_receiver_ref (var, false, ctx);
3281 SET_DECL_VALUE_EXPR (new_var, x);
3282 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3283 goto do_dtor;
3284 }
3285 }
74bf76ed 3286 do_firstprivate:
953ff289 3287 x = build_outer_var_ref (var, ctx);
74bf76ed
JJ
3288 if (is_simd)
3289 {
acf0174b
JJ
3290 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3291 && gimple_omp_for_combined_into_p (ctx->stmt))
3292 {
3293 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3294 ? sizetype : TREE_TYPE (x);
3295 tree t = fold_convert (stept,
3296 OMP_CLAUSE_LINEAR_STEP (c));
3297 tree c = find_omp_clause (clauses,
3298 OMP_CLAUSE__LOOPTEMP_);
3299 gcc_assert (c);
3300 tree l = OMP_CLAUSE_DECL (c);
3301 if (fd->collapse == 1)
3302 {
3303 tree n1 = fd->loop.n1;
3304 tree step = fd->loop.step;
3305 tree itype = TREE_TYPE (l);
3306 if (POINTER_TYPE_P (itype))
3307 itype = signed_type_for (itype);
3308 l = fold_build2 (MINUS_EXPR, itype, l, n1);
3309 if (TYPE_UNSIGNED (itype)
3310 && fd->loop.cond_code == GT_EXPR)
3311 l = fold_build2 (TRUNC_DIV_EXPR, itype,
3312 fold_build1 (NEGATE_EXPR,
3313 itype, l),
3314 fold_build1 (NEGATE_EXPR,
3315 itype, step));
3316 else
3317 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
3318 }
3319 t = fold_build2 (MULT_EXPR, stept,
3320 fold_convert (stept, l), t);
3321 if (POINTER_TYPE_P (TREE_TYPE (x)))
3322 x = fold_build2 (POINTER_PLUS_EXPR,
3323 TREE_TYPE (x), x, t);
3324 else
3325 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
3326 }
3327
74bf76ed
JJ
3328 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
3329 || TREE_ADDRESSABLE (new_var))
3330 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3331 idx, lane, ivar, lvar))
3332 {
3333 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
3334 {
3335 tree iv = create_tmp_var (TREE_TYPE (new_var), NULL);
3336 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
3337 gimplify_and_add (x, ilist);
3338 gimple_stmt_iterator gsi
3339 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3340 gimple g
3341 = gimple_build_assign (unshare_expr (lvar), iv);
3342 gsi_insert_before_without_update (&gsi, g,
3343 GSI_SAME_STMT);
3344 tree stept = POINTER_TYPE_P (TREE_TYPE (x))
3345 ? sizetype : TREE_TYPE (x);
3346 tree t = fold_convert (stept,
3347 OMP_CLAUSE_LINEAR_STEP (c));
3348 enum tree_code code = PLUS_EXPR;
3349 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
3350 code = POINTER_PLUS_EXPR;
3351 g = gimple_build_assign_with_ops (code, iv, iv, t);
3352 gsi_insert_before_without_update (&gsi, g,
3353 GSI_SAME_STMT);
3354 break;
3355 }
3356 x = lang_hooks.decls.omp_clause_copy_ctor
3357 (c, unshare_expr (ivar), x);
3358 gimplify_and_add (x, &llist[0]);
3359 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3360 if (x)
3361 {
3362 gimple_seq tseq = NULL;
3363
3364 dtor = x;
3365 gimplify_stmt (&dtor, &tseq);
3366 gimple_seq_add_seq (&llist[1], tseq);
3367 }
3368 break;
3369 }
3370 }
953ff289
DN
3371 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
3372 gimplify_and_add (x, ilist);
3373 goto do_dtor;
953ff289 3374
acf0174b
JJ
3375 case OMP_CLAUSE__LOOPTEMP_:
3376 gcc_assert (is_parallel_ctx (ctx));
3377 x = build_outer_var_ref (var, ctx);
3378 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3379 gimplify_and_add (x, ilist);
3380 break;
3381
953ff289 3382 case OMP_CLAUSE_COPYIN:
7c8f7639 3383 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
3384 x = build_receiver_ref (var, by_ref, ctx);
3385 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
3386 append_to_statement_list (x, &copyin_seq);
3387 copyin_by_ref |= by_ref;
3388 break;
3389
3390 case OMP_CLAUSE_REDUCTION:
3391 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3392 {
a68ab351 3393 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
acf0174b 3394 gimple tseq;
a68ab351
JJ
3395 x = build_outer_var_ref (var, ctx);
3396
acf0174b
JJ
3397 if (is_reference (var)
3398 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3399 TREE_TYPE (x)))
db3927fb 3400 x = build_fold_addr_expr_loc (clause_loc, x);
a68ab351
JJ
3401 SET_DECL_VALUE_EXPR (placeholder, x);
3402 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
acf0174b
JJ
3403 tree new_vard = new_var;
3404 if (is_reference (var))
3405 {
3406 gcc_assert (TREE_CODE (new_var) == MEM_REF);
3407 new_vard = TREE_OPERAND (new_var, 0);
3408 gcc_assert (DECL_P (new_vard));
3409 }
74bf76ed
JJ
3410 if (is_simd
3411 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3412 idx, lane, ivar, lvar))
3413 {
acf0174b
JJ
3414 if (new_vard == new_var)
3415 {
3416 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
3417 SET_DECL_VALUE_EXPR (new_var, ivar);
3418 }
3419 else
3420 {
3421 SET_DECL_VALUE_EXPR (new_vard,
3422 build_fold_addr_expr (ivar));
3423 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
3424 }
3425 x = lang_hooks.decls.omp_clause_default_ctor
3426 (c, unshare_expr (ivar),
3427 build_outer_var_ref (var, ctx));
3428 if (x)
3429 gimplify_and_add (x, &llist[0]);
3430 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3431 {
3432 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3433 lower_omp (&tseq, ctx);
3434 gimple_seq_add_seq (&llist[0], tseq);
3435 }
3436 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3437 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3438 lower_omp (&tseq, ctx);
3439 gimple_seq_add_seq (&llist[1], tseq);
3440 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3441 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3442 if (new_vard == new_var)
3443 SET_DECL_VALUE_EXPR (new_var, lvar);
3444 else
3445 SET_DECL_VALUE_EXPR (new_vard,
3446 build_fold_addr_expr (lvar));
3447 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
3448 if (x)
3449 {
3450 tseq = NULL;
3451 dtor = x;
3452 gimplify_stmt (&dtor, &tseq);
3453 gimple_seq_add_seq (&llist[1], tseq);
3454 }
3455 break;
3456 }
3457 x = lang_hooks.decls.omp_clause_default_ctor
3458 (c, new_var, unshare_expr (x));
3459 if (x)
3460 gimplify_and_add (x, ilist);
3461 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3462 {
3463 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3464 lower_omp (&tseq, ctx);
3465 gimple_seq_add_seq (ilist, tseq);
3466 }
3467 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3468 if (is_simd)
3469 {
3470 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3471 lower_omp (&tseq, ctx);
3472 gimple_seq_add_seq (dlist, tseq);
3473 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3474 }
3475 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3476 goto do_dtor;
3477 }
3478 else
3479 {
3480 x = omp_reduction_init (c, TREE_TYPE (new_var));
3481 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
3482 if (is_simd
3483 && lower_rec_simd_input_clauses (new_var, ctx, max_vf,
3484 idx, lane, ivar, lvar))
3485 {
3486 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3487 tree ref = build_outer_var_ref (var, ctx);
3488
3489 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
3490
3491 /* reduction(-:var) sums up the partial results, so it
3492 acts identically to reduction(+:var). */
3493 if (code == MINUS_EXPR)
3494 code = PLUS_EXPR;
3495
3496 x = build2 (code, TREE_TYPE (ref), ref, ivar);
74bf76ed
JJ
3497 ref = build_outer_var_ref (var, ctx);
3498 gimplify_assign (ref, x, &llist[1]);
3499 }
3500 else
3501 {
3502 gimplify_assign (new_var, x, ilist);
3503 if (is_simd)
3504 gimplify_assign (build_outer_var_ref (var, ctx),
3505 new_var, dlist);
3506 }
953ff289
DN
3507 }
3508 break;
3509
3510 default:
3511 gcc_unreachable ();
3512 }
3513 }
3514 }
3515
74bf76ed
JJ
3516 if (lane)
3517 {
3518 tree uid = create_tmp_var (ptr_type_node, "simduid");
8928eff3
JJ
3519 /* Don't want uninit warnings on simduid, it is always uninitialized,
3520 but we use it not for the value, but for the DECL_UID only. */
3521 TREE_NO_WARNING (uid) = 1;
74bf76ed
JJ
3522 gimple g
3523 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
3524 gimple_call_set_lhs (g, lane);
3525 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
3526 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
3527 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
3528 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
3529 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3530 gimple_omp_for_set_clauses (ctx->stmt, c);
3531 g = gimple_build_assign_with_ops (INTEGER_CST, lane,
3532 build_int_cst (unsigned_type_node, 0),
3533 NULL_TREE);
3534 gimple_seq_add_stmt (ilist, g);
3535 for (int i = 0; i < 2; i++)
3536 if (llist[i])
3537 {
3538 tree vf = create_tmp_var (unsigned_type_node, NULL);
3539 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
3540 gimple_call_set_lhs (g, vf);
3541 gimple_seq *seq = i == 0 ? ilist : dlist;
3542 gimple_seq_add_stmt (seq, g);
3543 tree t = build_int_cst (unsigned_type_node, 0);
3544 g = gimple_build_assign_with_ops (INTEGER_CST, idx, t, NULL_TREE);
3545 gimple_seq_add_stmt (seq, g);
3546 tree body = create_artificial_label (UNKNOWN_LOCATION);
3547 tree header = create_artificial_label (UNKNOWN_LOCATION);
3548 tree end = create_artificial_label (UNKNOWN_LOCATION);
3549 gimple_seq_add_stmt (seq, gimple_build_goto (header));
3550 gimple_seq_add_stmt (seq, gimple_build_label (body));
3551 gimple_seq_add_seq (seq, llist[i]);
3552 t = build_int_cst (unsigned_type_node, 1);
3553 g = gimple_build_assign_with_ops (PLUS_EXPR, idx, idx, t);
3554 gimple_seq_add_stmt (seq, g);
3555 gimple_seq_add_stmt (seq, gimple_build_label (header));
3556 g = gimple_build_cond (LT_EXPR, idx, vf, body, end);
3557 gimple_seq_add_stmt (seq, g);
3558 gimple_seq_add_stmt (seq, gimple_build_label (end));
3559 }
3560 }
3561
953ff289
DN
3562 /* The copyin sequence is not to be executed by the main thread, since
3563 that would result in self-copies. Perhaps not visible to scalars,
3564 but it certainly is to C++ operator=. */
3565 if (copyin_seq)
3566 {
e79983f4
MM
3567 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
3568 0);
953ff289
DN
3569 x = build2 (NE_EXPR, boolean_type_node, x,
3570 build_int_cst (TREE_TYPE (x), 0));
3571 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
3572 gimplify_and_add (x, ilist);
3573 }
3574
3575 /* If any copyin variable is passed by reference, we must ensure the
3576 master thread doesn't modify it before it is copied over in all
8ca5b2a2
JJ
3577 threads. Similarly for variables in both firstprivate and
3578 lastprivate clauses we need to ensure the lastprivate copying
acf0174b
JJ
3579 happens after firstprivate copying in all threads. And similarly
3580 for UDRs if initializer expression refers to omp_orig. */
3581 if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
74bf76ed
JJ
3582 {
3583 /* Don't add any barrier for #pragma omp simd or
3584 #pragma omp distribute. */
3585 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3586 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
acf0174b 3587 gimple_seq_add_stmt (ilist, build_omp_barrier (NULL_TREE));
74bf76ed
JJ
3588 }
3589
3590 /* If max_vf is non-zero, then we can use only a vectorization factor
3591 up to the max_vf we chose. So stick it into the safelen clause. */
3592 if (max_vf)
3593 {
3594 tree c = find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
3595 OMP_CLAUSE_SAFELEN);
3596 if (c == NULL_TREE
3597 || compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3598 max_vf) == 1)
3599 {
3600 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
3601 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
3602 max_vf);
3603 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
3604 gimple_omp_for_set_clauses (ctx->stmt, c);
3605 }
3606 }
953ff289
DN
3607}
3608
50674e96 3609
953ff289
DN
3610/* Generate code to implement the LASTPRIVATE clauses. This is used for
3611 both parallel and workshare constructs. PREDICATE may be NULL if it's
3612 always true. */
3613
3614static void
726a989a 3615lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
acf0174b 3616 omp_context *ctx)
953ff289 3617{
74bf76ed 3618 tree x, c, label = NULL, orig_clauses = clauses;
a68ab351 3619 bool par_clauses = false;
74bf76ed 3620 tree simduid = NULL, lastlane = NULL;
953ff289 3621
74bf76ed
JJ
3622 /* Early exit if there are no lastprivate or linear clauses. */
3623 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
3624 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
3625 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
3626 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
3627 break;
953ff289
DN
3628 if (clauses == NULL)
3629 {
3630 /* If this was a workshare clause, see if it had been combined
3631 with its parallel. In that case, look for the clauses on the
3632 parallel statement itself. */
3633 if (is_parallel_ctx (ctx))
3634 return;
3635
3636 ctx = ctx->outer;
3637 if (ctx == NULL || !is_parallel_ctx (ctx))
3638 return;
3639
726a989a 3640 clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
953ff289
DN
3641 OMP_CLAUSE_LASTPRIVATE);
3642 if (clauses == NULL)
3643 return;
a68ab351 3644 par_clauses = true;
953ff289
DN
3645 }
3646
726a989a
RB
3647 if (predicate)
3648 {
3649 gimple stmt;
3650 tree label_true, arm1, arm2;
3651
c2255bc4
AH
3652 label = create_artificial_label (UNKNOWN_LOCATION);
3653 label_true = create_artificial_label (UNKNOWN_LOCATION);
726a989a
RB
3654 arm1 = TREE_OPERAND (predicate, 0);
3655 arm2 = TREE_OPERAND (predicate, 1);
3656 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
3657 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
3658 stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
3659 label_true, label);
3660 gimple_seq_add_stmt (stmt_list, stmt);
3661 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
3662 }
953ff289 3663
74bf76ed
JJ
3664 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3665 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3666 {
3667 simduid = find_omp_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
3668 if (simduid)
3669 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
3670 }
3671
a68ab351 3672 for (c = clauses; c ;)
953ff289
DN
3673 {
3674 tree var, new_var;
db3927fb 3675 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3676
74bf76ed
JJ
3677 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3678 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3679 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
a68ab351
JJ
3680 {
3681 var = OMP_CLAUSE_DECL (c);
3682 new_var = lookup_decl (var, ctx);
953ff289 3683
74bf76ed
JJ
3684 if (simduid && DECL_HAS_VALUE_EXPR_P (new_var))
3685 {
3686 tree val = DECL_VALUE_EXPR (new_var);
3687 if (TREE_CODE (val) == ARRAY_REF
3688 && VAR_P (TREE_OPERAND (val, 0))
3689 && lookup_attribute ("omp simd array",
3690 DECL_ATTRIBUTES (TREE_OPERAND (val,
3691 0))))
3692 {
3693 if (lastlane == NULL)
3694 {
3695 lastlane = create_tmp_var (unsigned_type_node, NULL);
3696 gimple g
3697 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
3698 2, simduid,
3699 TREE_OPERAND (val, 1));
3700 gimple_call_set_lhs (g, lastlane);
3701 gimple_seq_add_stmt (stmt_list, g);
3702 }
3703 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
3704 TREE_OPERAND (val, 0), lastlane,
3705 NULL_TREE, NULL_TREE);
3706 }
3707 }
3708
3709 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
3710 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
726a989a 3711 {
355a7673 3712 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
726a989a
RB
3713 gimple_seq_add_seq (stmt_list,
3714 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
74bf76ed 3715 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
726a989a 3716 }
953ff289 3717
a68ab351
JJ
3718 x = build_outer_var_ref (var, ctx);
3719 if (is_reference (var))
70f34814 3720 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
a68ab351 3721 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
726a989a 3722 gimplify_and_add (x, stmt_list);
a68ab351
JJ
3723 }
3724 c = OMP_CLAUSE_CHAIN (c);
3725 if (c == NULL && !par_clauses)
3726 {
3727 /* If this was a workshare clause, see if it had been combined
3728 with its parallel. In that case, continue looking for the
3729 clauses also on the parallel statement itself. */
3730 if (is_parallel_ctx (ctx))
3731 break;
3732
3733 ctx = ctx->outer;
3734 if (ctx == NULL || !is_parallel_ctx (ctx))
3735 break;
3736
726a989a 3737 c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
a68ab351
JJ
3738 OMP_CLAUSE_LASTPRIVATE);
3739 par_clauses = true;
3740 }
953ff289
DN
3741 }
3742
726a989a
RB
3743 if (label)
3744 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
953ff289
DN
3745}
3746
50674e96 3747
953ff289
DN
3748/* Generate code to implement the REDUCTION clauses. */
3749
3750static void
726a989a 3751lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
953ff289 3752{
726a989a
RB
3753 gimple_seq sub_seq = NULL;
3754 gimple stmt;
3755 tree x, c;
953ff289
DN
3756 int count = 0;
3757
74bf76ed
JJ
3758 /* SIMD reductions are handled in lower_rec_input_clauses. */
3759 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3760 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3761 return;
3762
953ff289
DN
3763 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
3764 update in that case, otherwise use a lock. */
3765 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 3766 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289
DN
3767 {
3768 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3769 {
acf0174b 3770 /* Never use OMP_ATOMIC for array reductions or UDRs. */
953ff289
DN
3771 count = -1;
3772 break;
3773 }
3774 count++;
3775 }
3776
3777 if (count == 0)
3778 return;
3779
3780 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3781 {
3782 tree var, ref, new_var;
3783 enum tree_code code;
db3927fb 3784 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3785
aaf46ef9 3786 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
3787 continue;
3788
3789 var = OMP_CLAUSE_DECL (c);
3790 new_var = lookup_decl (var, ctx);
3791 if (is_reference (var))
70f34814 3792 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289
DN
3793 ref = build_outer_var_ref (var, ctx);
3794 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
3795
3796 /* reduction(-:var) sums up the partial results, so it acts
3797 identically to reduction(+:var). */
953ff289
DN
3798 if (code == MINUS_EXPR)
3799 code = PLUS_EXPR;
3800
3801 if (count == 1)
3802 {
db3927fb 3803 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
3804
3805 addr = save_expr (addr);
3806 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
db3927fb 3807 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
953ff289 3808 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
726a989a 3809 gimplify_and_add (x, stmt_seqp);
953ff289
DN
3810 return;
3811 }
3812
3813 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3814 {
3815 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3816
acf0174b
JJ
3817 if (is_reference (var)
3818 && !useless_type_conversion_p (TREE_TYPE (placeholder),
3819 TREE_TYPE (ref)))
db3927fb 3820 ref = build_fold_addr_expr_loc (clause_loc, ref);
953ff289
DN
3821 SET_DECL_VALUE_EXPR (placeholder, ref);
3822 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
355a7673 3823 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
726a989a
RB
3824 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
3825 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
953ff289
DN
3826 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
3827 }
3828 else
3829 {
3830 x = build2 (code, TREE_TYPE (ref), ref, new_var);
3831 ref = build_outer_var_ref (var, ctx);
726a989a 3832 gimplify_assign (ref, x, &sub_seq);
953ff289
DN
3833 }
3834 }
3835
e79983f4
MM
3836 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
3837 0);
726a989a 3838 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289 3839
726a989a 3840 gimple_seq_add_seq (stmt_seqp, sub_seq);
953ff289 3841
e79983f4
MM
3842 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
3843 0);
726a989a 3844 gimple_seq_add_stmt (stmt_seqp, stmt);
953ff289
DN
3845}
3846
50674e96 3847
953ff289
DN
3848/* Generate code to implement the COPYPRIVATE clauses. */
3849
3850static void
726a989a 3851lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
953ff289
DN
3852 omp_context *ctx)
3853{
3854 tree c;
3855
3856 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3857 {
78db7d92 3858 tree var, new_var, ref, x;
953ff289 3859 bool by_ref;
db3927fb 3860 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3861
aaf46ef9 3862 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
3863 continue;
3864
3865 var = OMP_CLAUSE_DECL (c);
7c8f7639 3866 by_ref = use_pointer_for_field (var, NULL);
953ff289
DN
3867
3868 ref = build_sender_ref (var, ctx);
78db7d92
JJ
3869 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
3870 if (by_ref)
3871 {
3872 x = build_fold_addr_expr_loc (clause_loc, new_var);
3873 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
3874 }
726a989a 3875 gimplify_assign (ref, x, slist);
953ff289 3876
78db7d92
JJ
3877 ref = build_receiver_ref (var, false, ctx);
3878 if (by_ref)
3879 {
3880 ref = fold_convert_loc (clause_loc,
3881 build_pointer_type (TREE_TYPE (new_var)),
3882 ref);
3883 ref = build_fold_indirect_ref_loc (clause_loc, ref);
3884 }
953ff289
DN
3885 if (is_reference (var))
3886 {
78db7d92 3887 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
70f34814
RG
3888 ref = build_simple_mem_ref_loc (clause_loc, ref);
3889 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
953ff289 3890 }
78db7d92 3891 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
953ff289
DN
3892 gimplify_and_add (x, rlist);
3893 }
3894}
3895
50674e96 3896
953ff289
DN
3897/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
3898 and REDUCTION from the sender (aka parent) side. */
3899
3900static void
726a989a
RB
3901lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
3902 omp_context *ctx)
953ff289
DN
3903{
3904 tree c;
3905
3906 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3907 {
50674e96 3908 tree val, ref, x, var;
953ff289 3909 bool by_ref, do_in = false, do_out = false;
db3927fb 3910 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
953ff289 3911
aaf46ef9 3912 switch (OMP_CLAUSE_CODE (c))
953ff289 3913 {
a68ab351
JJ
3914 case OMP_CLAUSE_PRIVATE:
3915 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
3916 break;
3917 continue;
953ff289
DN
3918 case OMP_CLAUSE_FIRSTPRIVATE:
3919 case OMP_CLAUSE_COPYIN:
3920 case OMP_CLAUSE_LASTPRIVATE:
3921 case OMP_CLAUSE_REDUCTION:
acf0174b 3922 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
3923 break;
3924 default:
3925 continue;
3926 }
3927
d2dda7fe
JJ
3928 val = OMP_CLAUSE_DECL (c);
3929 var = lookup_decl_in_outer_ctx (val, ctx);
50674e96 3930
8ca5b2a2
JJ
3931 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
3932 && is_global_var (var))
3933 continue;
953ff289
DN
3934 if (is_variable_sized (val))
3935 continue;
7c8f7639 3936 by_ref = use_pointer_for_field (val, NULL);
953ff289 3937
aaf46ef9 3938 switch (OMP_CLAUSE_CODE (c))
953ff289 3939 {
a68ab351 3940 case OMP_CLAUSE_PRIVATE:
953ff289
DN
3941 case OMP_CLAUSE_FIRSTPRIVATE:
3942 case OMP_CLAUSE_COPYIN:
acf0174b 3943 case OMP_CLAUSE__LOOPTEMP_:
953ff289
DN
3944 do_in = true;
3945 break;
3946
3947 case OMP_CLAUSE_LASTPRIVATE:
3948 if (by_ref || is_reference (val))
3949 {
3950 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3951 continue;
3952 do_in = true;
3953 }
3954 else
a68ab351
JJ
3955 {
3956 do_out = true;
3957 if (lang_hooks.decls.omp_private_outer_ref (val))
3958 do_in = true;
3959 }
953ff289
DN
3960 break;
3961
3962 case OMP_CLAUSE_REDUCTION:
3963 do_in = true;
3964 do_out = !(by_ref || is_reference (val));
3965 break;
3966
3967 default:
3968 gcc_unreachable ();
3969 }
3970
3971 if (do_in)
3972 {
3973 ref = build_sender_ref (val, ctx);
db3927fb 3974 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
726a989a 3975 gimplify_assign (ref, x, ilist);
a68ab351
JJ
3976 if (is_task_ctx (ctx))
3977 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
953ff289 3978 }
50674e96 3979
953ff289
DN
3980 if (do_out)
3981 {
3982 ref = build_sender_ref (val, ctx);
726a989a 3983 gimplify_assign (var, ref, olist);
953ff289
DN
3984 }
3985 }
3986}
3987
726a989a
RB
3988/* Generate code to implement SHARED from the sender (aka parent)
3989 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
3990 list things that got automatically shared. */
953ff289
DN
3991
3992static void
726a989a 3993lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
953ff289 3994{
a68ab351 3995 tree var, ovar, nvar, f, x, record_type;
953ff289
DN
3996
3997 if (ctx->record_type == NULL)
3998 return;
50674e96 3999
a68ab351 4000 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
910ad8de 4001 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
953ff289
DN
4002 {
4003 ovar = DECL_ABSTRACT_ORIGIN (f);
4004 nvar = maybe_lookup_decl (ovar, ctx);
4005 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
4006 continue;
4007
50674e96
DN
4008 /* If CTX is a nested parallel directive. Find the immediately
4009 enclosing parallel or workshare construct that contains a
4010 mapping for OVAR. */
d2dda7fe 4011 var = lookup_decl_in_outer_ctx (ovar, ctx);
50674e96 4012
7c8f7639 4013 if (use_pointer_for_field (ovar, ctx))
953ff289
DN
4014 {
4015 x = build_sender_ref (ovar, ctx);
50674e96 4016 var = build_fold_addr_expr (var);
726a989a 4017 gimplify_assign (x, var, ilist);
953ff289
DN
4018 }
4019 else
4020 {
4021 x = build_sender_ref (ovar, ctx);
726a989a 4022 gimplify_assign (x, var, ilist);
953ff289 4023
14e5b285
RG
4024 if (!TREE_READONLY (var)
4025 /* We don't need to receive a new reference to a result
4026 or parm decl. In fact we may not store to it as we will
4027 invalidate any pending RSO and generate wrong gimple
4028 during inlining. */
4029 && !((TREE_CODE (var) == RESULT_DECL
4030 || TREE_CODE (var) == PARM_DECL)
4031 && DECL_BY_REFERENCE (var)))
a68ab351
JJ
4032 {
4033 x = build_sender_ref (ovar, ctx);
726a989a 4034 gimplify_assign (var, x, olist);
a68ab351 4035 }
953ff289
DN
4036 }
4037 }
4038}
4039
726a989a
RB
4040
4041/* A convenience function to build an empty GIMPLE_COND with just the
4042 condition. */
4043
4044static gimple
4045gimple_build_cond_empty (tree cond)
4046{
4047 enum tree_code pred_code;
4048 tree lhs, rhs;
4049
4050 gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
4051 return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
4052}
4053
4054
b8698a0f 4055/* Build the function calls to GOMP_parallel_start etc to actually
50674e96
DN
4056 generate the parallel operation. REGION is the parallel region
4057 being expanded. BB is the block where to insert the code. WS_ARGS
4058 will be set if this is a call to a combined parallel+workshare
4059 construct, it contains the list of additional arguments needed by
4060 the workshare construct. */
953ff289
DN
4061
4062static void
777f7f9a 4063expand_parallel_call (struct omp_region *region, basic_block bb,
9771b263 4064 gimple entry_stmt, vec<tree, va_gc> *ws_args)
953ff289 4065{
acf0174b 4066 tree t, t1, t2, val, cond, c, clauses, flags;
726a989a
RB
4067 gimple_stmt_iterator gsi;
4068 gimple stmt;
e79983f4
MM
4069 enum built_in_function start_ix;
4070 int start_ix2;
db3927fb 4071 location_t clause_loc;
9771b263 4072 vec<tree, va_gc> *args;
50674e96 4073
726a989a 4074 clauses = gimple_omp_parallel_clauses (entry_stmt);
50674e96 4075
acf0174b 4076 /* Determine what flavor of GOMP_parallel we will be
50674e96 4077 emitting. */
acf0174b 4078 start_ix = BUILT_IN_GOMP_PARALLEL;
50674e96
DN
4079 if (is_combined_parallel (region))
4080 {
777f7f9a 4081 switch (region->inner->type)
50674e96 4082 {
726a989a 4083 case GIMPLE_OMP_FOR:
a68ab351 4084 gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
acf0174b 4085 start_ix2 = ((int)BUILT_IN_GOMP_PARALLEL_LOOP_STATIC
e79983f4
MM
4086 + (region->inner->sched_kind
4087 == OMP_CLAUSE_SCHEDULE_RUNTIME
4088 ? 3 : region->inner->sched_kind));
4089 start_ix = (enum built_in_function)start_ix2;
777f7f9a 4090 break;
726a989a 4091 case GIMPLE_OMP_SECTIONS:
acf0174b 4092 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS;
777f7f9a
RH
4093 break;
4094 default:
4095 gcc_unreachable ();
50674e96 4096 }
50674e96 4097 }
953ff289
DN
4098
4099 /* By default, the value of NUM_THREADS is zero (selected at run time)
4100 and there is no conditional. */
4101 cond = NULL_TREE;
4102 val = build_int_cst (unsigned_type_node, 0);
acf0174b 4103 flags = build_int_cst (unsigned_type_node, 0);
953ff289
DN
4104
4105 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4106 if (c)
4107 cond = OMP_CLAUSE_IF_EXPR (c);
4108
4109 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
4110 if (c)
db3927fb
AH
4111 {
4112 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
4113 clause_loc = OMP_CLAUSE_LOCATION (c);
4114 }
4115 else
4116 clause_loc = gimple_location (entry_stmt);
953ff289 4117
acf0174b
JJ
4118 c = find_omp_clause (clauses, OMP_CLAUSE_PROC_BIND);
4119 if (c)
4120 flags = build_int_cst (unsigned_type_node, OMP_CLAUSE_PROC_BIND_KIND (c));
4121
953ff289 4122 /* Ensure 'val' is of the correct type. */
db3927fb 4123 val = fold_convert_loc (clause_loc, unsigned_type_node, val);
953ff289
DN
4124
4125 /* If we found the clause 'if (cond)', build either
4126 (cond != 0) or (cond ? val : 1u). */
4127 if (cond)
4128 {
726a989a 4129 gimple_stmt_iterator gsi;
50674e96
DN
4130
4131 cond = gimple_boolify (cond);
4132
953ff289 4133 if (integer_zerop (val))
db3927fb
AH
4134 val = fold_build2_loc (clause_loc,
4135 EQ_EXPR, unsigned_type_node, cond,
917948d3 4136 build_int_cst (TREE_TYPE (cond), 0));
953ff289 4137 else
50674e96
DN
4138 {
4139 basic_block cond_bb, then_bb, else_bb;
917948d3 4140 edge e, e_then, e_else;
726a989a 4141 tree tmp_then, tmp_else, tmp_join, tmp_var;
917948d3
ZD
4142
4143 tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
4144 if (gimple_in_ssa_p (cfun))
4145 {
726a989a
RB
4146 tmp_then = make_ssa_name (tmp_var, NULL);
4147 tmp_else = make_ssa_name (tmp_var, NULL);
4148 tmp_join = make_ssa_name (tmp_var, NULL);
917948d3
ZD
4149 }
4150 else
4151 {
4152 tmp_then = tmp_var;
4153 tmp_else = tmp_var;
4154 tmp_join = tmp_var;
4155 }
50674e96 4156
50674e96
DN
4157 e = split_block (bb, NULL);
4158 cond_bb = e->src;
4159 bb = e->dest;
4160 remove_edge (e);
4161
4162 then_bb = create_empty_bb (cond_bb);
4163 else_bb = create_empty_bb (then_bb);
917948d3
ZD
4164 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
4165 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
50674e96 4166
726a989a
RB
4167 stmt = gimple_build_cond_empty (cond);
4168 gsi = gsi_start_bb (cond_bb);
4169 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 4170
726a989a
RB
4171 gsi = gsi_start_bb (then_bb);
4172 stmt = gimple_build_assign (tmp_then, val);
4173 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96 4174
726a989a
RB
4175 gsi = gsi_start_bb (else_bb);
4176 stmt = gimple_build_assign
4177 (tmp_else, build_int_cst (unsigned_type_node, 1));
4178 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
50674e96
DN
4179
4180 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
4181 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
a9e0d843
RB
4182 if (current_loops)
4183 {
4184 add_bb_to_loop (then_bb, cond_bb->loop_father);
4185 add_bb_to_loop (else_bb, cond_bb->loop_father);
4186 }
917948d3
ZD
4187 e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
4188 e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
50674e96 4189
917948d3
ZD
4190 if (gimple_in_ssa_p (cfun))
4191 {
726a989a 4192 gimple phi = create_phi_node (tmp_join, bb);
9e227d60
DC
4193 add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
4194 add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
917948d3
ZD
4195 }
4196
4197 val = tmp_join;
50674e96
DN
4198 }
4199
726a989a
RB
4200 gsi = gsi_start_bb (bb);
4201 val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
4202 false, GSI_CONTINUE_LINKING);
953ff289
DN
4203 }
4204
726a989a
RB
4205 gsi = gsi_last_bb (bb);
4206 t = gimple_omp_parallel_data_arg (entry_stmt);
953ff289 4207 if (t == NULL)
5039610b 4208 t1 = null_pointer_node;
953ff289 4209 else
5039610b 4210 t1 = build_fold_addr_expr (t);
726a989a 4211 t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
50674e96 4212
acf0174b 4213 vec_alloc (args, 4 + vec_safe_length (ws_args));
9771b263
DN
4214 args->quick_push (t2);
4215 args->quick_push (t1);
4216 args->quick_push (val);
4217 if (ws_args)
4218 args->splice (*ws_args);
acf0174b 4219 args->quick_push (flags);
3bb06db4
NF
4220
4221 t = build_call_expr_loc_vec (UNKNOWN_LOCATION,
e79983f4 4222 builtin_decl_explicit (start_ix), args);
50674e96 4223
726a989a
RB
4224 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4225 false, GSI_CONTINUE_LINKING);
953ff289
DN
4226}
4227
50674e96 4228
a68ab351
JJ
4229/* Build the function call to GOMP_task to actually
4230 generate the task operation. BB is the block where to insert the code. */
4231
4232static void
726a989a 4233expand_task_call (basic_block bb, gimple entry_stmt)
a68ab351 4234{
acf0174b 4235 tree t, t1, t2, t3, flags, cond, c, c2, clauses, depend;
726a989a 4236 gimple_stmt_iterator gsi;
db3927fb 4237 location_t loc = gimple_location (entry_stmt);
a68ab351 4238
726a989a 4239 clauses = gimple_omp_task_clauses (entry_stmt);
a68ab351 4240
a68ab351
JJ
4241 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
4242 if (c)
4243 cond = gimple_boolify (OMP_CLAUSE_IF_EXPR (c));
4244 else
4245 cond = boolean_true_node;
4246
4247 c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
20906c66 4248 c2 = find_omp_clause (clauses, OMP_CLAUSE_MERGEABLE);
acf0174b 4249 depend = find_omp_clause (clauses, OMP_CLAUSE_DEPEND);
20906c66 4250 flags = build_int_cst (unsigned_type_node,
acf0174b 4251 (c ? 1 : 0) + (c2 ? 4 : 0) + (depend ? 8 : 0));
20906c66
JJ
4252
4253 c = find_omp_clause (clauses, OMP_CLAUSE_FINAL);
4254 if (c)
4255 {
4256 c = gimple_boolify (OMP_CLAUSE_FINAL_EXPR (c));
4257 c = fold_build3_loc (loc, COND_EXPR, unsigned_type_node, c,
4258 build_int_cst (unsigned_type_node, 2),
4259 build_int_cst (unsigned_type_node, 0));
4260 flags = fold_build2_loc (loc, PLUS_EXPR, unsigned_type_node, flags, c);
4261 }
acf0174b
JJ
4262 if (depend)
4263 depend = OMP_CLAUSE_DECL (depend);
4264 else
4265 depend = build_int_cst (ptr_type_node, 0);
a68ab351 4266
726a989a
RB
4267 gsi = gsi_last_bb (bb);
4268 t = gimple_omp_task_data_arg (entry_stmt);
a68ab351
JJ
4269 if (t == NULL)
4270 t2 = null_pointer_node;
4271 else
db3927fb
AH
4272 t2 = build_fold_addr_expr_loc (loc, t);
4273 t1 = build_fold_addr_expr_loc (loc, gimple_omp_task_child_fn (entry_stmt));
726a989a 4274 t = gimple_omp_task_copy_fn (entry_stmt);
a68ab351
JJ
4275 if (t == NULL)
4276 t3 = null_pointer_node;
4277 else
db3927fb 4278 t3 = build_fold_addr_expr_loc (loc, t);
a68ab351 4279
e79983f4 4280 t = build_call_expr (builtin_decl_explicit (BUILT_IN_GOMP_TASK),
acf0174b 4281 8, t1, t2, t3,
726a989a 4282 gimple_omp_task_arg_size (entry_stmt),
acf0174b
JJ
4283 gimple_omp_task_arg_align (entry_stmt), cond, flags,
4284 depend);
a68ab351 4285
726a989a
RB
4286 force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
4287 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
4288}
4289
4290
726a989a
RB
4291/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
4292 catch handler and return it. This prevents programs from violating the
4293 structured block semantics with throws. */
953ff289 4294
726a989a
RB
4295static gimple_seq
4296maybe_catch_exception (gimple_seq body)
953ff289 4297{
1d65f45c
RH
4298 gimple g;
4299 tree decl;
953ff289
DN
4300
4301 if (!flag_exceptions)
726a989a 4302 return body;
953ff289 4303
3b06d379
SB
4304 if (lang_hooks.eh_protect_cleanup_actions != NULL)
4305 decl = lang_hooks.eh_protect_cleanup_actions ();
953ff289 4306 else
e79983f4 4307 decl = builtin_decl_explicit (BUILT_IN_TRAP);
726a989a 4308
1d65f45c
RH
4309 g = gimple_build_eh_must_not_throw (decl);
4310 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
726a989a 4311 GIMPLE_TRY_CATCH);
953ff289 4312
1d65f45c 4313 return gimple_seq_alloc_with_stmt (g);
953ff289
DN
4314}
4315
50674e96 4316/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
953ff289 4317
50674e96 4318static tree
9771b263 4319vec2chain (vec<tree, va_gc> *v)
953ff289 4320{
c021f10b
NF
4321 tree chain = NULL_TREE, t;
4322 unsigned ix;
953ff289 4323
9771b263 4324 FOR_EACH_VEC_SAFE_ELT_REVERSE (v, ix, t)
50674e96 4325 {
910ad8de 4326 DECL_CHAIN (t) = chain;
c021f10b 4327 chain = t;
50674e96 4328 }
953ff289 4329
c021f10b 4330 return chain;
50674e96 4331}
953ff289 4332
953ff289 4333
50674e96 4334/* Remove barriers in REGION->EXIT's block. Note that this is only
726a989a
RB
4335 valid for GIMPLE_OMP_PARALLEL regions. Since the end of a parallel region
4336 is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
4337 left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
50674e96 4338 removed. */
953ff289 4339
50674e96
DN
4340static void
4341remove_exit_barrier (struct omp_region *region)
4342{
726a989a 4343 gimple_stmt_iterator gsi;
50674e96 4344 basic_block exit_bb;
777f7f9a
RH
4345 edge_iterator ei;
4346 edge e;
726a989a 4347 gimple stmt;
03742a9b 4348 int any_addressable_vars = -1;
953ff289 4349
777f7f9a 4350 exit_bb = region->exit;
953ff289 4351
2aee3e57
JJ
4352 /* If the parallel region doesn't return, we don't have REGION->EXIT
4353 block at all. */
4354 if (! exit_bb)
4355 return;
4356
726a989a
RB
4357 /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN. The
4358 workshare's GIMPLE_OMP_RETURN will be in a preceding block. The kinds of
777f7f9a
RH
4359 statements that can appear in between are extremely limited -- no
4360 memory operations at all. Here, we allow nothing at all, so the
726a989a
RB
4361 only thing we allow to precede this GIMPLE_OMP_RETURN is a label. */
4362 gsi = gsi_last_bb (exit_bb);
4363 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4364 gsi_prev (&gsi);
4365 if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
50674e96 4366 return;
953ff289 4367
777f7f9a
RH
4368 FOR_EACH_EDGE (e, ei, exit_bb->preds)
4369 {
726a989a
RB
4370 gsi = gsi_last_bb (e->src);
4371 if (gsi_end_p (gsi))
777f7f9a 4372 continue;
726a989a 4373 stmt = gsi_stmt (gsi);
03742a9b
JJ
4374 if (gimple_code (stmt) == GIMPLE_OMP_RETURN
4375 && !gimple_omp_return_nowait_p (stmt))
4376 {
4377 /* OpenMP 3.0 tasks unfortunately prevent this optimization
4378 in many cases. If there could be tasks queued, the barrier
4379 might be needed to let the tasks run before some local
4380 variable of the parallel that the task uses as shared
4381 runs out of scope. The task can be spawned either
4382 from within current function (this would be easy to check)
4383 or from some function it calls and gets passed an address
4384 of such a variable. */
4385 if (any_addressable_vars < 0)
4386 {
4387 gimple parallel_stmt = last_stmt (region->entry);
4388 tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
c021f10b
NF
4389 tree local_decls, block, decl;
4390 unsigned ix;
03742a9b
JJ
4391
4392 any_addressable_vars = 0;
c021f10b
NF
4393 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (child_fun), ix, decl)
4394 if (TREE_ADDRESSABLE (decl))
03742a9b
JJ
4395 {
4396 any_addressable_vars = 1;
4397 break;
4398 }
4399 for (block = gimple_block (stmt);
4400 !any_addressable_vars
4401 && block
4402 && TREE_CODE (block) == BLOCK;
4403 block = BLOCK_SUPERCONTEXT (block))
4404 {
4405 for (local_decls = BLOCK_VARS (block);
4406 local_decls;
910ad8de 4407 local_decls = DECL_CHAIN (local_decls))
03742a9b
JJ
4408 if (TREE_ADDRESSABLE (local_decls))
4409 {
4410 any_addressable_vars = 1;
4411 break;
4412 }
4413 if (block == gimple_block (parallel_stmt))
4414 break;
4415 }
4416 }
4417 if (!any_addressable_vars)
4418 gimple_omp_return_set_nowait (stmt);
4419 }
777f7f9a 4420 }
953ff289
DN
4421}
4422
777f7f9a
RH
4423static void
4424remove_exit_barriers (struct omp_region *region)
4425{
726a989a 4426 if (region->type == GIMPLE_OMP_PARALLEL)
777f7f9a
RH
4427 remove_exit_barrier (region);
4428
4429 if (region->inner)
4430 {
4431 region = region->inner;
4432 remove_exit_barriers (region);
4433 while (region->next)
4434 {
4435 region = region->next;
4436 remove_exit_barriers (region);
4437 }
4438 }
4439}
50674e96 4440
2b4cf991
JJ
4441/* Optimize omp_get_thread_num () and omp_get_num_threads ()
4442 calls. These can't be declared as const functions, but
4443 within one parallel body they are constant, so they can be
4444 transformed there into __builtin_omp_get_{thread_num,num_threads} ()
a68ab351
JJ
4445 which are declared const. Similarly for task body, except
4446 that in untied task omp_get_thread_num () can change at any task
4447 scheduling point. */
2b4cf991
JJ
4448
4449static void
726a989a 4450optimize_omp_library_calls (gimple entry_stmt)
2b4cf991
JJ
4451{
4452 basic_block bb;
726a989a 4453 gimple_stmt_iterator gsi;
e79983f4
MM
4454 tree thr_num_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4455 tree thr_num_id = DECL_ASSEMBLER_NAME (thr_num_tree);
4456 tree num_thr_tree = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
4457 tree num_thr_id = DECL_ASSEMBLER_NAME (num_thr_tree);
726a989a
RB
4458 bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
4459 && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
a68ab351 4460 OMP_CLAUSE_UNTIED) != NULL);
2b4cf991
JJ
4461
4462 FOR_EACH_BB (bb)
726a989a 4463 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2b4cf991 4464 {
726a989a 4465 gimple call = gsi_stmt (gsi);
2b4cf991
JJ
4466 tree decl;
4467
726a989a
RB
4468 if (is_gimple_call (call)
4469 && (decl = gimple_call_fndecl (call))
2b4cf991
JJ
4470 && DECL_EXTERNAL (decl)
4471 && TREE_PUBLIC (decl)
4472 && DECL_INITIAL (decl) == NULL)
4473 {
4474 tree built_in;
4475
4476 if (DECL_NAME (decl) == thr_num_id)
a68ab351
JJ
4477 {
4478 /* In #pragma omp task untied omp_get_thread_num () can change
4479 during the execution of the task region. */
4480 if (untied_task)
4481 continue;
e79983f4 4482 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
a68ab351 4483 }
2b4cf991 4484 else if (DECL_NAME (decl) == num_thr_id)
e79983f4 4485 built_in = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
2b4cf991
JJ
4486 else
4487 continue;
4488
4489 if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
726a989a 4490 || gimple_call_num_args (call) != 0)
2b4cf991
JJ
4491 continue;
4492
4493 if (flag_exceptions && !TREE_NOTHROW (decl))
4494 continue;
4495
4496 if (TREE_CODE (TREE_TYPE (decl)) != FUNCTION_TYPE
9600efe1
MM
4497 || !types_compatible_p (TREE_TYPE (TREE_TYPE (decl)),
4498 TREE_TYPE (TREE_TYPE (built_in))))
2b4cf991
JJ
4499 continue;
4500
7c9577be 4501 gimple_call_set_fndecl (call, built_in);
2b4cf991
JJ
4502 }
4503 }
4504}
4505
5a0f4dd3
JJ
4506/* Callback for expand_omp_build_assign. Return non-NULL if *tp needs to be
4507 regimplified. */
4508
4509static tree
4510expand_omp_regimplify_p (tree *tp, int *walk_subtrees, void *)
4511{
4512 tree t = *tp;
4513
4514 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
4515 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
4516 return t;
4517
4518 if (TREE_CODE (t) == ADDR_EXPR)
4519 recompute_tree_invariant_for_addr_expr (t);
4520
4521 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
4522 return NULL_TREE;
4523}
4524
74bf76ed
JJ
4525/* Prepend TO = FROM assignment before *GSI_P. */
4526
4527static void
4528expand_omp_build_assign (gimple_stmt_iterator *gsi_p, tree to, tree from)
4529{
4530 bool simple_p = DECL_P (to) && TREE_ADDRESSABLE (to);
4531 from = force_gimple_operand_gsi (gsi_p, from, simple_p, NULL_TREE,
4532 true, GSI_SAME_STMT);
4533 gimple stmt = gimple_build_assign (to, from);
4534 gsi_insert_before (gsi_p, stmt, GSI_SAME_STMT);
4535 if (walk_tree (&from, expand_omp_regimplify_p, NULL, NULL)
4536 || walk_tree (&to, expand_omp_regimplify_p, NULL, NULL))
4537 {
4538 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
4539 gimple_regimplify_operands (stmt, &gsi);
4540 }
4541}
4542
a68ab351 4543/* Expand the OpenMP parallel or task directive starting at REGION. */
953ff289
DN
4544
4545static void
a68ab351 4546expand_omp_taskreg (struct omp_region *region)
953ff289 4547{
50674e96 4548 basic_block entry_bb, exit_bb, new_bb;
db2960f4 4549 struct function *child_cfun;
3bb06db4 4550 tree child_fn, block, t;
726a989a
RB
4551 gimple_stmt_iterator gsi;
4552 gimple entry_stmt, stmt;
50674e96 4553 edge e;
9771b263 4554 vec<tree, va_gc> *ws_args;
50674e96 4555
777f7f9a 4556 entry_stmt = last_stmt (region->entry);
726a989a 4557 child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
50674e96 4558 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
50674e96 4559
777f7f9a
RH
4560 entry_bb = region->entry;
4561 exit_bb = region->exit;
50674e96 4562
50674e96 4563 if (is_combined_parallel (region))
777f7f9a 4564 ws_args = region->ws_args;
50674e96 4565 else
3bb06db4 4566 ws_args = NULL;
953ff289 4567
777f7f9a 4568 if (child_cfun->cfg)
953ff289 4569 {
50674e96
DN
4570 /* Due to inlining, it may happen that we have already outlined
4571 the region, in which case all we need to do is make the
4572 sub-graph unreachable and emit the parallel call. */
4573 edge entry_succ_e, exit_succ_e;
726a989a 4574 gimple_stmt_iterator gsi;
50674e96
DN
4575
4576 entry_succ_e = single_succ_edge (entry_bb);
50674e96 4577
726a989a
RB
4578 gsi = gsi_last_bb (entry_bb);
4579 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
4580 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
4581 gsi_remove (&gsi, true);
50674e96
DN
4582
4583 new_bb = entry_bb;
d3c673c7
JJ
4584 if (exit_bb)
4585 {
4586 exit_succ_e = single_succ_edge (exit_bb);
4587 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
4588 }
917948d3 4589 remove_edge_and_dominated_blocks (entry_succ_e);
953ff289 4590 }
50674e96
DN
4591 else
4592 {
2fed2012 4593 unsigned srcidx, dstidx, num;
c021f10b 4594
50674e96 4595 /* If the parallel region needs data sent from the parent
b570947c
JJ
4596 function, then the very first statement (except possible
4597 tree profile counter updates) of the parallel body
50674e96
DN
4598 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
4599 &.OMP_DATA_O is passed as an argument to the child function,
4600 we need to replace it with the argument as seen by the child
4601 function.
4602
4603 In most cases, this will end up being the identity assignment
4604 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
4605 a function call that has been inlined, the original PARM_DECL
4606 .OMP_DATA_I may have been converted into a different local
4607 variable. In which case, we need to keep the assignment. */
726a989a 4608 if (gimple_omp_taskreg_data_arg (entry_stmt))
50674e96
DN
4609 {
4610 basic_block entry_succ_bb = single_succ (entry_bb);
726a989a
RB
4611 gimple_stmt_iterator gsi;
4612 tree arg, narg;
4613 gimple parcopy_stmt = NULL;
953ff289 4614
726a989a 4615 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
b570947c 4616 {
726a989a 4617 gimple stmt;
b570947c 4618
726a989a
RB
4619 gcc_assert (!gsi_end_p (gsi));
4620 stmt = gsi_stmt (gsi);
4621 if (gimple_code (stmt) != GIMPLE_ASSIGN)
018b899b
JJ
4622 continue;
4623
726a989a 4624 if (gimple_num_ops (stmt) == 2)
b570947c 4625 {
726a989a
RB
4626 tree arg = gimple_assign_rhs1 (stmt);
4627
4628 /* We're ignore the subcode because we're
4629 effectively doing a STRIP_NOPS. */
4630
4631 if (TREE_CODE (arg) == ADDR_EXPR
4632 && TREE_OPERAND (arg, 0)
4633 == gimple_omp_taskreg_data_arg (entry_stmt))
4634 {
4635 parcopy_stmt = stmt;
4636 break;
4637 }
b570947c
JJ
4638 }
4639 }
917948d3 4640
726a989a 4641 gcc_assert (parcopy_stmt != NULL);
917948d3
ZD
4642 arg = DECL_ARGUMENTS (child_fn);
4643
4644 if (!gimple_in_ssa_p (cfun))
4645 {
726a989a
RB
4646 if (gimple_assign_lhs (parcopy_stmt) == arg)
4647 gsi_remove (&gsi, true);
917948d3 4648 else
726a989a
RB
4649 {
4650 /* ?? Is setting the subcode really necessary ?? */
4651 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
4652 gimple_assign_set_rhs1 (parcopy_stmt, arg);
4653 }
917948d3
ZD
4654 }
4655 else
4656 {
4657 /* If we are in ssa form, we must load the value from the default
4658 definition of the argument. That should not be defined now,
4659 since the argument is not used uninitialized. */
32244553 4660 gcc_assert (ssa_default_def (cfun, arg) == NULL);
726a989a 4661 narg = make_ssa_name (arg, gimple_build_nop ());
32244553 4662 set_ssa_default_def (cfun, arg, narg);
726a989a
RB
4663 /* ?? Is setting the subcode really necessary ?? */
4664 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
4665 gimple_assign_set_rhs1 (parcopy_stmt, narg);
917948d3
ZD
4666 update_stmt (parcopy_stmt);
4667 }
50674e96
DN
4668 }
4669
4670 /* Declare local variables needed in CHILD_CFUN. */
4671 block = DECL_INITIAL (child_fn);
c021f10b 4672 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
4f0ae266
JJ
4673 /* The gimplifier could record temporaries in parallel/task block
4674 rather than in containing function's local_decls chain,
4675 which would mean cgraph missed finalizing them. Do it now. */
910ad8de 4676 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
4f0ae266
JJ
4677 if (TREE_CODE (t) == VAR_DECL
4678 && TREE_STATIC (t)
4679 && !DECL_EXTERNAL (t))
4680 varpool_finalize_decl (t);
726a989a 4681 DECL_SAVED_TREE (child_fn) = NULL;
355a7673
MM
4682 /* We'll create a CFG for child_fn, so no gimple body is needed. */
4683 gimple_set_body (child_fn, NULL);
b357f682 4684 TREE_USED (block) = 1;
50674e96 4685
917948d3 4686 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 4687 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
50674e96
DN
4688 DECL_CONTEXT (t) = child_fn;
4689
726a989a
RB
4690 /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
4691 so that it can be moved to the child function. */
4692 gsi = gsi_last_bb (entry_bb);
4693 stmt = gsi_stmt (gsi);
4694 gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
4695 || gimple_code (stmt) == GIMPLE_OMP_TASK));
4696 gsi_remove (&gsi, true);
4697 e = split_block (entry_bb, stmt);
50674e96
DN
4698 entry_bb = e->dest;
4699 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
4700
726a989a 4701 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
2aee3e57
JJ
4702 if (exit_bb)
4703 {
726a989a
RB
4704 gsi = gsi_last_bb (exit_bb);
4705 gcc_assert (!gsi_end_p (gsi)
4706 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
4707 stmt = gimple_build_return (NULL);
4708 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
4709 gsi_remove (&gsi, true);
2aee3e57 4710 }
917948d3
ZD
4711
4712 /* Move the parallel region into CHILD_CFUN. */
b8698a0f 4713
917948d3
ZD
4714 if (gimple_in_ssa_p (cfun))
4715 {
5db9ba0c 4716 init_tree_ssa (child_cfun);
3828719a
RG
4717 init_ssa_operands (child_cfun);
4718 child_cfun->gimple_df->in_ssa_p = true;
b357f682 4719 block = NULL_TREE;
917948d3 4720 }
b357f682 4721 else
726a989a 4722 block = gimple_block (entry_stmt);
b357f682
JJ
4723
4724 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
917948d3
ZD
4725 if (exit_bb)
4726 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
6093bc06
RB
4727 /* When the OMP expansion process cannot guarantee an up-to-date
4728 loop tree arrange for the child function to fixup loops. */
4729 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
4730 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
917948d3 4731
b357f682 4732 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
9771b263 4733 num = vec_safe_length (child_cfun->local_decls);
2fed2012
JJ
4734 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
4735 {
9771b263 4736 t = (*child_cfun->local_decls)[srcidx];
2fed2012
JJ
4737 if (DECL_CONTEXT (t) == cfun->decl)
4738 continue;
4739 if (srcidx != dstidx)
9771b263 4740 (*child_cfun->local_decls)[dstidx] = t;
2fed2012
JJ
4741 dstidx++;
4742 }
4743 if (dstidx != num)
9771b263 4744 vec_safe_truncate (child_cfun->local_decls, dstidx);
b357f682 4745
917948d3 4746 /* Inform the callgraph about the new function. */
d7ed20db 4747 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
917948d3
ZD
4748 cgraph_add_new_function (child_fn, true);
4749
4750 /* Fix the callgraph edges for child_cfun. Those for cfun will be
4751 fixed in a following pass. */
4752 push_cfun (child_cfun);
2b4cf991 4753 if (optimize)
a68ab351 4754 optimize_omp_library_calls (entry_stmt);
917948d3 4755 rebuild_cgraph_edges ();
99819c63
JJ
4756
4757 /* Some EH regions might become dead, see PR34608. If
4758 pass_cleanup_cfg isn't the first pass to happen with the
4759 new child, these dead EH edges might cause problems.
4760 Clean them up now. */
4761 if (flag_exceptions)
4762 {
4763 basic_block bb;
99819c63
JJ
4764 bool changed = false;
4765
99819c63 4766 FOR_EACH_BB (bb)
726a989a 4767 changed |= gimple_purge_dead_eh_edges (bb);
99819c63
JJ
4768 if (changed)
4769 cleanup_tree_cfg ();
99819c63 4770 }
5006671f
RG
4771 if (gimple_in_ssa_p (cfun))
4772 update_ssa (TODO_update_ssa);
917948d3 4773 pop_cfun ();
50674e96 4774 }
b8698a0f 4775
50674e96 4776 /* Emit a library call to launch the children threads. */
726a989a 4777 if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
a68ab351
JJ
4778 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
4779 else
4780 expand_task_call (new_bb, entry_stmt);
a5efada7
RG
4781 if (gimple_in_ssa_p (cfun))
4782 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
4783}
4784
50674e96 4785
74bf76ed
JJ
4786/* Helper function for expand_omp_{for_*,simd}. If this is the outermost
4787 of the combined collapse > 1 loop constructs, generate code like:
4788 if (__builtin_expect (N32 cond3 N31, 0)) goto ZERO_ITER_BB;
4789 if (cond3 is <)
4790 adj = STEP3 - 1;
4791 else
4792 adj = STEP3 + 1;
4793 count3 = (adj + N32 - N31) / STEP3;
4794 if (__builtin_expect (N22 cond2 N21, 0)) goto ZERO_ITER_BB;
4795 if (cond2 is <)
4796 adj = STEP2 - 1;
4797 else
4798 adj = STEP2 + 1;
4799 count2 = (adj + N22 - N21) / STEP2;
4800 if (__builtin_expect (N12 cond1 N11, 0)) goto ZERO_ITER_BB;
4801 if (cond1 is <)
4802 adj = STEP1 - 1;
4803 else
4804 adj = STEP1 + 1;
4805 count1 = (adj + N12 - N11) / STEP1;
4806 count = count1 * count2 * count3;
4807 Furthermore, if ZERO_ITER_BB is NULL, create a BB which does:
4808 count = 0;
acf0174b
JJ
4809 and set ZERO_ITER_BB to that bb. If this isn't the outermost
4810 of the combined loop constructs, just initialize COUNTS array
4811 from the _looptemp_ clauses. */
74bf76ed
JJ
4812
4813/* NOTE: It *could* be better to moosh all of the BBs together,
4814 creating one larger BB with all the computation and the unexpected
4815 jump at the end. I.e.
4816
4817 bool zero3, zero2, zero1, zero;
4818
4819 zero3 = N32 c3 N31;
4820 count3 = (N32 - N31) /[cl] STEP3;
4821 zero2 = N22 c2 N21;
4822 count2 = (N22 - N21) /[cl] STEP2;
4823 zero1 = N12 c1 N11;
4824 count1 = (N12 - N11) /[cl] STEP1;
4825 zero = zero3 || zero2 || zero1;
4826 count = count1 * count2 * count3;
4827 if (__builtin_expect(zero, false)) goto zero_iter_bb;
4828
4829 After all, we expect the zero=false, and thus we expect to have to
4830 evaluate all of the comparison expressions, so short-circuiting
4831 oughtn't be a win. Since the condition isn't protecting a
4832 denominator, we're not concerned about divide-by-zero, so we can
4833 fully evaluate count even if a numerator turned out to be wrong.
4834
4835 It seems like putting this all together would create much better
4836 scheduling opportunities, and less pressure on the chip's branch
4837 predictor. */
4838
4839static void
4840expand_omp_for_init_counts (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
4841 basic_block &entry_bb, tree *counts,
4842 basic_block &zero_iter_bb, int &first_zero_iter,
4843 basic_block &l2_dom_bb)
4844{
4845 tree t, type = TREE_TYPE (fd->loop.v);
4846 gimple stmt;
4847 edge e, ne;
4848 int i;
4849
4850 /* Collapsed loops need work for expansion into SSA form. */
4851 gcc_assert (!gimple_in_ssa_p (cfun));
4852
acf0174b
JJ
4853 if (gimple_omp_for_combined_into_p (fd->for_stmt)
4854 && TREE_CODE (fd->loop.n2) != INTEGER_CST)
4855 {
4856 /* First two _looptemp_ clauses are for istart/iend, counts[0]
4857 isn't supposed to be handled, as the inner loop doesn't
4858 use it. */
4859 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
4860 OMP_CLAUSE__LOOPTEMP_);
4861 gcc_assert (innerc);
4862 for (i = 0; i < fd->collapse; i++)
4863 {
4864 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
4865 OMP_CLAUSE__LOOPTEMP_);
4866 gcc_assert (innerc);
4867 if (i)
4868 counts[i] = OMP_CLAUSE_DECL (innerc);
4869 else
4870 counts[0] = NULL_TREE;
4871 }
4872 return;
4873 }
4874
74bf76ed
JJ
4875 for (i = 0; i < fd->collapse; i++)
4876 {
4877 tree itype = TREE_TYPE (fd->loops[i].v);
4878
4879 if (SSA_VAR_P (fd->loop.n2)
4880 && ((t = fold_binary (fd->loops[i].cond_code, boolean_type_node,
4881 fold_convert (itype, fd->loops[i].n1),
4882 fold_convert (itype, fd->loops[i].n2)))
4883 == NULL_TREE || !integer_onep (t)))
4884 {
4885 tree n1, n2;
4886 n1 = fold_convert (itype, unshare_expr (fd->loops[i].n1));
4887 n1 = force_gimple_operand_gsi (gsi, n1, true, NULL_TREE,
4888 true, GSI_SAME_STMT);
4889 n2 = fold_convert (itype, unshare_expr (fd->loops[i].n2));
4890 n2 = force_gimple_operand_gsi (gsi, n2, true, NULL_TREE,
4891 true, GSI_SAME_STMT);
4892 stmt = gimple_build_cond (fd->loops[i].cond_code, n1, n2,
4893 NULL_TREE, NULL_TREE);
4894 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4895 if (walk_tree (gimple_cond_lhs_ptr (stmt),
4896 expand_omp_regimplify_p, NULL, NULL)
4897 || walk_tree (gimple_cond_rhs_ptr (stmt),
4898 expand_omp_regimplify_p, NULL, NULL))
4899 {
4900 *gsi = gsi_for_stmt (stmt);
4901 gimple_regimplify_operands (stmt, gsi);
4902 }
4903 e = split_block (entry_bb, stmt);
4904 if (zero_iter_bb == NULL)
4905 {
4906 first_zero_iter = i;
4907 zero_iter_bb = create_empty_bb (entry_bb);
4908 if (current_loops)
4909 add_bb_to_loop (zero_iter_bb, entry_bb->loop_father);
4910 *gsi = gsi_after_labels (zero_iter_bb);
4911 stmt = gimple_build_assign (fd->loop.n2,
4912 build_zero_cst (type));
4913 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
4914 set_immediate_dominator (CDI_DOMINATORS, zero_iter_bb,
4915 entry_bb);
4916 }
4917 ne = make_edge (entry_bb, zero_iter_bb, EDGE_FALSE_VALUE);
4918 ne->probability = REG_BR_PROB_BASE / 2000 - 1;
4919 e->flags = EDGE_TRUE_VALUE;
4920 e->probability = REG_BR_PROB_BASE - ne->probability;
4921 if (l2_dom_bb == NULL)
4922 l2_dom_bb = entry_bb;
4923 entry_bb = e->dest;
4924 *gsi = gsi_last_bb (entry_bb);
4925 }
4926
4927 if (POINTER_TYPE_P (itype))
4928 itype = signed_type_for (itype);
4929 t = build_int_cst (itype, (fd->loops[i].cond_code == LT_EXPR
4930 ? -1 : 1));
4931 t = fold_build2 (PLUS_EXPR, itype,
4932 fold_convert (itype, fd->loops[i].step), t);
4933 t = fold_build2 (PLUS_EXPR, itype, t,
4934 fold_convert (itype, fd->loops[i].n2));
4935 t = fold_build2 (MINUS_EXPR, itype, t,
4936 fold_convert (itype, fd->loops[i].n1));
4937 /* ?? We could probably use CEIL_DIV_EXPR instead of
4938 TRUNC_DIV_EXPR and adjusting by hand. Unless we can't
4939 generate the same code in the end because generically we
4940 don't know that the values involved must be negative for
4941 GT?? */
4942 if (TYPE_UNSIGNED (itype) && fd->loops[i].cond_code == GT_EXPR)
4943 t = fold_build2 (TRUNC_DIV_EXPR, itype,
4944 fold_build1 (NEGATE_EXPR, itype, t),
4945 fold_build1 (NEGATE_EXPR, itype,
4946 fold_convert (itype,
4947 fd->loops[i].step)));
4948 else
4949 t = fold_build2 (TRUNC_DIV_EXPR, itype, t,
4950 fold_convert (itype, fd->loops[i].step));
4951 t = fold_convert (type, t);
4952 if (TREE_CODE (t) == INTEGER_CST)
4953 counts[i] = t;
4954 else
4955 {
4956 counts[i] = create_tmp_reg (type, ".count");
4957 expand_omp_build_assign (gsi, counts[i], t);
4958 }
4959 if (SSA_VAR_P (fd->loop.n2))
4960 {
4961 if (i == 0)
4962 t = counts[0];
4963 else
4964 t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
4965 expand_omp_build_assign (gsi, fd->loop.n2, t);
4966 }
4967 }
4968}
4969
4970
4971/* Helper function for expand_omp_{for_*,simd}. Generate code like:
4972 T = V;
4973 V3 = N31 + (T % count3) * STEP3;
4974 T = T / count3;
4975 V2 = N21 + (T % count2) * STEP2;
4976 T = T / count2;
4977 V1 = N11 + T * STEP1;
acf0174b
JJ
4978 if this loop doesn't have an inner loop construct combined with it.
4979 If it does have an inner loop construct combined with it and the
4980 iteration count isn't known constant, store values from counts array
4981 into its _looptemp_ temporaries instead. */
74bf76ed
JJ
4982
4983static void
4984expand_omp_for_init_vars (struct omp_for_data *fd, gimple_stmt_iterator *gsi,
acf0174b 4985 tree *counts, gimple inner_stmt, tree startvar)
74bf76ed
JJ
4986{
4987 int i;
acf0174b
JJ
4988 if (gimple_omp_for_combined_p (fd->for_stmt))
4989 {
4990 /* If fd->loop.n2 is constant, then no propagation of the counts
4991 is needed, they are constant. */
4992 if (TREE_CODE (fd->loop.n2) == INTEGER_CST)
4993 return;
4994
4995 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
4996 ? gimple_omp_parallel_clauses (inner_stmt)
4997 : gimple_omp_for_clauses (inner_stmt);
4998 /* First two _looptemp_ clauses are for istart/iend, counts[0]
4999 isn't supposed to be handled, as the inner loop doesn't
5000 use it. */
5001 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5002 gcc_assert (innerc);
5003 for (i = 0; i < fd->collapse; i++)
5004 {
5005 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5006 OMP_CLAUSE__LOOPTEMP_);
5007 gcc_assert (innerc);
5008 if (i)
5009 {
5010 tree tem = OMP_CLAUSE_DECL (innerc);
5011 tree t = fold_convert (TREE_TYPE (tem), counts[i]);
5012 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5013 false, GSI_CONTINUE_LINKING);
5014 gimple stmt = gimple_build_assign (tem, t);
5015 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5016 }
5017 }
5018 return;
5019 }
5020
74bf76ed
JJ
5021 tree type = TREE_TYPE (fd->loop.v);
5022 tree tem = create_tmp_reg (type, ".tem");
5023 gimple stmt = gimple_build_assign (tem, startvar);
5024 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5025
5026 for (i = fd->collapse - 1; i >= 0; i--)
5027 {
5028 tree vtype = TREE_TYPE (fd->loops[i].v), itype, t;
5029 itype = vtype;
5030 if (POINTER_TYPE_P (vtype))
5031 itype = signed_type_for (vtype);
5032 if (i != 0)
5033 t = fold_build2 (TRUNC_MOD_EXPR, type, tem, counts[i]);
5034 else
5035 t = tem;
5036 t = fold_convert (itype, t);
5037 t = fold_build2 (MULT_EXPR, itype, t,
5038 fold_convert (itype, fd->loops[i].step));
5039 if (POINTER_TYPE_P (vtype))
5040 t = fold_build_pointer_plus (fd->loops[i].n1, t);
5041 else
5042 t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
5043 t = force_gimple_operand_gsi (gsi, t,
5044 DECL_P (fd->loops[i].v)
5045 && TREE_ADDRESSABLE (fd->loops[i].v),
5046 NULL_TREE, false,
5047 GSI_CONTINUE_LINKING);
5048 stmt = gimple_build_assign (fd->loops[i].v, t);
5049 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5050 if (i != 0)
5051 {
5052 t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
5053 t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
5054 false, GSI_CONTINUE_LINKING);
5055 stmt = gimple_build_assign (tem, t);
5056 gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
5057 }
5058 }
5059}
5060
5061
5062/* Helper function for expand_omp_for_*. Generate code like:
5063 L10:
5064 V3 += STEP3;
5065 if (V3 cond3 N32) goto BODY_BB; else goto L11;
5066 L11:
5067 V3 = N31;
5068 V2 += STEP2;
5069 if (V2 cond2 N22) goto BODY_BB; else goto L12;
5070 L12:
5071 V2 = N21;
5072 V1 += STEP1;
5073 goto BODY_BB; */
5074
5075static basic_block
5076extract_omp_for_update_vars (struct omp_for_data *fd, basic_block cont_bb,
5077 basic_block body_bb)
5078{
5079 basic_block last_bb, bb, collapse_bb = NULL;
5080 int i;
5081 gimple_stmt_iterator gsi;
5082 edge e;
5083 tree t;
5084 gimple stmt;
5085
5086 last_bb = cont_bb;
5087 for (i = fd->collapse - 1; i >= 0; i--)
5088 {
5089 tree vtype = TREE_TYPE (fd->loops[i].v);
5090
5091 bb = create_empty_bb (last_bb);
5092 if (current_loops)
5093 add_bb_to_loop (bb, last_bb->loop_father);
5094 gsi = gsi_start_bb (bb);
5095
5096 if (i < fd->collapse - 1)
5097 {
5098 e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
5099 e->probability = REG_BR_PROB_BASE / 8;
5100
5101 t = fd->loops[i + 1].n1;
5102 t = force_gimple_operand_gsi (&gsi, t,
5103 DECL_P (fd->loops[i + 1].v)
5104 && TREE_ADDRESSABLE (fd->loops[i
5105 + 1].v),
5106 NULL_TREE, false,
5107 GSI_CONTINUE_LINKING);
5108 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
5109 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5110 }
5111 else
5112 collapse_bb = bb;
5113
5114 set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
5115
5116 if (POINTER_TYPE_P (vtype))
5117 t = fold_build_pointer_plus (fd->loops[i].v, fd->loops[i].step);
5118 else
5119 t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v, fd->loops[i].step);
5120 t = force_gimple_operand_gsi (&gsi, t,
5121 DECL_P (fd->loops[i].v)
5122 && TREE_ADDRESSABLE (fd->loops[i].v),
5123 NULL_TREE, false, GSI_CONTINUE_LINKING);
5124 stmt = gimple_build_assign (fd->loops[i].v, t);
5125 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5126
5127 if (i > 0)
5128 {
5129 t = fd->loops[i].n2;
5130 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5131 false, GSI_CONTINUE_LINKING);
5132 tree v = fd->loops[i].v;
5133 if (DECL_P (v) && TREE_ADDRESSABLE (v))
5134 v = force_gimple_operand_gsi (&gsi, v, true, NULL_TREE,
5135 false, GSI_CONTINUE_LINKING);
5136 t = fold_build2 (fd->loops[i].cond_code, boolean_type_node, v, t);
5137 stmt = gimple_build_cond_empty (t);
5138 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5139 e = make_edge (bb, body_bb, EDGE_TRUE_VALUE);
5140 e->probability = REG_BR_PROB_BASE * 7 / 8;
5141 }
5142 else
5143 make_edge (bb, body_bb, EDGE_FALLTHRU);
5144 last_bb = bb;
5145 }
5146
5147 return collapse_bb;
5148}
5149
5150
50674e96 5151/* A subroutine of expand_omp_for. Generate code for a parallel
953ff289
DN
5152 loop with any schedule. Given parameters:
5153
5154 for (V = N1; V cond N2; V += STEP) BODY;
5155
5156 where COND is "<" or ">", we generate pseudocode
5157
5158 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
50674e96 5159 if (more) goto L0; else goto L3;
953ff289
DN
5160 L0:
5161 V = istart0;
5162 iend = iend0;
5163 L1:
5164 BODY;
5165 V += STEP;
50674e96 5166 if (V cond iend) goto L1; else goto L2;
953ff289 5167 L2:
50674e96
DN
5168 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5169 L3:
953ff289 5170
50674e96 5171 If this is a combined omp parallel loop, instead of the call to
a68ab351 5172 GOMP_loop_foo_start, we call GOMP_loop_foo_next.
acf0174b
JJ
5173 If this is gimple_omp_for_combined_p loop, then instead of assigning
5174 V and iend in L0 we assign the first two _looptemp_ clause decls of the
5175 inner GIMPLE_OMP_FOR and V += STEP; and
5176 if (V cond iend) goto L1; else goto L2; are removed.
a68ab351
JJ
5177
5178 For collapsed loops, given parameters:
5179 collapse(3)
5180 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
5181 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
5182 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
5183 BODY;
5184
5185 we generate pseudocode
5186
5a0f4dd3 5187 if (__builtin_expect (N32 cond3 N31, 0)) goto Z0;
a68ab351
JJ
5188 if (cond3 is <)
5189 adj = STEP3 - 1;
5190 else
5191 adj = STEP3 + 1;
5192 count3 = (adj + N32 - N31) / STEP3;
5a0f4dd3 5193 if (__builtin_expect (N22 cond2 N21, 0)) goto Z0;
a68ab351
JJ
5194 if (cond2 is <)
5195 adj = STEP2 - 1;
5196 else
5197 adj = STEP2 + 1;
5198 count2 = (adj + N22 - N21) / STEP2;
5a0f4dd3 5199 if (__builtin_expect (N12 cond1 N11, 0)) goto Z0;
a68ab351
JJ
5200 if (cond1 is <)
5201 adj = STEP1 - 1;
5202 else
5203 adj = STEP1 + 1;
5204 count1 = (adj + N12 - N11) / STEP1;
5205 count = count1 * count2 * count3;
5a0f4dd3
JJ
5206 goto Z1;
5207 Z0:
5208 count = 0;
5209 Z1:
a68ab351
JJ
5210 more = GOMP_loop_foo_start (0, count, 1, CHUNK, &istart0, &iend0);
5211 if (more) goto L0; else goto L3;
5212 L0:
5213 V = istart0;
5214 T = V;
5215 V3 = N31 + (T % count3) * STEP3;
5216 T = T / count3;
5217 V2 = N21 + (T % count2) * STEP2;
5218 T = T / count2;
5219 V1 = N11 + T * STEP1;
5220 iend = iend0;
5221 L1:
5222 BODY;
5223 V += 1;
5224 if (V < iend) goto L10; else goto L2;
5225 L10:
5226 V3 += STEP3;
5227 if (V3 cond3 N32) goto L1; else goto L11;
5228 L11:
5229 V3 = N31;
5230 V2 += STEP2;
5231 if (V2 cond2 N22) goto L1; else goto L12;
5232 L12:
5233 V2 = N21;
5234 V1 += STEP1;
5235 goto L1;
5236 L2:
5237 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
5238 L3:
5239
5240 */
953ff289 5241
777f7f9a 5242static void
50674e96
DN
5243expand_omp_for_generic (struct omp_region *region,
5244 struct omp_for_data *fd,
953ff289 5245 enum built_in_function start_fn,
acf0174b
JJ
5246 enum built_in_function next_fn,
5247 gimple inner_stmt)
953ff289 5248{
726a989a 5249 tree type, istart0, iend0, iend;
a68ab351
JJ
5250 tree t, vmain, vback, bias = NULL_TREE;
5251 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
d3c673c7 5252 basic_block l2_bb = NULL, l3_bb = NULL;
726a989a
RB
5253 gimple_stmt_iterator gsi;
5254 gimple stmt;
50674e96 5255 bool in_combined_parallel = is_combined_parallel (region);
e5c95afe 5256 bool broken_loop = region->cont == NULL;
917948d3 5257 edge e, ne;
a68ab351
JJ
5258 tree *counts = NULL;
5259 int i;
e5c95afe
ZD
5260
5261 gcc_assert (!broken_loop || !in_combined_parallel);
a68ab351
JJ
5262 gcc_assert (fd->iter_type == long_integer_type_node
5263 || !in_combined_parallel);
953ff289 5264
a68ab351
JJ
5265 type = TREE_TYPE (fd->loop.v);
5266 istart0 = create_tmp_var (fd->iter_type, ".istart0");
5267 iend0 = create_tmp_var (fd->iter_type, ".iend0");
5b4fc8fb
JJ
5268 TREE_ADDRESSABLE (istart0) = 1;
5269 TREE_ADDRESSABLE (iend0) = 1;
953ff289 5270
a68ab351
JJ
5271 /* See if we need to bias by LLONG_MIN. */
5272 if (fd->iter_type == long_long_unsigned_type_node
5273 && TREE_CODE (type) == INTEGER_TYPE
5274 && !TYPE_UNSIGNED (type))
5275 {
5276 tree n1, n2;
5277
5278 if (fd->loop.cond_code == LT_EXPR)
5279 {
5280 n1 = fd->loop.n1;
5281 n2 = fold_build2 (PLUS_EXPR, type, fd->loop.n2, fd->loop.step);
5282 }
5283 else
5284 {
5285 n1 = fold_build2 (MINUS_EXPR, type, fd->loop.n2, fd->loop.step);
5286 n2 = fd->loop.n1;
5287 }
5288 if (TREE_CODE (n1) != INTEGER_CST
5289 || TREE_CODE (n2) != INTEGER_CST
5290 || ((tree_int_cst_sgn (n1) < 0) ^ (tree_int_cst_sgn (n2) < 0)))
5291 bias = fold_convert (fd->iter_type, TYPE_MIN_VALUE (type));
5292 }
5293
777f7f9a 5294 entry_bb = region->entry;
d3c673c7 5295 cont_bb = region->cont;
a68ab351 5296 collapse_bb = NULL;
e5c95afe
ZD
5297 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
5298 gcc_assert (broken_loop
5299 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
5300 l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5301 l1_bb = single_succ (l0_bb);
5302 if (!broken_loop)
d3c673c7
JJ
5303 {
5304 l2_bb = create_empty_bb (cont_bb);
e5c95afe
ZD
5305 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
5306 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
d3c673c7 5307 }
e5c95afe
ZD
5308 else
5309 l2_bb = NULL;
5310 l3_bb = BRANCH_EDGE (entry_bb)->dest;
5311 exit_bb = region->exit;
50674e96 5312
726a989a 5313 gsi = gsi_last_bb (entry_bb);
a68ab351 5314
726a989a 5315 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
a68ab351
JJ
5316 if (fd->collapse > 1)
5317 {
5a0f4dd3 5318 int first_zero_iter = -1;
74bf76ed 5319 basic_block zero_iter_bb = NULL, l2_dom_bb = NULL;
5a0f4dd3 5320
74bf76ed
JJ
5321 counts = XALLOCAVEC (tree, fd->collapse);
5322 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5323 zero_iter_bb, first_zero_iter,
5324 l2_dom_bb);
a68ab351 5325
5a0f4dd3
JJ
5326 if (zero_iter_bb)
5327 {
5328 /* Some counts[i] vars might be uninitialized if
5329 some loop has zero iterations. But the body shouldn't
5330 be executed in that case, so just avoid uninit warnings. */
5331 for (i = first_zero_iter; i < fd->collapse; i++)
5332 if (SSA_VAR_P (counts[i]))
5333 TREE_NO_WARNING (counts[i]) = 1;
5334 gsi_prev (&gsi);
5335 e = split_block (entry_bb, gsi_stmt (gsi));
5336 entry_bb = e->dest;
5337 make_edge (zero_iter_bb, entry_bb, EDGE_FALLTHRU);
5338 gsi = gsi_last_bb (entry_bb);
5339 set_immediate_dominator (CDI_DOMINATORS, entry_bb,
5340 get_immediate_dominator (CDI_DOMINATORS,
5341 zero_iter_bb));
5342 }
a68ab351 5343 }
917948d3
ZD
5344 if (in_combined_parallel)
5345 {
5346 /* In a combined parallel loop, emit a call to
5347 GOMP_loop_foo_next. */
e79983f4 5348 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
917948d3
ZD
5349 build_fold_addr_expr (istart0),
5350 build_fold_addr_expr (iend0));
5351 }
5352 else
953ff289 5353 {
5039610b 5354 tree t0, t1, t2, t3, t4;
50674e96
DN
5355 /* If this is not a combined parallel loop, emit a call to
5356 GOMP_loop_foo_start in ENTRY_BB. */
5039610b
SL
5357 t4 = build_fold_addr_expr (iend0);
5358 t3 = build_fold_addr_expr (istart0);
a68ab351 5359 t2 = fold_convert (fd->iter_type, fd->loop.step);
74bf76ed
JJ
5360 t1 = fd->loop.n2;
5361 t0 = fd->loop.n1;
acf0174b
JJ
5362 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5363 {
5364 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5365 OMP_CLAUSE__LOOPTEMP_);
5366 gcc_assert (innerc);
5367 t0 = OMP_CLAUSE_DECL (innerc);
5368 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5369 OMP_CLAUSE__LOOPTEMP_);
5370 gcc_assert (innerc);
5371 t1 = OMP_CLAUSE_DECL (innerc);
5372 }
74bf76ed
JJ
5373 if (POINTER_TYPE_P (TREE_TYPE (t0))
5374 && TYPE_PRECISION (TREE_TYPE (t0))
5375 != TYPE_PRECISION (fd->iter_type))
c6ff4493
SE
5376 {
5377 /* Avoid casting pointers to integer of a different size. */
96f9265a 5378 tree itype = signed_type_for (type);
74bf76ed
JJ
5379 t1 = fold_convert (fd->iter_type, fold_convert (itype, t1));
5380 t0 = fold_convert (fd->iter_type, fold_convert (itype, t0));
c6ff4493
SE
5381 }
5382 else
5383 {
74bf76ed
JJ
5384 t1 = fold_convert (fd->iter_type, t1);
5385 t0 = fold_convert (fd->iter_type, t0);
c6ff4493 5386 }
a68ab351 5387 if (bias)
953ff289 5388 {
a68ab351
JJ
5389 t1 = fold_build2 (PLUS_EXPR, fd->iter_type, t1, bias);
5390 t0 = fold_build2 (PLUS_EXPR, fd->iter_type, t0, bias);
5391 }
5392 if (fd->iter_type == long_integer_type_node)
5393 {
5394 if (fd->chunk_size)
5395 {
5396 t = fold_convert (fd->iter_type, fd->chunk_size);
e79983f4
MM
5397 t = build_call_expr (builtin_decl_explicit (start_fn),
5398 6, t0, t1, t2, t, t3, t4);
a68ab351
JJ
5399 }
5400 else
e79983f4
MM
5401 t = build_call_expr (builtin_decl_explicit (start_fn),
5402 5, t0, t1, t2, t3, t4);
953ff289 5403 }
5039610b 5404 else
a68ab351
JJ
5405 {
5406 tree t5;
5407 tree c_bool_type;
e79983f4 5408 tree bfn_decl;
a68ab351
JJ
5409
5410 /* The GOMP_loop_ull_*start functions have additional boolean
5411 argument, true for < loops and false for > loops.
5412 In Fortran, the C bool type can be different from
5413 boolean_type_node. */
e79983f4
MM
5414 bfn_decl = builtin_decl_explicit (start_fn);
5415 c_bool_type = TREE_TYPE (TREE_TYPE (bfn_decl));
a68ab351
JJ
5416 t5 = build_int_cst (c_bool_type,
5417 fd->loop.cond_code == LT_EXPR ? 1 : 0);
5418 if (fd->chunk_size)
5419 {
e79983f4 5420 tree bfn_decl = builtin_decl_explicit (start_fn);
a68ab351 5421 t = fold_convert (fd->iter_type, fd->chunk_size);
e79983f4 5422 t = build_call_expr (bfn_decl, 7, t5, t0, t1, t2, t, t3, t4);
a68ab351
JJ
5423 }
5424 else
e79983f4
MM
5425 t = build_call_expr (builtin_decl_explicit (start_fn),
5426 6, t5, t0, t1, t2, t3, t4);
a68ab351 5427 }
953ff289 5428 }
a68ab351
JJ
5429 if (TREE_TYPE (t) != boolean_type_node)
5430 t = fold_build2 (NE_EXPR, boolean_type_node,
5431 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
5432 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5433 true, GSI_SAME_STMT);
5434 gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
917948d3 5435
726a989a
RB
5436 /* Remove the GIMPLE_OMP_FOR statement. */
5437 gsi_remove (&gsi, true);
953ff289 5438
50674e96 5439 /* Iteration setup for sequential loop goes in L0_BB. */
74bf76ed
JJ
5440 tree startvar = fd->loop.v;
5441 tree endvar = NULL_TREE;
5442
acf0174b
JJ
5443 if (gimple_omp_for_combined_p (fd->for_stmt))
5444 {
5445 gcc_assert (gimple_code (inner_stmt) == GIMPLE_OMP_FOR
5446 && gimple_omp_for_kind (inner_stmt)
5447 == GF_OMP_FOR_KIND_SIMD);
5448 tree innerc = find_omp_clause (gimple_omp_for_clauses (inner_stmt),
5449 OMP_CLAUSE__LOOPTEMP_);
5450 gcc_assert (innerc);
5451 startvar = OMP_CLAUSE_DECL (innerc);
5452 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5453 OMP_CLAUSE__LOOPTEMP_);
5454 gcc_assert (innerc);
5455 endvar = OMP_CLAUSE_DECL (innerc);
5456 }
5457
726a989a 5458 gsi = gsi_start_bb (l0_bb);
550918ca 5459 t = istart0;
a68ab351 5460 if (bias)
550918ca 5461 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
74bf76ed
JJ
5462 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5463 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5464 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 5465 t = force_gimple_operand_gsi (&gsi, t,
74bf76ed
JJ
5466 DECL_P (startvar)
5467 && TREE_ADDRESSABLE (startvar),
ea3a0fde 5468 NULL_TREE, false, GSI_CONTINUE_LINKING);
74bf76ed 5469 stmt = gimple_build_assign (startvar, t);
726a989a 5470 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
953ff289 5471
550918ca 5472 t = iend0;
a68ab351 5473 if (bias)
550918ca 5474 t = fold_build2 (MINUS_EXPR, fd->iter_type, t, bias);
74bf76ed
JJ
5475 if (POINTER_TYPE_P (TREE_TYPE (startvar)))
5476 t = fold_convert (signed_type_for (TREE_TYPE (startvar)), t);
5477 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
5478 iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5479 false, GSI_CONTINUE_LINKING);
74bf76ed 5480 if (endvar)
a68ab351 5481 {
74bf76ed 5482 stmt = gimple_build_assign (endvar, iend);
726a989a 5483 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
a68ab351 5484 }
74bf76ed 5485 if (fd->collapse > 1)
acf0174b 5486 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
50674e96 5487
e5c95afe 5488 if (!broken_loop)
d3c673c7 5489 {
e5c95afe
ZD
5490 /* Code to control the increment and predicate for the sequential
5491 loop goes in the CONT_BB. */
726a989a
RB
5492 gsi = gsi_last_bb (cont_bb);
5493 stmt = gsi_stmt (gsi);
5494 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5495 vmain = gimple_omp_continue_control_use (stmt);
5496 vback = gimple_omp_continue_control_def (stmt);
917948d3 5497
acf0174b 5498 if (!gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed
JJ
5499 {
5500 if (POINTER_TYPE_P (type))
5501 t = fold_build_pointer_plus (vmain, fd->loop.step);
5502 else
5503 t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
5504 t = force_gimple_operand_gsi (&gsi, t,
5505 DECL_P (vback)
5506 && TREE_ADDRESSABLE (vback),
5507 NULL_TREE, true, GSI_SAME_STMT);
5508 stmt = gimple_build_assign (vback, t);
5509 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5510
5511 t = build2 (fd->loop.cond_code, boolean_type_node,
5512 DECL_P (vback) && TREE_ADDRESSABLE (vback) ? t : vback,
5513 iend);
5514 stmt = gimple_build_cond_empty (t);
5515 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5516 }
50674e96 5517
726a989a
RB
5518 /* Remove GIMPLE_OMP_CONTINUE. */
5519 gsi_remove (&gsi, true);
50674e96 5520
acf0174b 5521 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed 5522 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, l1_bb);
a68ab351 5523
e5c95afe 5524 /* Emit code to get the next parallel iteration in L2_BB. */
726a989a 5525 gsi = gsi_start_bb (l2_bb);
50674e96 5526
e79983f4 5527 t = build_call_expr (builtin_decl_explicit (next_fn), 2,
e5c95afe
ZD
5528 build_fold_addr_expr (istart0),
5529 build_fold_addr_expr (iend0));
726a989a
RB
5530 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5531 false, GSI_CONTINUE_LINKING);
a68ab351
JJ
5532 if (TREE_TYPE (t) != boolean_type_node)
5533 t = fold_build2 (NE_EXPR, boolean_type_node,
5534 t, build_int_cst (TREE_TYPE (t), 0));
726a989a
RB
5535 stmt = gimple_build_cond_empty (t);
5536 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
e5c95afe 5537 }
953ff289 5538
777f7f9a 5539 /* Add the loop cleanup function. */
726a989a
RB
5540 gsi = gsi_last_bb (exit_bb);
5541 if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
e79983f4 5542 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_NOWAIT);
acf0174b
JJ
5543 else if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5544 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_CANCEL);
777f7f9a 5545 else
e79983f4 5546 t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END);
726a989a 5547 stmt = gimple_build_call (t, 0);
acf0174b
JJ
5548 if (gimple_omp_return_lhs (gsi_stmt (gsi)))
5549 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (gsi)));
726a989a
RB
5550 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
5551 gsi_remove (&gsi, true);
50674e96
DN
5552
5553 /* Connect the new blocks. */
917948d3
ZD
5554 find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
5555 find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
953ff289 5556
e5c95afe
ZD
5557 if (!broken_loop)
5558 {
726a989a
RB
5559 gimple_seq phis;
5560
917948d3
ZD
5561 e = find_edge (cont_bb, l3_bb);
5562 ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
5563
726a989a
RB
5564 phis = phi_nodes (l3_bb);
5565 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5566 {
5567 gimple phi = gsi_stmt (gsi);
5568 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
5569 PHI_ARG_DEF_FROM_EDGE (phi, e));
5570 }
917948d3
ZD
5571 remove_edge (e);
5572
e5c95afe 5573 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
a9e0d843
RB
5574 if (current_loops)
5575 add_bb_to_loop (l2_bb, cont_bb->loop_father);
74bf76ed 5576 e = find_edge (cont_bb, l1_bb);
acf0174b
JJ
5577 if (gimple_omp_for_combined_p (fd->for_stmt))
5578 {
5579 remove_edge (e);
5580 e = NULL;
5581 }
74bf76ed 5582 else if (fd->collapse > 1)
a68ab351 5583 {
a68ab351
JJ
5584 remove_edge (e);
5585 e = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
5586 }
5587 else
74bf76ed
JJ
5588 e->flags = EDGE_TRUE_VALUE;
5589 if (e)
a68ab351 5590 {
74bf76ed
JJ
5591 e->probability = REG_BR_PROB_BASE * 7 / 8;
5592 find_edge (cont_bb, l2_bb)->probability = REG_BR_PROB_BASE / 8;
5593 }
5594 else
5595 {
5596 e = find_edge (cont_bb, l2_bb);
5597 e->flags = EDGE_FALLTHRU;
a68ab351 5598 }
e5c95afe 5599 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
917948d3
ZD
5600
5601 set_immediate_dominator (CDI_DOMINATORS, l2_bb,
5602 recompute_dominator (CDI_DOMINATORS, l2_bb));
5603 set_immediate_dominator (CDI_DOMINATORS, l3_bb,
5604 recompute_dominator (CDI_DOMINATORS, l3_bb));
5605 set_immediate_dominator (CDI_DOMINATORS, l0_bb,
5606 recompute_dominator (CDI_DOMINATORS, l0_bb));
5607 set_immediate_dominator (CDI_DOMINATORS, l1_bb,
5608 recompute_dominator (CDI_DOMINATORS, l1_bb));
6093bc06
RB
5609
5610 struct loop *outer_loop = alloc_loop ();
5611 outer_loop->header = l0_bb;
5612 outer_loop->latch = l2_bb;
5613 add_loop (outer_loop, l0_bb->loop_father);
5614
acf0174b 5615 if (!gimple_omp_for_combined_p (fd->for_stmt))
74bf76ed
JJ
5616 {
5617 struct loop *loop = alloc_loop ();
5618 loop->header = l1_bb;
5619 /* The loop may have multiple latches. */
5620 add_loop (loop, outer_loop);
5621 }
e5c95afe 5622 }
953ff289
DN
5623}
5624
5625
50674e96
DN
5626/* A subroutine of expand_omp_for. Generate code for a parallel
5627 loop with static schedule and no specified chunk size. Given
5628 parameters:
953ff289
DN
5629
5630 for (V = N1; V cond N2; V += STEP) BODY;
5631
5632 where COND is "<" or ">", we generate pseudocode
5633
5a0f4dd3 5634 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
953ff289
DN
5635 if (cond is <)
5636 adj = STEP - 1;
5637 else
5638 adj = STEP + 1;
a68ab351
JJ
5639 if ((__typeof (V)) -1 > 0 && cond is >)
5640 n = -(adj + N2 - N1) / -STEP;
5641 else
5642 n = (adj + N2 - N1) / STEP;
953ff289 5643 q = n / nthreads;
fb79f500
JJ
5644 tt = n % nthreads;
5645 if (threadid < tt) goto L3; else goto L4;
5646 L3:
5647 tt = 0;
5648 q = q + 1;
5649 L4:
5650 s0 = q * threadid + tt;
5651 e0 = s0 + q;
917948d3 5652 V = s0 * STEP + N1;
953ff289
DN
5653 if (s0 >= e0) goto L2; else goto L0;
5654 L0:
953ff289
DN
5655 e = e0 * STEP + N1;
5656 L1:
5657 BODY;
5658 V += STEP;
5659 if (V cond e) goto L1;
953ff289
DN
5660 L2:
5661*/
5662
777f7f9a 5663static void
50674e96 5664expand_omp_for_static_nochunk (struct omp_region *region,
acf0174b
JJ
5665 struct omp_for_data *fd,
5666 gimple inner_stmt)
953ff289 5667{
fb79f500 5668 tree n, q, s0, e0, e, t, tt, nthreads, threadid;
a68ab351 5669 tree type, itype, vmain, vback;
fb79f500 5670 basic_block entry_bb, second_bb, third_bb, exit_bb, seq_start_bb;
acf0174b 5671 basic_block body_bb, cont_bb, collapse_bb = NULL;
777f7f9a 5672 basic_block fin_bb;
726a989a
RB
5673 gimple_stmt_iterator gsi;
5674 gimple stmt;
fb79f500 5675 edge ep;
acf0174b
JJ
5676 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
5677 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
5678 bool broken_loop = region->cont == NULL;
5679 tree *counts = NULL;
5680 tree n1, n2, step;
953ff289 5681
a68ab351
JJ
5682 itype = type = TREE_TYPE (fd->loop.v);
5683 if (POINTER_TYPE_P (type))
96f9265a 5684 itype = signed_type_for (type);
953ff289 5685
777f7f9a 5686 entry_bb = region->entry;
777f7f9a 5687 cont_bb = region->cont;
e5c95afe 5688 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
acf0174b
JJ
5689 fin_bb = BRANCH_EDGE (entry_bb)->dest;
5690 gcc_assert (broken_loop
5691 || (fin_bb == FALLTHRU_EDGE (cont_bb)->dest));
e5c95afe
ZD
5692 seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
5693 body_bb = single_succ (seq_start_bb);
acf0174b
JJ
5694 if (!broken_loop)
5695 {
5696 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
5697 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
5698 }
777f7f9a
RH
5699 exit_bb = region->exit;
5700
50674e96 5701 /* Iteration space partitioning goes in ENTRY_BB. */
726a989a
RB
5702 gsi = gsi_last_bb (entry_bb);
5703 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
777f7f9a 5704
acf0174b
JJ
5705 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
5706 {
5707 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
5708 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
5709 }
5710
5711 if (fd->collapse > 1)
5712 {
5713 int first_zero_iter = -1;
5714 basic_block l2_dom_bb = NULL;
5715
5716 counts = XALLOCAVEC (tree, fd->collapse);
5717 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
5718 fin_bb, first_zero_iter,
5719 l2_dom_bb);
5720 t = NULL_TREE;
5721 }
5722 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
5723 t = integer_one_node;
5724 else
5725 t = fold_binary (fd->loop.cond_code, boolean_type_node,
5726 fold_convert (type, fd->loop.n1),
5727 fold_convert (type, fd->loop.n2));
5728 if (fd->collapse == 1
5729 && TYPE_UNSIGNED (type)
5a0f4dd3
JJ
5730 && (t == NULL_TREE || !integer_onep (t)))
5731 {
5a0f4dd3
JJ
5732 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
5733 n1 = force_gimple_operand_gsi (&gsi, n1, true, NULL_TREE,
5734 true, GSI_SAME_STMT);
5735 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
5736 n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE,
5737 true, GSI_SAME_STMT);
5738 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
5739 NULL_TREE, NULL_TREE);
5740 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5741 if (walk_tree (gimple_cond_lhs_ptr (stmt),
5742 expand_omp_regimplify_p, NULL, NULL)
5743 || walk_tree (gimple_cond_rhs_ptr (stmt),
5744 expand_omp_regimplify_p, NULL, NULL))
5745 {
5746 gsi = gsi_for_stmt (stmt);
5747 gimple_regimplify_operands (stmt, &gsi);
5748 }
5749 ep = split_block (entry_bb, stmt);
5750 ep->flags = EDGE_TRUE_VALUE;
5751 entry_bb = ep->dest;
5752 ep->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
5753 ep = make_edge (ep->src, fin_bb, EDGE_FALSE_VALUE);
5754 ep->probability = REG_BR_PROB_BASE / 2000 - 1;
5755 if (gimple_in_ssa_p (cfun))
5756 {
5757 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
5758 for (gsi = gsi_start_phis (fin_bb);
5759 !gsi_end_p (gsi); gsi_next (&gsi))
5760 {
5761 gimple phi = gsi_stmt (gsi);
5762 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
5763 ep, UNKNOWN_LOCATION);
5764 }
5765 }
5766 gsi = gsi_last_bb (entry_bb);
5767 }
5768
acf0174b 5769 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
a68ab351 5770 t = fold_convert (itype, t);
726a989a
RB
5771 nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5772 true, GSI_SAME_STMT);
b8698a0f 5773
acf0174b 5774 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
a68ab351 5775 t = fold_convert (itype, t);
726a989a
RB
5776 threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5777 true, GSI_SAME_STMT);
953ff289 5778
acf0174b
JJ
5779 n1 = fd->loop.n1;
5780 n2 = fd->loop.n2;
5781 step = fd->loop.step;
5782 if (gimple_omp_for_combined_into_p (fd->for_stmt))
5783 {
5784 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
5785 OMP_CLAUSE__LOOPTEMP_);
5786 gcc_assert (innerc);
5787 n1 = OMP_CLAUSE_DECL (innerc);
5788 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5789 OMP_CLAUSE__LOOPTEMP_);
5790 gcc_assert (innerc);
5791 n2 = OMP_CLAUSE_DECL (innerc);
5792 }
5793 n1 = force_gimple_operand_gsi (&gsi, fold_convert (type, n1),
5794 true, NULL_TREE, true, GSI_SAME_STMT);
5795 n2 = force_gimple_operand_gsi (&gsi, fold_convert (itype, n2),
5796 true, NULL_TREE, true, GSI_SAME_STMT);
5797 step = force_gimple_operand_gsi (&gsi, fold_convert (itype, step),
5798 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
5799
5800 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
acf0174b
JJ
5801 t = fold_build2 (PLUS_EXPR, itype, step, t);
5802 t = fold_build2 (PLUS_EXPR, itype, t, n2);
5803 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
a68ab351
JJ
5804 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
5805 t = fold_build2 (TRUNC_DIV_EXPR, itype,
5806 fold_build1 (NEGATE_EXPR, itype, t),
acf0174b 5807 fold_build1 (NEGATE_EXPR, itype, step));
a68ab351 5808 else
acf0174b 5809 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
a68ab351 5810 t = fold_convert (itype, t);
726a989a 5811 n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 5812
7cc434a3 5813 q = create_tmp_reg (itype, "q");
a68ab351 5814 t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
fb79f500
JJ
5815 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5816 gsi_insert_before (&gsi, gimple_build_assign (q, t), GSI_SAME_STMT);
5817
7cc434a3 5818 tt = create_tmp_reg (itype, "tt");
fb79f500
JJ
5819 t = fold_build2 (TRUNC_MOD_EXPR, itype, n, nthreads);
5820 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE, true, GSI_SAME_STMT);
5821 gsi_insert_before (&gsi, gimple_build_assign (tt, t), GSI_SAME_STMT);
953ff289 5822
fb79f500
JJ
5823 t = build2 (LT_EXPR, boolean_type_node, threadid, tt);
5824 stmt = gimple_build_cond_empty (t);
5825 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5826
5827 second_bb = split_block (entry_bb, stmt)->dest;
5828 gsi = gsi_last_bb (second_bb);
5829 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
5830
5831 gsi_insert_before (&gsi, gimple_build_assign (tt, build_int_cst (itype, 0)),
5832 GSI_SAME_STMT);
5833 stmt = gimple_build_assign_with_ops (PLUS_EXPR, q, q,
5834 build_int_cst (itype, 1));
5835 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
5836
5837 third_bb = split_block (second_bb, stmt)->dest;
5838 gsi = gsi_last_bb (third_bb);
5839 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
953ff289 5840
a68ab351 5841 t = build2 (MULT_EXPR, itype, q, threadid);
fb79f500 5842 t = build2 (PLUS_EXPR, itype, t, tt);
726a989a 5843 s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
953ff289 5844
a68ab351 5845 t = fold_build2 (PLUS_EXPR, itype, s0, q);
726a989a 5846 e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 5847
953ff289 5848 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
726a989a 5849 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
50674e96 5850
726a989a
RB
5851 /* Remove the GIMPLE_OMP_FOR statement. */
5852 gsi_remove (&gsi, true);
50674e96
DN
5853
5854 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 5855 gsi = gsi_start_bb (seq_start_bb);
953ff289 5856
acf0174b
JJ
5857 tree startvar = fd->loop.v;
5858 tree endvar = NULL_TREE;
5859
5860 if (gimple_omp_for_combined_p (fd->for_stmt))
5861 {
5862 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
5863 ? gimple_omp_parallel_clauses (inner_stmt)
5864 : gimple_omp_for_clauses (inner_stmt);
5865 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
5866 gcc_assert (innerc);
5867 startvar = OMP_CLAUSE_DECL (innerc);
5868 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
5869 OMP_CLAUSE__LOOPTEMP_);
5870 gcc_assert (innerc);
5871 endvar = OMP_CLAUSE_DECL (innerc);
5872 }
a68ab351 5873 t = fold_convert (itype, s0);
acf0174b 5874 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 5875 if (POINTER_TYPE_P (type))
acf0174b 5876 t = fold_build_pointer_plus (n1, t);
a68ab351 5877 else
acf0174b
JJ
5878 t = fold_build2 (PLUS_EXPR, type, t, n1);
5879 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 5880 t = force_gimple_operand_gsi (&gsi, t,
acf0174b
JJ
5881 DECL_P (startvar)
5882 && TREE_ADDRESSABLE (startvar),
ea3a0fde 5883 NULL_TREE, false, GSI_CONTINUE_LINKING);
acf0174b 5884 stmt = gimple_build_assign (startvar, t);
726a989a 5885 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
b8698a0f 5886
a68ab351 5887 t = fold_convert (itype, e0);
acf0174b 5888 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 5889 if (POINTER_TYPE_P (type))
acf0174b 5890 t = fold_build_pointer_plus (n1, t);
a68ab351 5891 else
acf0174b
JJ
5892 t = fold_build2 (PLUS_EXPR, type, t, n1);
5893 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
5894 e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
5895 false, GSI_CONTINUE_LINKING);
acf0174b
JJ
5896 if (endvar)
5897 {
5898 stmt = gimple_build_assign (endvar, e);
5899 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
5900 }
5901 if (fd->collapse > 1)
5902 expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
953ff289 5903
acf0174b
JJ
5904 if (!broken_loop)
5905 {
5906 /* The code controlling the sequential loop replaces the
5907 GIMPLE_OMP_CONTINUE. */
5908 gsi = gsi_last_bb (cont_bb);
5909 stmt = gsi_stmt (gsi);
5910 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
5911 vmain = gimple_omp_continue_control_use (stmt);
5912 vback = gimple_omp_continue_control_def (stmt);
917948d3 5913
acf0174b
JJ
5914 if (!gimple_omp_for_combined_p (fd->for_stmt))
5915 {
5916 if (POINTER_TYPE_P (type))
5917 t = fold_build_pointer_plus (vmain, step);
5918 else
5919 t = fold_build2 (PLUS_EXPR, type, vmain, step);
5920 t = force_gimple_operand_gsi (&gsi, t,
5921 DECL_P (vback)
5922 && TREE_ADDRESSABLE (vback),
5923 NULL_TREE, true, GSI_SAME_STMT);
5924 stmt = gimple_build_assign (vback, t);
5925 gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
917948d3 5926
acf0174b
JJ
5927 t = build2 (fd->loop.cond_code, boolean_type_node,
5928 DECL_P (vback) && TREE_ADDRESSABLE (vback)
5929 ? t : vback, e);
5930 gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
5931 }
953ff289 5932
acf0174b
JJ
5933 /* Remove the GIMPLE_OMP_CONTINUE statement. */
5934 gsi_remove (&gsi, true);
5935
5936 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
5937 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
5938 }
50674e96 5939
726a989a
RB
5940 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
5941 gsi = gsi_last_bb (exit_bb);
5942 if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
acf0174b
JJ
5943 {
5944 t = gimple_omp_return_lhs (gsi_stmt (gsi));
5945 gsi_insert_after (&gsi, build_omp_barrier (t), GSI_SAME_STMT);
5946 }
726a989a 5947 gsi_remove (&gsi, true);
50674e96
DN
5948
5949 /* Connect all the blocks. */
fb79f500
JJ
5950 ep = make_edge (entry_bb, third_bb, EDGE_FALSE_VALUE);
5951 ep->probability = REG_BR_PROB_BASE / 4 * 3;
5952 ep = find_edge (entry_bb, second_bb);
5953 ep->flags = EDGE_TRUE_VALUE;
5954 ep->probability = REG_BR_PROB_BASE / 4;
5955 find_edge (third_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
5956 find_edge (third_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
917948d3 5957
acf0174b
JJ
5958 if (!broken_loop)
5959 {
5960 ep = find_edge (cont_bb, body_bb);
5961 if (gimple_omp_for_combined_p (fd->for_stmt))
5962 {
5963 remove_edge (ep);
5964 ep = NULL;
5965 }
5966 else if (fd->collapse > 1)
5967 {
5968 remove_edge (ep);
5969 ep = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
5970 }
5971 else
5972 ep->flags = EDGE_TRUE_VALUE;
5973 find_edge (cont_bb, fin_bb)->flags
5974 = ep ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
5975 }
b8698a0f 5976
fb79f500
JJ
5977 set_immediate_dominator (CDI_DOMINATORS, second_bb, entry_bb);
5978 set_immediate_dominator (CDI_DOMINATORS, third_bb, entry_bb);
5979 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, third_bb);
acf0174b 5980
917948d3
ZD
5981 set_immediate_dominator (CDI_DOMINATORS, body_bb,
5982 recompute_dominator (CDI_DOMINATORS, body_bb));
5983 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
5984 recompute_dominator (CDI_DOMINATORS, fin_bb));
6093bc06 5985
acf0174b
JJ
5986 if (!broken_loop && !gimple_omp_for_combined_p (fd->for_stmt))
5987 {
5988 struct loop *loop = alloc_loop ();
5989 loop->header = body_bb;
5990 if (collapse_bb == NULL)
5991 loop->latch = cont_bb;
5992 add_loop (loop, body_bb->loop_father);
5993 }
953ff289
DN
5994}
5995
50674e96
DN
5996
5997/* A subroutine of expand_omp_for. Generate code for a parallel
5998 loop with static schedule and a specified chunk size. Given
5999 parameters:
953ff289
DN
6000
6001 for (V = N1; V cond N2; V += STEP) BODY;
6002
6003 where COND is "<" or ">", we generate pseudocode
6004
5a0f4dd3 6005 if ((__typeof (V)) -1 > 0 && N2 cond N1) goto L2;
953ff289
DN
6006 if (cond is <)
6007 adj = STEP - 1;
6008 else
6009 adj = STEP + 1;
a68ab351
JJ
6010 if ((__typeof (V)) -1 > 0 && cond is >)
6011 n = -(adj + N2 - N1) / -STEP;
6012 else
6013 n = (adj + N2 - N1) / STEP;
953ff289 6014 trip = 0;
917948d3
ZD
6015 V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
6016 here so that V is defined
6017 if the loop is not entered
953ff289
DN
6018 L0:
6019 s0 = (trip * nthreads + threadid) * CHUNK;
6020 e0 = min(s0 + CHUNK, n);
6021 if (s0 < n) goto L1; else goto L4;
6022 L1:
6023 V = s0 * STEP + N1;
6024 e = e0 * STEP + N1;
6025 L2:
6026 BODY;
6027 V += STEP;
6028 if (V cond e) goto L2; else goto L3;
6029 L3:
6030 trip += 1;
6031 goto L0;
6032 L4:
953ff289
DN
6033*/
6034
777f7f9a 6035static void
acf0174b
JJ
6036expand_omp_for_static_chunk (struct omp_region *region,
6037 struct omp_for_data *fd, gimple inner_stmt)
953ff289 6038{
726a989a 6039 tree n, s0, e0, e, t;
917948d3 6040 tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
726a989a 6041 tree type, itype, v_main, v_back, v_extra;
50674e96 6042 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
acf0174b 6043 basic_block trip_update_bb = NULL, cont_bb, collapse_bb = NULL, fin_bb;
726a989a
RB
6044 gimple_stmt_iterator si;
6045 gimple stmt;
6046 edge se;
acf0174b
JJ
6047 enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
6048 enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
6049 bool broken_loop = region->cont == NULL;
6050 tree *counts = NULL;
6051 tree n1, n2, step;
953ff289 6052
a68ab351
JJ
6053 itype = type = TREE_TYPE (fd->loop.v);
6054 if (POINTER_TYPE_P (type))
96f9265a 6055 itype = signed_type_for (type);
953ff289 6056
777f7f9a 6057 entry_bb = region->entry;
e5c95afe
ZD
6058 se = split_block (entry_bb, last_stmt (entry_bb));
6059 entry_bb = se->src;
6060 iter_part_bb = se->dest;
777f7f9a 6061 cont_bb = region->cont;
e5c95afe 6062 gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
acf0174b
JJ
6063 fin_bb = BRANCH_EDGE (iter_part_bb)->dest;
6064 gcc_assert (broken_loop
6065 || fin_bb == FALLTHRU_EDGE (cont_bb)->dest);
e5c95afe
ZD
6066 seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
6067 body_bb = single_succ (seq_start_bb);
acf0174b
JJ
6068 if (!broken_loop)
6069 {
6070 gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
6071 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6072 trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
6073 }
777f7f9a 6074 exit_bb = region->exit;
50674e96 6075
50674e96 6076 /* Trip and adjustment setup goes in ENTRY_BB. */
726a989a
RB
6077 si = gsi_last_bb (entry_bb);
6078 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
50674e96 6079
acf0174b
JJ
6080 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
6081 {
6082 get_num_threads = BUILT_IN_OMP_GET_NUM_TEAMS;
6083 get_thread_num = BUILT_IN_OMP_GET_TEAM_NUM;
6084 }
6085
6086 if (fd->collapse > 1)
6087 {
6088 int first_zero_iter = -1;
6089 basic_block l2_dom_bb = NULL;
6090
6091 counts = XALLOCAVEC (tree, fd->collapse);
6092 expand_omp_for_init_counts (fd, &si, entry_bb, counts,
6093 fin_bb, first_zero_iter,
6094 l2_dom_bb);
6095 t = NULL_TREE;
6096 }
6097 else if (gimple_omp_for_combined_into_p (fd->for_stmt))
6098 t = integer_one_node;
6099 else
6100 t = fold_binary (fd->loop.cond_code, boolean_type_node,
6101 fold_convert (type, fd->loop.n1),
6102 fold_convert (type, fd->loop.n2));
6103 if (fd->collapse == 1
6104 && TYPE_UNSIGNED (type)
5a0f4dd3
JJ
6105 && (t == NULL_TREE || !integer_onep (t)))
6106 {
5a0f4dd3
JJ
6107 n1 = fold_convert (type, unshare_expr (fd->loop.n1));
6108 n1 = force_gimple_operand_gsi (&si, n1, true, NULL_TREE,
6109 true, GSI_SAME_STMT);
6110 n2 = fold_convert (type, unshare_expr (fd->loop.n2));
6111 n2 = force_gimple_operand_gsi (&si, n2, true, NULL_TREE,
6112 true, GSI_SAME_STMT);
6113 stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
6114 NULL_TREE, NULL_TREE);
6115 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
6116 if (walk_tree (gimple_cond_lhs_ptr (stmt),
6117 expand_omp_regimplify_p, NULL, NULL)
6118 || walk_tree (gimple_cond_rhs_ptr (stmt),
6119 expand_omp_regimplify_p, NULL, NULL))
6120 {
6121 si = gsi_for_stmt (stmt);
6122 gimple_regimplify_operands (stmt, &si);
6123 }
6124 se = split_block (entry_bb, stmt);
6125 se->flags = EDGE_TRUE_VALUE;
6126 entry_bb = se->dest;
6127 se->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
6128 se = make_edge (se->src, fin_bb, EDGE_FALSE_VALUE);
6129 se->probability = REG_BR_PROB_BASE / 2000 - 1;
6130 if (gimple_in_ssa_p (cfun))
6131 {
6132 int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
6133 for (si = gsi_start_phis (fin_bb);
6134 !gsi_end_p (si); gsi_next (&si))
6135 {
6136 gimple phi = gsi_stmt (si);
6137 add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
6138 se, UNKNOWN_LOCATION);
6139 }
6140 }
6141 si = gsi_last_bb (entry_bb);
6142 }
6143
acf0174b 6144 t = build_call_expr (builtin_decl_explicit (get_num_threads), 0);
a68ab351 6145 t = fold_convert (itype, t);
726a989a
RB
6146 nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6147 true, GSI_SAME_STMT);
b8698a0f 6148
acf0174b 6149 t = build_call_expr (builtin_decl_explicit (get_thread_num), 0);
a68ab351 6150 t = fold_convert (itype, t);
726a989a
RB
6151 threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6152 true, GSI_SAME_STMT);
917948d3 6153
acf0174b
JJ
6154 n1 = fd->loop.n1;
6155 n2 = fd->loop.n2;
6156 step = fd->loop.step;
6157 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6158 {
6159 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6160 OMP_CLAUSE__LOOPTEMP_);
6161 gcc_assert (innerc);
6162 n1 = OMP_CLAUSE_DECL (innerc);
6163 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6164 OMP_CLAUSE__LOOPTEMP_);
6165 gcc_assert (innerc);
6166 n2 = OMP_CLAUSE_DECL (innerc);
6167 }
6168 n1 = force_gimple_operand_gsi (&si, fold_convert (type, n1),
6169 true, NULL_TREE, true, GSI_SAME_STMT);
6170 n2 = force_gimple_operand_gsi (&si, fold_convert (itype, n2),
6171 true, NULL_TREE, true, GSI_SAME_STMT);
6172 step = force_gimple_operand_gsi (&si, fold_convert (itype, step),
6173 true, NULL_TREE, true, GSI_SAME_STMT);
917948d3 6174 fd->chunk_size
726a989a
RB
6175 = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
6176 true, NULL_TREE, true, GSI_SAME_STMT);
a68ab351
JJ
6177
6178 t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
acf0174b
JJ
6179 t = fold_build2 (PLUS_EXPR, itype, step, t);
6180 t = fold_build2 (PLUS_EXPR, itype, t, n2);
6181 t = fold_build2 (MINUS_EXPR, itype, t, fold_convert (itype, n1));
a68ab351
JJ
6182 if (TYPE_UNSIGNED (itype) && fd->loop.cond_code == GT_EXPR)
6183 t = fold_build2 (TRUNC_DIV_EXPR, itype,
6184 fold_build1 (NEGATE_EXPR, itype, t),
acf0174b 6185 fold_build1 (NEGATE_EXPR, itype, step));
a68ab351 6186 else
acf0174b 6187 t = fold_build2 (TRUNC_DIV_EXPR, itype, t, step);
a68ab351 6188 t = fold_convert (itype, t);
726a989a
RB
6189 n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6190 true, GSI_SAME_STMT);
917948d3 6191
a5efada7 6192 trip_var = create_tmp_reg (itype, ".trip");
917948d3
ZD
6193 if (gimple_in_ssa_p (cfun))
6194 {
726a989a
RB
6195 trip_init = make_ssa_name (trip_var, NULL);
6196 trip_main = make_ssa_name (trip_var, NULL);
6197 trip_back = make_ssa_name (trip_var, NULL);
917948d3 6198 }
953ff289 6199 else
917948d3
ZD
6200 {
6201 trip_init = trip_var;
6202 trip_main = trip_var;
6203 trip_back = trip_var;
6204 }
953ff289 6205
726a989a
RB
6206 stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
6207 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
50674e96 6208
a68ab351 6209 t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
acf0174b 6210 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6211 if (POINTER_TYPE_P (type))
acf0174b 6212 t = fold_build_pointer_plus (n1, t);
a68ab351 6213 else
acf0174b 6214 t = fold_build2 (PLUS_EXPR, type, t, n1);
726a989a
RB
6215 v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6216 true, GSI_SAME_STMT);
917948d3 6217
726a989a
RB
6218 /* Remove the GIMPLE_OMP_FOR. */
6219 gsi_remove (&si, true);
50674e96
DN
6220
6221 /* Iteration space partitioning goes in ITER_PART_BB. */
726a989a 6222 si = gsi_last_bb (iter_part_bb);
953ff289 6223
a68ab351
JJ
6224 t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
6225 t = fold_build2 (PLUS_EXPR, itype, t, threadid);
6226 t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
726a989a
RB
6227 s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6228 false, GSI_CONTINUE_LINKING);
953ff289 6229
a68ab351
JJ
6230 t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
6231 t = fold_build2 (MIN_EXPR, itype, t, n);
726a989a
RB
6232 e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6233 false, GSI_CONTINUE_LINKING);
953ff289
DN
6234
6235 t = build2 (LT_EXPR, boolean_type_node, s0, n);
726a989a 6236 gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
50674e96
DN
6237
6238 /* Setup code for sequential iteration goes in SEQ_START_BB. */
726a989a 6239 si = gsi_start_bb (seq_start_bb);
953ff289 6240
acf0174b
JJ
6241 tree startvar = fd->loop.v;
6242 tree endvar = NULL_TREE;
6243
6244 if (gimple_omp_for_combined_p (fd->for_stmt))
6245 {
6246 tree clauses = gimple_code (inner_stmt) == GIMPLE_OMP_PARALLEL
6247 ? gimple_omp_parallel_clauses (inner_stmt)
6248 : gimple_omp_for_clauses (inner_stmt);
6249 tree innerc = find_omp_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
6250 gcc_assert (innerc);
6251 startvar = OMP_CLAUSE_DECL (innerc);
6252 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6253 OMP_CLAUSE__LOOPTEMP_);
6254 gcc_assert (innerc);
6255 endvar = OMP_CLAUSE_DECL (innerc);
6256 }
6257
a68ab351 6258 t = fold_convert (itype, s0);
acf0174b 6259 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6260 if (POINTER_TYPE_P (type))
acf0174b 6261 t = fold_build_pointer_plus (n1, t);
a68ab351 6262 else
acf0174b
JJ
6263 t = fold_build2 (PLUS_EXPR, type, t, n1);
6264 t = fold_convert (TREE_TYPE (startvar), t);
ea3a0fde 6265 t = force_gimple_operand_gsi (&si, t,
acf0174b
JJ
6266 DECL_P (startvar)
6267 && TREE_ADDRESSABLE (startvar),
ea3a0fde 6268 NULL_TREE, false, GSI_CONTINUE_LINKING);
acf0174b 6269 stmt = gimple_build_assign (startvar, t);
726a989a 6270 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
953ff289 6271
a68ab351 6272 t = fold_convert (itype, e0);
acf0174b 6273 t = fold_build2 (MULT_EXPR, itype, t, step);
a68ab351 6274 if (POINTER_TYPE_P (type))
acf0174b 6275 t = fold_build_pointer_plus (n1, t);
a68ab351 6276 else
acf0174b
JJ
6277 t = fold_build2 (PLUS_EXPR, type, t, n1);
6278 t = fold_convert (TREE_TYPE (startvar), t);
726a989a
RB
6279 e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6280 false, GSI_CONTINUE_LINKING);
acf0174b
JJ
6281 if (endvar)
6282 {
6283 stmt = gimple_build_assign (endvar, e);
6284 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6285 }
6286 if (fd->collapse > 1)
6287 expand_omp_for_init_vars (fd, &si, counts, inner_stmt, startvar);
6288
6289 if (!broken_loop)
6290 {
6291 /* The code controlling the sequential loop goes in CONT_BB,
6292 replacing the GIMPLE_OMP_CONTINUE. */
6293 si = gsi_last_bb (cont_bb);
6294 stmt = gsi_stmt (si);
6295 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6296 v_main = gimple_omp_continue_control_use (stmt);
6297 v_back = gimple_omp_continue_control_def (stmt);
953ff289 6298
acf0174b
JJ
6299 if (!gimple_omp_for_combined_p (fd->for_stmt))
6300 {
6301 if (POINTER_TYPE_P (type))
6302 t = fold_build_pointer_plus (v_main, step);
6303 else
6304 t = fold_build2 (PLUS_EXPR, type, v_main, step);
6305 if (DECL_P (v_back) && TREE_ADDRESSABLE (v_back))
6306 t = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
6307 true, GSI_SAME_STMT);
6308 stmt = gimple_build_assign (v_back, t);
6309 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
917948d3 6310
acf0174b
JJ
6311 t = build2 (fd->loop.cond_code, boolean_type_node,
6312 DECL_P (v_back) && TREE_ADDRESSABLE (v_back)
6313 ? t : v_back, e);
6314 gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
6315 }
917948d3 6316
acf0174b
JJ
6317 /* Remove GIMPLE_OMP_CONTINUE. */
6318 gsi_remove (&si, true);
b8698a0f 6319
acf0174b
JJ
6320 if (fd->collapse > 1 && !gimple_omp_for_combined_p (fd->for_stmt))
6321 collapse_bb = extract_omp_for_update_vars (fd, cont_bb, body_bb);
50674e96 6322
acf0174b
JJ
6323 /* Trip update code goes into TRIP_UPDATE_BB. */
6324 si = gsi_start_bb (trip_update_bb);
953ff289 6325
acf0174b
JJ
6326 t = build_int_cst (itype, 1);
6327 t = build2 (PLUS_EXPR, itype, trip_main, t);
6328 stmt = gimple_build_assign (trip_back, t);
6329 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
6330 }
953ff289 6331
726a989a
RB
6332 /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
6333 si = gsi_last_bb (exit_bb);
6334 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
acf0174b
JJ
6335 {
6336 t = gimple_omp_return_lhs (gsi_stmt (si));
6337 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
6338 }
726a989a 6339 gsi_remove (&si, true);
953ff289 6340
50674e96 6341 /* Connect the new blocks. */
e5c95afe
ZD
6342 find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
6343 find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
917948d3 6344
acf0174b
JJ
6345 if (!broken_loop)
6346 {
6347 se = find_edge (cont_bb, body_bb);
6348 if (gimple_omp_for_combined_p (fd->for_stmt))
6349 {
6350 remove_edge (se);
6351 se = NULL;
6352 }
6353 else if (fd->collapse > 1)
6354 {
6355 remove_edge (se);
6356 se = make_edge (cont_bb, collapse_bb, EDGE_TRUE_VALUE);
6357 }
6358 else
6359 se->flags = EDGE_TRUE_VALUE;
6360 find_edge (cont_bb, trip_update_bb)->flags
6361 = se ? EDGE_FALSE_VALUE : EDGE_FALLTHRU;
917948d3 6362
acf0174b
JJ
6363 redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
6364 }
917948d3
ZD
6365
6366 if (gimple_in_ssa_p (cfun))
6367 {
726a989a
RB
6368 gimple_stmt_iterator psi;
6369 gimple phi;
6370 edge re, ene;
9771b263 6371 edge_var_map_vector *head;
726a989a
RB
6372 edge_var_map *vm;
6373 size_t i;
6374
acf0174b
JJ
6375 gcc_assert (fd->collapse == 1 && !broken_loop);
6376
917948d3
ZD
6377 /* When we redirect the edge from trip_update_bb to iter_part_bb, we
6378 remove arguments of the phi nodes in fin_bb. We need to create
6379 appropriate phi nodes in iter_part_bb instead. */
6380 se = single_pred_edge (fin_bb);
6381 re = single_succ_edge (trip_update_bb);
726a989a 6382 head = redirect_edge_var_map_vector (re);
917948d3
ZD
6383 ene = single_succ_edge (entry_bb);
6384
726a989a 6385 psi = gsi_start_phis (fin_bb);
9771b263 6386 for (i = 0; !gsi_end_p (psi) && head->iterate (i, &vm);
726a989a 6387 gsi_next (&psi), ++i)
917948d3 6388 {
726a989a 6389 gimple nphi;
f5045c96 6390 source_location locus;
726a989a
RB
6391
6392 phi = gsi_stmt (psi);
6393 t = gimple_phi_result (phi);
6394 gcc_assert (t == redirect_edge_var_map_result (vm));
917948d3 6395 nphi = create_phi_node (t, iter_part_bb);
917948d3
ZD
6396
6397 t = PHI_ARG_DEF_FROM_EDGE (phi, se);
f5045c96
AM
6398 locus = gimple_phi_arg_location_from_edge (phi, se);
6399
a68ab351
JJ
6400 /* A special case -- fd->loop.v is not yet computed in
6401 iter_part_bb, we need to use v_extra instead. */
6402 if (t == fd->loop.v)
917948d3 6403 t = v_extra;
9e227d60 6404 add_phi_arg (nphi, t, ene, locus);
f5045c96 6405 locus = redirect_edge_var_map_location (vm);
9e227d60 6406 add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
726a989a 6407 }
9771b263 6408 gcc_assert (!gsi_end_p (psi) && i == head->length ());
726a989a
RB
6409 redirect_edge_var_map_clear (re);
6410 while (1)
6411 {
6412 psi = gsi_start_phis (fin_bb);
6413 if (gsi_end_p (psi))
6414 break;
6415 remove_phi_node (&psi, false);
917948d3 6416 }
917948d3
ZD
6417
6418 /* Make phi node for trip. */
6419 phi = create_phi_node (trip_main, iter_part_bb);
f5045c96 6420 add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb),
9e227d60 6421 UNKNOWN_LOCATION);
f5045c96 6422 add_phi_arg (phi, trip_init, single_succ_edge (entry_bb),
9e227d60 6423 UNKNOWN_LOCATION);
917948d3
ZD
6424 }
6425
acf0174b
JJ
6426 if (!broken_loop)
6427 set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
917948d3
ZD
6428 set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
6429 recompute_dominator (CDI_DOMINATORS, iter_part_bb));
6430 set_immediate_dominator (CDI_DOMINATORS, fin_bb,
6431 recompute_dominator (CDI_DOMINATORS, fin_bb));
6432 set_immediate_dominator (CDI_DOMINATORS, seq_start_bb,
6433 recompute_dominator (CDI_DOMINATORS, seq_start_bb));
6434 set_immediate_dominator (CDI_DOMINATORS, body_bb,
6435 recompute_dominator (CDI_DOMINATORS, body_bb));
6093bc06 6436
acf0174b
JJ
6437 if (!broken_loop)
6438 {
6439 struct loop *trip_loop = alloc_loop ();
6440 trip_loop->header = iter_part_bb;
6441 trip_loop->latch = trip_update_bb;
6442 add_loop (trip_loop, iter_part_bb->loop_father);
6093bc06 6443
acf0174b
JJ
6444 if (!gimple_omp_for_combined_p (fd->for_stmt))
6445 {
6446 struct loop *loop = alloc_loop ();
6447 loop->header = body_bb;
6448 loop->latch = cont_bb;
6449 add_loop (loop, trip_loop);
6450 }
6451 }
953ff289
DN
6452}
6453
acf0174b 6454
74bf76ed
JJ
6455/* A subroutine of expand_omp_for. Generate code for a simd non-worksharing
6456 loop. Given parameters:
6457
6458 for (V = N1; V cond N2; V += STEP) BODY;
6459
6460 where COND is "<" or ">", we generate pseudocode
6461
6462 V = N1;
6463 goto L1;
6464 L0:
6465 BODY;
6466 V += STEP;
6467 L1:
6468 if (V cond N2) goto L0; else goto L2;
6469 L2:
6470
6471 For collapsed loops, given parameters:
6472 collapse(3)
6473 for (V1 = N11; V1 cond1 N12; V1 += STEP1)
6474 for (V2 = N21; V2 cond2 N22; V2 += STEP2)
6475 for (V3 = N31; V3 cond3 N32; V3 += STEP3)
6476 BODY;
6477
6478 we generate pseudocode
6479
6480 if (cond3 is <)
6481 adj = STEP3 - 1;
6482 else
6483 adj = STEP3 + 1;
6484 count3 = (adj + N32 - N31) / STEP3;
6485 if (cond2 is <)
6486 adj = STEP2 - 1;
6487 else
6488 adj = STEP2 + 1;
6489 count2 = (adj + N22 - N21) / STEP2;
6490 if (cond1 is <)
6491 adj = STEP1 - 1;
6492 else
6493 adj = STEP1 + 1;
6494 count1 = (adj + N12 - N11) / STEP1;
6495 count = count1 * count2 * count3;
6496 V = 0;
6497 V1 = N11;
6498 V2 = N21;
6499 V3 = N31;
6500 goto L1;
6501 L0:
6502 BODY;
6503 V += 1;
6504 V3 += STEP3;
6505 V2 += (V3 cond3 N32) ? 0 : STEP2;
6506 V3 = (V3 cond3 N32) ? V3 : N31;
6507 V1 += (V2 cond2 N22) ? 0 : STEP1;
6508 V2 = (V2 cond2 N22) ? V2 : N21;
6509 L1:
6510 if (V < count) goto L0; else goto L2;
6511 L2:
6512
6513 */
6514
6515static void
6516expand_omp_simd (struct omp_region *region, struct omp_for_data *fd)
6517{
6518 tree type, t;
6519 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
6520 gimple_stmt_iterator gsi;
6521 gimple stmt;
6522 bool broken_loop = region->cont == NULL;
6523 edge e, ne;
6524 tree *counts = NULL;
6525 int i;
6526 tree safelen = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6527 OMP_CLAUSE_SAFELEN);
6528 tree simduid = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6529 OMP_CLAUSE__SIMDUID_);
acf0174b 6530 tree n1, n2;
74bf76ed
JJ
6531
6532 type = TREE_TYPE (fd->loop.v);
6533 entry_bb = region->entry;
6534 cont_bb = region->cont;
6535 gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
6536 gcc_assert (broken_loop
6537 || BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
6538 l0_bb = FALLTHRU_EDGE (entry_bb)->dest;
6539 if (!broken_loop)
6540 {
6541 gcc_assert (BRANCH_EDGE (cont_bb)->dest == l0_bb);
6542 gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
6543 l1_bb = split_block (cont_bb, last_stmt (cont_bb))->dest;
6544 l2_bb = BRANCH_EDGE (entry_bb)->dest;
6545 }
6546 else
6547 {
6548 BRANCH_EDGE (entry_bb)->flags &= ~EDGE_ABNORMAL;
6549 l1_bb = split_edge (BRANCH_EDGE (entry_bb));
6550 l2_bb = single_succ (l1_bb);
6551 }
6552 exit_bb = region->exit;
6553 l2_dom_bb = NULL;
6554
6555 gsi = gsi_last_bb (entry_bb);
6556
6557 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
6558 /* Not needed in SSA form right now. */
6559 gcc_assert (!gimple_in_ssa_p (cfun));
6560 if (fd->collapse > 1)
6561 {
6562 int first_zero_iter = -1;
6563 basic_block zero_iter_bb = l2_bb;
6564
6565 counts = XALLOCAVEC (tree, fd->collapse);
6566 expand_omp_for_init_counts (fd, &gsi, entry_bb, counts,
6567 zero_iter_bb, first_zero_iter,
6568 l2_dom_bb);
6569 }
6570 if (l2_dom_bb == NULL)
6571 l2_dom_bb = l1_bb;
6572
acf0174b 6573 n1 = fd->loop.n1;
74bf76ed 6574 n2 = fd->loop.n2;
acf0174b
JJ
6575 if (gimple_omp_for_combined_into_p (fd->for_stmt))
6576 {
6577 tree innerc = find_omp_clause (gimple_omp_for_clauses (fd->for_stmt),
6578 OMP_CLAUSE__LOOPTEMP_);
6579 gcc_assert (innerc);
6580 n1 = OMP_CLAUSE_DECL (innerc);
6581 innerc = find_omp_clause (OMP_CLAUSE_CHAIN (innerc),
6582 OMP_CLAUSE__LOOPTEMP_);
6583 gcc_assert (innerc);
6584 n2 = OMP_CLAUSE_DECL (innerc);
6585 expand_omp_build_assign (&gsi, fd->loop.v,
6586 fold_convert (type, n1));
6587 if (fd->collapse > 1)
6588 {
6589 gsi_prev (&gsi);
6590 expand_omp_for_init_vars (fd, &gsi, counts, NULL, n1);
6591 gsi_next (&gsi);
6592 }
6593 }
74bf76ed
JJ
6594 else
6595 {
6596 expand_omp_build_assign (&gsi, fd->loop.v,
6597 fold_convert (type, fd->loop.n1));
6598 if (fd->collapse > 1)
6599 for (i = 0; i < fd->collapse; i++)
6600 {
6601 tree itype = TREE_TYPE (fd->loops[i].v);
6602 if (POINTER_TYPE_P (itype))
6603 itype = signed_type_for (itype);
6604 t = fold_convert (TREE_TYPE (fd->loops[i].v), fd->loops[i].n1);
6605 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6606 }
6607 }
6608
6609 /* Remove the GIMPLE_OMP_FOR statement. */
6610 gsi_remove (&gsi, true);
6611
6612 if (!broken_loop)
6613 {
6614 /* Code to control the increment goes in the CONT_BB. */
6615 gsi = gsi_last_bb (cont_bb);
6616 stmt = gsi_stmt (gsi);
6617 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
6618
6619 if (POINTER_TYPE_P (type))
6620 t = fold_build_pointer_plus (fd->loop.v, fd->loop.step);
6621 else
6622 t = fold_build2 (PLUS_EXPR, type, fd->loop.v, fd->loop.step);
6623 expand_omp_build_assign (&gsi, fd->loop.v, t);
6624
6625 if (fd->collapse > 1)
6626 {
6627 i = fd->collapse - 1;
6628 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i].v)))
6629 {
6630 t = fold_convert (sizetype, fd->loops[i].step);
6631 t = fold_build_pointer_plus (fd->loops[i].v, t);
6632 }
6633 else
6634 {
6635 t = fold_convert (TREE_TYPE (fd->loops[i].v),
6636 fd->loops[i].step);
6637 t = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->loops[i].v),
6638 fd->loops[i].v, t);
6639 }
6640 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6641
6642 for (i = fd->collapse - 1; i > 0; i--)
6643 {
6644 tree itype = TREE_TYPE (fd->loops[i].v);
6645 tree itype2 = TREE_TYPE (fd->loops[i - 1].v);
6646 if (POINTER_TYPE_P (itype2))
6647 itype2 = signed_type_for (itype2);
6648 t = build3 (COND_EXPR, itype2,
6649 build2 (fd->loops[i].cond_code, boolean_type_node,
6650 fd->loops[i].v,
6651 fold_convert (itype, fd->loops[i].n2)),
6652 build_int_cst (itype2, 0),
6653 fold_convert (itype2, fd->loops[i - 1].step));
6654 if (POINTER_TYPE_P (TREE_TYPE (fd->loops[i - 1].v)))
6655 t = fold_build_pointer_plus (fd->loops[i - 1].v, t);
6656 else
6657 t = fold_build2 (PLUS_EXPR, itype2, fd->loops[i - 1].v, t);
6658 expand_omp_build_assign (&gsi, fd->loops[i - 1].v, t);
6659
6660 t = build3 (COND_EXPR, itype,
6661 build2 (fd->loops[i].cond_code, boolean_type_node,
6662 fd->loops[i].v,
6663 fold_convert (itype, fd->loops[i].n2)),
6664 fd->loops[i].v,
6665 fold_convert (itype, fd->loops[i].n1));
6666 expand_omp_build_assign (&gsi, fd->loops[i].v, t);
6667 }
6668 }
6669
6670 /* Remove GIMPLE_OMP_CONTINUE. */
6671 gsi_remove (&gsi, true);
6672 }
6673
6674 /* Emit the condition in L1_BB. */
6675 gsi = gsi_start_bb (l1_bb);
6676
6677 t = fold_convert (type, n2);
6678 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
6679 false, GSI_CONTINUE_LINKING);
6680 t = build2 (fd->loop.cond_code, boolean_type_node, fd->loop.v, t);
6681 stmt = gimple_build_cond_empty (t);
6682 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
6683 if (walk_tree (gimple_cond_lhs_ptr (stmt), expand_omp_regimplify_p,
6684 NULL, NULL)
6685 || walk_tree (gimple_cond_rhs_ptr (stmt), expand_omp_regimplify_p,
6686 NULL, NULL))
6687 {
6688 gsi = gsi_for_stmt (stmt);
6689 gimple_regimplify_operands (stmt, &gsi);
6690 }
6691
6692 /* Remove GIMPLE_OMP_RETURN. */
6693 gsi = gsi_last_bb (exit_bb);
6694 gsi_remove (&gsi, true);
6695
6696 /* Connect the new blocks. */
6697 remove_edge (FALLTHRU_EDGE (entry_bb));
6698
6699 if (!broken_loop)
6700 {
6701 remove_edge (BRANCH_EDGE (entry_bb));
6702 make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
6703
6704 e = BRANCH_EDGE (l1_bb);
6705 ne = FALLTHRU_EDGE (l1_bb);
6706 e->flags = EDGE_TRUE_VALUE;
6707 }
6708 else
6709 {
6710 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
6711
6712 ne = single_succ_edge (l1_bb);
6713 e = make_edge (l1_bb, l0_bb, EDGE_TRUE_VALUE);
6714
6715 }
6716 ne->flags = EDGE_FALSE_VALUE;
6717 e->probability = REG_BR_PROB_BASE * 7 / 8;
6718 ne->probability = REG_BR_PROB_BASE / 8;
6719
6720 set_immediate_dominator (CDI_DOMINATORS, l1_bb, entry_bb);
6721 set_immediate_dominator (CDI_DOMINATORS, l2_bb, l2_dom_bb);
6722 set_immediate_dominator (CDI_DOMINATORS, l0_bb, l1_bb);
6723
6724 if (!broken_loop)
6725 {
6726 struct loop *loop = alloc_loop ();
6727 loop->header = l1_bb;
6728 loop->latch = e->dest;
6729 add_loop (loop, l1_bb->loop_father);
6730 if (safelen == NULL_TREE)
6731 loop->safelen = INT_MAX;
6732 else
6733 {
6734 safelen = OMP_CLAUSE_SAFELEN_EXPR (safelen);
6735 if (!host_integerp (safelen, 1)
6736 || (unsigned HOST_WIDE_INT) tree_low_cst (safelen, 1)
6737 > INT_MAX)
6738 loop->safelen = INT_MAX;
6739 else
6740 loop->safelen = tree_low_cst (safelen, 1);
6741 if (loop->safelen == 1)
6742 loop->safelen = 0;
6743 }
6744 if (simduid)
6745 {
6746 loop->simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6747 cfun->has_simduid_loops = true;
6748 }
ea0f3e87 6749 /* If not -fno-tree-loop-vectorize, hint that we want to vectorize
74bf76ed 6750 the loop. */
ea0f3e87
XDL
6751 if ((flag_tree_loop_vectorize
6752 || (!global_options_set.x_flag_tree_loop_vectorize
6753 && !global_options_set.x_flag_tree_vectorize))
74bf76ed
JJ
6754 && loop->safelen > 1)
6755 {
6756 loop->force_vect = true;
6757 cfun->has_force_vect_loops = true;
6758 }
6759 }
6760}
6761
953ff289 6762
50674e96 6763/* Expand the OpenMP loop defined by REGION. */
953ff289 6764
50674e96 6765static void
acf0174b 6766expand_omp_for (struct omp_region *region, gimple inner_stmt)
50674e96
DN
6767{
6768 struct omp_for_data fd;
a68ab351 6769 struct omp_for_data_loop *loops;
953ff289 6770
a68ab351
JJ
6771 loops
6772 = (struct omp_for_data_loop *)
726a989a 6773 alloca (gimple_omp_for_collapse (last_stmt (region->entry))
a68ab351 6774 * sizeof (struct omp_for_data_loop));
a68ab351 6775 extract_omp_for_data (last_stmt (region->entry), &fd, loops);
21a66e91 6776 region->sched_kind = fd.sched_kind;
953ff289 6777
135a171d
JJ
6778 gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
6779 BRANCH_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6780 FALLTHRU_EDGE (region->entry)->flags &= ~EDGE_ABNORMAL;
6781 if (region->cont)
6782 {
6783 gcc_assert (EDGE_COUNT (region->cont->succs) == 2);
6784 BRANCH_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6785 FALLTHRU_EDGE (region->cont)->flags &= ~EDGE_ABNORMAL;
6786 }
6093bc06 6787 else
1aa95df7 6788 /* If there isn't a continue then this is a degerate case where
6093bc06
RB
6789 the introduction of abnormal edges during lowering will prevent
6790 original loops from being detected. Fix that up. */
6791 loops_state_set (LOOPS_NEED_FIXUP);
135a171d 6792
74bf76ed
JJ
6793 if (gimple_omp_for_kind (fd.for_stmt) == GF_OMP_FOR_KIND_SIMD)
6794 expand_omp_simd (region, &fd);
6795 else if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
acf0174b 6796 && !fd.have_ordered)
953ff289
DN
6797 {
6798 if (fd.chunk_size == NULL)
acf0174b 6799 expand_omp_for_static_nochunk (region, &fd, inner_stmt);
953ff289 6800 else
acf0174b 6801 expand_omp_for_static_chunk (region, &fd, inner_stmt);
953ff289
DN
6802 }
6803 else
6804 {
a68ab351
JJ
6805 int fn_index, start_ix, next_ix;
6806
74bf76ed
JJ
6807 gcc_assert (gimple_omp_for_kind (fd.for_stmt)
6808 == GF_OMP_FOR_KIND_FOR);
9abd5ed9
JJ
6809 if (fd.chunk_size == NULL
6810 && fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
6811 fd.chunk_size = integer_zero_node;
a68ab351
JJ
6812 gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
6813 fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
726a989a 6814 ? 3 : fd.sched_kind;
a68ab351 6815 fn_index += fd.have_ordered * 4;
e79983f4
MM
6816 start_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_START) + fn_index;
6817 next_ix = ((int)BUILT_IN_GOMP_LOOP_STATIC_NEXT) + fn_index;
a68ab351
JJ
6818 if (fd.iter_type == long_long_unsigned_type_node)
6819 {
e79983f4
MM
6820 start_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_START
6821 - (int)BUILT_IN_GOMP_LOOP_STATIC_START);
6822 next_ix += ((int)BUILT_IN_GOMP_LOOP_ULL_STATIC_NEXT
6823 - (int)BUILT_IN_GOMP_LOOP_STATIC_NEXT);
a68ab351 6824 }
bbbbb16a 6825 expand_omp_for_generic (region, &fd, (enum built_in_function) start_ix,
acf0174b 6826 (enum built_in_function) next_ix, inner_stmt);
953ff289 6827 }
5f40b3cb 6828
a5efada7
RG
6829 if (gimple_in_ssa_p (cfun))
6830 update_ssa (TODO_update_ssa_only_virtuals);
953ff289
DN
6831}
6832
953ff289
DN
6833
6834/* Expand code for an OpenMP sections directive. In pseudo code, we generate
6835
953ff289
DN
6836 v = GOMP_sections_start (n);
6837 L0:
6838 switch (v)
6839 {
6840 case 0:
6841 goto L2;
6842 case 1:
6843 section 1;
6844 goto L1;
6845 case 2:
6846 ...
6847 case n:
6848 ...
953ff289
DN
6849 default:
6850 abort ();
6851 }
6852 L1:
6853 v = GOMP_sections_next ();
6854 goto L0;
6855 L2:
6856 reduction;
6857
50674e96 6858 If this is a combined parallel sections, replace the call to
917948d3 6859 GOMP_sections_start with call to GOMP_sections_next. */
953ff289
DN
6860
6861static void
50674e96 6862expand_omp_sections (struct omp_region *region)
953ff289 6863{
0f900dfa 6864 tree t, u, vin = NULL, vmain, vnext, l2;
9771b263 6865 vec<tree> label_vec;
726a989a 6866 unsigned len;
e5c95afe 6867 basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
726a989a
RB
6868 gimple_stmt_iterator si, switch_si;
6869 gimple sections_stmt, stmt, cont;
c34938a8
JJ
6870 edge_iterator ei;
6871 edge e;
777f7f9a 6872 struct omp_region *inner;
726a989a 6873 unsigned i, casei;
e5c95afe 6874 bool exit_reachable = region->cont != NULL;
953ff289 6875
65e7bfe3 6876 gcc_assert (region->exit != NULL);
777f7f9a 6877 entry_bb = region->entry;
e5c95afe 6878 l0_bb = single_succ (entry_bb);
777f7f9a 6879 l1_bb = region->cont;
e5c95afe 6880 l2_bb = region->exit;
65e7bfe3
JJ
6881 if (single_pred_p (l2_bb) && single_pred (l2_bb) == l0_bb)
6882 l2 = gimple_block_label (l2_bb);
6883 else
d3c673c7 6884 {
65e7bfe3
JJ
6885 /* This can happen if there are reductions. */
6886 len = EDGE_COUNT (l0_bb->succs);
6887 gcc_assert (len > 0);
6888 e = EDGE_SUCC (l0_bb, len - 1);
6889 si = gsi_last_bb (e->dest);
6890 l2 = NULL_TREE;
6891 if (gsi_end_p (si)
6892 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
6893 l2 = gimple_block_label (e->dest);
c34938a8 6894 else
65e7bfe3
JJ
6895 FOR_EACH_EDGE (e, ei, l0_bb->succs)
6896 {
6897 si = gsi_last_bb (e->dest);
6898 if (gsi_end_p (si)
6899 || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
c34938a8 6900 {
65e7bfe3
JJ
6901 l2 = gimple_block_label (e->dest);
6902 break;
c34938a8 6903 }
65e7bfe3 6904 }
d3c673c7 6905 }
65e7bfe3
JJ
6906 if (exit_reachable)
6907 default_bb = create_empty_bb (l1_bb->prev_bb);
d3c673c7 6908 else
65e7bfe3 6909 default_bb = create_empty_bb (l0_bb);
50674e96
DN
6910
6911 /* We will build a switch() with enough cases for all the
726a989a 6912 GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
50674e96 6913 and a default case to abort if something goes wrong. */
e5c95afe 6914 len = EDGE_COUNT (l0_bb->succs);
726a989a 6915
9771b263 6916 /* Use vec::quick_push on label_vec throughout, since we know the size
726a989a 6917 in advance. */
9771b263 6918 label_vec.create (len);
953ff289 6919
777f7f9a 6920 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
726a989a
RB
6921 GIMPLE_OMP_SECTIONS statement. */
6922 si = gsi_last_bb (entry_bb);
6923 sections_stmt = gsi_stmt (si);
6924 gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
6925 vin = gimple_omp_sections_control (sections_stmt);
50674e96 6926 if (!is_combined_parallel (region))
953ff289 6927 {
50674e96
DN
6928 /* If we are not inside a combined parallel+sections region,
6929 call GOMP_sections_start. */
4befd127 6930 t = build_int_cst (unsigned_type_node, len - 1);
e79983f4 6931 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_START);
726a989a 6932 stmt = gimple_build_call (u, 1, t);
953ff289 6933 }
917948d3
ZD
6934 else
6935 {
6936 /* Otherwise, call GOMP_sections_next. */
e79983f4 6937 u = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
726a989a 6938 stmt = gimple_build_call (u, 0);
917948d3 6939 }
726a989a
RB
6940 gimple_call_set_lhs (stmt, vin);
6941 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
6942 gsi_remove (&si, true);
6943
6944 /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
6945 L0_BB. */
6946 switch_si = gsi_last_bb (l0_bb);
6947 gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
917948d3
ZD
6948 if (exit_reachable)
6949 {
6950 cont = last_stmt (l1_bb);
726a989a
RB
6951 gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
6952 vmain = gimple_omp_continue_control_use (cont);
6953 vnext = gimple_omp_continue_control_def (cont);
917948d3
ZD
6954 }
6955 else
6956 {
6957 vmain = vin;
6958 vnext = NULL_TREE;
6959 }
953ff289 6960
65e7bfe3 6961 t = build_case_label (build_int_cst (unsigned_type_node, 0), NULL, l2);
9771b263 6962 label_vec.quick_push (t);
65e7bfe3 6963 i = 1;
d3c673c7 6964
726a989a 6965 /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
e5c95afe
ZD
6966 for (inner = region->inner, casei = 1;
6967 inner;
6968 inner = inner->next, i++, casei++)
953ff289 6969 {
50674e96
DN
6970 basic_block s_entry_bb, s_exit_bb;
6971
c34938a8 6972 /* Skip optional reduction region. */
726a989a 6973 if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
c34938a8
JJ
6974 {
6975 --i;
6976 --casei;
6977 continue;
6978 }
6979
777f7f9a
RH
6980 s_entry_bb = inner->entry;
6981 s_exit_bb = inner->exit;
953ff289 6982
726a989a 6983 t = gimple_block_label (s_entry_bb);
e5c95afe 6984 u = build_int_cst (unsigned_type_node, casei);
3d528853 6985 u = build_case_label (u, NULL, t);
9771b263 6986 label_vec.quick_push (u);
777f7f9a 6987
726a989a
RB
6988 si = gsi_last_bb (s_entry_bb);
6989 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
6990 gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
6991 gsi_remove (&si, true);
777f7f9a 6992 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
d3c673c7
JJ
6993
6994 if (s_exit_bb == NULL)
6995 continue;
6996
726a989a
RB
6997 si = gsi_last_bb (s_exit_bb);
6998 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
6999 gsi_remove (&si, true);
d3c673c7 7000
50674e96 7001 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
953ff289
DN
7002 }
7003
50674e96 7004 /* Error handling code goes in DEFAULT_BB. */
726a989a 7005 t = gimple_block_label (default_bb);
3d528853 7006 u = build_case_label (NULL, NULL, t);
777f7f9a 7007 make_edge (l0_bb, default_bb, 0);
a9e0d843 7008 if (current_loops)
6093bc06 7009 add_bb_to_loop (default_bb, current_loops->tree_root);
953ff289 7010
fd8d363e 7011 stmt = gimple_build_switch (vmain, u, label_vec);
726a989a
RB
7012 gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
7013 gsi_remove (&switch_si, true);
9771b263 7014 label_vec.release ();
726a989a
RB
7015
7016 si = gsi_start_bb (default_bb);
e79983f4 7017 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
726a989a 7018 gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
50674e96 7019
e5c95afe 7020 if (exit_reachable)
d3c673c7 7021 {
e79983f4
MM
7022 tree bfn_decl;
7023
e5c95afe 7024 /* Code to get the next section goes in L1_BB. */
726a989a
RB
7025 si = gsi_last_bb (l1_bb);
7026 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
953ff289 7027
e79983f4
MM
7028 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_NEXT);
7029 stmt = gimple_build_call (bfn_decl, 0);
726a989a
RB
7030 gimple_call_set_lhs (stmt, vnext);
7031 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7032 gsi_remove (&si, true);
50674e96 7033
e5c95afe 7034 single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
d3c673c7 7035 }
50674e96 7036
65e7bfe3
JJ
7037 /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB. */
7038 si = gsi_last_bb (l2_bb);
7039 if (gimple_omp_return_nowait_p (gsi_stmt (si)))
7040 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_NOWAIT);
acf0174b
JJ
7041 else if (gimple_omp_return_lhs (gsi_stmt (si)))
7042 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END_CANCEL);
65e7bfe3
JJ
7043 else
7044 t = builtin_decl_explicit (BUILT_IN_GOMP_SECTIONS_END);
7045 stmt = gimple_build_call (t, 0);
acf0174b
JJ
7046 if (gimple_omp_return_lhs (gsi_stmt (si)))
7047 gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (si)));
65e7bfe3
JJ
7048 gsi_insert_after (&si, stmt, GSI_SAME_STMT);
7049 gsi_remove (&si, true);
7050
917948d3 7051 set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
50674e96 7052}
953ff289 7053
953ff289 7054
777f7f9a
RH
7055/* Expand code for an OpenMP single directive. We've already expanded
7056 much of the code, here we simply place the GOMP_barrier call. */
7057
7058static void
7059expand_omp_single (struct omp_region *region)
7060{
7061 basic_block entry_bb, exit_bb;
726a989a 7062 gimple_stmt_iterator si;
777f7f9a
RH
7063
7064 entry_bb = region->entry;
7065 exit_bb = region->exit;
7066
726a989a 7067 si = gsi_last_bb (entry_bb);
726a989a
RB
7068 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
7069 gsi_remove (&si, true);
777f7f9a
RH
7070 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7071
726a989a 7072 si = gsi_last_bb (exit_bb);
acf0174b
JJ
7073 if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
7074 {
7075 tree t = gimple_omp_return_lhs (gsi_stmt (si));
7076 gsi_insert_after (&si, build_omp_barrier (t), GSI_SAME_STMT);
7077 }
726a989a 7078 gsi_remove (&si, true);
777f7f9a
RH
7079 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7080}
7081
7082
7083/* Generic expansion for OpenMP synchronization directives: master,
7084 ordered and critical. All we need to do here is remove the entry
7085 and exit markers for REGION. */
50674e96
DN
7086
7087static void
7088expand_omp_synch (struct omp_region *region)
7089{
7090 basic_block entry_bb, exit_bb;
726a989a 7091 gimple_stmt_iterator si;
50674e96 7092
777f7f9a
RH
7093 entry_bb = region->entry;
7094 exit_bb = region->exit;
50674e96 7095
726a989a
RB
7096 si = gsi_last_bb (entry_bb);
7097 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
7098 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
acf0174b 7099 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TASKGROUP
726a989a 7100 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
acf0174b
JJ
7101 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL
7102 || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_TEAMS);
726a989a 7103 gsi_remove (&si, true);
50674e96
DN
7104 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7105
d3c673c7
JJ
7106 if (exit_bb)
7107 {
726a989a
RB
7108 si = gsi_last_bb (exit_bb);
7109 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
7110 gsi_remove (&si, true);
d3c673c7
JJ
7111 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
7112 }
50674e96 7113}
953ff289 7114
20906c66
JJ
7115/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7116 operation as a normal volatile load. */
7117
7118static bool
05409788
RH
7119expand_omp_atomic_load (basic_block load_bb, tree addr,
7120 tree loaded_val, int index)
20906c66 7121{
05409788
RH
7122 enum built_in_function tmpbase;
7123 gimple_stmt_iterator gsi;
7124 basic_block store_bb;
7125 location_t loc;
7126 gimple stmt;
7127 tree decl, call, type, itype;
7128
7129 gsi = gsi_last_bb (load_bb);
7130 stmt = gsi_stmt (gsi);
7131 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7132 loc = gimple_location (stmt);
7133
7134 /* ??? If the target does not implement atomic_load_optab[mode], and mode
7135 is smaller than word size, then expand_atomic_load assumes that the load
7136 is atomic. We could avoid the builtin entirely in this case. */
7137
7138 tmpbase = (enum built_in_function) (BUILT_IN_ATOMIC_LOAD_N + index + 1);
7139 decl = builtin_decl_explicit (tmpbase);
7140 if (decl == NULL_TREE)
7141 return false;
7142
7143 type = TREE_TYPE (loaded_val);
7144 itype = TREE_TYPE (TREE_TYPE (decl));
7145
7146 call = build_call_expr_loc (loc, decl, 2, addr,
acf0174b
JJ
7147 build_int_cst (NULL,
7148 gimple_omp_atomic_seq_cst_p (stmt)
7149 ? MEMMODEL_SEQ_CST
7150 : MEMMODEL_RELAXED));
05409788
RH
7151 if (!useless_type_conversion_p (type, itype))
7152 call = fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7153 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7154
7155 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7156 gsi_remove (&gsi, true);
7157
7158 store_bb = single_succ (load_bb);
7159 gsi = gsi_last_bb (store_bb);
7160 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7161 gsi_remove (&gsi, true);
7162
7163 if (gimple_in_ssa_p (cfun))
7164 update_ssa (TODO_update_ssa_no_phi);
7165
7166 return true;
20906c66
JJ
7167}
7168
7169/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
7170 operation as a normal volatile store. */
7171
7172static bool
05409788
RH
7173expand_omp_atomic_store (basic_block load_bb, tree addr,
7174 tree loaded_val, tree stored_val, int index)
20906c66 7175{
05409788
RH
7176 enum built_in_function tmpbase;
7177 gimple_stmt_iterator gsi;
7178 basic_block store_bb = single_succ (load_bb);
7179 location_t loc;
7180 gimple stmt;
7181 tree decl, call, type, itype;
7182 enum machine_mode imode;
7183 bool exchange;
7184
7185 gsi = gsi_last_bb (load_bb);
7186 stmt = gsi_stmt (gsi);
7187 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD);
7188
7189 /* If the load value is needed, then this isn't a store but an exchange. */
7190 exchange = gimple_omp_atomic_need_value_p (stmt);
7191
7192 gsi = gsi_last_bb (store_bb);
7193 stmt = gsi_stmt (gsi);
7194 gcc_assert (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE);
7195 loc = gimple_location (stmt);
7196
7197 /* ??? If the target does not implement atomic_store_optab[mode], and mode
7198 is smaller than word size, then expand_atomic_store assumes that the store
7199 is atomic. We could avoid the builtin entirely in this case. */
7200
7201 tmpbase = (exchange ? BUILT_IN_ATOMIC_EXCHANGE_N : BUILT_IN_ATOMIC_STORE_N);
7202 tmpbase = (enum built_in_function) ((int) tmpbase + index + 1);
7203 decl = builtin_decl_explicit (tmpbase);
7204 if (decl == NULL_TREE)
7205 return false;
7206
7207 type = TREE_TYPE (stored_val);
7208
7209 /* Dig out the type of the function's second argument. */
7210 itype = TREE_TYPE (decl);
7211 itype = TYPE_ARG_TYPES (itype);
7212 itype = TREE_CHAIN (itype);
7213 itype = TREE_VALUE (itype);
7214 imode = TYPE_MODE (itype);
7215
7216 if (exchange && !can_atomic_exchange_p (imode, true))
7217 return false;
7218
7219 if (!useless_type_conversion_p (itype, type))
7220 stored_val = fold_build1_loc (loc, VIEW_CONVERT_EXPR, itype, stored_val);
7221 call = build_call_expr_loc (loc, decl, 3, addr, stored_val,
acf0174b
JJ
7222 build_int_cst (NULL,
7223 gimple_omp_atomic_seq_cst_p (stmt)
7224 ? MEMMODEL_SEQ_CST
7225 : MEMMODEL_RELAXED));
05409788
RH
7226 if (exchange)
7227 {
7228 if (!useless_type_conversion_p (type, itype))
7229 call = build1_loc (loc, VIEW_CONVERT_EXPR, type, call);
7230 call = build2_loc (loc, MODIFY_EXPR, void_type_node, loaded_val, call);
7231 }
7232
7233 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7234 gsi_remove (&gsi, true);
7235
7236 /* Remove the GIMPLE_OMP_ATOMIC_LOAD that we verified above. */
7237 gsi = gsi_last_bb (load_bb);
7238 gsi_remove (&gsi, true);
7239
7240 if (gimple_in_ssa_p (cfun))
7241 update_ssa (TODO_update_ssa_no_phi);
7242
7243 return true;
20906c66
JJ
7244}
7245
a509ebb5 7246/* A subroutine of expand_omp_atomic. Attempt to implement the atomic
86951993 7247 operation as a __atomic_fetch_op builtin. INDEX is log2 of the
a509ebb5
RL
7248 size of the data type, and thus usable to find the index of the builtin
7249 decl. Returns false if the expression is not of the proper form. */
7250
7251static bool
7252expand_omp_atomic_fetch_op (basic_block load_bb,
7253 tree addr, tree loaded_val,
7254 tree stored_val, int index)
7255{
e79983f4 7256 enum built_in_function oldbase, newbase, tmpbase;
a509ebb5 7257 tree decl, itype, call;
20906c66 7258 tree lhs, rhs;
a509ebb5 7259 basic_block store_bb = single_succ (load_bb);
726a989a
RB
7260 gimple_stmt_iterator gsi;
7261 gimple stmt;
db3927fb 7262 location_t loc;
86951993 7263 enum tree_code code;
20906c66 7264 bool need_old, need_new;
86951993 7265 enum machine_mode imode;
acf0174b 7266 bool seq_cst;
a509ebb5
RL
7267
7268 /* We expect to find the following sequences:
b8698a0f 7269
a509ebb5 7270 load_bb:
726a989a 7271 GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
a509ebb5
RL
7272
7273 store_bb:
7274 val = tmp OP something; (or: something OP tmp)
b8698a0f 7275 GIMPLE_OMP_STORE (val)
a509ebb5 7276
b8698a0f 7277 ???FIXME: Allow a more flexible sequence.
a509ebb5 7278 Perhaps use data flow to pick the statements.
b8698a0f 7279
a509ebb5
RL
7280 */
7281
726a989a
RB
7282 gsi = gsi_after_labels (store_bb);
7283 stmt = gsi_stmt (gsi);
db3927fb 7284 loc = gimple_location (stmt);
726a989a 7285 if (!is_gimple_assign (stmt))
a509ebb5 7286 return false;
726a989a
RB
7287 gsi_next (&gsi);
7288 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
a509ebb5 7289 return false;
20906c66
JJ
7290 need_new = gimple_omp_atomic_need_value_p (gsi_stmt (gsi));
7291 need_old = gimple_omp_atomic_need_value_p (last_stmt (load_bb));
acf0174b 7292 seq_cst = gimple_omp_atomic_seq_cst_p (last_stmt (load_bb));
20906c66 7293 gcc_checking_assert (!need_old || !need_new);
a509ebb5 7294
726a989a 7295 if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
a509ebb5
RL
7296 return false;
7297
a509ebb5 7298 /* Check for one of the supported fetch-op operations. */
86951993
AM
7299 code = gimple_assign_rhs_code (stmt);
7300 switch (code)
a509ebb5
RL
7301 {
7302 case PLUS_EXPR:
7303 case POINTER_PLUS_EXPR:
86951993
AM
7304 oldbase = BUILT_IN_ATOMIC_FETCH_ADD_N;
7305 newbase = BUILT_IN_ATOMIC_ADD_FETCH_N;
a509ebb5
RL
7306 break;
7307 case MINUS_EXPR:
86951993
AM
7308 oldbase = BUILT_IN_ATOMIC_FETCH_SUB_N;
7309 newbase = BUILT_IN_ATOMIC_SUB_FETCH_N;
a509ebb5
RL
7310 break;
7311 case BIT_AND_EXPR:
86951993
AM
7312 oldbase = BUILT_IN_ATOMIC_FETCH_AND_N;
7313 newbase = BUILT_IN_ATOMIC_AND_FETCH_N;
a509ebb5
RL
7314 break;
7315 case BIT_IOR_EXPR:
86951993
AM
7316 oldbase = BUILT_IN_ATOMIC_FETCH_OR_N;
7317 newbase = BUILT_IN_ATOMIC_OR_FETCH_N;
a509ebb5
RL
7318 break;
7319 case BIT_XOR_EXPR:
86951993
AM
7320 oldbase = BUILT_IN_ATOMIC_FETCH_XOR_N;
7321 newbase = BUILT_IN_ATOMIC_XOR_FETCH_N;
a509ebb5
RL
7322 break;
7323 default:
7324 return false;
7325 }
86951993 7326
a509ebb5 7327 /* Make sure the expression is of the proper form. */
726a989a
RB
7328 if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
7329 rhs = gimple_assign_rhs2 (stmt);
7330 else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
7331 && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
7332 rhs = gimple_assign_rhs1 (stmt);
a509ebb5
RL
7333 else
7334 return false;
7335
e79983f4
MM
7336 tmpbase = ((enum built_in_function)
7337 ((need_new ? newbase : oldbase) + index + 1));
7338 decl = builtin_decl_explicit (tmpbase);
20790697
JJ
7339 if (decl == NULL_TREE)
7340 return false;
a509ebb5 7341 itype = TREE_TYPE (TREE_TYPE (decl));
86951993 7342 imode = TYPE_MODE (itype);
a509ebb5 7343
86951993
AM
7344 /* We could test all of the various optabs involved, but the fact of the
7345 matter is that (with the exception of i486 vs i586 and xadd) all targets
7346 that support any atomic operaton optab also implements compare-and-swap.
7347 Let optabs.c take care of expanding any compare-and-swap loop. */
cedb4a1a 7348 if (!can_compare_and_swap_p (imode, true))
a509ebb5
RL
7349 return false;
7350
726a989a
RB
7351 gsi = gsi_last_bb (load_bb);
7352 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
86951993
AM
7353
7354 /* OpenMP does not imply any barrier-like semantics on its atomic ops.
7355 It only requires that the operation happen atomically. Thus we can
7356 use the RELAXED memory model. */
7357 call = build_call_expr_loc (loc, decl, 3, addr,
7358 fold_convert_loc (loc, itype, rhs),
acf0174b
JJ
7359 build_int_cst (NULL,
7360 seq_cst ? MEMMODEL_SEQ_CST
7361 : MEMMODEL_RELAXED));
86951993 7362
20906c66
JJ
7363 if (need_old || need_new)
7364 {
7365 lhs = need_old ? loaded_val : stored_val;
7366 call = fold_convert_loc (loc, TREE_TYPE (lhs), call);
7367 call = build2_loc (loc, MODIFY_EXPR, void_type_node, lhs, call);
7368 }
7369 else
7370 call = fold_convert_loc (loc, void_type_node, call);
726a989a
RB
7371 force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
7372 gsi_remove (&gsi, true);
a509ebb5 7373
726a989a
RB
7374 gsi = gsi_last_bb (store_bb);
7375 gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
7376 gsi_remove (&gsi, true);
7377 gsi = gsi_last_bb (store_bb);
7378 gsi_remove (&gsi, true);
a509ebb5
RL
7379
7380 if (gimple_in_ssa_p (cfun))
7381 update_ssa (TODO_update_ssa_no_phi);
7382
7383 return true;
7384}
7385
7386/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7387
7388 oldval = *addr;
7389 repeat:
7390 newval = rhs; // with oldval replacing *addr in rhs
7391 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
7392 if (oldval != newval)
7393 goto repeat;
7394
7395 INDEX is log2 of the size of the data type, and thus usable to find the
7396 index of the builtin decl. */
7397
7398static bool
7399expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
7400 tree addr, tree loaded_val, tree stored_val,
7401 int index)
7402{
c18c98c0 7403 tree loadedi, storedi, initial, new_storedi, old_vali;
a509ebb5 7404 tree type, itype, cmpxchg, iaddr;
726a989a 7405 gimple_stmt_iterator si;
a509ebb5 7406 basic_block loop_header = single_succ (load_bb);
726a989a 7407 gimple phi, stmt;
a509ebb5 7408 edge e;
e79983f4 7409 enum built_in_function fncode;
a509ebb5 7410
86951993
AM
7411 /* ??? We need a non-pointer interface to __atomic_compare_exchange in
7412 order to use the RELAXED memory model effectively. */
e79983f4
MM
7413 fncode = (enum built_in_function)((int)BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_N
7414 + index + 1);
7415 cmpxchg = builtin_decl_explicit (fncode);
20790697
JJ
7416 if (cmpxchg == NULL_TREE)
7417 return false;
a509ebb5
RL
7418 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7419 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
7420
cedb4a1a 7421 if (!can_compare_and_swap_p (TYPE_MODE (itype), true))
a509ebb5
RL
7422 return false;
7423
726a989a
RB
7424 /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD. */
7425 si = gsi_last_bb (load_bb);
7426 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
7427
c18c98c0
JJ
7428 /* For floating-point values, we'll need to view-convert them to integers
7429 so that we can perform the atomic compare and swap. Simplify the
7430 following code by always setting up the "i"ntegral variables. */
7431 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
7432 {
726a989a
RB
7433 tree iaddr_val;
7434
7cc434a3
RG
7435 iaddr = create_tmp_reg (build_pointer_type_for_mode (itype, ptr_mode,
7436 true), NULL);
726a989a
RB
7437 iaddr_val
7438 = force_gimple_operand_gsi (&si,
7439 fold_convert (TREE_TYPE (iaddr), addr),
7440 false, NULL_TREE, true, GSI_SAME_STMT);
7441 stmt = gimple_build_assign (iaddr, iaddr_val);
7442 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
c18c98c0
JJ
7443 loadedi = create_tmp_var (itype, NULL);
7444 if (gimple_in_ssa_p (cfun))
46eb666a 7445 loadedi = make_ssa_name (loadedi, NULL);
c18c98c0
JJ
7446 }
7447 else
7448 {
7449 iaddr = addr;
7450 loadedi = loaded_val;
7451 }
726a989a 7452
70f34814
RG
7453 initial
7454 = force_gimple_operand_gsi (&si,
7455 build2 (MEM_REF, TREE_TYPE (TREE_TYPE (iaddr)),
7456 iaddr,
7457 build_int_cst (TREE_TYPE (iaddr), 0)),
7458 true, NULL_TREE, true, GSI_SAME_STMT);
c18c98c0
JJ
7459
7460 /* Move the value to the LOADEDI temporary. */
a509ebb5
RL
7461 if (gimple_in_ssa_p (cfun))
7462 {
726a989a 7463 gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
c18c98c0 7464 phi = create_phi_node (loadedi, loop_header);
a509ebb5
RL
7465 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
7466 initial);
7467 }
7468 else
726a989a
RB
7469 gsi_insert_before (&si,
7470 gimple_build_assign (loadedi, initial),
7471 GSI_SAME_STMT);
c18c98c0
JJ
7472 if (loadedi != loaded_val)
7473 {
726a989a
RB
7474 gimple_stmt_iterator gsi2;
7475 tree x;
c18c98c0
JJ
7476
7477 x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
726a989a 7478 gsi2 = gsi_start_bb (loop_header);
c18c98c0
JJ
7479 if (gimple_in_ssa_p (cfun))
7480 {
726a989a
RB
7481 gimple stmt;
7482 x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7483 true, GSI_SAME_STMT);
7484 stmt = gimple_build_assign (loaded_val, x);
7485 gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
c18c98c0
JJ
7486 }
7487 else
7488 {
726a989a
RB
7489 x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
7490 force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
7491 true, GSI_SAME_STMT);
c18c98c0
JJ
7492 }
7493 }
726a989a 7494 gsi_remove (&si, true);
a509ebb5 7495
726a989a
RB
7496 si = gsi_last_bb (store_bb);
7497 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 7498
c18c98c0
JJ
7499 if (iaddr == addr)
7500 storedi = stored_val;
a509ebb5 7501 else
c18c98c0 7502 storedi =
726a989a 7503 force_gimple_operand_gsi (&si,
c18c98c0
JJ
7504 build1 (VIEW_CONVERT_EXPR, itype,
7505 stored_val), true, NULL_TREE, true,
726a989a 7506 GSI_SAME_STMT);
a509ebb5
RL
7507
7508 /* Build the compare&swap statement. */
7509 new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
726a989a 7510 new_storedi = force_gimple_operand_gsi (&si,
587aa063
RG
7511 fold_convert (TREE_TYPE (loadedi),
7512 new_storedi),
a509ebb5 7513 true, NULL_TREE,
726a989a 7514 true, GSI_SAME_STMT);
a509ebb5
RL
7515
7516 if (gimple_in_ssa_p (cfun))
7517 old_vali = loadedi;
7518 else
7519 {
587aa063 7520 old_vali = create_tmp_var (TREE_TYPE (loadedi), NULL);
726a989a
RB
7521 stmt = gimple_build_assign (old_vali, loadedi);
7522 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5 7523
726a989a
RB
7524 stmt = gimple_build_assign (loadedi, new_storedi);
7525 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
7526 }
7527
7528 /* Note that we always perform the comparison as an integer, even for
b8698a0f 7529 floating point. This allows the atomic operation to properly
a509ebb5 7530 succeed even with NaNs and -0.0. */
726a989a
RB
7531 stmt = gimple_build_cond_empty
7532 (build2 (NE_EXPR, boolean_type_node,
7533 new_storedi, old_vali));
7534 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5
RL
7535
7536 /* Update cfg. */
7537 e = single_succ_edge (store_bb);
7538 e->flags &= ~EDGE_FALLTHRU;
7539 e->flags |= EDGE_FALSE_VALUE;
7540
7541 e = make_edge (store_bb, loop_header, EDGE_TRUE_VALUE);
7542
c18c98c0 7543 /* Copy the new value to loadedi (we already did that before the condition
a509ebb5
RL
7544 if we are not in SSA). */
7545 if (gimple_in_ssa_p (cfun))
7546 {
726a989a 7547 phi = gimple_seq_first_stmt (phi_nodes (loop_header));
c18c98c0 7548 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
a509ebb5
RL
7549 }
7550
726a989a
RB
7551 /* Remove GIMPLE_OMP_ATOMIC_STORE. */
7552 gsi_remove (&si, true);
a509ebb5 7553
6093bc06
RB
7554 struct loop *loop = alloc_loop ();
7555 loop->header = loop_header;
a1756c0a 7556 loop->latch = store_bb;
6093bc06
RB
7557 add_loop (loop, loop_header->loop_father);
7558
a509ebb5
RL
7559 if (gimple_in_ssa_p (cfun))
7560 update_ssa (TODO_update_ssa_no_phi);
7561
7562 return true;
7563}
7564
7565/* A subroutine of expand_omp_atomic. Implement the atomic operation as:
7566
7567 GOMP_atomic_start ();
7568 *addr = rhs;
7569 GOMP_atomic_end ();
7570
7571 The result is not globally atomic, but works so long as all parallel
7572 references are within #pragma omp atomic directives. According to
7573 responses received from omp@openmp.org, appears to be within spec.
7574 Which makes sense, since that's how several other compilers handle
b8698a0f 7575 this situation as well.
726a989a
RB
7576 LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
7577 expanding. STORED_VAL is the operand of the matching
7578 GIMPLE_OMP_ATOMIC_STORE.
a509ebb5 7579
b8698a0f
L
7580 We replace
7581 GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with
a509ebb5
RL
7582 loaded_val = *addr;
7583
7584 and replace
05409788 7585 GIMPLE_OMP_ATOMIC_STORE (stored_val) with
b8698a0f 7586 *addr = stored_val;
a509ebb5
RL
7587*/
7588
7589static bool
7590expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
7591 tree addr, tree loaded_val, tree stored_val)
7592{
726a989a
RB
7593 gimple_stmt_iterator si;
7594 gimple stmt;
a509ebb5
RL
7595 tree t;
7596
726a989a
RB
7597 si = gsi_last_bb (load_bb);
7598 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5 7599
e79983f4 7600 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
3bb06db4 7601 t = build_call_expr (t, 0);
726a989a 7602 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
a509ebb5 7603
70f34814 7604 stmt = gimple_build_assign (loaded_val, build_simple_mem_ref (addr));
726a989a
RB
7605 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
7606 gsi_remove (&si, true);
a509ebb5 7607
726a989a
RB
7608 si = gsi_last_bb (store_bb);
7609 gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
a509ebb5 7610
70f34814
RG
7611 stmt = gimple_build_assign (build_simple_mem_ref (unshare_expr (addr)),
7612 stored_val);
726a989a 7613 gsi_insert_before (&si, stmt, GSI_SAME_STMT);
a509ebb5 7614
e79983f4 7615 t = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
3bb06db4 7616 t = build_call_expr (t, 0);
726a989a
RB
7617 force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
7618 gsi_remove (&si, true);
a509ebb5
RL
7619
7620 if (gimple_in_ssa_p (cfun))
7621 update_ssa (TODO_update_ssa_no_phi);
7622 return true;
7623}
7624
b8698a0f
L
7625/* Expand an GIMPLE_OMP_ATOMIC statement. We try to expand
7626 using expand_omp_atomic_fetch_op. If it failed, we try to
a509ebb5
RL
7627 call expand_omp_atomic_pipeline, and if it fails too, the
7628 ultimate fallback is wrapping the operation in a mutex
b8698a0f
L
7629 (expand_omp_atomic_mutex). REGION is the atomic region built
7630 by build_omp_regions_1(). */
a509ebb5
RL
7631
7632static void
7633expand_omp_atomic (struct omp_region *region)
7634{
7635 basic_block load_bb = region->entry, store_bb = region->exit;
726a989a
RB
7636 gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
7637 tree loaded_val = gimple_omp_atomic_load_lhs (load);
7638 tree addr = gimple_omp_atomic_load_rhs (load);
7639 tree stored_val = gimple_omp_atomic_store_val (store);
a509ebb5
RL
7640 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7641 HOST_WIDE_INT index;
7642
7643 /* Make sure the type is one of the supported sizes. */
7644 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7645 index = exact_log2 (index);
7646 if (index >= 0 && index <= 4)
7647 {
7648 unsigned int align = TYPE_ALIGN_UNIT (type);
7649
7650 /* __sync builtins require strict data alignment. */
4999c62c 7651 if (exact_log2 (align) >= index)
a509ebb5 7652 {
05409788 7653 /* Atomic load. */
20906c66
JJ
7654 if (loaded_val == stored_val
7655 && (GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7656 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7657 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
05409788 7658 && expand_omp_atomic_load (load_bb, addr, loaded_val, index))
20906c66
JJ
7659 return;
7660
05409788 7661 /* Atomic store. */
20906c66
JJ
7662 if ((GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7663 || GET_MODE_CLASS (TYPE_MODE (type)) == MODE_FLOAT)
7664 && GET_MODE_BITSIZE (TYPE_MODE (type)) <= BITS_PER_WORD
7665 && store_bb == single_succ (load_bb)
7666 && first_stmt (store_bb) == store
05409788
RH
7667 && expand_omp_atomic_store (load_bb, addr, loaded_val,
7668 stored_val, index))
20906c66
JJ
7669 return;
7670
a509ebb5
RL
7671 /* When possible, use specialized atomic update functions. */
7672 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
05409788
RH
7673 && store_bb == single_succ (load_bb)
7674 && expand_omp_atomic_fetch_op (load_bb, addr,
7675 loaded_val, stored_val, index))
7676 return;
a509ebb5
RL
7677
7678 /* If we don't have specialized __sync builtins, try and implement
7679 as a compare and swap loop. */
7680 if (expand_omp_atomic_pipeline (load_bb, store_bb, addr,
7681 loaded_val, stored_val, index))
7682 return;
7683 }
7684 }
7685
7686 /* The ultimate fallback is wrapping the operation in a mutex. */
7687 expand_omp_atomic_mutex (load_bb, store_bb, addr, loaded_val, stored_val);
7688}
7689
953ff289 7690
acf0174b
JJ
7691/* Expand the OpenMP target{, data, update} directive starting at REGION. */
7692
7693static void
7694expand_omp_target (struct omp_region *region)
7695{
7696 basic_block entry_bb, exit_bb, new_bb;
7697 struct function *child_cfun = NULL;
7698 tree child_fn = NULL_TREE, block, t;
7699 gimple_stmt_iterator gsi;
7700 gimple entry_stmt, stmt;
7701 edge e;
7702
7703 entry_stmt = last_stmt (region->entry);
7704 new_bb = region->entry;
7705 int kind = gimple_omp_target_kind (entry_stmt);
7706 if (kind == GF_OMP_TARGET_KIND_REGION)
7707 {
7708 child_fn = gimple_omp_target_child_fn (entry_stmt);
7709 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7710 }
7711
7712 entry_bb = region->entry;
7713 exit_bb = region->exit;
7714
7715 if (kind == GF_OMP_TARGET_KIND_REGION)
7716 {
7717 unsigned srcidx, dstidx, num;
7718
7719 /* If the target region needs data sent from the parent
7720 function, then the very first statement (except possible
7721 tree profile counter updates) of the parallel body
7722 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
7723 &.OMP_DATA_O is passed as an argument to the child function,
7724 we need to replace it with the argument as seen by the child
7725 function.
7726
7727 In most cases, this will end up being the identity assignment
7728 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
7729 a function call that has been inlined, the original PARM_DECL
7730 .OMP_DATA_I may have been converted into a different local
7731 variable. In which case, we need to keep the assignment. */
7732 if (gimple_omp_target_data_arg (entry_stmt))
7733 {
7734 basic_block entry_succ_bb = single_succ (entry_bb);
7735 gimple_stmt_iterator gsi;
7736 tree arg;
7737 gimple tgtcopy_stmt = NULL;
7738 tree sender
7739 = TREE_VEC_ELT (gimple_omp_target_data_arg (entry_stmt), 0);
7740
7741 for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
7742 {
7743 gcc_assert (!gsi_end_p (gsi));
7744 stmt = gsi_stmt (gsi);
7745 if (gimple_code (stmt) != GIMPLE_ASSIGN)
7746 continue;
7747
7748 if (gimple_num_ops (stmt) == 2)
7749 {
7750 tree arg = gimple_assign_rhs1 (stmt);
7751
7752 /* We're ignoring the subcode because we're
7753 effectively doing a STRIP_NOPS. */
7754
7755 if (TREE_CODE (arg) == ADDR_EXPR
7756 && TREE_OPERAND (arg, 0) == sender)
7757 {
7758 tgtcopy_stmt = stmt;
7759 break;
7760 }
7761 }
7762 }
7763
7764 gcc_assert (tgtcopy_stmt != NULL);
7765 arg = DECL_ARGUMENTS (child_fn);
7766
7767 gcc_assert (gimple_assign_lhs (tgtcopy_stmt) == arg);
7768 gsi_remove (&gsi, true);
7769 }
7770
7771 /* Declare local variables needed in CHILD_CFUN. */
7772 block = DECL_INITIAL (child_fn);
7773 BLOCK_VARS (block) = vec2chain (child_cfun->local_decls);
7774 /* The gimplifier could record temporaries in target block
7775 rather than in containing function's local_decls chain,
7776 which would mean cgraph missed finalizing them. Do it now. */
7777 for (t = BLOCK_VARS (block); t; t = DECL_CHAIN (t))
7778 if (TREE_CODE (t) == VAR_DECL
7779 && TREE_STATIC (t)
7780 && !DECL_EXTERNAL (t))
7781 varpool_finalize_decl (t);
7782 DECL_SAVED_TREE (child_fn) = NULL;
7783 /* We'll create a CFG for child_fn, so no gimple body is needed. */
7784 gimple_set_body (child_fn, NULL);
7785 TREE_USED (block) = 1;
7786
7787 /* Reset DECL_CONTEXT on function arguments. */
7788 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7789 DECL_CONTEXT (t) = child_fn;
7790
7791 /* Split ENTRY_BB at GIMPLE_OMP_TARGET,
7792 so that it can be moved to the child function. */
7793 gsi = gsi_last_bb (entry_bb);
7794 stmt = gsi_stmt (gsi);
7795 gcc_assert (stmt && gimple_code (stmt) == GIMPLE_OMP_TARGET
7796 && gimple_omp_target_kind (stmt)
7797 == GF_OMP_TARGET_KIND_REGION);
7798 gsi_remove (&gsi, true);
7799 e = split_block (entry_bb, stmt);
7800 entry_bb = e->dest;
7801 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
7802
7803 /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR. */
7804 if (exit_bb)
7805 {
7806 gsi = gsi_last_bb (exit_bb);
7807 gcc_assert (!gsi_end_p (gsi)
7808 && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
7809 stmt = gimple_build_return (NULL);
7810 gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
7811 gsi_remove (&gsi, true);
7812 }
7813
7814 /* Move the target region into CHILD_CFUN. */
7815
7816 block = gimple_block (entry_stmt);
7817
7818 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
7819 if (exit_bb)
7820 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
7821 /* When the OMP expansion process cannot guarantee an up-to-date
7822 loop tree arrange for the child function to fixup loops. */
7823 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
7824 child_cfun->x_current_loops->state |= LOOPS_NEED_FIXUP;
7825
7826 /* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
7827 num = vec_safe_length (child_cfun->local_decls);
7828 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
7829 {
7830 t = (*child_cfun->local_decls)[srcidx];
7831 if (DECL_CONTEXT (t) == cfun->decl)
7832 continue;
7833 if (srcidx != dstidx)
7834 (*child_cfun->local_decls)[dstidx] = t;
7835 dstidx++;
7836 }
7837 if (dstidx != num)
7838 vec_safe_truncate (child_cfun->local_decls, dstidx);
7839
7840 /* Inform the callgraph about the new function. */
7841 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
7842 cgraph_add_new_function (child_fn, true);
7843
7844 /* Fix the callgraph edges for child_cfun. Those for cfun will be
7845 fixed in a following pass. */
7846 push_cfun (child_cfun);
7847 rebuild_cgraph_edges ();
7848
7849 /* Some EH regions might become dead, see PR34608. If
7850 pass_cleanup_cfg isn't the first pass to happen with the
7851 new child, these dead EH edges might cause problems.
7852 Clean them up now. */
7853 if (flag_exceptions)
7854 {
7855 basic_block bb;
7856 bool changed = false;
7857
7858 FOR_EACH_BB (bb)
7859 changed |= gimple_purge_dead_eh_edges (bb);
7860 if (changed)
7861 cleanup_tree_cfg ();
7862 }
7863 pop_cfun ();
7864 }
7865
7866 /* Emit a library call to launch the target region, or do data
7867 transfers. */
7868 tree t1, t2, t3, t4, device, cond, c, clauses;
7869 enum built_in_function start_ix;
7870 location_t clause_loc;
7871
7872 clauses = gimple_omp_target_clauses (entry_stmt);
7873
7874 if (kind == GF_OMP_TARGET_KIND_REGION)
7875 start_ix = BUILT_IN_GOMP_TARGET;
7876 else if (kind == GF_OMP_TARGET_KIND_DATA)
7877 start_ix = BUILT_IN_GOMP_TARGET_DATA;
7878 else
7879 start_ix = BUILT_IN_GOMP_TARGET_UPDATE;
7880
7881 /* By default, the value of DEVICE is -1 (let runtime library choose)
7882 and there is no conditional. */
7883 cond = NULL_TREE;
7884 device = build_int_cst (integer_type_node, -1);
7885
7886 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
7887 if (c)
7888 cond = OMP_CLAUSE_IF_EXPR (c);
7889
7890 c = find_omp_clause (clauses, OMP_CLAUSE_DEVICE);
7891 if (c)
7892 {
7893 device = OMP_CLAUSE_DEVICE_ID (c);
7894 clause_loc = OMP_CLAUSE_LOCATION (c);
7895 }
7896 else
7897 clause_loc = gimple_location (entry_stmt);
7898
7899 /* Ensure 'device' is of the correct type. */
7900 device = fold_convert_loc (clause_loc, integer_type_node, device);
7901
7902 /* If we found the clause 'if (cond)', build
7903 (cond ? device : -2). */
7904 if (cond)
7905 {
7906 cond = gimple_boolify (cond);
7907
7908 basic_block cond_bb, then_bb, else_bb;
7909 edge e;
7910 tree tmp_var;
7911
7912 tmp_var = create_tmp_var (TREE_TYPE (device), NULL);
7913 if (kind != GF_OMP_TARGET_KIND_REGION)
7914 {
7915 gsi = gsi_last_bb (new_bb);
7916 gsi_prev (&gsi);
7917 e = split_block (new_bb, gsi_stmt (gsi));
7918 }
7919 else
7920 e = split_block (new_bb, NULL);
7921 cond_bb = e->src;
7922 new_bb = e->dest;
7923 remove_edge (e);
7924
7925 then_bb = create_empty_bb (cond_bb);
7926 else_bb = create_empty_bb (then_bb);
7927 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
7928 set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
7929
7930 stmt = gimple_build_cond_empty (cond);
7931 gsi = gsi_last_bb (cond_bb);
7932 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7933
7934 gsi = gsi_start_bb (then_bb);
7935 stmt = gimple_build_assign (tmp_var, device);
7936 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7937
7938 gsi = gsi_start_bb (else_bb);
7939 stmt = gimple_build_assign (tmp_var,
7940 build_int_cst (integer_type_node, -2));
7941 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
7942
7943 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
7944 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
7945 if (current_loops)
7946 {
7947 add_bb_to_loop (then_bb, cond_bb->loop_father);
7948 add_bb_to_loop (else_bb, cond_bb->loop_father);
7949 }
7950 make_edge (then_bb, new_bb, EDGE_FALLTHRU);
7951 make_edge (else_bb, new_bb, EDGE_FALLTHRU);
7952
7953 device = tmp_var;
7954 }
7955
7956 gsi = gsi_last_bb (new_bb);
7957 t = gimple_omp_target_data_arg (entry_stmt);
7958 if (t == NULL)
7959 {
7960 t1 = size_zero_node;
7961 t2 = build_zero_cst (ptr_type_node);
7962 t3 = t2;
7963 t4 = t2;
7964 }
7965 else
7966 {
7967 t1 = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (TREE_VEC_ELT (t, 1))));
7968 t1 = size_binop (PLUS_EXPR, t1, size_int (1));
7969 t2 = build_fold_addr_expr (TREE_VEC_ELT (t, 0));
7970 t3 = build_fold_addr_expr (TREE_VEC_ELT (t, 1));
7971 t4 = build_fold_addr_expr (TREE_VEC_ELT (t, 2));
7972 }
7973
7974 gimple g;
7975 /* FIXME: This will be address of
7976 extern char __OPENMP_TARGET__[] __attribute__((visibility ("hidden")))
7977 symbol, as soon as the linker plugin is able to create it for us. */
7978 tree openmp_target = build_zero_cst (ptr_type_node);
7979 if (kind == GF_OMP_TARGET_KIND_REGION)
7980 {
7981 tree fnaddr = build_fold_addr_expr (child_fn);
7982 g = gimple_build_call (builtin_decl_explicit (start_ix), 7,
7983 device, fnaddr, openmp_target, t1, t2, t3, t4);
7984 }
7985 else
7986 g = gimple_build_call (builtin_decl_explicit (start_ix), 6,
7987 device, openmp_target, t1, t2, t3, t4);
7988 gimple_set_location (g, gimple_location (entry_stmt));
7989 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
7990 if (kind != GF_OMP_TARGET_KIND_REGION)
7991 {
7992 g = gsi_stmt (gsi);
7993 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_TARGET);
7994 gsi_remove (&gsi, true);
7995 }
7996 if (kind == GF_OMP_TARGET_KIND_DATA && region->exit)
7997 {
7998 gsi = gsi_last_bb (region->exit);
7999 g = gsi_stmt (gsi);
8000 gcc_assert (g && gimple_code (g) == GIMPLE_OMP_RETURN);
8001 gsi_remove (&gsi, true);
8002 }
8003}
8004
8005
8006/* Expand the parallel region tree rooted at REGION. Expansion
8007 proceeds in depth-first order. Innermost regions are expanded
8008 first. This way, parallel regions that require a new function to
726a989a 8009 be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
50674e96
DN
8010 internal dependencies in their body. */
8011
8012static void
8013expand_omp (struct omp_region *region)
8014{
8015 while (region)
8016 {
b357f682 8017 location_t saved_location;
acf0174b 8018 gimple inner_stmt = NULL;
b357f682 8019
068e1875
ZD
8020 /* First, determine whether this is a combined parallel+workshare
8021 region. */
726a989a 8022 if (region->type == GIMPLE_OMP_PARALLEL)
068e1875
ZD
8023 determine_parallel_type (region);
8024
acf0174b
JJ
8025 if (region->type == GIMPLE_OMP_FOR
8026 && gimple_omp_for_combined_p (last_stmt (region->entry)))
8027 inner_stmt = last_stmt (region->inner->entry);
8028
50674e96
DN
8029 if (region->inner)
8030 expand_omp (region->inner);
8031
b357f682 8032 saved_location = input_location;
726a989a
RB
8033 if (gimple_has_location (last_stmt (region->entry)))
8034 input_location = gimple_location (last_stmt (region->entry));
b357f682 8035
777f7f9a 8036 switch (region->type)
50674e96 8037 {
726a989a
RB
8038 case GIMPLE_OMP_PARALLEL:
8039 case GIMPLE_OMP_TASK:
a68ab351
JJ
8040 expand_omp_taskreg (region);
8041 break;
8042
726a989a 8043 case GIMPLE_OMP_FOR:
acf0174b 8044 expand_omp_for (region, inner_stmt);
777f7f9a 8045 break;
50674e96 8046
726a989a 8047 case GIMPLE_OMP_SECTIONS:
777f7f9a
RH
8048 expand_omp_sections (region);
8049 break;
50674e96 8050
726a989a 8051 case GIMPLE_OMP_SECTION:
777f7f9a 8052 /* Individual omp sections are handled together with their
726a989a 8053 parent GIMPLE_OMP_SECTIONS region. */
777f7f9a 8054 break;
50674e96 8055
726a989a 8056 case GIMPLE_OMP_SINGLE:
777f7f9a
RH
8057 expand_omp_single (region);
8058 break;
50674e96 8059
726a989a 8060 case GIMPLE_OMP_MASTER:
acf0174b 8061 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
8062 case GIMPLE_OMP_ORDERED:
8063 case GIMPLE_OMP_CRITICAL:
acf0174b 8064 case GIMPLE_OMP_TEAMS:
777f7f9a
RH
8065 expand_omp_synch (region);
8066 break;
50674e96 8067
726a989a 8068 case GIMPLE_OMP_ATOMIC_LOAD:
a509ebb5
RL
8069 expand_omp_atomic (region);
8070 break;
8071
acf0174b
JJ
8072 case GIMPLE_OMP_TARGET:
8073 expand_omp_target (region);
8074 break;
8075
777f7f9a
RH
8076 default:
8077 gcc_unreachable ();
8078 }
8d9c1aec 8079
b357f682 8080 input_location = saved_location;
50674e96
DN
8081 region = region->next;
8082 }
8083}
8084
8085
8086/* Helper for build_omp_regions. Scan the dominator tree starting at
5f40b3cb
ZD
8087 block BB. PARENT is the region that contains BB. If SINGLE_TREE is
8088 true, the function ends once a single tree is built (otherwise, whole
8089 forest of OMP constructs may be built). */
50674e96
DN
8090
8091static void
5f40b3cb
ZD
8092build_omp_regions_1 (basic_block bb, struct omp_region *parent,
8093 bool single_tree)
50674e96 8094{
726a989a
RB
8095 gimple_stmt_iterator gsi;
8096 gimple stmt;
50674e96
DN
8097 basic_block son;
8098
726a989a
RB
8099 gsi = gsi_last_bb (bb);
8100 if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
50674e96
DN
8101 {
8102 struct omp_region *region;
726a989a 8103 enum gimple_code code;
50674e96 8104
726a989a
RB
8105 stmt = gsi_stmt (gsi);
8106 code = gimple_code (stmt);
8107 if (code == GIMPLE_OMP_RETURN)
50674e96
DN
8108 {
8109 /* STMT is the return point out of region PARENT. Mark it
8110 as the exit point and make PARENT the immediately
8111 enclosing region. */
8112 gcc_assert (parent);
8113 region = parent;
777f7f9a 8114 region->exit = bb;
50674e96 8115 parent = parent->outer;
50674e96 8116 }
726a989a 8117 else if (code == GIMPLE_OMP_ATOMIC_STORE)
a509ebb5 8118 {
726a989a
RB
8119 /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
8120 GIMPLE_OMP_RETURN, but matches with
8121 GIMPLE_OMP_ATOMIC_LOAD. */
a509ebb5 8122 gcc_assert (parent);
726a989a 8123 gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
a509ebb5
RL
8124 region = parent;
8125 region->exit = bb;
8126 parent = parent->outer;
8127 }
8128
726a989a 8129 else if (code == GIMPLE_OMP_CONTINUE)
777f7f9a
RH
8130 {
8131 gcc_assert (parent);
8132 parent->cont = bb;
8133 }
726a989a 8134 else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
e5c95afe 8135 {
726a989a
RB
8136 /* GIMPLE_OMP_SECTIONS_SWITCH is part of
8137 GIMPLE_OMP_SECTIONS, and we do nothing for it. */
8138 ;
e5c95afe 8139 }
acf0174b
JJ
8140 else if (code == GIMPLE_OMP_TARGET
8141 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_UPDATE)
8142 new_omp_region (bb, code, parent);
50674e96
DN
8143 else
8144 {
8145 /* Otherwise, this directive becomes the parent for a new
8146 region. */
777f7f9a 8147 region = new_omp_region (bb, code, parent);
50674e96
DN
8148 parent = region;
8149 }
50674e96
DN
8150 }
8151
5f40b3cb
ZD
8152 if (single_tree && !parent)
8153 return;
8154
50674e96
DN
8155 for (son = first_dom_son (CDI_DOMINATORS, bb);
8156 son;
8157 son = next_dom_son (CDI_DOMINATORS, son))
5f40b3cb
ZD
8158 build_omp_regions_1 (son, parent, single_tree);
8159}
8160
8161/* Builds the tree of OMP regions rooted at ROOT, storing it to
8162 root_omp_region. */
8163
8164static void
8165build_omp_regions_root (basic_block root)
8166{
8167 gcc_assert (root_omp_region == NULL);
8168 build_omp_regions_1 (root, NULL, true);
8169 gcc_assert (root_omp_region != NULL);
50674e96
DN
8170}
8171
5f40b3cb
ZD
8172/* Expands omp construct (and its subconstructs) starting in HEAD. */
8173
8174void
8175omp_expand_local (basic_block head)
8176{
8177 build_omp_regions_root (head);
8178 if (dump_file && (dump_flags & TDF_DETAILS))
8179 {
8180 fprintf (dump_file, "\nOMP region tree\n\n");
8181 dump_omp_region (dump_file, root_omp_region, 0);
8182 fprintf (dump_file, "\n");
8183 }
8184
8185 remove_exit_barriers (root_omp_region);
8186 expand_omp (root_omp_region);
8187
8188 free_omp_regions ();
8189}
50674e96
DN
8190
8191/* Scan the CFG and build a tree of OMP regions. Return the root of
8192 the OMP region tree. */
8193
8194static void
8195build_omp_regions (void)
8196{
777f7f9a 8197 gcc_assert (root_omp_region == NULL);
50674e96 8198 calculate_dominance_info (CDI_DOMINATORS);
5f40b3cb 8199 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
50674e96
DN
8200}
8201
50674e96
DN
8202/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
8203
c2924966 8204static unsigned int
50674e96
DN
8205execute_expand_omp (void)
8206{
8207 build_omp_regions ();
8208
777f7f9a
RH
8209 if (!root_omp_region)
8210 return 0;
50674e96 8211
777f7f9a
RH
8212 if (dump_file)
8213 {
8214 fprintf (dump_file, "\nOMP region tree\n\n");
8215 dump_omp_region (dump_file, root_omp_region, 0);
8216 fprintf (dump_file, "\n");
50674e96 8217 }
777f7f9a
RH
8218
8219 remove_exit_barriers (root_omp_region);
8220
8221 expand_omp (root_omp_region);
8222
777f7f9a
RH
8223 cleanup_tree_cfg ();
8224
8225 free_omp_regions ();
8226
c2924966 8227 return 0;
50674e96
DN
8228}
8229
917948d3
ZD
8230/* OMP expansion -- the default pass, run before creation of SSA form. */
8231
50674e96
DN
8232static bool
8233gate_expand_omp (void)
8234{
6d7f7e0a 8235 return ((flag_openmp != 0 || flag_openmp_simd != 0) && !seen_error ());
50674e96
DN
8236}
8237
27a4cd48
DM
8238namespace {
8239
8240const pass_data pass_data_expand_omp =
8241{
8242 GIMPLE_PASS, /* type */
8243 "ompexp", /* name */
8244 OPTGROUP_NONE, /* optinfo_flags */
8245 true, /* has_gate */
8246 true, /* has_execute */
8247 TV_NONE, /* tv_id */
8248 PROP_gimple_any, /* properties_required */
8249 0, /* properties_provided */
8250 0, /* properties_destroyed */
8251 0, /* todo_flags_start */
8252 0, /* todo_flags_finish */
50674e96 8253};
27a4cd48
DM
8254
8255class pass_expand_omp : public gimple_opt_pass
8256{
8257public:
c3284718
RS
8258 pass_expand_omp (gcc::context *ctxt)
8259 : gimple_opt_pass (pass_data_expand_omp, ctxt)
27a4cd48
DM
8260 {}
8261
8262 /* opt_pass methods: */
8263 bool gate () { return gate_expand_omp (); }
8264 unsigned int execute () { return execute_expand_omp (); }
8265
8266}; // class pass_expand_omp
8267
8268} // anon namespace
8269
8270gimple_opt_pass *
8271make_pass_expand_omp (gcc::context *ctxt)
8272{
8273 return new pass_expand_omp (ctxt);
8274}
50674e96
DN
8275\f
8276/* Routines to lower OpenMP directives into OMP-GIMPLE. */
8277
acf0174b
JJ
8278/* If ctx is a worksharing context inside of a cancellable parallel
8279 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8280 and conditional branch to parallel's cancel_label to handle
8281 cancellation in the implicit barrier. */
8282
8283static void
8284maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
8285{
8286 gimple omp_return = gimple_seq_last_stmt (*body);
8287 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8288 if (gimple_omp_return_nowait_p (omp_return))
8289 return;
8290 if (ctx->outer
8291 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
8292 && ctx->outer->cancellable)
8293 {
8294 tree lhs = create_tmp_var (boolean_type_node, NULL);
8295 gimple_omp_return_set_lhs (omp_return, lhs);
8296 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8297 gimple g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
8298 ctx->outer->cancel_label, fallthru_label);
8299 gimple_seq_add_stmt (body, g);
8300 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8301 }
8302}
8303
726a989a
RB
8304/* Lower the OpenMP sections directive in the current statement in GSI_P.
8305 CTX is the enclosing OMP context for the current statement. */
50674e96
DN
8306
8307static void
726a989a 8308lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 8309{
726a989a
RB
8310 tree block, control;
8311 gimple_stmt_iterator tgsi;
726a989a 8312 gimple stmt, new_stmt, bind, t;
355a7673 8313 gimple_seq ilist, dlist, olist, new_body;
d406b663 8314 struct gimplify_ctx gctx;
50674e96 8315
726a989a 8316 stmt = gsi_stmt (*gsi_p);
50674e96 8317
d406b663 8318 push_gimplify_context (&gctx);
50674e96
DN
8319
8320 dlist = NULL;
8321 ilist = NULL;
726a989a 8322 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
acf0174b 8323 &ilist, &dlist, ctx, NULL);
50674e96 8324
355a7673
MM
8325 new_body = gimple_omp_body (stmt);
8326 gimple_omp_set_body (stmt, NULL);
8327 tgsi = gsi_start (new_body);
8328 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
50674e96
DN
8329 {
8330 omp_context *sctx;
726a989a 8331 gimple sec_start;
50674e96 8332
726a989a 8333 sec_start = gsi_stmt (tgsi);
50674e96
DN
8334 sctx = maybe_lookup_ctx (sec_start);
8335 gcc_assert (sctx);
8336
355a7673
MM
8337 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8338 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8339 GSI_CONTINUE_LINKING);
726a989a 8340 gimple_omp_set_body (sec_start, NULL);
50674e96 8341
355a7673 8342 if (gsi_one_before_end_p (tgsi))
50674e96 8343 {
726a989a
RB
8344 gimple_seq l = NULL;
8345 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
50674e96 8346 &l, ctx);
355a7673 8347 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
726a989a 8348 gimple_omp_section_set_last (sec_start);
50674e96 8349 }
b8698a0f 8350
355a7673
MM
8351 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8352 GSI_CONTINUE_LINKING);
50674e96 8353 }
953ff289
DN
8354
8355 block = make_node (BLOCK);
355a7673 8356 bind = gimple_build_bind (NULL, new_body, block);
953ff289 8357
726a989a
RB
8358 olist = NULL;
8359 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
50674e96 8360
b357f682 8361 block = make_node (BLOCK);
726a989a 8362 new_stmt = gimple_build_bind (NULL, NULL, block);
355a7673 8363 gsi_replace (gsi_p, new_stmt, true);
50674e96 8364
b357f682 8365 pop_gimplify_context (new_stmt);
726a989a
RB
8366 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8367 BLOCK_VARS (block) = gimple_bind_vars (bind);
b357f682
JJ
8368 if (BLOCK_VARS (block))
8369 TREE_USED (block) = 1;
8370
726a989a
RB
8371 new_body = NULL;
8372 gimple_seq_add_seq (&new_body, ilist);
8373 gimple_seq_add_stmt (&new_body, stmt);
8374 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8375 gimple_seq_add_stmt (&new_body, bind);
777f7f9a 8376
e5c95afe 8377 control = create_tmp_var (unsigned_type_node, ".section");
726a989a
RB
8378 t = gimple_build_omp_continue (control, control);
8379 gimple_omp_sections_set_control (stmt, control);
8380 gimple_seq_add_stmt (&new_body, t);
777f7f9a 8381
726a989a 8382 gimple_seq_add_seq (&new_body, olist);
acf0174b
JJ
8383 if (ctx->cancellable)
8384 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
726a989a 8385 gimple_seq_add_seq (&new_body, dlist);
50674e96 8386
726a989a 8387 new_body = maybe_catch_exception (new_body);
4a31b7ee 8388
726a989a
RB
8389 t = gimple_build_omp_return
8390 (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
8391 OMP_CLAUSE_NOWAIT));
8392 gimple_seq_add_stmt (&new_body, t);
acf0174b 8393 maybe_add_implicit_barrier_cancel (ctx, &new_body);
777f7f9a 8394
726a989a 8395 gimple_bind_set_body (new_stmt, new_body);
953ff289
DN
8396}
8397
8398
50674e96 8399/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 8400 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
953ff289
DN
8401
8402 if (GOMP_single_start ())
8403 BODY;
8404 [ GOMP_barrier (); ] -> unless 'nowait' is present.
50674e96
DN
8405
8406 FIXME. It may be better to delay expanding the logic of this until
8407 pass_expand_omp. The expanded logic may make the job more difficult
8408 to a synchronization analysis pass. */
953ff289
DN
8409
8410static void
726a989a 8411lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
953ff289 8412{
c2255bc4
AH
8413 location_t loc = gimple_location (single_stmt);
8414 tree tlabel = create_artificial_label (loc);
8415 tree flabel = create_artificial_label (loc);
726a989a
RB
8416 gimple call, cond;
8417 tree lhs, decl;
8418
e79983f4 8419 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
726a989a
RB
8420 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
8421 call = gimple_build_call (decl, 0);
8422 gimple_call_set_lhs (call, lhs);
8423 gimple_seq_add_stmt (pre_p, call);
8424
8425 cond = gimple_build_cond (EQ_EXPR, lhs,
db3927fb
AH
8426 fold_convert_loc (loc, TREE_TYPE (lhs),
8427 boolean_true_node),
726a989a
RB
8428 tlabel, flabel);
8429 gimple_seq_add_stmt (pre_p, cond);
8430 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8431 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8432 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
953ff289
DN
8433}
8434
50674e96
DN
8435
8436/* A subroutine of lower_omp_single. Expand the simple form of
726a989a 8437 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
953ff289
DN
8438
8439 #pragma omp single copyprivate (a, b, c)
8440
8441 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8442
8443 {
8444 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8445 {
8446 BODY;
8447 copyout.a = a;
8448 copyout.b = b;
8449 copyout.c = c;
8450 GOMP_single_copy_end (&copyout);
8451 }
8452 else
8453 {
8454 a = copyout_p->a;
8455 b = copyout_p->b;
8456 c = copyout_p->c;
8457 }
8458 GOMP_barrier ();
8459 }
50674e96
DN
8460
8461 FIXME. It may be better to delay expanding the logic of this until
8462 pass_expand_omp. The expanded logic may make the job more difficult
8463 to a synchronization analysis pass. */
953ff289
DN
8464
8465static void
726a989a 8466lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
953ff289 8467{
e79983f4 8468 tree ptr_type, t, l0, l1, l2, bfn_decl;
726a989a 8469 gimple_seq copyin_seq;
c2255bc4 8470 location_t loc = gimple_location (single_stmt);
953ff289
DN
8471
8472 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8473
8474 ptr_type = build_pointer_type (ctx->record_type);
8475 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8476
c2255bc4
AH
8477 l0 = create_artificial_label (loc);
8478 l1 = create_artificial_label (loc);
8479 l2 = create_artificial_label (loc);
953ff289 8480
e79983f4
MM
8481 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8482 t = build_call_expr_loc (loc, bfn_decl, 0);
db3927fb 8483 t = fold_convert_loc (loc, ptr_type, t);
726a989a 8484 gimplify_assign (ctx->receiver_decl, t, pre_p);
953ff289
DN
8485
8486 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8487 build_int_cst (ptr_type, 0));
8488 t = build3 (COND_EXPR, void_type_node, t,
8489 build_and_jump (&l0), build_and_jump (&l1));
8490 gimplify_and_add (t, pre_p);
8491
726a989a 8492 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
953ff289 8493
726a989a 8494 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
953ff289
DN
8495
8496 copyin_seq = NULL;
726a989a 8497 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
953ff289
DN
8498 &copyin_seq, ctx);
8499
db3927fb 8500 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
e79983f4
MM
8501 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8502 t = build_call_expr_loc (loc, bfn_decl, 1, t);
953ff289
DN
8503 gimplify_and_add (t, pre_p);
8504
8505 t = build_and_jump (&l2);
8506 gimplify_and_add (t, pre_p);
8507
726a989a 8508 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
953ff289 8509
726a989a 8510 gimple_seq_add_seq (pre_p, copyin_seq);
953ff289 8511
726a989a 8512 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
953ff289
DN
8513}
8514
50674e96 8515
953ff289
DN
8516/* Expand code for an OpenMP single directive. */
8517
8518static void
726a989a 8519lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8520{
726a989a
RB
8521 tree block;
8522 gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
acf0174b 8523 gimple_seq bind_body, bind_body_tail = NULL, dlist;
d406b663 8524 struct gimplify_ctx gctx;
953ff289 8525
d406b663 8526 push_gimplify_context (&gctx);
953ff289 8527
355a7673
MM
8528 block = make_node (BLOCK);
8529 bind = gimple_build_bind (NULL, NULL, block);
8530 gsi_replace (gsi_p, bind, true);
726a989a 8531 bind_body = NULL;
355a7673 8532 dlist = NULL;
726a989a 8533 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
acf0174b 8534 &bind_body, &dlist, ctx, NULL);
355a7673 8535 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
953ff289 8536
726a989a 8537 gimple_seq_add_stmt (&bind_body, single_stmt);
953ff289
DN
8538
8539 if (ctx->record_type)
726a989a 8540 lower_omp_single_copy (single_stmt, &bind_body, ctx);
953ff289 8541 else
726a989a
RB
8542 lower_omp_single_simple (single_stmt, &bind_body);
8543
8544 gimple_omp_set_body (single_stmt, NULL);
953ff289 8545
726a989a 8546 gimple_seq_add_seq (&bind_body, dlist);
777f7f9a 8547
726a989a 8548 bind_body = maybe_catch_exception (bind_body);
777f7f9a 8549
b8698a0f 8550 t = gimple_build_omp_return
726a989a
RB
8551 (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
8552 OMP_CLAUSE_NOWAIT));
acf0174b
JJ
8553 gimple_seq_add_stmt (&bind_body_tail, t);
8554 maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
8555 if (ctx->record_type)
8556 {
8557 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8558 tree clobber = build_constructor (ctx->record_type, NULL);
8559 TREE_THIS_VOLATILE (clobber) = 1;
8560 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8561 clobber), GSI_SAME_STMT);
8562 }
8563 gimple_seq_add_seq (&bind_body, bind_body_tail);
355a7673 8564 gimple_bind_set_body (bind, bind_body);
777f7f9a 8565
953ff289 8566 pop_gimplify_context (bind);
50674e96 8567
726a989a
RB
8568 gimple_bind_append_vars (bind, ctx->block_vars);
8569 BLOCK_VARS (block) = ctx->block_vars;
b357f682
JJ
8570 if (BLOCK_VARS (block))
8571 TREE_USED (block) = 1;
953ff289
DN
8572}
8573
50674e96 8574
953ff289
DN
8575/* Expand code for an OpenMP master directive. */
8576
8577static void
726a989a 8578lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8579{
e79983f4 8580 tree block, lab = NULL, x, bfn_decl;
726a989a 8581 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 8582 location_t loc = gimple_location (stmt);
726a989a 8583 gimple_seq tseq;
d406b663 8584 struct gimplify_ctx gctx;
953ff289 8585
d406b663 8586 push_gimplify_context (&gctx);
953ff289
DN
8587
8588 block = make_node (BLOCK);
355a7673
MM
8589 bind = gimple_build_bind (NULL, NULL, block);
8590 gsi_replace (gsi_p, bind, true);
8591 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8592
e79983f4
MM
8593 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8594 x = build_call_expr_loc (loc, bfn_decl, 0);
953ff289
DN
8595 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8596 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
726a989a
RB
8597 tseq = NULL;
8598 gimplify_and_add (x, &tseq);
8599 gimple_bind_add_seq (bind, tseq);
953ff289 8600
355a7673 8601 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8602 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8603 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8604 gimple_omp_set_body (stmt, NULL);
953ff289 8605
726a989a 8606 gimple_bind_add_stmt (bind, gimple_build_label (lab));
777f7f9a 8607
726a989a 8608 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 8609
953ff289 8610 pop_gimplify_context (bind);
50674e96 8611
726a989a
RB
8612 gimple_bind_append_vars (bind, ctx->block_vars);
8613 BLOCK_VARS (block) = ctx->block_vars;
953ff289
DN
8614}
8615
50674e96 8616
acf0174b
JJ
8617/* Expand code for an OpenMP taskgroup directive. */
8618
8619static void
8620lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8621{
8622 gimple stmt = gsi_stmt (*gsi_p), bind, x;
8623 tree block = make_node (BLOCK);
8624
8625 bind = gimple_build_bind (NULL, NULL, block);
8626 gsi_replace (gsi_p, bind, true);
8627 gimple_bind_add_stmt (bind, stmt);
8628
8629 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8630 0);
8631 gimple_bind_add_stmt (bind, x);
8632
8633 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8634 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8635 gimple_omp_set_body (stmt, NULL);
8636
8637 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8638
8639 gimple_bind_append_vars (bind, ctx->block_vars);
8640 BLOCK_VARS (block) = ctx->block_vars;
8641}
8642
8643
953ff289
DN
8644/* Expand code for an OpenMP ordered directive. */
8645
8646static void
726a989a 8647lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8648{
726a989a
RB
8649 tree block;
8650 gimple stmt = gsi_stmt (*gsi_p), bind, x;
d406b663 8651 struct gimplify_ctx gctx;
953ff289 8652
d406b663 8653 push_gimplify_context (&gctx);
953ff289
DN
8654
8655 block = make_node (BLOCK);
355a7673
MM
8656 bind = gimple_build_bind (NULL, NULL, block);
8657 gsi_replace (gsi_p, bind, true);
8658 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8659
e79983f4
MM
8660 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
8661 0);
726a989a 8662 gimple_bind_add_stmt (bind, x);
953ff289 8663
355a7673 8664 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8665 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8666 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8667 gimple_omp_set_body (stmt, NULL);
953ff289 8668
e79983f4 8669 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END), 0);
726a989a 8670 gimple_bind_add_stmt (bind, x);
777f7f9a 8671
726a989a 8672 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
777f7f9a 8673
953ff289 8674 pop_gimplify_context (bind);
50674e96 8675
726a989a
RB
8676 gimple_bind_append_vars (bind, ctx->block_vars);
8677 BLOCK_VARS (block) = gimple_bind_vars (bind);
953ff289
DN
8678}
8679
953ff289 8680
726a989a 8681/* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
953ff289
DN
8682 substitution of a couple of function calls. But in the NAMED case,
8683 requires that languages coordinate a symbol name. It is therefore
8684 best put here in common code. */
8685
8686static GTY((param1_is (tree), param2_is (tree)))
8687 splay_tree critical_name_mutexes;
8688
8689static void
726a989a 8690lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 8691{
726a989a
RB
8692 tree block;
8693 tree name, lock, unlock;
8694 gimple stmt = gsi_stmt (*gsi_p), bind;
db3927fb 8695 location_t loc = gimple_location (stmt);
726a989a 8696 gimple_seq tbody;
d406b663 8697 struct gimplify_ctx gctx;
953ff289 8698
726a989a 8699 name = gimple_omp_critical_name (stmt);
953ff289
DN
8700 if (name)
8701 {
5039610b 8702 tree decl;
953ff289
DN
8703 splay_tree_node n;
8704
8705 if (!critical_name_mutexes)
8706 critical_name_mutexes
a9429e29
LB
8707 = splay_tree_new_ggc (splay_tree_compare_pointers,
8708 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
8709 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
953ff289
DN
8710
8711 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
8712 if (n == NULL)
8713 {
8714 char *new_str;
8715
8716 decl = create_tmp_var_raw (ptr_type_node, NULL);
8717
8718 new_str = ACONCAT ((".gomp_critical_user_",
8719 IDENTIFIER_POINTER (name), NULL));
8720 DECL_NAME (decl) = get_identifier (new_str);
8721 TREE_PUBLIC (decl) = 1;
8722 TREE_STATIC (decl) = 1;
8723 DECL_COMMON (decl) = 1;
8724 DECL_ARTIFICIAL (decl) = 1;
8725 DECL_IGNORED_P (decl) = 1;
8a4a83ed 8726 varpool_finalize_decl (decl);
953ff289
DN
8727
8728 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
8729 (splay_tree_value) decl);
8730 }
8731 else
8732 decl = (tree) n->value;
8733
e79983f4 8734 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
db3927fb 8735 lock = build_call_expr_loc (loc, lock, 1, build_fold_addr_expr_loc (loc, decl));
953ff289 8736
e79983f4 8737 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
db3927fb
AH
8738 unlock = build_call_expr_loc (loc, unlock, 1,
8739 build_fold_addr_expr_loc (loc, decl));
953ff289
DN
8740 }
8741 else
8742 {
e79983f4 8743 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
db3927fb 8744 lock = build_call_expr_loc (loc, lock, 0);
953ff289 8745
e79983f4 8746 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
db3927fb 8747 unlock = build_call_expr_loc (loc, unlock, 0);
953ff289
DN
8748 }
8749
d406b663 8750 push_gimplify_context (&gctx);
953ff289
DN
8751
8752 block = make_node (BLOCK);
355a7673
MM
8753 bind = gimple_build_bind (NULL, NULL, block);
8754 gsi_replace (gsi_p, bind, true);
8755 gimple_bind_add_stmt (bind, stmt);
777f7f9a 8756
726a989a
RB
8757 tbody = gimple_bind_body (bind);
8758 gimplify_and_add (lock, &tbody);
8759 gimple_bind_set_body (bind, tbody);
953ff289 8760
355a7673 8761 lower_omp (gimple_omp_body_ptr (stmt), ctx);
726a989a
RB
8762 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8763 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8764 gimple_omp_set_body (stmt, NULL);
953ff289 8765
726a989a
RB
8766 tbody = gimple_bind_body (bind);
8767 gimplify_and_add (unlock, &tbody);
8768 gimple_bind_set_body (bind, tbody);
777f7f9a 8769
726a989a 8770 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
953ff289
DN
8771
8772 pop_gimplify_context (bind);
726a989a
RB
8773 gimple_bind_append_vars (bind, ctx->block_vars);
8774 BLOCK_VARS (block) = gimple_bind_vars (bind);
50674e96
DN
8775}
8776
8777
8778/* A subroutine of lower_omp_for. Generate code to emit the predicate
8779 for a lastprivate clause. Given a loop control predicate of (V
8780 cond N2), we gate the clause on (!(V cond N2)). The lowered form
3d55c64b
JJ
8781 is appended to *DLIST, iterator initialization is appended to
8782 *BODY_P. */
50674e96
DN
8783
8784static void
726a989a
RB
8785lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8786 gimple_seq *dlist, struct omp_context *ctx)
50674e96 8787{
726a989a 8788 tree clauses, cond, vinit;
50674e96 8789 enum tree_code cond_code;
726a989a 8790 gimple_seq stmts;
b8698a0f 8791
a68ab351 8792 cond_code = fd->loop.cond_code;
50674e96
DN
8793 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8794
8795 /* When possible, use a strict equality expression. This can let VRP
8796 type optimizations deduce the value and remove a copy. */
a68ab351 8797 if (host_integerp (fd->loop.step, 0))
50674e96 8798 {
a68ab351 8799 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step);
50674e96
DN
8800 if (step == 1 || step == -1)
8801 cond_code = EQ_EXPR;
8802 }
8803
a68ab351 8804 cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
50674e96 8805
726a989a 8806 clauses = gimple_omp_for_clauses (fd->for_stmt);
3d55c64b
JJ
8807 stmts = NULL;
8808 lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
726a989a 8809 if (!gimple_seq_empty_p (stmts))
3d55c64b 8810 {
726a989a 8811 gimple_seq_add_seq (&stmts, *dlist);
a68ab351 8812 *dlist = stmts;
3d55c64b
JJ
8813
8814 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
a68ab351 8815 vinit = fd->loop.n1;
3d55c64b 8816 if (cond_code == EQ_EXPR
a68ab351
JJ
8817 && host_integerp (fd->loop.n2, 0)
8818 && ! integer_zerop (fd->loop.n2))
8819 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
74bf76ed
JJ
8820 else
8821 vinit = unshare_expr (vinit);
3d55c64b
JJ
8822
8823 /* Initialize the iterator variable, so that threads that don't execute
8824 any iterations don't execute the lastprivate clauses by accident. */
726a989a 8825 gimplify_assign (fd->loop.v, vinit, body_p);
3d55c64b 8826 }
50674e96
DN
8827}
8828
8829
8830/* Lower code for an OpenMP loop directive. */
8831
8832static void
726a989a 8833lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 8834{
726a989a 8835 tree *rhs_p, block;
acf0174b 8836 struct omp_for_data fd, *fdp = NULL;
726a989a 8837 gimple stmt = gsi_stmt (*gsi_p), new_stmt;
0f900dfa 8838 gimple_seq omp_for_body, body, dlist;
726a989a 8839 size_t i;
d406b663 8840 struct gimplify_ctx gctx;
50674e96 8841
d406b663 8842 push_gimplify_context (&gctx);
50674e96 8843
355a7673 8844 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
50674e96 8845
b357f682 8846 block = make_node (BLOCK);
726a989a 8847 new_stmt = gimple_build_bind (NULL, NULL, block);
355a7673
MM
8848 /* Replace at gsi right away, so that 'stmt' is no member
8849 of a sequence anymore as we're going to add to to a different
8850 one below. */
8851 gsi_replace (gsi_p, new_stmt, true);
b357f682 8852
50674e96
DN
8853 /* Move declaration of temporaries in the loop body before we make
8854 it go away. */
726a989a
RB
8855 omp_for_body = gimple_omp_body (stmt);
8856 if (!gimple_seq_empty_p (omp_for_body)
8857 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8858 {
8859 tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
8860 gimple_bind_append_vars (new_stmt, vars);
8861 }
50674e96 8862
acf0174b
JJ
8863 if (gimple_omp_for_combined_into_p (stmt))
8864 {
8865 extract_omp_for_data (stmt, &fd, NULL);
8866 fdp = &fd;
8867
8868 /* We need two temporaries with fd.loop.v type (istart/iend)
8869 and then (fd.collapse - 1) temporaries with the same
8870 type for count2 ... countN-1 vars if not constant. */
8871 size_t count = 2;
8872 tree type = fd.iter_type;
8873 if (fd.collapse > 1
8874 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8875 count += fd.collapse - 1;
8876 bool parallel_for = gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR;
8877 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8878 tree clauses = *pc;
8879 if (parallel_for)
8880 outerc
8881 = find_omp_clause (gimple_omp_parallel_clauses (ctx->outer->stmt),
8882 OMP_CLAUSE__LOOPTEMP_);
8883 for (i = 0; i < count; i++)
8884 {
8885 tree temp;
8886 if (parallel_for)
8887 {
8888 gcc_assert (outerc);
8889 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8890 outerc = find_omp_clause (OMP_CLAUSE_CHAIN (outerc),
8891 OMP_CLAUSE__LOOPTEMP_);
8892 }
8893 else
8894 temp = create_tmp_var (type, NULL);
8895 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8896 OMP_CLAUSE_DECL (*pc) = temp;
8897 pc = &OMP_CLAUSE_CHAIN (*pc);
8898 }
8899 *pc = clauses;
8900 }
8901
726a989a 8902 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
50674e96 8903 dlist = NULL;
726a989a 8904 body = NULL;
acf0174b
JJ
8905 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8906 fdp);
726a989a 8907 gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
50674e96 8908
74bf76ed
JJ
8909 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8910
50674e96
DN
8911 /* Lower the header expressions. At this point, we can assume that
8912 the header is of the form:
8913
8914 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8915
8916 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8917 using the .omp_data_s mapping, if needed. */
726a989a 8918 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
a68ab351 8919 {
726a989a 8920 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
a68ab351 8921 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8922 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8923
726a989a 8924 rhs_p = gimple_omp_for_final_ptr (stmt, i);
a68ab351 8925 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8926 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8927
726a989a 8928 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
a68ab351 8929 if (!is_gimple_min_invariant (*rhs_p))
726a989a 8930 *rhs_p = get_formal_tmp_var (*rhs_p, &body);
a68ab351 8931 }
50674e96
DN
8932
8933 /* Once lowered, extract the bounds and clauses. */
a68ab351 8934 extract_omp_for_data (stmt, &fd, NULL);
50674e96 8935
726a989a 8936 lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
50674e96 8937
726a989a
RB
8938 gimple_seq_add_stmt (&body, stmt);
8939 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
777f7f9a 8940
726a989a
RB
8941 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8942 fd.loop.v));
777f7f9a 8943
50674e96 8944 /* After the loop, add exit clauses. */
726a989a 8945 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
acf0174b
JJ
8946
8947 if (ctx->cancellable)
8948 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8949
726a989a 8950 gimple_seq_add_seq (&body, dlist);
50674e96 8951
726a989a 8952 body = maybe_catch_exception (body);
4a31b7ee 8953
777f7f9a 8954 /* Region exit marker goes at the end of the loop body. */
726a989a 8955 gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
acf0174b 8956 maybe_add_implicit_barrier_cancel (ctx, &body);
b357f682 8957 pop_gimplify_context (new_stmt);
726a989a
RB
8958
8959 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8960 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
b357f682
JJ
8961 if (BLOCK_VARS (block))
8962 TREE_USED (block) = 1;
50674e96 8963
726a989a
RB
8964 gimple_bind_set_body (new_stmt, body);
8965 gimple_omp_set_body (stmt, NULL);
8966 gimple_omp_for_set_pre_body (stmt, NULL);
953ff289
DN
8967}
8968
b8698a0f 8969/* Callback for walk_stmts. Check if the current statement only contains
726a989a 8970 GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL. */
69f1837b
JJ
8971
8972static tree
726a989a
RB
8973check_combined_parallel (gimple_stmt_iterator *gsi_p,
8974 bool *handled_ops_p,
8975 struct walk_stmt_info *wi)
69f1837b 8976{
d3bfe4de 8977 int *info = (int *) wi->info;
726a989a 8978 gimple stmt = gsi_stmt (*gsi_p);
69f1837b 8979
726a989a
RB
8980 *handled_ops_p = true;
8981 switch (gimple_code (stmt))
69f1837b 8982 {
726a989a
RB
8983 WALK_SUBSTMTS;
8984
8985 case GIMPLE_OMP_FOR:
8986 case GIMPLE_OMP_SECTIONS:
69f1837b
JJ
8987 *info = *info == 0 ? 1 : -1;
8988 break;
8989 default:
8990 *info = -1;
8991 break;
8992 }
8993 return NULL;
8994}
50674e96 8995
a68ab351
JJ
8996struct omp_taskcopy_context
8997{
8998 /* This field must be at the beginning, as we do "inheritance": Some
8999 callback functions for tree-inline.c (e.g., omp_copy_decl)
9000 receive a copy_body_data pointer that is up-casted to an
9001 omp_context pointer. */
9002 copy_body_data cb;
9003 omp_context *ctx;
9004};
9005
9006static tree
9007task_copyfn_copy_decl (tree var, copy_body_data *cb)
9008{
9009 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
9010
9011 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
9012 return create_tmp_var (TREE_TYPE (var), NULL);
9013
9014 return var;
9015}
9016
9017static tree
9018task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
9019{
9020 tree name, new_fields = NULL, type, f;
9021
9022 type = lang_hooks.types.make_type (RECORD_TYPE);
9023 name = DECL_NAME (TYPE_NAME (orig_type));
c2255bc4
AH
9024 name = build_decl (gimple_location (tcctx->ctx->stmt),
9025 TYPE_DECL, name, type);
a68ab351
JJ
9026 TYPE_NAME (type) = name;
9027
9028 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
9029 {
9030 tree new_f = copy_node (f);
9031 DECL_CONTEXT (new_f) = type;
9032 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
9033 TREE_CHAIN (new_f) = new_fields;
726a989a
RB
9034 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9035 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
9036 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
9037 &tcctx->cb, NULL);
a68ab351
JJ
9038 new_fields = new_f;
9039 *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
9040 }
9041 TYPE_FIELDS (type) = nreverse (new_fields);
9042 layout_type (type);
9043 return type;
9044}
9045
9046/* Create task copyfn. */
9047
9048static void
726a989a 9049create_task_copyfn (gimple task_stmt, omp_context *ctx)
a68ab351
JJ
9050{
9051 struct function *child_cfun;
9052 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
9053 tree record_type, srecord_type, bind, list;
9054 bool record_needs_remap = false, srecord_needs_remap = false;
9055 splay_tree_node n;
9056 struct omp_taskcopy_context tcctx;
d406b663 9057 struct gimplify_ctx gctx;
db3927fb 9058 location_t loc = gimple_location (task_stmt);
a68ab351 9059
726a989a 9060 child_fn = gimple_omp_task_copy_fn (task_stmt);
a68ab351
JJ
9061 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
9062 gcc_assert (child_cfun->cfg == NULL);
a68ab351
JJ
9063 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
9064
9065 /* Reset DECL_CONTEXT on function arguments. */
910ad8de 9066 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
a68ab351
JJ
9067 DECL_CONTEXT (t) = child_fn;
9068
9069 /* Populate the function. */
d406b663 9070 push_gimplify_context (&gctx);
af16bc76 9071 push_cfun (child_cfun);
a68ab351
JJ
9072
9073 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
9074 TREE_SIDE_EFFECTS (bind) = 1;
9075 list = NULL;
9076 DECL_SAVED_TREE (child_fn) = bind;
726a989a 9077 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
a68ab351
JJ
9078
9079 /* Remap src and dst argument types if needed. */
9080 record_type = ctx->record_type;
9081 srecord_type = ctx->srecord_type;
910ad8de 9082 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
9083 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9084 {
9085 record_needs_remap = true;
9086 break;
9087 }
910ad8de 9088 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
a68ab351
JJ
9089 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
9090 {
9091 srecord_needs_remap = true;
9092 break;
9093 }
9094
9095 if (record_needs_remap || srecord_needs_remap)
9096 {
9097 memset (&tcctx, '\0', sizeof (tcctx));
9098 tcctx.cb.src_fn = ctx->cb.src_fn;
9099 tcctx.cb.dst_fn = child_fn;
fe660d7b
MJ
9100 tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
9101 gcc_checking_assert (tcctx.cb.src_node);
a68ab351
JJ
9102 tcctx.cb.dst_node = tcctx.cb.src_node;
9103 tcctx.cb.src_cfun = ctx->cb.src_cfun;
9104 tcctx.cb.copy_decl = task_copyfn_copy_decl;
1d65f45c 9105 tcctx.cb.eh_lp_nr = 0;
a68ab351
JJ
9106 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
9107 tcctx.cb.decl_map = pointer_map_create ();
9108 tcctx.ctx = ctx;
9109
9110 if (record_needs_remap)
9111 record_type = task_copyfn_remap_type (&tcctx, record_type);
9112 if (srecord_needs_remap)
9113 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
9114 }
9115 else
9116 tcctx.cb.decl_map = NULL;
9117
a68ab351
JJ
9118 arg = DECL_ARGUMENTS (child_fn);
9119 TREE_TYPE (arg) = build_pointer_type (record_type);
910ad8de 9120 sarg = DECL_CHAIN (arg);
a68ab351
JJ
9121 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
9122
9123 /* First pass: initialize temporaries used in record_type and srecord_type
9124 sizes and field offsets. */
9125 if (tcctx.cb.decl_map)
726a989a 9126 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9127 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9128 {
9129 tree *p;
9130
9131 decl = OMP_CLAUSE_DECL (c);
9132 p = (tree *) pointer_map_contains (tcctx.cb.decl_map, decl);
9133 if (p == NULL)
9134 continue;
9135 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9136 sf = (tree) n->value;
9137 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9138 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9139 src = omp_build_component_ref (src, sf);
726a989a 9140 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
a68ab351
JJ
9141 append_to_statement_list (t, &list);
9142 }
9143
9144 /* Second pass: copy shared var pointers and copy construct non-VLA
9145 firstprivate vars. */
726a989a 9146 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9147 switch (OMP_CLAUSE_CODE (c))
9148 {
9149 case OMP_CLAUSE_SHARED:
9150 decl = OMP_CLAUSE_DECL (c);
9151 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9152 if (n == NULL)
9153 break;
9154 f = (tree) n->value;
9155 if (tcctx.cb.decl_map)
9156 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9157 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9158 sf = (tree) n->value;
9159 if (tcctx.cb.decl_map)
9160 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9161 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9162 src = omp_build_component_ref (src, sf);
70f34814 9163 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9164 dst = omp_build_component_ref (dst, f);
726a989a 9165 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
9166 append_to_statement_list (t, &list);
9167 break;
9168 case OMP_CLAUSE_FIRSTPRIVATE:
9169 decl = OMP_CLAUSE_DECL (c);
9170 if (is_variable_sized (decl))
9171 break;
9172 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9173 if (n == NULL)
9174 break;
9175 f = (tree) n->value;
9176 if (tcctx.cb.decl_map)
9177 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9178 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9179 if (n != NULL)
9180 {
9181 sf = (tree) n->value;
9182 if (tcctx.cb.decl_map)
9183 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9184 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9185 src = omp_build_component_ref (src, sf);
a68ab351 9186 if (use_pointer_for_field (decl, NULL) || is_reference (decl))
70f34814 9187 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
9188 }
9189 else
9190 src = decl;
70f34814 9191 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9192 dst = omp_build_component_ref (dst, f);
a68ab351
JJ
9193 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9194 append_to_statement_list (t, &list);
9195 break;
9196 case OMP_CLAUSE_PRIVATE:
9197 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
9198 break;
9199 decl = OMP_CLAUSE_DECL (c);
9200 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9201 f = (tree) n->value;
9202 if (tcctx.cb.decl_map)
9203 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9204 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
9205 if (n != NULL)
9206 {
9207 sf = (tree) n->value;
9208 if (tcctx.cb.decl_map)
9209 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9210 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9211 src = omp_build_component_ref (src, sf);
a68ab351 9212 if (use_pointer_for_field (decl, NULL))
70f34814 9213 src = build_simple_mem_ref_loc (loc, src);
a68ab351
JJ
9214 }
9215 else
9216 src = decl;
70f34814 9217 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9218 dst = omp_build_component_ref (dst, f);
726a989a 9219 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
a68ab351
JJ
9220 append_to_statement_list (t, &list);
9221 break;
9222 default:
9223 break;
9224 }
9225
9226 /* Last pass: handle VLA firstprivates. */
9227 if (tcctx.cb.decl_map)
726a989a 9228 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
a68ab351
JJ
9229 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9230 {
9231 tree ind, ptr, df;
9232
9233 decl = OMP_CLAUSE_DECL (c);
9234 if (!is_variable_sized (decl))
9235 continue;
9236 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
9237 if (n == NULL)
9238 continue;
9239 f = (tree) n->value;
9240 f = *(tree *) pointer_map_contains (tcctx.cb.decl_map, f);
9241 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
9242 ind = DECL_VALUE_EXPR (decl);
9243 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
9244 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
9245 n = splay_tree_lookup (ctx->sfield_map,
9246 (splay_tree_key) TREE_OPERAND (ind, 0));
9247 sf = (tree) n->value;
9248 sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
70f34814 9249 src = build_simple_mem_ref_loc (loc, sarg);
a9a58711 9250 src = omp_build_component_ref (src, sf);
70f34814
RG
9251 src = build_simple_mem_ref_loc (loc, src);
9252 dst = build_simple_mem_ref_loc (loc, arg);
a9a58711 9253 dst = omp_build_component_ref (dst, f);
a68ab351
JJ
9254 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
9255 append_to_statement_list (t, &list);
9256 n = splay_tree_lookup (ctx->field_map,
9257 (splay_tree_key) TREE_OPERAND (ind, 0));
9258 df = (tree) n->value;
9259 df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
70f34814 9260 ptr = build_simple_mem_ref_loc (loc, arg);
a9a58711 9261 ptr = omp_build_component_ref (ptr, df);
726a989a 9262 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
db3927fb 9263 build_fold_addr_expr_loc (loc, dst));
a68ab351
JJ
9264 append_to_statement_list (t, &list);
9265 }
9266
9267 t = build1 (RETURN_EXPR, void_type_node, NULL);
9268 append_to_statement_list (t, &list);
9269
9270 if (tcctx.cb.decl_map)
9271 pointer_map_destroy (tcctx.cb.decl_map);
9272 pop_gimplify_context (NULL);
9273 BIND_EXPR_BODY (bind) = list;
9274 pop_cfun ();
a68ab351
JJ
9275}
9276
acf0174b
JJ
9277static void
9278lower_depend_clauses (gimple stmt, gimple_seq *iseq, gimple_seq *oseq)
9279{
9280 tree c, clauses;
9281 gimple g;
9282 size_t n_in = 0, n_out = 0, idx = 2, i;
9283
9284 clauses = find_omp_clause (gimple_omp_task_clauses (stmt),
9285 OMP_CLAUSE_DEPEND);
9286 gcc_assert (clauses);
9287 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
9289 switch (OMP_CLAUSE_DEPEND_KIND (c))
9290 {
9291 case OMP_CLAUSE_DEPEND_IN:
9292 n_in++;
9293 break;
9294 case OMP_CLAUSE_DEPEND_OUT:
9295 case OMP_CLAUSE_DEPEND_INOUT:
9296 n_out++;
9297 break;
9298 default:
9299 gcc_unreachable ();
9300 }
9301 tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
9302 tree array = create_tmp_var (type, NULL);
9303 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
9304 NULL_TREE);
9305 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
9306 gimple_seq_add_stmt (iseq, g);
9307 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
9308 NULL_TREE);
9309 g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
9310 gimple_seq_add_stmt (iseq, g);
9311 for (i = 0; i < 2; i++)
9312 {
9313 if ((i ? n_in : n_out) == 0)
9314 continue;
9315 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9316 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9317 && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
9318 {
9319 tree t = OMP_CLAUSE_DECL (c);
9320 t = fold_convert (ptr_type_node, t);
9321 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
9322 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
9323 NULL_TREE, NULL_TREE);
9324 g = gimple_build_assign (r, t);
9325 gimple_seq_add_stmt (iseq, g);
9326 }
9327 }
9328 tree *p = gimple_omp_task_clauses_ptr (stmt);
9329 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
9330 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
9331 OMP_CLAUSE_CHAIN (c) = *p;
9332 *p = c;
9333 tree clobber = build_constructor (type, NULL);
9334 TREE_THIS_VOLATILE (clobber) = 1;
9335 g = gimple_build_assign (array, clobber);
9336 gimple_seq_add_stmt (oseq, g);
9337}
9338
726a989a
RB
9339/* Lower the OpenMP parallel or task directive in the current statement
9340 in GSI_P. CTX holds context information for the directive. */
50674e96
DN
9341
9342static void
726a989a 9343lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
50674e96 9344{
726a989a
RB
9345 tree clauses;
9346 tree child_fn, t;
9347 gimple stmt = gsi_stmt (*gsi_p);
acf0174b
JJ
9348 gimple par_bind, bind, dep_bind = NULL;
9349 gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
9350 struct gimplify_ctx gctx, dep_gctx;
db3927fb 9351 location_t loc = gimple_location (stmt);
50674e96 9352
726a989a
RB
9353 clauses = gimple_omp_taskreg_clauses (stmt);
9354 par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9355 par_body = gimple_bind_body (par_bind);
50674e96 9356 child_fn = ctx->cb.dst_fn;
726a989a
RB
9357 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9358 && !gimple_omp_parallel_combined_p (stmt))
69f1837b
JJ
9359 {
9360 struct walk_stmt_info wi;
9361 int ws_num = 0;
9362
9363 memset (&wi, 0, sizeof (wi));
69f1837b
JJ
9364 wi.info = &ws_num;
9365 wi.val_only = true;
726a989a 9366 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
69f1837b 9367 if (ws_num == 1)
726a989a 9368 gimple_omp_parallel_set_combined_p (stmt, true);
69f1837b 9369 }
acf0174b
JJ
9370 gimple_seq dep_ilist = NULL;
9371 gimple_seq dep_olist = NULL;
9372 if (gimple_code (stmt) == GIMPLE_OMP_TASK
9373 && find_omp_clause (clauses, OMP_CLAUSE_DEPEND))
9374 {
9375 push_gimplify_context (&dep_gctx);
9376 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9377 lower_depend_clauses (stmt, &dep_ilist, &dep_olist);
9378 }
9379
a68ab351
JJ
9380 if (ctx->srecord_type)
9381 create_task_copyfn (stmt, ctx);
50674e96 9382
d406b663 9383 push_gimplify_context (&gctx);
50674e96 9384
726a989a
RB
9385 par_olist = NULL;
9386 par_ilist = NULL;
acf0174b
JJ
9387 par_rlist = NULL;
9388 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
355a7673 9389 lower_omp (&par_body, ctx);
726a989a 9390 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
acf0174b 9391 lower_reduction_clauses (clauses, &par_rlist, ctx);
50674e96
DN
9392
9393 /* Declare all the variables created by mapping and the variables
9394 declared in the scope of the parallel body. */
9395 record_vars_into (ctx->block_vars, child_fn);
726a989a 9396 record_vars_into (gimple_bind_vars (par_bind), child_fn);
50674e96
DN
9397
9398 if (ctx->record_type)
9399 {
a68ab351
JJ
9400 ctx->sender_decl
9401 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9402 : ctx->record_type, ".omp_data_o");
cd3f04c8 9403 DECL_NAMELESS (ctx->sender_decl) = 1;
628c189e 9404 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
726a989a 9405 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
50674e96
DN
9406 }
9407
726a989a
RB
9408 olist = NULL;
9409 ilist = NULL;
50674e96
DN
9410 lower_send_clauses (clauses, &ilist, &olist, ctx);
9411 lower_send_shared_vars (&ilist, &olist, ctx);
9412
acf0174b
JJ
9413 if (ctx->record_type)
9414 {
9415 tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9416 TREE_THIS_VOLATILE (clobber) = 1;
9417 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9418 clobber));
9419 }
9420
50674e96 9421 /* Once all the expansions are done, sequence all the different
726a989a 9422 fragments inside gimple_omp_body. */
50674e96 9423
726a989a 9424 new_body = NULL;
50674e96
DN
9425
9426 if (ctx->record_type)
9427 {
db3927fb 9428 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
018b899b 9429 /* fixup_child_record_type might have changed receiver_decl's type. */
db3927fb 9430 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
726a989a
RB
9431 gimple_seq_add_stmt (&new_body,
9432 gimple_build_assign (ctx->receiver_decl, t));
50674e96
DN
9433 }
9434
726a989a
RB
9435 gimple_seq_add_seq (&new_body, par_ilist);
9436 gimple_seq_add_seq (&new_body, par_body);
acf0174b
JJ
9437 gimple_seq_add_seq (&new_body, par_rlist);
9438 if (ctx->cancellable)
9439 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
726a989a
RB
9440 gimple_seq_add_seq (&new_body, par_olist);
9441 new_body = maybe_catch_exception (new_body);
9442 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9443 gimple_omp_set_body (stmt, new_body);
50674e96 9444
726a989a 9445 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
acf0174b
JJ
9446 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9447 gimple_bind_add_seq (bind, ilist);
9448 gimple_bind_add_stmt (bind, stmt);
9449 gimple_bind_add_seq (bind, olist);
9450
9451 pop_gimplify_context (NULL);
9452
9453 if (dep_bind)
9454 {
9455 gimple_bind_add_seq (dep_bind, dep_ilist);
9456 gimple_bind_add_stmt (dep_bind, bind);
9457 gimple_bind_add_seq (dep_bind, dep_olist);
9458 pop_gimplify_context (dep_bind);
9459 }
9460}
9461
9462/* Lower the OpenMP target directive in the current statement
9463 in GSI_P. CTX holds context information for the directive. */
9464
9465static void
9466lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9467{
9468 tree clauses;
9469 tree child_fn, t, c;
9470 gimple stmt = gsi_stmt (*gsi_p);
9471 gimple tgt_bind = NULL, bind;
9472 gimple_seq tgt_body = NULL, olist, ilist, new_body;
9473 struct gimplify_ctx gctx;
9474 location_t loc = gimple_location (stmt);
9475 int kind = gimple_omp_target_kind (stmt);
9476 unsigned int map_cnt = 0;
9477
9478 clauses = gimple_omp_target_clauses (stmt);
9479 if (kind == GF_OMP_TARGET_KIND_REGION)
9480 {
9481 tgt_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
9482 tgt_body = gimple_bind_body (tgt_bind);
9483 }
9484 else if (kind == GF_OMP_TARGET_KIND_DATA)
9485 tgt_body = gimple_omp_body (stmt);
9486 child_fn = ctx->cb.dst_fn;
9487
9488 push_gimplify_context (&gctx);
9489
9490 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9491 switch (OMP_CLAUSE_CODE (c))
9492 {
9493 tree var, x;
9494
9495 default:
9496 break;
9497 case OMP_CLAUSE_MAP:
9498 case OMP_CLAUSE_TO:
9499 case OMP_CLAUSE_FROM:
9500 var = OMP_CLAUSE_DECL (c);
9501 if (!DECL_P (var))
9502 {
9503 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9504 || !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9505 map_cnt++;
9506 continue;
9507 }
9508
9509 if (DECL_SIZE (var)
9510 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9511 {
9512 tree var2 = DECL_VALUE_EXPR (var);
9513 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9514 var2 = TREE_OPERAND (var2, 0);
9515 gcc_assert (DECL_P (var2));
9516 var = var2;
9517 }
9518
9519 if (!maybe_lookup_field (var, ctx))
9520 continue;
9521
9522 if (kind == GF_OMP_TARGET_KIND_REGION)
9523 {
9524 x = build_receiver_ref (var, true, ctx);
9525 tree new_var = lookup_decl (var, ctx);
9526 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9527 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9528 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9529 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9530 x = build_simple_mem_ref (x);
9531 SET_DECL_VALUE_EXPR (new_var, x);
9532 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9533 }
9534 map_cnt++;
9535 }
9536
9537 if (kind == GF_OMP_TARGET_KIND_REGION)
9538 {
9539 target_nesting_level++;
9540 lower_omp (&tgt_body, ctx);
9541 target_nesting_level--;
9542 }
9543 else if (kind == GF_OMP_TARGET_KIND_DATA)
9544 lower_omp (&tgt_body, ctx);
9545
9546 if (kind == GF_OMP_TARGET_KIND_REGION)
9547 {
9548 /* Declare all the variables created by mapping and the variables
9549 declared in the scope of the target body. */
9550 record_vars_into (ctx->block_vars, child_fn);
9551 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9552 }
9553
9554 olist = NULL;
9555 ilist = NULL;
9556 if (ctx->record_type)
9557 {
9558 ctx->sender_decl
9559 = create_tmp_var (ctx->record_type, ".omp_data_arr");
9560 DECL_NAMELESS (ctx->sender_decl) = 1;
9561 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9562 t = make_tree_vec (3);
9563 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9564 TREE_VEC_ELT (t, 1)
9565 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9566 ".omp_data_sizes");
9567 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9568 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9569 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9570 TREE_VEC_ELT (t, 2)
9571 = create_tmp_var (build_array_type_nelts (unsigned_char_type_node,
9572 map_cnt),
9573 ".omp_data_kinds");
9574 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9575 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9576 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9577 gimple_omp_target_set_data_arg (stmt, t);
9578
9579 vec<constructor_elt, va_gc> *vsize;
9580 vec<constructor_elt, va_gc> *vkind;
9581 vec_alloc (vsize, map_cnt);
9582 vec_alloc (vkind, map_cnt);
9583 unsigned int map_idx = 0;
9584
9585 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9586 switch (OMP_CLAUSE_CODE (c))
9587 {
9588 tree ovar, nc;
9589
9590 default:
9591 break;
9592 case OMP_CLAUSE_MAP:
9593 case OMP_CLAUSE_TO:
9594 case OMP_CLAUSE_FROM:
9595 nc = c;
9596 ovar = OMP_CLAUSE_DECL (c);
9597 if (!DECL_P (ovar))
9598 {
9599 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9600 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9601 {
9602 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9603 == get_base_address (ovar));
9604 nc = OMP_CLAUSE_CHAIN (c);
9605 ovar = OMP_CLAUSE_DECL (nc);
9606 }
9607 else
9608 {
9609 tree x = build_sender_ref (ovar, ctx);
9610 tree v
9611 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9612 gimplify_assign (x, v, &ilist);
9613 nc = NULL_TREE;
9614 }
9615 }
9616 else
9617 {
9618 if (DECL_SIZE (ovar)
9619 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9620 {
9621 tree ovar2 = DECL_VALUE_EXPR (ovar);
9622 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9623 ovar2 = TREE_OPERAND (ovar2, 0);
9624 gcc_assert (DECL_P (ovar2));
9625 ovar = ovar2;
9626 }
9627 if (!maybe_lookup_field (ovar, ctx))
9628 continue;
9629 }
9630
9631 if (nc)
9632 {
9633 tree var = lookup_decl_in_outer_ctx (ovar, ctx);
9634 tree x = build_sender_ref (ovar, ctx);
9635 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9636 && OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_POINTER
9637 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9638 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9639 {
9640 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9641 tree avar
9642 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)), NULL);
9643 mark_addressable (avar);
9644 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9645 avar = build_fold_addr_expr (avar);
9646 gimplify_assign (x, avar, &ilist);
9647 }
9648 else if (is_gimple_reg (var))
9649 {
9650 gcc_assert (kind == GF_OMP_TARGET_KIND_REGION);
9651 tree avar = create_tmp_var (TREE_TYPE (var), NULL);
9652 mark_addressable (avar);
9653 if (OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_ALLOC
9654 && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_FROM)
9655 gimplify_assign (avar, var, &ilist);
9656 avar = build_fold_addr_expr (avar);
9657 gimplify_assign (x, avar, &ilist);
9658 if ((OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_FROM
9659 || OMP_CLAUSE_MAP_KIND (c) == OMP_CLAUSE_MAP_TOFROM)
9660 && !TYPE_READONLY (TREE_TYPE (var)))
9661 {
9662 x = build_sender_ref (ovar, ctx);
9663 x = build_simple_mem_ref (x);
9664 gimplify_assign (var, x, &olist);
9665 }
9666 }
9667 else
9668 {
9669 var = build_fold_addr_expr (var);
9670 gimplify_assign (x, var, &ilist);
9671 }
9672 }
9673 tree s = OMP_CLAUSE_SIZE (c);
9674 if (s == NULL_TREE)
9675 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9676 s = fold_convert (size_type_node, s);
9677 tree purpose = size_int (map_idx++);
9678 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9679 if (TREE_CODE (s) != INTEGER_CST)
9680 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9681
9682 unsigned char tkind = 0;
9683 switch (OMP_CLAUSE_CODE (c))
9684 {
9685 case OMP_CLAUSE_MAP:
9686 tkind = OMP_CLAUSE_MAP_KIND (c);
9687 break;
9688 case OMP_CLAUSE_TO:
9689 tkind = OMP_CLAUSE_MAP_TO;
9690 break;
9691 case OMP_CLAUSE_FROM:
9692 tkind = OMP_CLAUSE_MAP_FROM;
9693 break;
9694 default:
9695 gcc_unreachable ();
9696 }
9697 unsigned int talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9698 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9699 talign = DECL_ALIGN_UNIT (ovar);
9700 talign = ceil_log2 (talign);
9701 tkind |= talign << 3;
9702 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9703 build_int_cst (unsigned_char_type_node,
9704 tkind));
9705 if (nc && nc != c)
9706 c = nc;
9707 }
9708
9709 gcc_assert (map_idx == map_cnt);
9710
9711 DECL_INITIAL (TREE_VEC_ELT (t, 1))
9712 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9713 DECL_INITIAL (TREE_VEC_ELT (t, 2))
9714 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9715 if (!TREE_STATIC (TREE_VEC_ELT (t, 1)))
9716 {
9717 gimple_seq initlist = NULL;
9718 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9719 TREE_VEC_ELT (t, 1)),
9720 &initlist, true, NULL_TREE);
9721 gimple_seq_add_seq (&ilist, initlist);
9722 }
9723
9724 tree clobber = build_constructor (ctx->record_type, NULL);
9725 TREE_THIS_VOLATILE (clobber) = 1;
9726 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9727 clobber));
9728 }
9729
9730 /* Once all the expansions are done, sequence all the different
9731 fragments inside gimple_omp_body. */
9732
9733 new_body = NULL;
9734
9735 if (ctx->record_type && kind == GF_OMP_TARGET_KIND_REGION)
9736 {
9737 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9738 /* fixup_child_record_type might have changed receiver_decl's type. */
9739 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9740 gimple_seq_add_stmt (&new_body,
9741 gimple_build_assign (ctx->receiver_decl, t));
9742 }
9743
9744 if (kind == GF_OMP_TARGET_KIND_REGION)
9745 {
9746 gimple_seq_add_seq (&new_body, tgt_body);
9747 new_body = maybe_catch_exception (new_body);
9748 }
9749 else if (kind == GF_OMP_TARGET_KIND_DATA)
9750 new_body = tgt_body;
9751 if (kind != GF_OMP_TARGET_KIND_UPDATE)
9752 {
9753 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9754 gimple_omp_set_body (stmt, new_body);
9755 }
9756
9757 bind = gimple_build_bind (NULL, NULL,
9758 tgt_bind ? gimple_bind_block (tgt_bind)
9759 : NULL_TREE);
726a989a 9760 gsi_replace (gsi_p, bind, true);
355a7673
MM
9761 gimple_bind_add_seq (bind, ilist);
9762 gimple_bind_add_stmt (bind, stmt);
9763 gimple_bind_add_seq (bind, olist);
50674e96 9764
726a989a 9765 pop_gimplify_context (NULL);
50674e96
DN
9766}
9767
acf0174b
JJ
9768/* Expand code for an OpenMP teams directive. */
9769
9770static void
9771lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9772{
9773 gimple teams_stmt = gsi_stmt (*gsi_p);
9774 struct gimplify_ctx gctx;
9775 push_gimplify_context (&gctx);
9776
9777 tree block = make_node (BLOCK);
9778 gimple bind = gimple_build_bind (NULL, NULL, block);
9779 gsi_replace (gsi_p, bind, true);
9780 gimple_seq bind_body = NULL;
9781 gimple_seq dlist = NULL;
9782 gimple_seq olist = NULL;
9783
9784 tree num_teams = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9785 OMP_CLAUSE_NUM_TEAMS);
9786 if (num_teams == NULL_TREE)
9787 num_teams = build_int_cst (unsigned_type_node, 0);
9788 else
9789 {
9790 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
9791 num_teams = fold_convert (unsigned_type_node, num_teams);
9792 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
9793 }
9794 tree thread_limit = find_omp_clause (gimple_omp_teams_clauses (teams_stmt),
9795 OMP_CLAUSE_THREAD_LIMIT);
9796 if (thread_limit == NULL_TREE)
9797 thread_limit = build_int_cst (unsigned_type_node, 0);
9798 else
9799 {
9800 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
9801 thread_limit = fold_convert (unsigned_type_node, thread_limit);
9802 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
9803 fb_rvalue);
9804 }
9805
9806 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
9807 &bind_body, &dlist, ctx, NULL);
9808 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
9809 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
9810 gimple_seq_add_stmt (&bind_body, teams_stmt);
9811
9812 location_t loc = gimple_location (teams_stmt);
9813 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
9814 gimple call = gimple_build_call (decl, 2, num_teams, thread_limit);
9815 gimple_set_location (call, loc);
9816 gimple_seq_add_stmt (&bind_body, call);
9817
9818 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
9819 gimple_omp_set_body (teams_stmt, NULL);
9820 gimple_seq_add_seq (&bind_body, olist);
9821 gimple_seq_add_seq (&bind_body, dlist);
9822 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
9823 gimple_bind_set_body (bind, bind_body);
9824
9825 pop_gimplify_context (bind);
9826
9827 gimple_bind_append_vars (bind, ctx->block_vars);
9828 BLOCK_VARS (block) = ctx->block_vars;
9829 if (BLOCK_VARS (block))
9830 TREE_USED (block) = 1;
9831}
9832
9833
d0fb20be 9834/* Callback for lower_omp_1. Return non-NULL if *tp needs to be
726a989a
RB
9835 regimplified. If DATA is non-NULL, lower_omp_1 is outside
9836 of OpenMP context, but with task_shared_vars set. */
75a4c3c1
AP
9837
9838static tree
726a989a
RB
9839lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
9840 void *data)
75a4c3c1 9841{
d0fb20be 9842 tree t = *tp;
75a4c3c1 9843
d0fb20be 9844 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
726a989a 9845 if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
b826efd9
JJ
9846 return t;
9847
9848 if (task_shared_vars
9849 && DECL_P (t)
9850 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
d0fb20be 9851 return t;
75a4c3c1 9852
d0fb20be
JJ
9853 /* If a global variable has been privatized, TREE_CONSTANT on
9854 ADDR_EXPR might be wrong. */
726a989a 9855 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
d0fb20be 9856 recompute_tree_invariant_for_addr_expr (t);
75a4c3c1 9857
d0fb20be
JJ
9858 *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
9859 return NULL_TREE;
75a4c3c1 9860}
50674e96 9861
d0fb20be 9862static void
726a989a 9863lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
953ff289 9864{
726a989a
RB
9865 gimple stmt = gsi_stmt (*gsi_p);
9866 struct walk_stmt_info wi;
953ff289 9867
726a989a
RB
9868 if (gimple_has_location (stmt))
9869 input_location = gimple_location (stmt);
d0fb20be 9870
726a989a
RB
9871 if (task_shared_vars)
9872 memset (&wi, '\0', sizeof (wi));
d0fb20be 9873
50674e96
DN
9874 /* If we have issued syntax errors, avoid doing any heavy lifting.
9875 Just replace the OpenMP directives with a NOP to avoid
9876 confusing RTL expansion. */
1da2ed5f 9877 if (seen_error () && is_gimple_omp (stmt))
50674e96 9878 {
726a989a 9879 gsi_replace (gsi_p, gimple_build_nop (), true);
d0fb20be 9880 return;
50674e96
DN
9881 }
9882
726a989a 9883 switch (gimple_code (stmt))
953ff289 9884 {
726a989a 9885 case GIMPLE_COND:
a68ab351 9886 if ((ctx || task_shared_vars)
726a989a
RB
9887 && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
9888 ctx ? NULL : &wi, NULL)
9889 || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
9890 ctx ? NULL : &wi, NULL)))
9891 gimple_regimplify_operands (stmt, gsi_p);
d0fb20be 9892 break;
726a989a 9893 case GIMPLE_CATCH:
355a7673 9894 lower_omp (gimple_catch_handler_ptr (stmt), ctx);
d0fb20be 9895 break;
726a989a 9896 case GIMPLE_EH_FILTER:
355a7673 9897 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
d0fb20be 9898 break;
726a989a 9899 case GIMPLE_TRY:
355a7673
MM
9900 lower_omp (gimple_try_eval_ptr (stmt), ctx);
9901 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
d0fb20be 9902 break;
d88513ea
PM
9903 case GIMPLE_TRANSACTION:
9904 lower_omp (gimple_transaction_body_ptr (stmt), ctx);
9905 break;
726a989a 9906 case GIMPLE_BIND:
355a7673 9907 lower_omp (gimple_bind_body_ptr (stmt), ctx);
d0fb20be 9908 break;
726a989a
RB
9909 case GIMPLE_OMP_PARALLEL:
9910 case GIMPLE_OMP_TASK:
9911 ctx = maybe_lookup_ctx (stmt);
acf0174b
JJ
9912 gcc_assert (ctx);
9913 if (ctx->cancellable)
9914 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9915 lower_omp_taskreg (gsi_p, ctx);
d0fb20be 9916 break;
726a989a
RB
9917 case GIMPLE_OMP_FOR:
9918 ctx = maybe_lookup_ctx (stmt);
953ff289 9919 gcc_assert (ctx);
acf0174b
JJ
9920 if (ctx->cancellable)
9921 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9922 lower_omp_for (gsi_p, ctx);
953ff289 9923 break;
726a989a
RB
9924 case GIMPLE_OMP_SECTIONS:
9925 ctx = maybe_lookup_ctx (stmt);
953ff289 9926 gcc_assert (ctx);
acf0174b
JJ
9927 if (ctx->cancellable)
9928 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
726a989a 9929 lower_omp_sections (gsi_p, ctx);
953ff289 9930 break;
726a989a
RB
9931 case GIMPLE_OMP_SINGLE:
9932 ctx = maybe_lookup_ctx (stmt);
953ff289 9933 gcc_assert (ctx);
726a989a 9934 lower_omp_single (gsi_p, ctx);
953ff289 9935 break;
726a989a
RB
9936 case GIMPLE_OMP_MASTER:
9937 ctx = maybe_lookup_ctx (stmt);
953ff289 9938 gcc_assert (ctx);
726a989a 9939 lower_omp_master (gsi_p, ctx);
953ff289 9940 break;
acf0174b
JJ
9941 case GIMPLE_OMP_TASKGROUP:
9942 ctx = maybe_lookup_ctx (stmt);
9943 gcc_assert (ctx);
9944 lower_omp_taskgroup (gsi_p, ctx);
9945 break;
726a989a
RB
9946 case GIMPLE_OMP_ORDERED:
9947 ctx = maybe_lookup_ctx (stmt);
953ff289 9948 gcc_assert (ctx);
726a989a 9949 lower_omp_ordered (gsi_p, ctx);
953ff289 9950 break;
726a989a
RB
9951 case GIMPLE_OMP_CRITICAL:
9952 ctx = maybe_lookup_ctx (stmt);
953ff289 9953 gcc_assert (ctx);
726a989a
RB
9954 lower_omp_critical (gsi_p, ctx);
9955 break;
9956 case GIMPLE_OMP_ATOMIC_LOAD:
9957 if ((ctx || task_shared_vars)
9958 && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
9959 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
9960 gimple_regimplify_operands (stmt, gsi_p);
953ff289 9961 break;
acf0174b
JJ
9962 case GIMPLE_OMP_TARGET:
9963 ctx = maybe_lookup_ctx (stmt);
9964 gcc_assert (ctx);
9965 lower_omp_target (gsi_p, ctx);
9966 break;
9967 case GIMPLE_OMP_TEAMS:
9968 ctx = maybe_lookup_ctx (stmt);
9969 gcc_assert (ctx);
9970 lower_omp_teams (gsi_p, ctx);
9971 break;
9972 case GIMPLE_CALL:
9973 tree fndecl;
9974 fndecl = gimple_call_fndecl (stmt);
9975 if (fndecl
9976 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
9977 switch (DECL_FUNCTION_CODE (fndecl))
9978 {
9979 case BUILT_IN_GOMP_BARRIER:
9980 if (ctx == NULL)
9981 break;
9982 /* FALLTHRU */
9983 case BUILT_IN_GOMP_CANCEL:
9984 case BUILT_IN_GOMP_CANCELLATION_POINT:
9985 omp_context *cctx;
9986 cctx = ctx;
9987 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
9988 cctx = cctx->outer;
9989 gcc_assert (gimple_call_lhs (stmt) == NULL_TREE);
9990 if (!cctx->cancellable)
9991 {
9992 if (DECL_FUNCTION_CODE (fndecl)
9993 == BUILT_IN_GOMP_CANCELLATION_POINT)
9994 {
9995 stmt = gimple_build_nop ();
9996 gsi_replace (gsi_p, stmt, false);
9997 }
9998 break;
9999 }
10000 tree lhs;
10001 lhs = create_tmp_var (boolean_type_node, NULL);
10002 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10003 {
10004 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10005 gimple_call_set_fndecl (stmt, fndecl);
10006 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
10007 }
10008 gimple_call_set_lhs (stmt, lhs);
10009 tree fallthru_label;
10010 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10011 gimple g;
10012 g = gimple_build_label (fallthru_label);
10013 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10014 g = gimple_build_cond (NE_EXPR, lhs, boolean_false_node,
10015 cctx->cancel_label, fallthru_label);
10016 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10017 break;
10018 default:
10019 break;
10020 }
10021 /* FALLTHRU */
d0fb20be 10022 default:
a68ab351 10023 if ((ctx || task_shared_vars)
726a989a
RB
10024 && walk_gimple_op (stmt, lower_omp_regimplify_p,
10025 ctx ? NULL : &wi))
10026 gimple_regimplify_operands (stmt, gsi_p);
953ff289 10027 break;
953ff289 10028 }
953ff289
DN
10029}
10030
10031static void
355a7673 10032lower_omp (gimple_seq *body, omp_context *ctx)
953ff289 10033{
b357f682 10034 location_t saved_location = input_location;
355a7673
MM
10035 gimple_stmt_iterator gsi;
10036 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
726a989a 10037 lower_omp_1 (&gsi, ctx);
acf0174b
JJ
10038 /* Inside target region we haven't called fold_stmt during gimplification,
10039 because it can break code by adding decl references that weren't in the
10040 source. Call fold_stmt now. */
10041 if (target_nesting_level)
10042 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10043 fold_stmt (&gsi);
b357f682 10044 input_location = saved_location;
953ff289
DN
10045}
10046\f
10047/* Main entry point. */
10048
c2924966 10049static unsigned int
953ff289
DN
10050execute_lower_omp (void)
10051{
726a989a
RB
10052 gimple_seq body;
10053
535b544a
SB
10054 /* This pass always runs, to provide PROP_gimple_lomp.
10055 But there is nothing to do unless -fopenmp is given. */
6d7f7e0a 10056 if (flag_openmp == 0 && flag_openmp_simd == 0)
535b544a
SB
10057 return 0;
10058
953ff289
DN
10059 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10060 delete_omp_context);
10061
726a989a 10062 body = gimple_body (current_function_decl);
26127932 10063 scan_omp (&body, NULL);
a68ab351 10064 gcc_assert (taskreg_nesting_level == 0);
953ff289
DN
10065
10066 if (all_contexts->root)
a68ab351 10067 {
d406b663
JJ
10068 struct gimplify_ctx gctx;
10069
a68ab351 10070 if (task_shared_vars)
d406b663 10071 push_gimplify_context (&gctx);
355a7673 10072 lower_omp (&body, NULL);
a68ab351
JJ
10073 if (task_shared_vars)
10074 pop_gimplify_context (NULL);
10075 }
953ff289 10076
50674e96
DN
10077 if (all_contexts)
10078 {
10079 splay_tree_delete (all_contexts);
10080 all_contexts = NULL;
10081 }
a68ab351 10082 BITMAP_FREE (task_shared_vars);
c2924966 10083 return 0;
953ff289
DN
10084}
10085
27a4cd48
DM
10086namespace {
10087
10088const pass_data pass_data_lower_omp =
10089{
10090 GIMPLE_PASS, /* type */
10091 "omplower", /* name */
10092 OPTGROUP_NONE, /* optinfo_flags */
10093 false, /* has_gate */
10094 true, /* has_execute */
10095 TV_NONE, /* tv_id */
10096 PROP_gimple_any, /* properties_required */
10097 PROP_gimple_lomp, /* properties_provided */
10098 0, /* properties_destroyed */
10099 0, /* todo_flags_start */
10100 0, /* todo_flags_finish */
953ff289 10101};
27a4cd48
DM
10102
10103class pass_lower_omp : public gimple_opt_pass
10104{
10105public:
c3284718
RS
10106 pass_lower_omp (gcc::context *ctxt)
10107 : gimple_opt_pass (pass_data_lower_omp, ctxt)
27a4cd48
DM
10108 {}
10109
10110 /* opt_pass methods: */
10111 unsigned int execute () { return execute_lower_omp (); }
10112
10113}; // class pass_lower_omp
10114
10115} // anon namespace
10116
10117gimple_opt_pass *
10118make_pass_lower_omp (gcc::context *ctxt)
10119{
10120 return new pass_lower_omp (ctxt);
10121}
953ff289
DN
10122\f
10123/* The following is a utility to diagnose OpenMP structured block violations.
777f7f9a
RH
10124 It is not part of the "omplower" pass, as that's invoked too late. It
10125 should be invoked by the respective front ends after gimplification. */
953ff289
DN
10126
10127static splay_tree all_labels;
10128
10129/* Check for mismatched contexts and generate an error if needed. Return
10130 true if an error is detected. */
10131
10132static bool
726a989a
RB
10133diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10134 gimple branch_ctx, gimple label_ctx)
953ff289 10135{
726a989a 10136 if (label_ctx == branch_ctx)
953ff289
DN
10137 return false;
10138
b8698a0f 10139
726a989a
RB
10140 /*
10141 Previously we kept track of the label's entire context in diagnose_sb_[12]
10142 so we could traverse it and issue a correct "exit" or "enter" error
10143 message upon a structured block violation.
10144
10145 We built the context by building a list with tree_cons'ing, but there is
10146 no easy counterpart in gimple tuples. It seems like far too much work
10147 for issuing exit/enter error messages. If someone really misses the
10148 distinct error message... patches welcome.
10149 */
b8698a0f 10150
726a989a 10151#if 0
953ff289 10152 /* Try to avoid confusing the user by producing and error message
fa10beec 10153 with correct "exit" or "enter" verbiage. We prefer "exit"
953ff289
DN
10154 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
10155 if (branch_ctx == NULL)
10156 exit_p = false;
10157 else
10158 {
10159 while (label_ctx)
10160 {
10161 if (TREE_VALUE (label_ctx) == branch_ctx)
10162 {
10163 exit_p = false;
10164 break;
10165 }
10166 label_ctx = TREE_CHAIN (label_ctx);
10167 }
10168 }
10169
10170 if (exit_p)
10171 error ("invalid exit from OpenMP structured block");
10172 else
10173 error ("invalid entry to OpenMP structured block");
726a989a 10174#endif
953ff289 10175
726a989a
RB
10176 /* If it's obvious we have an invalid entry, be specific about the error. */
10177 if (branch_ctx == NULL)
10178 error ("invalid entry to OpenMP structured block");
10179 else
10180 /* Otherwise, be vague and lazy, but efficient. */
10181 error ("invalid branch to/from an OpenMP structured block");
10182
10183 gsi_replace (gsi_p, gimple_build_nop (), false);
953ff289
DN
10184 return true;
10185}
10186
10187/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
726a989a 10188 where each label is found. */
953ff289
DN
10189
10190static tree
726a989a
RB
10191diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10192 struct walk_stmt_info *wi)
953ff289 10193{
726a989a
RB
10194 gimple context = (gimple) wi->info;
10195 gimple inner_context;
10196 gimple stmt = gsi_stmt (*gsi_p);
953ff289 10197
726a989a
RB
10198 *handled_ops_p = true;
10199
10200 switch (gimple_code (stmt))
953ff289 10201 {
726a989a 10202 WALK_SUBSTMTS;
b8698a0f 10203
726a989a
RB
10204 case GIMPLE_OMP_PARALLEL:
10205 case GIMPLE_OMP_TASK:
10206 case GIMPLE_OMP_SECTIONS:
10207 case GIMPLE_OMP_SINGLE:
10208 case GIMPLE_OMP_SECTION:
10209 case GIMPLE_OMP_MASTER:
10210 case GIMPLE_OMP_ORDERED:
10211 case GIMPLE_OMP_CRITICAL:
acf0174b
JJ
10212 case GIMPLE_OMP_TARGET:
10213 case GIMPLE_OMP_TEAMS:
10214 case GIMPLE_OMP_TASKGROUP:
726a989a
RB
10215 /* The minimal context here is just the current OMP construct. */
10216 inner_context = stmt;
953ff289 10217 wi->info = inner_context;
726a989a 10218 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
10219 wi->info = context;
10220 break;
10221
726a989a
RB
10222 case GIMPLE_OMP_FOR:
10223 inner_context = stmt;
953ff289 10224 wi->info = inner_context;
726a989a
RB
10225 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10226 walk them. */
10227 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10228 diagnose_sb_1, NULL, wi);
10229 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
953ff289
DN
10230 wi->info = context;
10231 break;
10232
726a989a
RB
10233 case GIMPLE_LABEL:
10234 splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
953ff289
DN
10235 (splay_tree_value) context);
10236 break;
10237
10238 default:
10239 break;
10240 }
10241
10242 return NULL_TREE;
10243}
10244
10245/* Pass 2: Check each branch and see if its context differs from that of
10246 the destination label's context. */
10247
10248static tree
726a989a
RB
10249diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10250 struct walk_stmt_info *wi)
953ff289 10251{
726a989a 10252 gimple context = (gimple) wi->info;
953ff289 10253 splay_tree_node n;
726a989a 10254 gimple stmt = gsi_stmt (*gsi_p);
953ff289 10255
726a989a
RB
10256 *handled_ops_p = true;
10257
10258 switch (gimple_code (stmt))
953ff289 10259 {
726a989a
RB
10260 WALK_SUBSTMTS;
10261
10262 case GIMPLE_OMP_PARALLEL:
10263 case GIMPLE_OMP_TASK:
10264 case GIMPLE_OMP_SECTIONS:
10265 case GIMPLE_OMP_SINGLE:
10266 case GIMPLE_OMP_SECTION:
10267 case GIMPLE_OMP_MASTER:
10268 case GIMPLE_OMP_ORDERED:
10269 case GIMPLE_OMP_CRITICAL:
acf0174b
JJ
10270 case GIMPLE_OMP_TARGET:
10271 case GIMPLE_OMP_TEAMS:
10272 case GIMPLE_OMP_TASKGROUP:
726a989a 10273 wi->info = stmt;
355a7673 10274 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
10275 wi->info = context;
10276 break;
10277
726a989a
RB
10278 case GIMPLE_OMP_FOR:
10279 wi->info = stmt;
10280 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10281 walk them. */
355a7673
MM
10282 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10283 diagnose_sb_2, NULL, wi);
10284 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
953ff289
DN
10285 wi->info = context;
10286 break;
10287
ca50f84a
L
10288 case GIMPLE_COND:
10289 {
10290 tree lab = gimple_cond_true_label (stmt);
10291 if (lab)
10292 {
10293 n = splay_tree_lookup (all_labels,
10294 (splay_tree_key) lab);
10295 diagnose_sb_0 (gsi_p, context,
10296 n ? (gimple) n->value : NULL);
10297 }
10298 lab = gimple_cond_false_label (stmt);
10299 if (lab)
10300 {
10301 n = splay_tree_lookup (all_labels,
10302 (splay_tree_key) lab);
10303 diagnose_sb_0 (gsi_p, context,
10304 n ? (gimple) n->value : NULL);
10305 }
10306 }
10307 break;
10308
726a989a 10309 case GIMPLE_GOTO:
953ff289 10310 {
726a989a 10311 tree lab = gimple_goto_dest (stmt);
953ff289
DN
10312 if (TREE_CODE (lab) != LABEL_DECL)
10313 break;
10314
10315 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 10316 diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
953ff289
DN
10317 }
10318 break;
10319
726a989a 10320 case GIMPLE_SWITCH:
953ff289 10321 {
726a989a
RB
10322 unsigned int i;
10323 for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
953ff289 10324 {
726a989a 10325 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
953ff289 10326 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
726a989a 10327 if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
953ff289
DN
10328 break;
10329 }
10330 }
10331 break;
10332
726a989a
RB
10333 case GIMPLE_RETURN:
10334 diagnose_sb_0 (gsi_p, context, NULL);
953ff289
DN
10335 break;
10336
10337 default:
10338 break;
10339 }
10340
10341 return NULL_TREE;
10342}
10343
0645c1a2
AM
10344/* Called from tree-cfg.c::make_edges to create cfg edges for all GIMPLE_OMP
10345 codes. */
10346bool
10347make_gimple_omp_edges (basic_block bb, struct omp_region **region)
10348{
10349 gimple last = last_stmt (bb);
10350 enum gimple_code code = gimple_code (last);
10351 struct omp_region *cur_region = *region;
10352 bool fallthru = false;
10353
10354 switch (code)
10355 {
10356 case GIMPLE_OMP_PARALLEL:
10357 case GIMPLE_OMP_TASK:
10358 case GIMPLE_OMP_FOR:
10359 case GIMPLE_OMP_SINGLE:
10360 case GIMPLE_OMP_TEAMS:
10361 case GIMPLE_OMP_MASTER:
10362 case GIMPLE_OMP_TASKGROUP:
10363 case GIMPLE_OMP_ORDERED:
10364 case GIMPLE_OMP_CRITICAL:
10365 case GIMPLE_OMP_SECTION:
10366 cur_region = new_omp_region (bb, code, cur_region);
10367 fallthru = true;
10368 break;
10369
10370 case GIMPLE_OMP_TARGET:
10371 cur_region = new_omp_region (bb, code, cur_region);
10372 fallthru = true;
10373 if (gimple_omp_target_kind (last) == GF_OMP_TARGET_KIND_UPDATE)
10374 cur_region = cur_region->outer;
10375 break;
10376
10377 case GIMPLE_OMP_SECTIONS:
10378 cur_region = new_omp_region (bb, code, cur_region);
10379 fallthru = true;
10380 break;
10381
10382 case GIMPLE_OMP_SECTIONS_SWITCH:
10383 fallthru = false;
10384 break;
10385
10386 case GIMPLE_OMP_ATOMIC_LOAD:
10387 case GIMPLE_OMP_ATOMIC_STORE:
10388 fallthru = true;
10389 break;
10390
10391 case GIMPLE_OMP_RETURN:
10392 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
10393 somewhere other than the next block. This will be
10394 created later. */
10395 cur_region->exit = bb;
10396 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
10397 cur_region = cur_region->outer;
10398 break;
10399
10400 case GIMPLE_OMP_CONTINUE:
10401 cur_region->cont = bb;
10402 switch (cur_region->type)
10403 {
10404 case GIMPLE_OMP_FOR:
10405 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
10406 succs edges as abnormal to prevent splitting
10407 them. */
10408 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
10409 /* Make the loopback edge. */
10410 make_edge (bb, single_succ (cur_region->entry),
10411 EDGE_ABNORMAL);
10412
10413 /* Create an edge from GIMPLE_OMP_FOR to exit, which
10414 corresponds to the case that the body of the loop
10415 is not executed at all. */
10416 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
10417 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
10418 fallthru = false;
10419 break;
10420
10421 case GIMPLE_OMP_SECTIONS:
10422 /* Wire up the edges into and out of the nested sections. */
10423 {
10424 basic_block switch_bb = single_succ (cur_region->entry);
10425
10426 struct omp_region *i;
10427 for (i = cur_region->inner; i ; i = i->next)
10428 {
10429 gcc_assert (i->type == GIMPLE_OMP_SECTION);
10430 make_edge (switch_bb, i->entry, 0);
10431 make_edge (i->exit, bb, EDGE_FALLTHRU);
10432 }
10433
10434 /* Make the loopback edge to the block with
10435 GIMPLE_OMP_SECTIONS_SWITCH. */
10436 make_edge (bb, switch_bb, 0);
10437
10438 /* Make the edge from the switch to exit. */
10439 make_edge (switch_bb, bb->next_bb, 0);
10440 fallthru = false;
10441 }
10442 break;
10443
10444 default:
10445 gcc_unreachable ();
10446 }
10447 break;
10448
10449 default:
10450 gcc_unreachable ();
10451 }
10452
10453 if (*region != cur_region)
10454 *region = cur_region;
10455
10456 return fallthru;
10457}
10458
a406865a
RG
10459static unsigned int
10460diagnose_omp_structured_block_errors (void)
953ff289 10461{
953ff289 10462 struct walk_stmt_info wi;
a406865a 10463 gimple_seq body = gimple_body (current_function_decl);
953ff289
DN
10464
10465 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10466
10467 memset (&wi, 0, sizeof (wi));
726a989a 10468 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
953ff289
DN
10469
10470 memset (&wi, 0, sizeof (wi));
953ff289 10471 wi.want_locations = true;
355a7673
MM
10472 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10473
10474 gimple_set_body (current_function_decl, body);
953ff289
DN
10475
10476 splay_tree_delete (all_labels);
10477 all_labels = NULL;
10478
a406865a 10479 return 0;
953ff289
DN
10480}
10481
a406865a
RG
10482static bool
10483gate_diagnose_omp_blocks (void)
10484{
10485 return flag_openmp != 0;
10486}
10487
27a4cd48
DM
10488namespace {
10489
10490const pass_data pass_data_diagnose_omp_blocks =
10491{
10492 GIMPLE_PASS, /* type */
10493 "*diagnose_omp_blocks", /* name */
10494 OPTGROUP_NONE, /* optinfo_flags */
10495 true, /* has_gate */
10496 true, /* has_execute */
10497 TV_NONE, /* tv_id */
10498 PROP_gimple_any, /* properties_required */
10499 0, /* properties_provided */
10500 0, /* properties_destroyed */
10501 0, /* todo_flags_start */
10502 0, /* todo_flags_finish */
a406865a
RG
10503};
10504
27a4cd48
DM
10505class pass_diagnose_omp_blocks : public gimple_opt_pass
10506{
10507public:
c3284718
RS
10508 pass_diagnose_omp_blocks (gcc::context *ctxt)
10509 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
27a4cd48
DM
10510 {}
10511
10512 /* opt_pass methods: */
10513 bool gate () { return gate_diagnose_omp_blocks (); }
10514 unsigned int execute () {
10515 return diagnose_omp_structured_block_errors ();
10516 }
10517
10518}; // class pass_diagnose_omp_blocks
10519
10520} // anon namespace
10521
10522gimple_opt_pass *
10523make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10524{
10525 return new pass_diagnose_omp_blocks (ctxt);
10526}
10527
953ff289 10528#include "gt-omp-low.h"