]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/omp-low.c
re PR middle-end/27328 (ICE with -fopenmp and goto)
[thirdparty/gcc.git] / gcc / omp-low.c
CommitLineData
953ff289
DN
1/* Lowering pass for OpenMP directives. Converts OpenMP directives
2 into explicit calls to the runtime library (libgomp) and data
3 marshalling to implement data sharing and copying clauses.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
5b4fc8fb 6 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
953ff289
DN
7
8This file is part of GCC.
9
10GCC is free software; you can redistribute it and/or modify it under
11the terms of the GNU General Public License as published by the Free
12Software Foundation; either version 2, or (at your option) any later
13version.
14
15GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16WARRANTY; without even the implied warranty of MERCHANTABILITY or
17FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18for more details.
19
20You should have received a copy of the GNU General Public License
21along with GCC; see the file COPYING. If not, write to the Free
22Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2302110-1301, USA. */
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "tm.h"
29#include "tree.h"
30#include "rtl.h"
31#include "tree-gimple.h"
32#include "tree-inline.h"
33#include "langhooks.h"
34#include "diagnostic.h"
35#include "tree-flow.h"
36#include "timevar.h"
37#include "flags.h"
38#include "function.h"
39#include "expr.h"
40#include "toplev.h"
41#include "tree-pass.h"
42#include "ggc.h"
43#include "except.h"
44
45
46/* Lowering of OpenMP parallel and workshare constructs proceeds in two
47 phases. The first phase scans the function looking for OMP statements
48 and then for variables that must be replaced to satisfy data sharing
49 clauses. The second phase expands code for the constructs, as well as
c0220ea4 50 re-gimplifying things when variables have been replaced with complex
953ff289
DN
51 expressions.
52
7ebaeab5
DN
53 Final code generation is done by pass_expand_omp. The flowgraph is
54 scanned for parallel regions which are then moved to a new
55 function, to be invoked by the thread library. */
953ff289
DN
56
57/* Context structure. Used to store information about each parallel
58 directive in the code. */
59
60typedef struct omp_context
61{
62 /* This field must be at the beginning, as we do "inheritance": Some
63 callback functions for tree-inline.c (e.g., omp_copy_decl)
64 receive a copy_body_data pointer that is up-casted to an
65 omp_context pointer. */
66 copy_body_data cb;
67
68 /* The tree of contexts corresponding to the encountered constructs. */
69 struct omp_context *outer;
70 tree stmt;
71
72 /* Map variables to fields in a structure that allows communication
73 between sending and receiving threads. */
74 splay_tree field_map;
75 tree record_type;
76 tree sender_decl;
77 tree receiver_decl;
78
79 /* A chain of variables to add to the top-level block surrounding the
80 construct. In the case of a parallel, this is in the child function. */
81 tree block_vars;
82
83 /* What to do with variables with implicitly determined sharing
84 attributes. */
85 enum omp_clause_default_kind default_kind;
86
87 /* Nesting depth of this context. Used to beautify error messages re
88 invalid gotos. The outermost ctx is depth 1, with depth 0 being
89 reserved for the main body of the function. */
90 int depth;
91
953ff289
DN
92 /* True if this parallel directive is nested within another. */
93 bool is_nested;
953ff289
DN
94} omp_context;
95
96
50674e96 97/* A structure describing the main elements of a parallel loop. */
953ff289 98
50674e96 99struct omp_for_data
953ff289
DN
100{
101 tree v, n1, n2, step, chunk_size, for_stmt;
102 enum tree_code cond_code;
103 tree pre;
953ff289
DN
104 bool have_nowait, have_ordered;
105 enum omp_clause_schedule_kind sched_kind;
106};
107
50674e96 108
953ff289
DN
109static splay_tree all_contexts;
110static int parallel_nesting_level;
777f7f9a 111struct omp_region *root_omp_region;
953ff289
DN
112
113static void scan_omp (tree *, omp_context *);
50674e96 114static void lower_omp (tree *, omp_context *);
953ff289
DN
115
116/* Find an OpenMP clause of type KIND within CLAUSES. */
117
50674e96 118static tree
953ff289
DN
119find_omp_clause (tree clauses, enum tree_code kind)
120{
121 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
aaf46ef9 122 if (OMP_CLAUSE_CODE (clauses) == kind)
953ff289
DN
123 return clauses;
124
125 return NULL_TREE;
126}
127
128/* Return true if CTX is for an omp parallel. */
129
130static inline bool
131is_parallel_ctx (omp_context *ctx)
132{
50674e96 133 return TREE_CODE (ctx->stmt) == OMP_PARALLEL;
953ff289
DN
134}
135
50674e96
DN
136
137/* Return true if REGION is a combined parallel+workshare region. */
953ff289
DN
138
139static inline bool
50674e96
DN
140is_combined_parallel (struct omp_region *region)
141{
142 return region->is_combined_parallel;
143}
144
145
146/* Extract the header elements of parallel loop FOR_STMT and store
147 them into *FD. */
148
149static void
150extract_omp_for_data (tree for_stmt, struct omp_for_data *fd)
151{
152 tree t;
153
154 fd->for_stmt = for_stmt;
155 fd->pre = NULL;
156
157 t = OMP_FOR_INIT (for_stmt);
158 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
159 fd->v = TREE_OPERAND (t, 0);
160 gcc_assert (DECL_P (fd->v));
161 gcc_assert (TREE_CODE (TREE_TYPE (fd->v)) == INTEGER_TYPE);
162 fd->n1 = TREE_OPERAND (t, 1);
163
164 t = OMP_FOR_COND (for_stmt);
165 fd->cond_code = TREE_CODE (t);
166 gcc_assert (TREE_OPERAND (t, 0) == fd->v);
167 fd->n2 = TREE_OPERAND (t, 1);
168 switch (fd->cond_code)
169 {
170 case LT_EXPR:
171 case GT_EXPR:
172 break;
173 case LE_EXPR:
174 fd->n2 = fold_build2 (PLUS_EXPR, TREE_TYPE (fd->n2), fd->n2,
175 build_int_cst (TREE_TYPE (fd->n2), 1));
176 fd->cond_code = LT_EXPR;
177 break;
178 case GE_EXPR:
179 fd->n2 = fold_build2 (MINUS_EXPR, TREE_TYPE (fd->n2), fd->n2,
180 build_int_cst (TREE_TYPE (fd->n2), 1));
181 fd->cond_code = GT_EXPR;
182 break;
183 default:
184 gcc_unreachable ();
185 }
186
187 t = OMP_FOR_INCR (fd->for_stmt);
188 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
189 gcc_assert (TREE_OPERAND (t, 0) == fd->v);
190 t = TREE_OPERAND (t, 1);
191 gcc_assert (TREE_OPERAND (t, 0) == fd->v);
192 switch (TREE_CODE (t))
193 {
194 case PLUS_EXPR:
195 fd->step = TREE_OPERAND (t, 1);
196 break;
197 case MINUS_EXPR:
198 fd->step = TREE_OPERAND (t, 1);
199 fd->step = fold_build1 (NEGATE_EXPR, TREE_TYPE (fd->step), fd->step);
200 break;
201 default:
202 gcc_unreachable ();
203 }
204
205 fd->have_nowait = fd->have_ordered = false;
206 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
207 fd->chunk_size = NULL_TREE;
208
209 for (t = OMP_FOR_CLAUSES (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
aaf46ef9 210 switch (OMP_CLAUSE_CODE (t))
50674e96
DN
211 {
212 case OMP_CLAUSE_NOWAIT:
213 fd->have_nowait = true;
214 break;
215 case OMP_CLAUSE_ORDERED:
216 fd->have_ordered = true;
217 break;
218 case OMP_CLAUSE_SCHEDULE:
219 fd->sched_kind = OMP_CLAUSE_SCHEDULE_KIND (t);
220 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
221 break;
222 default:
223 break;
224 }
225
226 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
227 gcc_assert (fd->chunk_size == NULL);
228 else if (fd->chunk_size == NULL)
229 {
230 /* We only need to compute a default chunk size for ordered
231 static loops and dynamic loops. */
232 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC || fd->have_ordered)
233 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
234 ? integer_zero_node : integer_one_node;
235 }
236}
237
238
239/* Given two blocks PAR_ENTRY_BB and WS_ENTRY_BB such that WS_ENTRY_BB
240 is the immediate dominator of PAR_ENTRY_BB, return true if there
241 are no data dependencies that would prevent expanding the parallel
242 directive at PAR_ENTRY_BB as a combined parallel+workshare region.
243
244 When expanding a combined parallel+workshare region, the call to
245 the child function may need additional arguments in the case of
246 OMP_FOR regions. In some cases, these arguments are computed out
247 of variables passed in from the parent to the child via 'struct
248 .omp_data_s'. For instance:
249
250 #pragma omp parallel for schedule (guided, i * 4)
251 for (j ...)
252
253 Is lowered into:
254
255 # BLOCK 2 (PAR_ENTRY_BB)
256 .omp_data_o.i = i;
257 #pragma omp parallel [child fn: bar.omp_fn.0 ( ..., D.1598)
258
259 # BLOCK 3 (WS_ENTRY_BB)
260 .omp_data_i = &.omp_data_o;
261 D.1667 = .omp_data_i->i;
262 D.1598 = D.1667 * 4;
263 #pragma omp for schedule (guided, D.1598)
264
265 When we outline the parallel region, the call to the child function
266 'bar.omp_fn.0' will need the value D.1598 in its argument list, but
267 that value is computed *after* the call site. So, in principle we
268 cannot do the transformation.
269
270 To see whether the code in WS_ENTRY_BB blocks the combined
271 parallel+workshare call, we collect all the variables used in the
272 OMP_FOR header check whether they appear on the LHS of any
273 statement in WS_ENTRY_BB. If so, then we cannot emit the combined
274 call.
275
276 FIXME. If we had the SSA form built at this point, we could merely
277 hoist the code in block 3 into block 2 and be done with it. But at
278 this point we don't have dataflow information and though we could
279 hack something up here, it is really not worth the aggravation. */
280
281static bool
282workshare_safe_to_combine_p (basic_block par_entry_bb, basic_block ws_entry_bb)
283{
284 struct omp_for_data fd;
285 tree par_stmt, ws_stmt;
286
287 par_stmt = last_stmt (par_entry_bb);
288 ws_stmt = last_stmt (ws_entry_bb);
289
290 if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
291 return true;
292
293 gcc_assert (TREE_CODE (ws_stmt) == OMP_FOR);
294
295 extract_omp_for_data (ws_stmt, &fd);
296
297 /* FIXME. We give up too easily here. If any of these arguments
298 are not constants, they will likely involve variables that have
299 been mapped into fields of .omp_data_s for sharing with the child
300 function. With appropriate data flow, it would be possible to
301 see through this. */
302 if (!is_gimple_min_invariant (fd.n1)
303 || !is_gimple_min_invariant (fd.n2)
304 || !is_gimple_min_invariant (fd.step)
305 || (fd.chunk_size && !is_gimple_min_invariant (fd.chunk_size)))
306 return false;
307
308 return true;
309}
310
311
312/* Collect additional arguments needed to emit a combined
313 parallel+workshare call. WS_STMT is the workshare directive being
314 expanded. */
315
316static tree
317get_ws_args_for (tree ws_stmt)
318{
319 tree t;
320
321 if (TREE_CODE (ws_stmt) == OMP_FOR)
322 {
323 struct omp_for_data fd;
324 tree ws_args;
325
326 extract_omp_for_data (ws_stmt, &fd);
327
328 ws_args = NULL_TREE;
329 if (fd.chunk_size)
330 {
331 t = fold_convert (long_integer_type_node, fd.chunk_size);
332 ws_args = tree_cons (NULL, t, ws_args);
333 }
334
335 t = fold_convert (long_integer_type_node, fd.step);
336 ws_args = tree_cons (NULL, t, ws_args);
337
338 t = fold_convert (long_integer_type_node, fd.n2);
339 ws_args = tree_cons (NULL, t, ws_args);
340
341 t = fold_convert (long_integer_type_node, fd.n1);
342 ws_args = tree_cons (NULL, t, ws_args);
343
344 return ws_args;
345 }
346 else if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
347 {
348 basic_block bb = bb_for_stmt (ws_stmt);
349 t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs));
350 t = tree_cons (NULL, t, NULL);
351 return t;
352 }
353
354 gcc_unreachable ();
355}
356
357
358/* Discover whether REGION is a combined parallel+workshare region. */
359
360static void
361determine_parallel_type (struct omp_region *region)
953ff289 362{
50674e96
DN
363 basic_block par_entry_bb, par_exit_bb;
364 basic_block ws_entry_bb, ws_exit_bb;
365
366 if (region == NULL || region->inner == NULL)
367 return;
368
369 /* We only support parallel+for and parallel+sections. */
777f7f9a
RH
370 if (region->type != OMP_PARALLEL
371 || (region->inner->type != OMP_FOR
372 && region->inner->type != OMP_SECTIONS))
50674e96
DN
373 return;
374
375 /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
376 WS_EXIT_BB -> PAR_EXIT_BB. */
777f7f9a
RH
377 par_entry_bb = region->entry;
378 par_exit_bb = region->exit;
379 ws_entry_bb = region->inner->entry;
380 ws_exit_bb = region->inner->exit;
50674e96
DN
381
382 if (single_succ (par_entry_bb) == ws_entry_bb
383 && single_succ (ws_exit_bb) == par_exit_bb
384 && workshare_safe_to_combine_p (par_entry_bb, ws_entry_bb))
385 {
777f7f9a
RH
386 tree ws_stmt = last_stmt (region->inner->entry);
387
388 if (region->inner->type == OMP_FOR)
50674e96
DN
389 {
390 /* If this is a combined parallel loop, we need to determine
391 whether or not to use the combined library calls. There
392 are two cases where we do not apply the transformation:
393 static loops and any kind of ordered loop. In the first
394 case, we already open code the loop so there is no need
395 to do anything else. In the latter case, the combined
396 parallel loop call would still need extra synchronization
397 to implement ordered semantics, so there would not be any
398 gain in using the combined call. */
777f7f9a 399 tree clauses = OMP_FOR_CLAUSES (ws_stmt);
50674e96
DN
400 tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
401 if (c == NULL
402 || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
403 || find_omp_clause (clauses, OMP_CLAUSE_ORDERED))
404 {
405 region->is_combined_parallel = false;
406 region->inner->is_combined_parallel = false;
407 return;
408 }
409 }
410
411 region->is_combined_parallel = true;
412 region->inner->is_combined_parallel = true;
777f7f9a 413 region->ws_args = get_ws_args_for (ws_stmt);
50674e96 414 }
953ff289
DN
415}
416
50674e96 417
953ff289
DN
418/* Return true if EXPR is variable sized. */
419
420static inline bool
421is_variable_sized (tree expr)
422{
423 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
424}
425
426/* Return true if DECL is a reference type. */
427
428static inline bool
429is_reference (tree decl)
430{
431 return lang_hooks.decls.omp_privatize_by_reference (decl);
432}
433
434/* Lookup variables in the decl or field splay trees. The "maybe" form
435 allows for the variable form to not have been entered, otherwise we
436 assert that the variable must have been entered. */
437
438static inline tree
439lookup_decl (tree var, omp_context *ctx)
440{
441 splay_tree_node n;
442 n = splay_tree_lookup (ctx->cb.decl_map, (splay_tree_key) var);
443 return (tree) n->value;
444}
445
446static inline tree
447maybe_lookup_decl (tree var, omp_context *ctx)
448{
449 splay_tree_node n;
450 n = splay_tree_lookup (ctx->cb.decl_map, (splay_tree_key) var);
451 return n ? (tree) n->value : NULL_TREE;
452}
453
454static inline tree
455lookup_field (tree var, omp_context *ctx)
456{
457 splay_tree_node n;
458 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
459 return (tree) n->value;
460}
461
462static inline tree
463maybe_lookup_field (tree var, omp_context *ctx)
464{
465 splay_tree_node n;
466 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
467 return n ? (tree) n->value : NULL_TREE;
468}
469
470/* Return true if DECL should be copied by pointer. SHARED_P is true
471 if DECL is to be shared. */
472
473static bool
474use_pointer_for_field (tree decl, bool shared_p)
475{
476 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)))
477 return true;
478
479 /* We can only use copy-in/copy-out semantics for shared varibles
480 when we know the value is not accessible from an outer scope. */
481 if (shared_p)
482 {
483 /* ??? Trivially accessible from anywhere. But why would we even
484 be passing an address in this case? Should we simply assert
485 this to be false, or should we have a cleanup pass that removes
486 these from the list of mappings? */
487 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
488 return true;
489
490 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
491 without analyzing the expression whether or not its location
492 is accessible to anyone else. In the case of nested parallel
493 regions it certainly may be. */
494 if (DECL_HAS_VALUE_EXPR_P (decl))
495 return true;
496
497 /* Do not use copy-in/copy-out for variables that have their
498 address taken. */
499 if (TREE_ADDRESSABLE (decl))
500 return true;
501 }
502
503 return false;
504}
505
506/* Construct a new automatic decl similar to VAR. */
507
508static tree
509omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
510{
511 tree copy = build_decl (VAR_DECL, name, type);
512
513 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
514 DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (var);
515 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
516 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
517 TREE_USED (copy) = 1;
50674e96 518 DECL_CONTEXT (copy) = current_function_decl;
953ff289
DN
519 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
520
521 TREE_CHAIN (copy) = ctx->block_vars;
522 ctx->block_vars = copy;
523
524 return copy;
525}
526
527static tree
528omp_copy_decl_1 (tree var, omp_context *ctx)
529{
530 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
531}
532
533/* Build tree nodes to access the field for VAR on the receiver side. */
534
535static tree
536build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
537{
538 tree x, field = lookup_field (var, ctx);
539
540 /* If the receiver record type was remapped in the child function,
541 remap the field into the new record type. */
542 x = maybe_lookup_field (field, ctx);
543 if (x != NULL)
544 field = x;
545
546 x = build_fold_indirect_ref (ctx->receiver_decl);
547 x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL);
548 if (by_ref)
549 x = build_fold_indirect_ref (x);
550
551 return x;
552}
553
554/* Build tree nodes to access VAR in the scope outer to CTX. In the case
555 of a parallel, this is a component reference; for workshare constructs
556 this is some variable. */
557
558static tree
559build_outer_var_ref (tree var, omp_context *ctx)
560{
561 tree x;
562
563 if (is_global_var (var))
564 x = var;
565 else if (is_variable_sized (var))
566 {
567 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
568 x = build_outer_var_ref (x, ctx);
569 x = build_fold_indirect_ref (x);
570 }
571 else if (is_parallel_ctx (ctx))
572 {
573 bool by_ref = use_pointer_for_field (var, false);
574 x = build_receiver_ref (var, by_ref, ctx);
575 }
576 else if (ctx->outer)
577 x = lookup_decl (var, ctx->outer);
578 else
579 gcc_unreachable ();
580
581 if (is_reference (var))
582 x = build_fold_indirect_ref (x);
583
584 return x;
585}
586
587/* Build tree nodes to access the field for VAR on the sender side. */
588
589static tree
590build_sender_ref (tree var, omp_context *ctx)
591{
592 tree field = lookup_field (var, ctx);
593 return build3 (COMPONENT_REF, TREE_TYPE (field),
594 ctx->sender_decl, field, NULL);
595}
596
597/* Add a new field for VAR inside the structure CTX->SENDER_DECL. */
598
599static void
600install_var_field (tree var, bool by_ref, omp_context *ctx)
601{
602 tree field, type;
603
604 gcc_assert (!splay_tree_lookup (ctx->field_map, (splay_tree_key) var));
605
606 type = TREE_TYPE (var);
607 if (by_ref)
608 type = build_pointer_type (type);
609
610 field = build_decl (FIELD_DECL, DECL_NAME (var), type);
611
612 /* Remember what variable this field was created for. This does have a
613 side effect of making dwarf2out ignore this member, so for helpful
614 debugging we clear it later in delete_omp_context. */
615 DECL_ABSTRACT_ORIGIN (field) = var;
616
617 insert_field_into_struct (ctx->record_type, field);
618
619 splay_tree_insert (ctx->field_map, (splay_tree_key) var,
620 (splay_tree_value) field);
621}
622
623static tree
624install_var_local (tree var, omp_context *ctx)
625{
626 tree new_var = omp_copy_decl_1 (var, ctx);
627 insert_decl_map (&ctx->cb, var, new_var);
628 return new_var;
629}
630
631/* Adjust the replacement for DECL in CTX for the new context. This means
632 copying the DECL_VALUE_EXPR, and fixing up the type. */
633
634static void
635fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
636{
637 tree new_decl, size;
638
639 new_decl = lookup_decl (decl, ctx);
640
641 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
642
643 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
644 && DECL_HAS_VALUE_EXPR_P (decl))
645 {
646 tree ve = DECL_VALUE_EXPR (decl);
647 walk_tree (&ve, copy_body_r, &ctx->cb, NULL);
648 SET_DECL_VALUE_EXPR (new_decl, ve);
649 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
650 }
651
652 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
653 {
654 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
655 if (size == error_mark_node)
656 size = TYPE_SIZE (TREE_TYPE (new_decl));
657 DECL_SIZE (new_decl) = size;
658
659 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
660 if (size == error_mark_node)
661 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
662 DECL_SIZE_UNIT (new_decl) = size;
663 }
664}
665
666/* The callback for remap_decl. Search all containing contexts for a
667 mapping of the variable; this avoids having to duplicate the splay
668 tree ahead of time. We know a mapping doesn't already exist in the
669 given context. Create new mappings to implement default semantics. */
670
671static tree
672omp_copy_decl (tree var, copy_body_data *cb)
673{
674 omp_context *ctx = (omp_context *) cb;
675 tree new_var;
676
677 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
678 return var;
679
680 if (TREE_CODE (var) == LABEL_DECL)
681 {
682 new_var = create_artificial_label ();
50674e96 683 DECL_CONTEXT (new_var) = current_function_decl;
953ff289
DN
684 insert_decl_map (&ctx->cb, var, new_var);
685 return new_var;
686 }
687
688 while (!is_parallel_ctx (ctx))
689 {
690 ctx = ctx->outer;
691 if (ctx == NULL)
692 return var;
693 new_var = maybe_lookup_decl (var, ctx);
694 if (new_var)
695 return new_var;
696 }
697
698 return error_mark_node;
699}
700
50674e96
DN
701
702/* Return the parallel region associated with STMT. */
703
50674e96
DN
704/* Debugging dumps for parallel regions. */
705void dump_omp_region (FILE *, struct omp_region *, int);
706void debug_omp_region (struct omp_region *);
707void debug_all_omp_regions (void);
708
709/* Dump the parallel region tree rooted at REGION. */
710
711void
712dump_omp_region (FILE *file, struct omp_region *region, int indent)
713{
777f7f9a
RH
714 fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
715 tree_code_name[region->type]);
50674e96
DN
716
717 if (region->inner)
718 dump_omp_region (file, region->inner, indent + 4);
719
777f7f9a
RH
720 if (region->cont)
721 {
444e96af 722 fprintf (file, "%*sbb %d: OMP_CONTINUE\n", indent, "",
777f7f9a
RH
723 region->cont->index);
724 }
725
50674e96 726 if (region->exit)
444e96af 727 fprintf (file, "%*sbb: %d: OMP_RETURN\n", indent, "",
777f7f9a 728 region->exit->index);
50674e96 729 else
777f7f9a 730 fprintf (file, "%*s[no exit marker]\n", indent, "");
50674e96
DN
731
732 if (region->next)
777f7f9a 733 dump_omp_region (file, region->next, indent);
50674e96
DN
734}
735
736void
737debug_omp_region (struct omp_region *region)
738{
739 dump_omp_region (stderr, region, 0);
740}
741
742void
743debug_all_omp_regions (void)
744{
745 dump_omp_region (stderr, root_omp_region, 0);
746}
747
748
749/* Create a new parallel region starting at STMT inside region PARENT. */
750
777f7f9a
RH
751struct omp_region *
752new_omp_region (basic_block bb, enum tree_code type, struct omp_region *parent)
50674e96 753{
777f7f9a 754 struct omp_region *region = xcalloc (1, sizeof (*region));
50674e96
DN
755
756 region->outer = parent;
777f7f9a
RH
757 region->entry = bb;
758 region->type = type;
50674e96
DN
759
760 if (parent)
761 {
762 /* This is a nested region. Add it to the list of inner
763 regions in PARENT. */
764 region->next = parent->inner;
765 parent->inner = region;
766 }
777f7f9a 767 else
50674e96
DN
768 {
769 /* This is a toplevel region. Add it to the list of toplevel
770 regions in ROOT_OMP_REGION. */
771 region->next = root_omp_region;
772 root_omp_region = region;
773 }
777f7f9a
RH
774
775 return region;
776}
777
778/* Release the memory associated with the region tree rooted at REGION. */
779
780static void
781free_omp_region_1 (struct omp_region *region)
782{
783 struct omp_region *i, *n;
784
785 for (i = region->inner; i ; i = n)
50674e96 786 {
777f7f9a
RH
787 n = i->next;
788 free_omp_region_1 (i);
50674e96
DN
789 }
790
777f7f9a
RH
791 free (region);
792}
50674e96 793
777f7f9a
RH
794/* Release the memory for the entire omp region tree. */
795
796void
797free_omp_regions (void)
798{
799 struct omp_region *r, *n;
800 for (r = root_omp_region; r ; r = n)
801 {
802 n = r->next;
803 free_omp_region_1 (r);
804 }
805 root_omp_region = NULL;
50674e96
DN
806}
807
808
953ff289
DN
809/* Create a new context, with OUTER_CTX being the surrounding context. */
810
811static omp_context *
812new_omp_context (tree stmt, omp_context *outer_ctx)
813{
814 omp_context *ctx = XCNEW (omp_context);
815
816 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
817 (splay_tree_value) ctx);
818 ctx->stmt = stmt;
819
820 if (outer_ctx)
821 {
822 ctx->outer = outer_ctx;
823 ctx->cb = outer_ctx->cb;
824 ctx->cb.block = NULL;
825 ctx->depth = outer_ctx->depth + 1;
826 }
827 else
828 {
829 ctx->cb.src_fn = current_function_decl;
830 ctx->cb.dst_fn = current_function_decl;
831 ctx->cb.src_node = cgraph_node (current_function_decl);
832 ctx->cb.dst_node = ctx->cb.src_node;
833 ctx->cb.src_cfun = cfun;
834 ctx->cb.copy_decl = omp_copy_decl;
835 ctx->cb.eh_region = -1;
836 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
837 ctx->depth = 1;
838 }
839
840 ctx->cb.decl_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
841
842 return ctx;
843}
844
845/* Destroy a omp_context data structures. Called through the splay tree
846 value delete callback. */
847
848static void
849delete_omp_context (splay_tree_value value)
850{
851 omp_context *ctx = (omp_context *) value;
852
853 splay_tree_delete (ctx->cb.decl_map);
854
855 if (ctx->field_map)
856 splay_tree_delete (ctx->field_map);
857
858 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
859 it produces corrupt debug information. */
860 if (ctx->record_type)
861 {
862 tree t;
863 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
864 DECL_ABSTRACT_ORIGIN (t) = NULL;
865 }
866
867 XDELETE (ctx);
868}
869
870/* Fix up RECEIVER_DECL with a type that has been remapped to the child
871 context. */
872
873static void
874fixup_child_record_type (omp_context *ctx)
875{
876 tree f, type = ctx->record_type;
877
878 /* ??? It isn't sufficient to just call remap_type here, because
879 variably_modified_type_p doesn't work the way we expect for
880 record types. Testing each field for whether it needs remapping
881 and creating a new record by hand works, however. */
882 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
883 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
884 break;
885 if (f)
886 {
887 tree name, new_fields = NULL;
888
889 type = lang_hooks.types.make_type (RECORD_TYPE);
890 name = DECL_NAME (TYPE_NAME (ctx->record_type));
891 name = build_decl (TYPE_DECL, name, type);
892 TYPE_NAME (type) = name;
893
894 for (f = TYPE_FIELDS (ctx->record_type); f ; f = TREE_CHAIN (f))
895 {
896 tree new_f = copy_node (f);
897 DECL_CONTEXT (new_f) = type;
898 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
899 TREE_CHAIN (new_f) = new_fields;
900 new_fields = new_f;
901
902 /* Arrange to be able to look up the receiver field
903 given the sender field. */
904 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
905 (splay_tree_value) new_f);
906 }
907 TYPE_FIELDS (type) = nreverse (new_fields);
908 layout_type (type);
909 }
910
911 TREE_TYPE (ctx->receiver_decl) = build_pointer_type (type);
912}
913
914/* Instantiate decls as necessary in CTX to satisfy the data sharing
915 specified by CLAUSES. */
916
917static void
918scan_sharing_clauses (tree clauses, omp_context *ctx)
919{
920 tree c, decl;
921 bool scan_array_reductions = false;
922
923 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
924 {
925 bool by_ref;
926
aaf46ef9 927 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
928 {
929 case OMP_CLAUSE_PRIVATE:
930 decl = OMP_CLAUSE_DECL (c);
931 if (!is_variable_sized (decl))
932 install_var_local (decl, ctx);
933 break;
934
935 case OMP_CLAUSE_SHARED:
936 gcc_assert (is_parallel_ctx (ctx));
937 decl = OMP_CLAUSE_DECL (c);
938 gcc_assert (!is_variable_sized (decl));
939 by_ref = use_pointer_for_field (decl, true);
940 if (! TREE_READONLY (decl)
941 || TREE_ADDRESSABLE (decl)
942 || by_ref
943 || is_reference (decl))
944 {
945 install_var_field (decl, by_ref, ctx);
946 install_var_local (decl, ctx);
947 break;
948 }
949 /* We don't need to copy const scalar vars back. */
aaf46ef9 950 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
953ff289
DN
951 goto do_private;
952
953 case OMP_CLAUSE_LASTPRIVATE:
954 /* Let the corresponding firstprivate clause create
955 the variable. */
956 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
957 break;
958 /* FALLTHRU */
959
960 case OMP_CLAUSE_FIRSTPRIVATE:
961 case OMP_CLAUSE_REDUCTION:
962 decl = OMP_CLAUSE_DECL (c);
963 do_private:
964 if (is_variable_sized (decl))
965 break;
966 else if (is_parallel_ctx (ctx))
967 {
968 by_ref = use_pointer_for_field (decl, false);
969 install_var_field (decl, by_ref, ctx);
970 }
971 install_var_local (decl, ctx);
972 break;
973
974 case OMP_CLAUSE_COPYPRIVATE:
975 if (ctx->outer)
976 scan_omp (&OMP_CLAUSE_DECL (c), ctx->outer);
977 /* FALLTHRU */
978
979 case OMP_CLAUSE_COPYIN:
980 decl = OMP_CLAUSE_DECL (c);
981 by_ref = use_pointer_for_field (decl, false);
982 install_var_field (decl, by_ref, ctx);
983 break;
984
985 case OMP_CLAUSE_DEFAULT:
986 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
987 break;
988
989 case OMP_CLAUSE_IF:
990 case OMP_CLAUSE_NUM_THREADS:
991 case OMP_CLAUSE_SCHEDULE:
992 if (ctx->outer)
aaf46ef9 993 scan_omp (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
953ff289
DN
994 break;
995
996 case OMP_CLAUSE_NOWAIT:
997 case OMP_CLAUSE_ORDERED:
998 break;
999
1000 default:
1001 gcc_unreachable ();
1002 }
1003 }
1004
1005 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1006 {
aaf46ef9 1007 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1008 {
1009 case OMP_CLAUSE_LASTPRIVATE:
1010 /* Let the corresponding firstprivate clause create
1011 the variable. */
1012 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1013 break;
1014 /* FALLTHRU */
1015
1016 case OMP_CLAUSE_PRIVATE:
1017 case OMP_CLAUSE_FIRSTPRIVATE:
1018 case OMP_CLAUSE_REDUCTION:
1019 decl = OMP_CLAUSE_DECL (c);
1020 if (is_variable_sized (decl))
1021 install_var_local (decl, ctx);
1022 fixup_remapped_decl (decl, ctx,
aaf46ef9 1023 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
953ff289 1024 && OMP_CLAUSE_PRIVATE_DEBUG (c));
aaf46ef9 1025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1026 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1027 scan_array_reductions = true;
1028 break;
1029
1030 case OMP_CLAUSE_SHARED:
1031 decl = OMP_CLAUSE_DECL (c);
1032 fixup_remapped_decl (decl, ctx, false);
1033 break;
1034
1035 case OMP_CLAUSE_COPYPRIVATE:
1036 case OMP_CLAUSE_COPYIN:
1037 case OMP_CLAUSE_DEFAULT:
1038 case OMP_CLAUSE_IF:
1039 case OMP_CLAUSE_NUM_THREADS:
1040 case OMP_CLAUSE_SCHEDULE:
1041 case OMP_CLAUSE_NOWAIT:
1042 case OMP_CLAUSE_ORDERED:
1043 break;
1044
1045 default:
1046 gcc_unreachable ();
1047 }
1048 }
1049
1050 if (scan_array_reductions)
1051 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 1052 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
953ff289
DN
1053 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1054 {
1055 scan_omp (&OMP_CLAUSE_REDUCTION_INIT (c), ctx);
1056 scan_omp (&OMP_CLAUSE_REDUCTION_MERGE (c), ctx);
1057 }
1058}
1059
1060/* Create a new name for omp child function. Returns an identifier. */
1061
1062static GTY(()) unsigned int tmp_ompfn_id_num;
1063
1064static tree
1065create_omp_child_function_name (void)
1066{
1067 tree name = DECL_ASSEMBLER_NAME (current_function_decl);
1068 size_t len = IDENTIFIER_LENGTH (name);
1069 char *tmp_name, *prefix;
1070
1071 prefix = alloca (len + sizeof ("_omp_fn"));
1072 memcpy (prefix, IDENTIFIER_POINTER (name), len);
1073 strcpy (prefix + len, "_omp_fn");
1074#ifndef NO_DOT_IN_LABEL
1075 prefix[len] = '.';
1076#elif !defined NO_DOLLAR_IN_LABEL
1077 prefix[len] = '$';
1078#endif
1079 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, tmp_ompfn_id_num++);
1080 return get_identifier (tmp_name);
1081}
1082
1083/* Build a decl for the omp child function. It'll not contain a body
1084 yet, just the bare decl. */
1085
1086static void
1087create_omp_child_function (omp_context *ctx)
1088{
1089 tree decl, type, name, t;
1090
1091 name = create_omp_child_function_name ();
1092 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1093
1094 decl = build_decl (FUNCTION_DECL, name, type);
1095 decl = lang_hooks.decls.pushdecl (decl);
1096
1097 ctx->cb.dst_fn = decl;
1098
1099 TREE_STATIC (decl) = 1;
1100 TREE_USED (decl) = 1;
1101 DECL_ARTIFICIAL (decl) = 1;
1102 DECL_IGNORED_P (decl) = 0;
1103 TREE_PUBLIC (decl) = 0;
1104 DECL_UNINLINABLE (decl) = 1;
1105 DECL_EXTERNAL (decl) = 0;
1106 DECL_CONTEXT (decl) = NULL_TREE;
50674e96 1107 DECL_INITIAL (decl) = make_node (BLOCK);
953ff289
DN
1108
1109 t = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1110 DECL_ARTIFICIAL (t) = 1;
1111 DECL_IGNORED_P (t) = 1;
1112 DECL_RESULT (decl) = t;
1113
1114 t = build_decl (PARM_DECL, get_identifier (".omp_data_i"), ptr_type_node);
1115 DECL_ARTIFICIAL (t) = 1;
1116 DECL_ARG_TYPE (t) = ptr_type_node;
50674e96 1117 DECL_CONTEXT (t) = current_function_decl;
953ff289
DN
1118 TREE_USED (t) = 1;
1119 DECL_ARGUMENTS (decl) = t;
1120 ctx->receiver_decl = t;
1121
1122 /* Allocate memory for the function structure. The call to
50674e96 1123 allocate_struct_function clobbers CFUN, so we need to restore
953ff289
DN
1124 it afterward. */
1125 allocate_struct_function (decl);
1126 DECL_SOURCE_LOCATION (decl) = EXPR_LOCATION (ctx->stmt);
1127 cfun->function_end_locus = EXPR_LOCATION (ctx->stmt);
1128 cfun = ctx->cb.src_cfun;
1129}
1130
953ff289
DN
1131
1132/* Scan an OpenMP parallel directive. */
1133
1134static void
1135scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
1136{
1137 omp_context *ctx;
1138 tree name;
1139
1140 /* Ignore parallel directives with empty bodies, unless there
1141 are copyin clauses. */
1142 if (optimize > 0
1143 && empty_body_p (OMP_PARALLEL_BODY (*stmt_p))
1144 && find_omp_clause (OMP_CLAUSES (*stmt_p), OMP_CLAUSE_COPYIN) == NULL)
1145 {
1146 *stmt_p = build_empty_stmt ();
1147 return;
1148 }
1149
1150 ctx = new_omp_context (*stmt_p, outer_ctx);
50674e96
DN
1151 if (parallel_nesting_level > 1)
1152 ctx->is_nested = true;
953ff289 1153 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
953ff289
DN
1154 ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1155 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
953ff289
DN
1156 name = create_tmp_var_name (".omp_data_s");
1157 name = build_decl (TYPE_DECL, name, ctx->record_type);
1158 TYPE_NAME (ctx->record_type) = name;
1159 create_omp_child_function (ctx);
50674e96 1160 OMP_PARALLEL_FN (*stmt_p) = ctx->cb.dst_fn;
953ff289
DN
1161
1162 scan_sharing_clauses (OMP_PARALLEL_CLAUSES (*stmt_p), ctx);
1163 scan_omp (&OMP_PARALLEL_BODY (*stmt_p), ctx);
1164
1165 if (TYPE_FIELDS (ctx->record_type) == NULL)
1166 ctx->record_type = ctx->receiver_decl = NULL;
1167 else
1168 {
1169 layout_type (ctx->record_type);
1170 fixup_child_record_type (ctx);
1171 }
1172}
1173
1174
50674e96 1175/* Scan an OpenMP loop directive. */
953ff289
DN
1176
1177static void
50674e96 1178scan_omp_for (tree *stmt_p, omp_context *outer_ctx)
953ff289 1179{
50674e96
DN
1180 omp_context *ctx;
1181 tree stmt;
953ff289 1182
50674e96
DN
1183 stmt = *stmt_p;
1184 ctx = new_omp_context (stmt, outer_ctx);
953ff289 1185
50674e96 1186 scan_sharing_clauses (OMP_FOR_CLAUSES (stmt), ctx);
953ff289 1187
953ff289
DN
1188 scan_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
1189 scan_omp (&OMP_FOR_INIT (stmt), ctx);
1190 scan_omp (&OMP_FOR_COND (stmt), ctx);
1191 scan_omp (&OMP_FOR_INCR (stmt), ctx);
1192 scan_omp (&OMP_FOR_BODY (stmt), ctx);
1193}
1194
1195/* Scan an OpenMP sections directive. */
1196
1197static void
1198scan_omp_sections (tree *stmt_p, omp_context *outer_ctx)
1199{
50674e96 1200 tree stmt;
953ff289
DN
1201 omp_context *ctx;
1202
50674e96 1203 stmt = *stmt_p;
953ff289
DN
1204 ctx = new_omp_context (stmt, outer_ctx);
1205 scan_sharing_clauses (OMP_SECTIONS_CLAUSES (stmt), ctx);
1206 scan_omp (&OMP_SECTIONS_BODY (stmt), ctx);
1207}
1208
1209/* Scan an OpenMP single directive. */
1210
1211static void
1212scan_omp_single (tree *stmt_p, omp_context *outer_ctx)
1213{
1214 tree stmt = *stmt_p;
1215 omp_context *ctx;
1216 tree name;
1217
1218 ctx = new_omp_context (stmt, outer_ctx);
1219 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1220 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1221 name = create_tmp_var_name (".omp_copy_s");
1222 name = build_decl (TYPE_DECL, name, ctx->record_type);
1223 TYPE_NAME (ctx->record_type) = name;
1224
1225 scan_sharing_clauses (OMP_SINGLE_CLAUSES (stmt), ctx);
1226 scan_omp (&OMP_SINGLE_BODY (stmt), ctx);
1227
1228 if (TYPE_FIELDS (ctx->record_type) == NULL)
1229 ctx->record_type = NULL;
1230 else
1231 layout_type (ctx->record_type);
1232}
1233
953ff289
DN
1234
1235/* Callback for walk_stmts used to scan for OpenMP directives at TP. */
1236
1237static tree
1238scan_omp_1 (tree *tp, int *walk_subtrees, void *data)
1239{
1240 struct walk_stmt_info *wi = data;
1241 omp_context *ctx = wi->info;
1242 tree t = *tp;
1243
1244 if (EXPR_HAS_LOCATION (t))
1245 input_location = EXPR_LOCATION (t);
1246
1247 *walk_subtrees = 0;
1248 switch (TREE_CODE (t))
1249 {
1250 case OMP_PARALLEL:
50674e96
DN
1251 parallel_nesting_level++;
1252 scan_omp_parallel (tp, ctx);
953ff289
DN
1253 parallel_nesting_level--;
1254 break;
1255
1256 case OMP_FOR:
50674e96 1257 scan_omp_for (tp, ctx);
953ff289
DN
1258 break;
1259
1260 case OMP_SECTIONS:
50674e96 1261 scan_omp_sections (tp, ctx);
953ff289
DN
1262 break;
1263
1264 case OMP_SINGLE:
50674e96 1265 scan_omp_single (tp, ctx);
953ff289
DN
1266 break;
1267
1268 case OMP_SECTION:
1269 case OMP_MASTER:
1270 case OMP_ORDERED:
1271 case OMP_CRITICAL:
1272 ctx = new_omp_context (*tp, ctx);
1273 scan_omp (&OMP_BODY (*tp), ctx);
1274 break;
1275
1276 case BIND_EXPR:
1277 {
1278 tree var;
1279 *walk_subtrees = 1;
1280
1281 for (var = BIND_EXPR_VARS (t); var ; var = TREE_CHAIN (var))
50674e96 1282 insert_decl_map (&ctx->cb, var, var);
953ff289
DN
1283 }
1284 break;
1285
1286 case VAR_DECL:
1287 case PARM_DECL:
1288 case LABEL_DECL:
1289 if (ctx)
1290 *tp = remap_decl (t, &ctx->cb);
1291 break;
1292
1293 default:
1294 if (ctx && TYPE_P (t))
1295 *tp = remap_type (t, &ctx->cb);
1296 else if (!DECL_P (t))
1297 *walk_subtrees = 1;
1298 break;
1299 }
1300
1301 return NULL_TREE;
1302}
1303
1304
1305/* Scan all the statements starting at STMT_P. CTX contains context
1306 information about the OpenMP directives and clauses found during
1307 the scan. */
1308
1309static void
1310scan_omp (tree *stmt_p, omp_context *ctx)
1311{
1312 location_t saved_location;
1313 struct walk_stmt_info wi;
1314
1315 memset (&wi, 0, sizeof (wi));
1316 wi.callback = scan_omp_1;
1317 wi.info = ctx;
1318 wi.want_bind_expr = (ctx != NULL);
1319 wi.want_locations = true;
1320
1321 saved_location = input_location;
1322 walk_stmts (&wi, stmt_p);
1323 input_location = saved_location;
1324}
1325\f
1326/* Re-gimplification and code generation routines. */
1327
1328/* Build a call to GOMP_barrier. */
1329
1330static void
1331build_omp_barrier (tree *stmt_list)
1332{
1333 tree t;
1334
1335 t = built_in_decls[BUILT_IN_GOMP_BARRIER];
1336 t = build_function_call_expr (t, NULL);
1337 gimplify_and_add (t, stmt_list);
1338}
1339
1340/* If a context was created for STMT when it was scanned, return it. */
1341
1342static omp_context *
1343maybe_lookup_ctx (tree stmt)
1344{
1345 splay_tree_node n;
1346 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
1347 return n ? (omp_context *) n->value : NULL;
1348}
1349
50674e96
DN
1350
1351/* Find the mapping for DECL in CTX or the immediately enclosing
1352 context that has a mapping for DECL.
1353
1354 If CTX is a nested parallel directive, we may have to use the decl
1355 mappings created in CTX's parent context. Suppose that we have the
1356 following parallel nesting (variable UIDs showed for clarity):
1357
1358 iD.1562 = 0;
1359 #omp parallel shared(iD.1562) -> outer parallel
1360 iD.1562 = iD.1562 + 1;
1361
1362 #omp parallel shared (iD.1562) -> inner parallel
1363 iD.1562 = iD.1562 - 1;
1364
1365 Each parallel structure will create a distinct .omp_data_s structure
1366 for copying iD.1562 in/out of the directive:
1367
1368 outer parallel .omp_data_s.1.i -> iD.1562
1369 inner parallel .omp_data_s.2.i -> iD.1562
1370
1371 A shared variable mapping will produce a copy-out operation before
1372 the parallel directive and a copy-in operation after it. So, in
1373 this case we would have:
1374
1375 iD.1562 = 0;
1376 .omp_data_o.1.i = iD.1562;
1377 #omp parallel shared(iD.1562) -> outer parallel
1378 .omp_data_i.1 = &.omp_data_o.1
1379 .omp_data_i.1->i = .omp_data_i.1->i + 1;
1380
1381 .omp_data_o.2.i = iD.1562; -> **
1382 #omp parallel shared(iD.1562) -> inner parallel
1383 .omp_data_i.2 = &.omp_data_o.2
1384 .omp_data_i.2->i = .omp_data_i.2->i - 1;
1385
1386
1387 ** This is a problem. The symbol iD.1562 cannot be referenced
1388 inside the body of the outer parallel region. But since we are
1389 emitting this copy operation while expanding the inner parallel
1390 directive, we need to access the CTX structure of the outer
1391 parallel directive to get the correct mapping:
1392
1393 .omp_data_o.2.i = .omp_data_i.1->i
1394
1395 Since there may be other workshare or parallel directives enclosing
1396 the parallel directive, it may be necessary to walk up the context
1397 parent chain. This is not a problem in general because nested
1398 parallelism happens only rarely. */
1399
1400static tree
1401lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
1402{
1403 tree t;
1404 omp_context *up;
1405
1406 gcc_assert (ctx->is_nested);
1407
1408 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
1409 t = maybe_lookup_decl (decl, up);
1410
1411 gcc_assert (t);
1412
1413 return t;
1414}
1415
1416
953ff289
DN
1417/* Construct the initialization value for reduction CLAUSE. */
1418
1419tree
1420omp_reduction_init (tree clause, tree type)
1421{
1422 switch (OMP_CLAUSE_REDUCTION_CODE (clause))
1423 {
1424 case PLUS_EXPR:
1425 case MINUS_EXPR:
1426 case BIT_IOR_EXPR:
1427 case BIT_XOR_EXPR:
1428 case TRUTH_OR_EXPR:
1429 case TRUTH_ORIF_EXPR:
1430 case TRUTH_XOR_EXPR:
1431 case NE_EXPR:
1432 return fold_convert (type, integer_zero_node);
1433
1434 case MULT_EXPR:
1435 case TRUTH_AND_EXPR:
1436 case TRUTH_ANDIF_EXPR:
1437 case EQ_EXPR:
1438 return fold_convert (type, integer_one_node);
1439
1440 case BIT_AND_EXPR:
1441 return fold_convert (type, integer_minus_one_node);
1442
1443 case MAX_EXPR:
1444 if (SCALAR_FLOAT_TYPE_P (type))
1445 {
1446 REAL_VALUE_TYPE max, min;
1447 if (HONOR_INFINITIES (TYPE_MODE (type)))
1448 {
1449 real_inf (&max);
1450 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
1451 }
1452 else
1453 real_maxval (&min, 1, TYPE_MODE (type));
1454 return build_real (type, min);
1455 }
1456 else
1457 {
1458 gcc_assert (INTEGRAL_TYPE_P (type));
1459 return TYPE_MIN_VALUE (type);
1460 }
1461
1462 case MIN_EXPR:
1463 if (SCALAR_FLOAT_TYPE_P (type))
1464 {
1465 REAL_VALUE_TYPE max;
1466 if (HONOR_INFINITIES (TYPE_MODE (type)))
1467 real_inf (&max);
1468 else
1469 real_maxval (&max, 0, TYPE_MODE (type));
1470 return build_real (type, max);
1471 }
1472 else
1473 {
1474 gcc_assert (INTEGRAL_TYPE_P (type));
1475 return TYPE_MAX_VALUE (type);
1476 }
1477
1478 default:
1479 gcc_unreachable ();
1480 }
1481}
1482
1483/* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
1484 from the receiver (aka child) side and initializers for REFERENCE_TYPE
1485 private variables. Initialization statements go in ILIST, while calls
1486 to destructors go in DLIST. */
1487
1488static void
50674e96 1489lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
953ff289
DN
1490 omp_context *ctx)
1491{
1492 tree_stmt_iterator diter;
1493 tree c, dtor, copyin_seq, x, args, ptr;
1494 bool copyin_by_ref = false;
1495 int pass;
1496
1497 *dlist = alloc_stmt_list ();
1498 diter = tsi_start (*dlist);
1499 copyin_seq = NULL;
1500
1501 /* Do all the fixed sized types in the first pass, and the variable sized
1502 types in the second pass. This makes sure that the scalar arguments to
1503 the variable sized types are processed before we use them in the
1504 variable sized operations. */
1505 for (pass = 0; pass < 2; ++pass)
1506 {
1507 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1508 {
aaf46ef9 1509 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
953ff289
DN
1510 tree var, new_var;
1511 bool by_ref;
1512
1513 switch (c_kind)
1514 {
1515 case OMP_CLAUSE_PRIVATE:
1516 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
1517 continue;
1518 break;
1519 case OMP_CLAUSE_SHARED:
1520 case OMP_CLAUSE_FIRSTPRIVATE:
1521 case OMP_CLAUSE_LASTPRIVATE:
1522 case OMP_CLAUSE_COPYIN:
1523 case OMP_CLAUSE_REDUCTION:
1524 break;
1525 default:
1526 continue;
1527 }
1528
1529 new_var = var = OMP_CLAUSE_DECL (c);
1530 if (c_kind != OMP_CLAUSE_COPYIN)
1531 new_var = lookup_decl (var, ctx);
1532
1533 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
1534 {
1535 if (pass != 0)
1536 continue;
1537 }
953ff289
DN
1538 else if (is_variable_sized (var))
1539 {
50674e96
DN
1540 /* For variable sized types, we need to allocate the
1541 actual storage here. Call alloca and store the
1542 result in the pointer decl that we created elsewhere. */
953ff289
DN
1543 if (pass == 0)
1544 continue;
1545
1546 ptr = DECL_VALUE_EXPR (new_var);
1547 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
1548 ptr = TREE_OPERAND (ptr, 0);
1549 gcc_assert (DECL_P (ptr));
1550
1551 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
1552 args = tree_cons (NULL, x, NULL);
1553 x = built_in_decls[BUILT_IN_ALLOCA];
1554 x = build_function_call_expr (x, args);
1555 x = fold_convert (TREE_TYPE (ptr), x);
1556 x = build2 (MODIFY_EXPR, void_type_node, ptr, x);
1557 gimplify_and_add (x, ilist);
1558 }
953ff289
DN
1559 else if (is_reference (var))
1560 {
50674e96
DN
1561 /* For references that are being privatized for Fortran,
1562 allocate new backing storage for the new pointer
1563 variable. This allows us to avoid changing all the
1564 code that expects a pointer to something that expects
1565 a direct variable. Note that this doesn't apply to
1566 C++, since reference types are disallowed in data
1567 sharing clauses there. */
953ff289
DN
1568 if (pass == 0)
1569 continue;
1570
1571 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
1572 if (TREE_CONSTANT (x))
1573 {
1574 const char *name = NULL;
1575 if (DECL_NAME (var))
1576 name = IDENTIFIER_POINTER (DECL_NAME (new_var));
1577
1578 x = create_tmp_var (TREE_TYPE (TREE_TYPE (new_var)), name);
1579 x = build_fold_addr_expr_with_type (x, TREE_TYPE (new_var));
1580 }
1581 else
1582 {
1583 args = tree_cons (NULL, x, NULL);
1584 x = built_in_decls[BUILT_IN_ALLOCA];
1585 x = build_function_call_expr (x, args);
1586 x = fold_convert (TREE_TYPE (new_var), x);
1587 }
1588
1589 x = build2 (MODIFY_EXPR, void_type_node, new_var, x);
1590 gimplify_and_add (x, ilist);
1591
1592 new_var = build_fold_indirect_ref (new_var);
1593 }
1594 else if (c_kind == OMP_CLAUSE_REDUCTION
1595 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1596 {
1597 if (pass == 0)
1598 continue;
1599 }
1600 else if (pass != 0)
1601 continue;
1602
aaf46ef9 1603 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1604 {
1605 case OMP_CLAUSE_SHARED:
1606 /* Set up the DECL_VALUE_EXPR for shared variables now. This
1607 needs to be delayed until after fixup_child_record_type so
1608 that we get the correct type during the dereference. */
1609 by_ref = use_pointer_for_field (var, true);
1610 x = build_receiver_ref (var, by_ref, ctx);
1611 SET_DECL_VALUE_EXPR (new_var, x);
1612 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
1613
1614 /* ??? If VAR is not passed by reference, and the variable
1615 hasn't been initialized yet, then we'll get a warning for
1616 the store into the omp_data_s structure. Ideally, we'd be
1617 able to notice this and not store anything at all, but
1618 we're generating code too early. Suppress the warning. */
1619 if (!by_ref)
1620 TREE_NO_WARNING (var) = 1;
1621 break;
1622
1623 case OMP_CLAUSE_LASTPRIVATE:
1624 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1625 break;
1626 /* FALLTHRU */
1627
1628 case OMP_CLAUSE_PRIVATE:
1629 x = lang_hooks.decls.omp_clause_default_ctor (c, new_var);
1630 if (x)
1631 gimplify_and_add (x, ilist);
1632 /* FALLTHRU */
1633
1634 do_dtor:
1635 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
1636 if (x)
1637 {
1638 dtor = x;
1639 gimplify_stmt (&dtor);
1640 tsi_link_before (&diter, dtor, TSI_SAME_STMT);
1641 }
1642 break;
1643
1644 case OMP_CLAUSE_FIRSTPRIVATE:
1645 x = build_outer_var_ref (var, ctx);
1646 x = lang_hooks.decls.omp_clause_copy_ctor (c, new_var, x);
1647 gimplify_and_add (x, ilist);
1648 goto do_dtor;
1649 break;
1650
1651 case OMP_CLAUSE_COPYIN:
1652 by_ref = use_pointer_for_field (var, false);
1653 x = build_receiver_ref (var, by_ref, ctx);
1654 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
1655 append_to_statement_list (x, &copyin_seq);
1656 copyin_by_ref |= by_ref;
1657 break;
1658
1659 case OMP_CLAUSE_REDUCTION:
1660 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1661 {
1662 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), ilist);
1663 OMP_CLAUSE_REDUCTION_INIT (c) = NULL;
1664 }
1665 else
1666 {
1667 x = omp_reduction_init (c, TREE_TYPE (new_var));
1668 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
1669 x = build2 (MODIFY_EXPR, void_type_node, new_var, x);
1670 gimplify_and_add (x, ilist);
1671 }
1672 break;
1673
1674 default:
1675 gcc_unreachable ();
1676 }
1677 }
1678 }
1679
1680 /* The copyin sequence is not to be executed by the main thread, since
1681 that would result in self-copies. Perhaps not visible to scalars,
1682 but it certainly is to C++ operator=. */
1683 if (copyin_seq)
1684 {
1685 x = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
1686 x = build_function_call_expr (x, NULL);
1687 x = build2 (NE_EXPR, boolean_type_node, x,
1688 build_int_cst (TREE_TYPE (x), 0));
1689 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
1690 gimplify_and_add (x, ilist);
1691 }
1692
1693 /* If any copyin variable is passed by reference, we must ensure the
1694 master thread doesn't modify it before it is copied over in all
1695 threads. */
1696 if (copyin_by_ref)
1697 build_omp_barrier (ilist);
1698}
1699
50674e96 1700
953ff289
DN
1701/* Generate code to implement the LASTPRIVATE clauses. This is used for
1702 both parallel and workshare constructs. PREDICATE may be NULL if it's
1703 always true. */
1704
1705static void
50674e96 1706lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
953ff289
DN
1707 omp_context *ctx)
1708{
1709 tree sub_list, x, c;
1710
1711 /* Early exit if there are no lastprivate clauses. */
1712 clauses = find_omp_clause (clauses, OMP_CLAUSE_LASTPRIVATE);
1713 if (clauses == NULL)
1714 {
1715 /* If this was a workshare clause, see if it had been combined
1716 with its parallel. In that case, look for the clauses on the
1717 parallel statement itself. */
1718 if (is_parallel_ctx (ctx))
1719 return;
1720
1721 ctx = ctx->outer;
1722 if (ctx == NULL || !is_parallel_ctx (ctx))
1723 return;
1724
1725 clauses = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
1726 OMP_CLAUSE_LASTPRIVATE);
1727 if (clauses == NULL)
1728 return;
1729 }
1730
1731 sub_list = alloc_stmt_list ();
1732
1733 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1734 {
1735 tree var, new_var;
1736
aaf46ef9 1737 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE)
953ff289
DN
1738 continue;
1739
1740 var = OMP_CLAUSE_DECL (c);
1741 new_var = lookup_decl (var, ctx);
1742
1743 x = build_outer_var_ref (var, ctx);
1744 if (is_reference (var))
1745 new_var = build_fold_indirect_ref (new_var);
1746 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
1747 append_to_statement_list (x, &sub_list);
1748 }
1749
1750 if (predicate)
1751 x = build3 (COND_EXPR, void_type_node, predicate, sub_list, NULL);
1752 else
1753 x = sub_list;
50674e96 1754
953ff289
DN
1755 gimplify_and_add (x, stmt_list);
1756}
1757
50674e96 1758
953ff289
DN
1759/* Generate code to implement the REDUCTION clauses. */
1760
1761static void
50674e96 1762lower_reduction_clauses (tree clauses, tree *stmt_list, omp_context *ctx)
953ff289
DN
1763{
1764 tree sub_list = NULL, x, c;
1765 int count = 0;
1766
1767 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
1768 update in that case, otherwise use a lock. */
1769 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
aaf46ef9 1770 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
953ff289
DN
1771 {
1772 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1773 {
1774 /* Never use OMP_ATOMIC for array reductions. */
1775 count = -1;
1776 break;
1777 }
1778 count++;
1779 }
1780
1781 if (count == 0)
1782 return;
1783
1784 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1785 {
1786 tree var, ref, new_var;
1787 enum tree_code code;
1788
aaf46ef9 1789 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
953ff289
DN
1790 continue;
1791
1792 var = OMP_CLAUSE_DECL (c);
1793 new_var = lookup_decl (var, ctx);
1794 if (is_reference (var))
1795 new_var = build_fold_indirect_ref (new_var);
1796 ref = build_outer_var_ref (var, ctx);
1797 code = OMP_CLAUSE_REDUCTION_CODE (c);
50674e96
DN
1798
1799 /* reduction(-:var) sums up the partial results, so it acts
1800 identically to reduction(+:var). */
953ff289
DN
1801 if (code == MINUS_EXPR)
1802 code = PLUS_EXPR;
1803
1804 if (count == 1)
1805 {
1806 tree addr = build_fold_addr_expr (ref);
1807
1808 addr = save_expr (addr);
1809 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
1810 x = fold_build2 (code, TREE_TYPE (ref), ref, new_var);
1811 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
1812 gimplify_and_add (x, stmt_list);
1813 return;
1814 }
1815
1816 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1817 {
1818 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
1819
1820 if (is_reference (var))
1821 ref = build_fold_addr_expr (ref);
1822 SET_DECL_VALUE_EXPR (placeholder, ref);
1823 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
1824 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), &sub_list);
1825 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL;
1826 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
1827 }
1828 else
1829 {
1830 x = build2 (code, TREE_TYPE (ref), ref, new_var);
1831 ref = build_outer_var_ref (var, ctx);
1832 x = build2 (MODIFY_EXPR, void_type_node, ref, x);
1833 append_to_statement_list (x, &sub_list);
1834 }
1835 }
1836
1837 x = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
1838 x = build_function_call_expr (x, NULL);
1839 gimplify_and_add (x, stmt_list);
1840
1841 gimplify_and_add (sub_list, stmt_list);
1842
1843 x = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
1844 x = build_function_call_expr (x, NULL);
1845 gimplify_and_add (x, stmt_list);
1846}
1847
50674e96 1848
953ff289
DN
1849/* Generate code to implement the COPYPRIVATE clauses. */
1850
1851static void
50674e96 1852lower_copyprivate_clauses (tree clauses, tree *slist, tree *rlist,
953ff289
DN
1853 omp_context *ctx)
1854{
1855 tree c;
1856
1857 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1858 {
1859 tree var, ref, x;
1860 bool by_ref;
1861
aaf46ef9 1862 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
953ff289
DN
1863 continue;
1864
1865 var = OMP_CLAUSE_DECL (c);
1866 by_ref = use_pointer_for_field (var, false);
1867
1868 ref = build_sender_ref (var, ctx);
50674e96
DN
1869 x = (ctx->is_nested) ? lookup_decl_in_outer_ctx (var, ctx) : var;
1870 x = by_ref ? build_fold_addr_expr (x) : x;
953ff289
DN
1871 x = build2 (MODIFY_EXPR, void_type_node, ref, x);
1872 gimplify_and_add (x, slist);
1873
1874 ref = build_receiver_ref (var, by_ref, ctx);
1875 if (is_reference (var))
1876 {
1877 ref = build_fold_indirect_ref (ref);
1878 var = build_fold_indirect_ref (var);
1879 }
1880 x = lang_hooks.decls.omp_clause_assign_op (c, var, ref);
1881 gimplify_and_add (x, rlist);
1882 }
1883}
1884
50674e96 1885
953ff289
DN
1886/* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
1887 and REDUCTION from the sender (aka parent) side. */
1888
1889static void
50674e96 1890lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
953ff289
DN
1891{
1892 tree c;
1893
1894 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1895 {
50674e96 1896 tree val, ref, x, var;
953ff289
DN
1897 bool by_ref, do_in = false, do_out = false;
1898
aaf46ef9 1899 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1900 {
1901 case OMP_CLAUSE_FIRSTPRIVATE:
1902 case OMP_CLAUSE_COPYIN:
1903 case OMP_CLAUSE_LASTPRIVATE:
1904 case OMP_CLAUSE_REDUCTION:
1905 break;
1906 default:
1907 continue;
1908 }
1909
50674e96
DN
1910 var = val = OMP_CLAUSE_DECL (c);
1911 if (ctx->is_nested)
1912 var = lookup_decl_in_outer_ctx (val, ctx);
1913
953ff289
DN
1914 if (is_variable_sized (val))
1915 continue;
1916 by_ref = use_pointer_for_field (val, false);
1917
aaf46ef9 1918 switch (OMP_CLAUSE_CODE (c))
953ff289
DN
1919 {
1920 case OMP_CLAUSE_FIRSTPRIVATE:
1921 case OMP_CLAUSE_COPYIN:
1922 do_in = true;
1923 break;
1924
1925 case OMP_CLAUSE_LASTPRIVATE:
1926 if (by_ref || is_reference (val))
1927 {
1928 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1929 continue;
1930 do_in = true;
1931 }
1932 else
1933 do_out = true;
1934 break;
1935
1936 case OMP_CLAUSE_REDUCTION:
1937 do_in = true;
1938 do_out = !(by_ref || is_reference (val));
1939 break;
1940
1941 default:
1942 gcc_unreachable ();
1943 }
1944
1945 if (do_in)
1946 {
1947 ref = build_sender_ref (val, ctx);
50674e96 1948 x = by_ref ? build_fold_addr_expr (var) : var;
953ff289
DN
1949 x = build2 (MODIFY_EXPR, void_type_node, ref, x);
1950 gimplify_and_add (x, ilist);
1951 }
50674e96 1952
953ff289
DN
1953 if (do_out)
1954 {
1955 ref = build_sender_ref (val, ctx);
50674e96 1956 x = build2 (MODIFY_EXPR, void_type_node, var, ref);
953ff289
DN
1957 gimplify_and_add (x, olist);
1958 }
1959 }
1960}
1961
1962/* Generate code to implement SHARED from the sender (aka parent) side.
1963 This is trickier, since OMP_PARALLEL_CLAUSES doesn't list things that
1964 got automatically shared. */
1965
1966static void
50674e96 1967lower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
953ff289 1968{
50674e96 1969 tree var, ovar, nvar, f, x;
953ff289
DN
1970
1971 if (ctx->record_type == NULL)
1972 return;
50674e96 1973
953ff289
DN
1974 for (f = TYPE_FIELDS (ctx->record_type); f ; f = TREE_CHAIN (f))
1975 {
1976 ovar = DECL_ABSTRACT_ORIGIN (f);
1977 nvar = maybe_lookup_decl (ovar, ctx);
1978 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
1979 continue;
1980
50674e96
DN
1981 var = ovar;
1982
1983 /* If CTX is a nested parallel directive. Find the immediately
1984 enclosing parallel or workshare construct that contains a
1985 mapping for OVAR. */
1986 if (ctx->is_nested)
1987 var = lookup_decl_in_outer_ctx (ovar, ctx);
1988
953ff289
DN
1989 if (use_pointer_for_field (ovar, true))
1990 {
1991 x = build_sender_ref (ovar, ctx);
50674e96
DN
1992 var = build_fold_addr_expr (var);
1993 x = build2 (MODIFY_EXPR, void_type_node, x, var);
953ff289
DN
1994 gimplify_and_add (x, ilist);
1995 }
1996 else
1997 {
1998 x = build_sender_ref (ovar, ctx);
50674e96 1999 x = build2 (MODIFY_EXPR, void_type_node, x, var);
953ff289
DN
2000 gimplify_and_add (x, ilist);
2001
2002 x = build_sender_ref (ovar, ctx);
50674e96 2003 x = build2 (MODIFY_EXPR, void_type_node, var, x);
953ff289
DN
2004 gimplify_and_add (x, olist);
2005 }
2006 }
2007}
2008
2009/* Build the function calls to GOMP_parallel_start etc to actually
50674e96
DN
2010 generate the parallel operation. REGION is the parallel region
2011 being expanded. BB is the block where to insert the code. WS_ARGS
2012 will be set if this is a call to a combined parallel+workshare
2013 construct, it contains the list of additional arguments needed by
2014 the workshare construct. */
953ff289
DN
2015
2016static void
777f7f9a
RH
2017expand_parallel_call (struct omp_region *region, basic_block bb,
2018 tree entry_stmt, tree ws_args)
953ff289 2019{
50674e96
DN
2020 tree t, args, val, cond, c, list, clauses;
2021 block_stmt_iterator si;
2022 int start_ix;
2023
777f7f9a 2024 clauses = OMP_PARALLEL_CLAUSES (entry_stmt);
50674e96
DN
2025 push_gimplify_context ();
2026
c0220ea4 2027 /* Determine what flavor of GOMP_parallel_start we will be
50674e96
DN
2028 emitting. */
2029 start_ix = BUILT_IN_GOMP_PARALLEL_START;
2030 if (is_combined_parallel (region))
2031 {
777f7f9a 2032 switch (region->inner->type)
50674e96 2033 {
777f7f9a 2034 case OMP_FOR:
21a66e91
JJ
2035 start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
2036 + region->inner->sched_kind;
777f7f9a
RH
2037 break;
2038 case OMP_SECTIONS:
2039 start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
2040 break;
2041 default:
2042 gcc_unreachable ();
50674e96 2043 }
50674e96 2044 }
953ff289
DN
2045
2046 /* By default, the value of NUM_THREADS is zero (selected at run time)
2047 and there is no conditional. */
2048 cond = NULL_TREE;
2049 val = build_int_cst (unsigned_type_node, 0);
2050
2051 c = find_omp_clause (clauses, OMP_CLAUSE_IF);
2052 if (c)
2053 cond = OMP_CLAUSE_IF_EXPR (c);
2054
2055 c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
2056 if (c)
2057 val = OMP_CLAUSE_NUM_THREADS_EXPR (c);
2058
2059 /* Ensure 'val' is of the correct type. */
2060 val = fold_convert (unsigned_type_node, val);
2061
2062 /* If we found the clause 'if (cond)', build either
2063 (cond != 0) or (cond ? val : 1u). */
2064 if (cond)
2065 {
50674e96
DN
2066 block_stmt_iterator si;
2067
2068 cond = gimple_boolify (cond);
2069
953ff289
DN
2070 if (integer_zerop (val))
2071 val = build2 (EQ_EXPR, unsigned_type_node, cond,
2072 build_int_cst (TREE_TYPE (cond), 0));
2073 else
50674e96
DN
2074 {
2075 basic_block cond_bb, then_bb, else_bb;
2076 edge e;
2077 tree t, then_lab, else_lab, tmp;
2078
2079 tmp = create_tmp_var (TREE_TYPE (val), NULL);
2080 e = split_block (bb, NULL);
2081 cond_bb = e->src;
2082 bb = e->dest;
2083 remove_edge (e);
2084
2085 then_bb = create_empty_bb (cond_bb);
2086 else_bb = create_empty_bb (then_bb);
2087 then_lab = create_artificial_label ();
2088 else_lab = create_artificial_label ();
2089
2090 t = build3 (COND_EXPR, void_type_node,
2091 cond,
2092 build_and_jump (&then_lab),
2093 build_and_jump (&else_lab));
2094
2095 si = bsi_start (cond_bb);
2096 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2097
2098 si = bsi_start (then_bb);
2099 t = build1 (LABEL_EXPR, void_type_node, then_lab);
2100 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2101 t = build2 (MODIFY_EXPR, void_type_node, tmp, val);
2102 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2103
2104 si = bsi_start (else_bb);
2105 t = build1 (LABEL_EXPR, void_type_node, else_lab);
2106 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2107 t = build2 (MODIFY_EXPR, void_type_node, tmp,
2108 build_int_cst (unsigned_type_node, 1));
2109 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
2110
2111 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2112 make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
2113 make_edge (then_bb, bb, EDGE_FALLTHRU);
2114 make_edge (else_bb, bb, EDGE_FALLTHRU);
2115
2116 val = tmp;
2117 }
2118
2119 list = NULL_TREE;
2120 val = get_formal_tmp_var (val, &list);
2121 si = bsi_start (bb);
2122 bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
953ff289
DN
2123 }
2124
50674e96 2125 list = NULL_TREE;
953ff289 2126 args = tree_cons (NULL, val, NULL);
777f7f9a 2127 t = OMP_PARALLEL_DATA_ARG (entry_stmt);
953ff289
DN
2128 if (t == NULL)
2129 t = null_pointer_node;
2130 else
2131 t = build_fold_addr_expr (t);
2132 args = tree_cons (NULL, t, args);
777f7f9a 2133 t = build_fold_addr_expr (OMP_PARALLEL_FN (entry_stmt));
953ff289 2134 args = tree_cons (NULL, t, args);
50674e96
DN
2135
2136 if (ws_args)
2137 args = chainon (args, ws_args);
2138
2139 t = built_in_decls[start_ix];
953ff289 2140 t = build_function_call_expr (t, args);
50674e96 2141 gimplify_and_add (t, &list);
953ff289 2142
777f7f9a 2143 t = OMP_PARALLEL_DATA_ARG (entry_stmt);
953ff289
DN
2144 if (t == NULL)
2145 t = null_pointer_node;
2146 else
2147 t = build_fold_addr_expr (t);
2148 args = tree_cons (NULL, t, NULL);
777f7f9a 2149 t = build_function_call_expr (OMP_PARALLEL_FN (entry_stmt), args);
50674e96 2150 gimplify_and_add (t, &list);
953ff289
DN
2151
2152 t = built_in_decls[BUILT_IN_GOMP_PARALLEL_END];
2153 t = build_function_call_expr (t, NULL);
50674e96
DN
2154 gimplify_and_add (t, &list);
2155
2156 si = bsi_last (bb);
2157 bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
2158
2159 pop_gimplify_context (NULL_TREE);
953ff289
DN
2160}
2161
50674e96 2162
953ff289
DN
2163/* If exceptions are enabled, wrap *STMT_P in a MUST_NOT_THROW catch
2164 handler. This prevents programs from violating the structured
2165 block semantics with throws. */
2166
2167static void
2168maybe_catch_exception (tree *stmt_p)
2169{
2170 tree f, t;
2171
2172 if (!flag_exceptions)
2173 return;
2174
2175 if (lang_protect_cleanup_actions)
2176 t = lang_protect_cleanup_actions ();
2177 else
2178 {
2179 t = built_in_decls[BUILT_IN_TRAP];
2180 t = build_function_call_expr (t, NULL);
2181 }
2182 f = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
2183 EH_FILTER_MUST_NOT_THROW (f) = 1;
2184 gimplify_and_add (t, &EH_FILTER_FAILURE (f));
2185
2186 t = build2 (TRY_CATCH_EXPR, void_type_node, *stmt_p, NULL);
2187 append_to_statement_list (f, &TREE_OPERAND (t, 1));
2188
2189 *stmt_p = NULL;
2190 append_to_statement_list (t, stmt_p);
2191}
2192
50674e96 2193/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
953ff289 2194
50674e96
DN
2195static tree
2196list2chain (tree list)
953ff289 2197{
50674e96 2198 tree t;
953ff289 2199
50674e96
DN
2200 for (t = list; t; t = TREE_CHAIN (t))
2201 {
2202 tree var = TREE_VALUE (t);
2203 if (TREE_CHAIN (t))
2204 TREE_CHAIN (var) = TREE_VALUE (TREE_CHAIN (t));
2205 else
2206 TREE_CHAIN (var) = NULL_TREE;
2207 }
953ff289 2208
50674e96
DN
2209 return list ? TREE_VALUE (list) : NULL_TREE;
2210}
953ff289 2211
953ff289 2212
50674e96
DN
2213/* Remove barriers in REGION->EXIT's block. Note that this is only
2214 valid for OMP_PARALLEL regions. Since the end of a parallel region
2215 is an implicit barrier, any workshare inside the OMP_PARALLEL that
2216 left a barrier at the end of the OMP_PARALLEL region can now be
2217 removed. */
953ff289 2218
50674e96
DN
2219static void
2220remove_exit_barrier (struct omp_region *region)
2221{
2222 block_stmt_iterator si;
2223 basic_block exit_bb;
777f7f9a
RH
2224 edge_iterator ei;
2225 edge e;
50674e96 2226 tree t;
953ff289 2227
777f7f9a 2228 exit_bb = region->exit;
953ff289 2229
2aee3e57
JJ
2230 /* If the parallel region doesn't return, we don't have REGION->EXIT
2231 block at all. */
2232 if (! exit_bb)
2233 return;
2234
777f7f9a 2235 /* The last insn in the block will be the parallel's OMP_RETURN. The
75c40d56 2236 workshare's OMP_RETURN will be in a preceding block. The kinds of
777f7f9a
RH
2237 statements that can appear in between are extremely limited -- no
2238 memory operations at all. Here, we allow nothing at all, so the
75c40d56 2239 only thing we allow to precede this OMP_RETURN is a label. */
50674e96 2240 si = bsi_last (exit_bb);
777f7f9a 2241 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
50674e96 2242 bsi_prev (&si);
777f7f9a 2243 if (!bsi_end_p (si) && TREE_CODE (bsi_stmt (si)) != LABEL_EXPR)
50674e96 2244 return;
953ff289 2245
777f7f9a
RH
2246 FOR_EACH_EDGE (e, ei, exit_bb->preds)
2247 {
2248 si = bsi_last (e->src);
2249 if (bsi_end_p (si))
2250 continue;
2251 t = bsi_stmt (si);
2252 if (TREE_CODE (t) == OMP_RETURN)
2253 OMP_RETURN_NOWAIT (t) = 1;
2254 }
953ff289
DN
2255}
2256
777f7f9a
RH
2257static void
2258remove_exit_barriers (struct omp_region *region)
2259{
2260 if (region->type == OMP_PARALLEL)
2261 remove_exit_barrier (region);
2262
2263 if (region->inner)
2264 {
2265 region = region->inner;
2266 remove_exit_barriers (region);
2267 while (region->next)
2268 {
2269 region = region->next;
2270 remove_exit_barriers (region);
2271 }
2272 }
2273}
50674e96
DN
2274
2275/* Expand the OpenMP parallel directive starting at REGION. */
953ff289
DN
2276
2277static void
50674e96 2278expand_omp_parallel (struct omp_region *region)
953ff289 2279{
50674e96
DN
2280 basic_block entry_bb, exit_bb, new_bb;
2281 struct function *child_cfun, *saved_cfun;
2282 tree child_fn, block, t, ws_args;
2283 block_stmt_iterator si;
777f7f9a 2284 tree entry_stmt;
50674e96
DN
2285 edge e;
2286
777f7f9a
RH
2287 entry_stmt = last_stmt (region->entry);
2288 child_fn = OMP_PARALLEL_FN (entry_stmt);
50674e96
DN
2289 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
2290 saved_cfun = cfun;
2291
777f7f9a
RH
2292 entry_bb = region->entry;
2293 exit_bb = region->exit;
50674e96 2294
50674e96 2295 if (is_combined_parallel (region))
777f7f9a 2296 ws_args = region->ws_args;
50674e96
DN
2297 else
2298 ws_args = NULL_TREE;
953ff289 2299
777f7f9a 2300 if (child_cfun->cfg)
953ff289 2301 {
50674e96
DN
2302 /* Due to inlining, it may happen that we have already outlined
2303 the region, in which case all we need to do is make the
2304 sub-graph unreachable and emit the parallel call. */
2305 edge entry_succ_e, exit_succ_e;
2306 block_stmt_iterator si;
2307
2308 entry_succ_e = single_succ_edge (entry_bb);
2309 exit_succ_e = single_succ_edge (exit_bb);
2310
2311 si = bsi_last (entry_bb);
777f7f9a 2312 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_PARALLEL);
50674e96
DN
2313 bsi_remove (&si, true);
2314
2315 new_bb = entry_bb;
2316 remove_edge (entry_succ_e);
2317 make_edge (new_bb, exit_succ_e->dest, EDGE_FALLTHRU);
953ff289 2318 }
50674e96
DN
2319 else
2320 {
2321 /* If the parallel region needs data sent from the parent
2322 function, then the very first statement of the parallel body
2323 is a copy assignment .OMP_DATA_I = &.OMP_DATA_O. Since
2324 &.OMP_DATA_O is passed as an argument to the child function,
2325 we need to replace it with the argument as seen by the child
2326 function.
2327
2328 In most cases, this will end up being the identity assignment
2329 .OMP_DATA_I = .OMP_DATA_I. However, if the parallel body had
2330 a function call that has been inlined, the original PARM_DECL
2331 .OMP_DATA_I may have been converted into a different local
2332 variable. In which case, we need to keep the assignment. */
777f7f9a 2333 if (OMP_PARALLEL_DATA_ARG (entry_stmt))
50674e96
DN
2334 {
2335 basic_block entry_succ_bb = single_succ (entry_bb);
2336 block_stmt_iterator si = bsi_start (entry_succ_bb);
2337 tree stmt;
953ff289 2338
50674e96 2339 gcc_assert (!bsi_end_p (si));
953ff289 2340
50674e96
DN
2341 stmt = bsi_stmt (si);
2342 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR
2343 && TREE_CODE (TREE_OPERAND (stmt, 1)) == ADDR_EXPR
2344 && TREE_OPERAND (TREE_OPERAND (stmt, 1), 0)
777f7f9a 2345 == OMP_PARALLEL_DATA_ARG (entry_stmt));
50674e96
DN
2346
2347 if (TREE_OPERAND (stmt, 0) == DECL_ARGUMENTS (child_fn))
2348 bsi_remove (&si, true);
2349 else
2350 TREE_OPERAND (stmt, 1) = DECL_ARGUMENTS (child_fn);
2351 }
2352
2353 /* Declare local variables needed in CHILD_CFUN. */
2354 block = DECL_INITIAL (child_fn);
2355 BLOCK_VARS (block) = list2chain (child_cfun->unexpanded_var_list);
2356 DECL_SAVED_TREE (child_fn) = single_succ (entry_bb)->stmt_list;
2357
2358 /* Reset DECL_CONTEXT on locals and function arguments. */
2359 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
2360 DECL_CONTEXT (t) = child_fn;
2361
2362 for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
2363 DECL_CONTEXT (t) = child_fn;
2364
2365 /* Split ENTRY_BB at OMP_PARALLEL so that it can be moved to the
2366 child function. */
2367 si = bsi_last (entry_bb);
2368 t = bsi_stmt (si);
2369 gcc_assert (t && TREE_CODE (t) == OMP_PARALLEL);
2370 bsi_remove (&si, true);
2371 e = split_block (entry_bb, t);
2372 entry_bb = e->dest;
2373 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
2374
2375 /* Move the parallel region into CHILD_CFUN. We need to reset
2376 dominance information because the expansion of the inner
2377 regions has invalidated it. */
2378 free_dominance_info (CDI_DOMINATORS);
2379 new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb);
2aee3e57
JJ
2380 if (exit_bb)
2381 single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
50674e96
DN
2382 cgraph_add_new_function (child_fn);
2383
2384 /* Convert OMP_RETURN into a RETURN_EXPR. */
2aee3e57
JJ
2385 if (exit_bb)
2386 {
2387 si = bsi_last (exit_bb);
2388 gcc_assert (!bsi_end_p (si)
2389 && TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
2390 t = build1 (RETURN_EXPR, void_type_node, NULL);
2391 bsi_insert_after (&si, t, TSI_SAME_STMT);
2392 bsi_remove (&si, true);
2393 }
50674e96
DN
2394 }
2395
2396 /* Emit a library call to launch the children threads. */
777f7f9a 2397 expand_parallel_call (region, new_bb, entry_stmt, ws_args);
953ff289
DN
2398}
2399
50674e96
DN
2400
2401/* A subroutine of expand_omp_for. Generate code for a parallel
953ff289
DN
2402 loop with any schedule. Given parameters:
2403
2404 for (V = N1; V cond N2; V += STEP) BODY;
2405
2406 where COND is "<" or ">", we generate pseudocode
2407
2408 more = GOMP_loop_foo_start (N1, N2, STEP, CHUNK, &istart0, &iend0);
50674e96 2409 if (more) goto L0; else goto L3;
953ff289
DN
2410 L0:
2411 V = istart0;
2412 iend = iend0;
2413 L1:
2414 BODY;
2415 V += STEP;
50674e96 2416 if (V cond iend) goto L1; else goto L2;
953ff289 2417 L2:
50674e96
DN
2418 if (GOMP_loop_foo_next (&istart0, &iend0)) goto L0; else goto L3;
2419 L3:
953ff289 2420
50674e96
DN
2421 If this is a combined omp parallel loop, instead of the call to
2422 GOMP_loop_foo_start, we emit 'goto L3'. */
953ff289 2423
777f7f9a 2424static void
50674e96
DN
2425expand_omp_for_generic (struct omp_region *region,
2426 struct omp_for_data *fd,
953ff289
DN
2427 enum built_in_function start_fn,
2428 enum built_in_function next_fn)
2429{
50674e96 2430 tree l0, l1, l2, l3;
953ff289 2431 tree type, istart0, iend0, iend;
50674e96 2432 tree t, args, list;
777f7f9a 2433 basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l3_bb;
50674e96
DN
2434 block_stmt_iterator si;
2435 bool in_combined_parallel = is_combined_parallel (region);
953ff289
DN
2436
2437 type = TREE_TYPE (fd->v);
2438
2439 istart0 = create_tmp_var (long_integer_type_node, ".istart0");
2440 iend0 = create_tmp_var (long_integer_type_node, ".iend0");
777f7f9a 2441 iend = create_tmp_var (type, NULL);
5b4fc8fb
JJ
2442 TREE_ADDRESSABLE (istart0) = 1;
2443 TREE_ADDRESSABLE (iend0) = 1;
953ff289 2444
777f7f9a
RH
2445 entry_bb = region->entry;
2446 l0_bb = create_empty_bb (entry_bb);
50674e96 2447 l1_bb = single_succ (entry_bb);
777f7f9a
RH
2448 cont_bb = region->cont;
2449 l2_bb = create_empty_bb (cont_bb);
2450 l3_bb = single_succ (cont_bb);
2451 exit_bb = region->exit;
50674e96 2452
777f7f9a
RH
2453 l0 = tree_block_label (l0_bb);
2454 l1 = tree_block_label (l1_bb);
2455 l2 = tree_block_label (l2_bb);
2456 l3 = tree_block_label (l3_bb);
50674e96 2457
777f7f9a
RH
2458 si = bsi_last (entry_bb);
2459 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
50674e96 2460 if (!in_combined_parallel)
953ff289 2461 {
50674e96
DN
2462 /* If this is not a combined parallel loop, emit a call to
2463 GOMP_loop_foo_start in ENTRY_BB. */
777f7f9a 2464 list = alloc_stmt_list ();
953ff289
DN
2465 t = build_fold_addr_expr (iend0);
2466 args = tree_cons (NULL, t, NULL);
2467 t = build_fold_addr_expr (istart0);
2468 args = tree_cons (NULL, t, args);
2469 if (fd->chunk_size)
2470 {
2471 t = fold_convert (long_integer_type_node, fd->chunk_size);
2472 args = tree_cons (NULL, t, args);
2473 }
2474 t = fold_convert (long_integer_type_node, fd->step);
2475 args = tree_cons (NULL, t, args);
2476 t = fold_convert (long_integer_type_node, fd->n2);
2477 args = tree_cons (NULL, t, args);
2478 t = fold_convert (long_integer_type_node, fd->n1);
2479 args = tree_cons (NULL, t, args);
2480 t = build_function_call_expr (built_in_decls[start_fn], args);
50674e96
DN
2481 t = get_formal_tmp_var (t, &list);
2482 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l0),
2483 build_and_jump (&l3));
2484 append_to_statement_list (t, &list);
777f7f9a 2485 bsi_insert_after (&si, list, BSI_SAME_STMT);
953ff289 2486 }
777f7f9a 2487 bsi_remove (&si, true);
953ff289 2488
50674e96
DN
2489 /* Iteration setup for sequential loop goes in L0_BB. */
2490 list = alloc_stmt_list ();
953ff289
DN
2491 t = fold_convert (type, istart0);
2492 t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
50674e96 2493 gimplify_and_add (t, &list);
953ff289
DN
2494
2495 t = fold_convert (type, iend0);
2496 t = build2 (MODIFY_EXPR, void_type_node, iend, t);
50674e96 2497 gimplify_and_add (t, &list);
953ff289 2498
50674e96
DN
2499 si = bsi_start (l0_bb);
2500 bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
2501
50674e96
DN
2502 /* Code to control the increment and predicate for the sequential
2503 loop goes in the first half of EXIT_BB (we split EXIT_BB so
2504 that we can inherit all the edges going out of the loop
2505 body). */
2506 list = alloc_stmt_list ();
953ff289
DN
2507
2508 t = build2 (PLUS_EXPR, type, fd->v, fd->step);
2509 t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
50674e96 2510 gimplify_and_add (t, &list);
953ff289
DN
2511
2512 t = build2 (fd->cond_code, boolean_type_node, fd->v, iend);
50674e96
DN
2513 t = get_formal_tmp_var (t, &list);
2514 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l1),
2515 build_and_jump (&l2));
2516 append_to_statement_list (t, &list);
2517
777f7f9a
RH
2518 si = bsi_last (cont_bb);
2519 bsi_insert_after (&si, list, BSI_SAME_STMT);
2520 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
50674e96 2521 bsi_remove (&si, true);
50674e96
DN
2522
2523 /* Emit code to get the next parallel iteration in L2_BB. */
2524 list = alloc_stmt_list ();
50674e96
DN
2525
2526 t = build_fold_addr_expr (iend0);
2527 args = tree_cons (NULL, t, NULL);
2528 t = build_fold_addr_expr (istart0);
2529 args = tree_cons (NULL, t, args);
2530 t = build_function_call_expr (built_in_decls[next_fn], args);
2531 t = get_formal_tmp_var (t, &list);
2532 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l0),
2533 build_and_jump (&l3));
2534 append_to_statement_list (t, &list);
2535
50674e96
DN
2536 si = bsi_start (l2_bb);
2537 bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
953ff289 2538
777f7f9a
RH
2539 /* Add the loop cleanup function. */
2540 si = bsi_last (exit_bb);
2541 if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
2542 t = built_in_decls[BUILT_IN_GOMP_LOOP_END_NOWAIT];
2543 else
2544 t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
2545 t = build_function_call_expr (t, NULL);
2546 bsi_insert_after (&si, t, BSI_SAME_STMT);
2547 bsi_remove (&si, true);
50674e96
DN
2548
2549 /* Connect the new blocks. */
2550 remove_edge (single_succ_edge (entry_bb));
953ff289 2551 if (in_combined_parallel)
50674e96 2552 make_edge (entry_bb, l2_bb, EDGE_FALLTHRU);
953ff289
DN
2553 else
2554 {
50674e96 2555 make_edge (entry_bb, l0_bb, EDGE_TRUE_VALUE);
777f7f9a 2556 make_edge (entry_bb, l3_bb, EDGE_FALSE_VALUE);
953ff289
DN
2557 }
2558
50674e96 2559 make_edge (l0_bb, l1_bb, EDGE_FALLTHRU);
50674e96 2560
777f7f9a
RH
2561 remove_edge (single_succ_edge (cont_bb));
2562 make_edge (cont_bb, l1_bb, EDGE_TRUE_VALUE);
2563 make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
2564
2565 make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
2566 make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
953ff289
DN
2567}
2568
2569
50674e96
DN
2570/* A subroutine of expand_omp_for. Generate code for a parallel
2571 loop with static schedule and no specified chunk size. Given
2572 parameters:
953ff289
DN
2573
2574 for (V = N1; V cond N2; V += STEP) BODY;
2575
2576 where COND is "<" or ">", we generate pseudocode
2577
2578 if (cond is <)
2579 adj = STEP - 1;
2580 else
2581 adj = STEP + 1;
2582 n = (adj + N2 - N1) / STEP;
2583 q = n / nthreads;
2584 q += (q * nthreads != n);
2585 s0 = q * threadid;
2586 e0 = min(s0 + q, n);
2587 if (s0 >= e0) goto L2; else goto L0;
2588 L0:
2589 V = s0 * STEP + N1;
2590 e = e0 * STEP + N1;
2591 L1:
2592 BODY;
2593 V += STEP;
2594 if (V cond e) goto L1;
953ff289
DN
2595 L2:
2596*/
2597
777f7f9a 2598static void
50674e96
DN
2599expand_omp_for_static_nochunk (struct omp_region *region,
2600 struct omp_for_data *fd)
953ff289
DN
2601{
2602 tree l0, l1, l2, n, q, s0, e0, e, t, nthreads, threadid;
50674e96 2603 tree type, utype, list;
777f7f9a
RH
2604 basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
2605 basic_block fin_bb;
50674e96 2606 block_stmt_iterator si;
953ff289 2607
953ff289
DN
2608 type = TREE_TYPE (fd->v);
2609 utype = lang_hooks.types.unsigned_type (type);
2610
777f7f9a
RH
2611 entry_bb = region->entry;
2612 seq_start_bb = create_empty_bb (entry_bb);
50674e96 2613 body_bb = single_succ (entry_bb);
777f7f9a
RH
2614 cont_bb = region->cont;
2615 fin_bb = single_succ (cont_bb);
2616 exit_bb = region->exit;
2617
2618 l0 = tree_block_label (seq_start_bb);
2619 l1 = tree_block_label (body_bb);
2620 l2 = tree_block_label (fin_bb);
50674e96
DN
2621
2622 /* Iteration space partitioning goes in ENTRY_BB. */
2623 list = alloc_stmt_list ();
777f7f9a 2624
953ff289
DN
2625 t = built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS];
2626 t = build_function_call_expr (t, NULL);
2627 t = fold_convert (utype, t);
50674e96 2628 nthreads = get_formal_tmp_var (t, &list);
953ff289
DN
2629
2630 t = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
2631 t = build_function_call_expr (t, NULL);
2632 t = fold_convert (utype, t);
50674e96 2633 threadid = get_formal_tmp_var (t, &list);
953ff289
DN
2634
2635 fd->n1 = fold_convert (type, fd->n1);
2636 if (!is_gimple_val (fd->n1))
50674e96 2637 fd->n1 = get_formal_tmp_var (fd->n1, &list);
953ff289
DN
2638
2639 fd->n2 = fold_convert (type, fd->n2);
2640 if (!is_gimple_val (fd->n2))
50674e96 2641 fd->n2 = get_formal_tmp_var (fd->n2, &list);
953ff289
DN
2642
2643 fd->step = fold_convert (type, fd->step);
2644 if (!is_gimple_val (fd->step))
50674e96 2645 fd->step = get_formal_tmp_var (fd->step, &list);
953ff289
DN
2646
2647 t = build_int_cst (type, (fd->cond_code == LT_EXPR ? -1 : 1));
2648 t = fold_build2 (PLUS_EXPR, type, fd->step, t);
2649 t = fold_build2 (PLUS_EXPR, type, t, fd->n2);
2650 t = fold_build2 (MINUS_EXPR, type, t, fd->n1);
2651 t = fold_build2 (TRUNC_DIV_EXPR, type, t, fd->step);
2652 t = fold_convert (utype, t);
2653 if (is_gimple_val (t))
2654 n = t;
2655 else
50674e96 2656 n = get_formal_tmp_var (t, &list);
953ff289
DN
2657
2658 t = build2 (TRUNC_DIV_EXPR, utype, n, nthreads);
50674e96 2659 q = get_formal_tmp_var (t, &list);
953ff289
DN
2660
2661 t = build2 (MULT_EXPR, utype, q, nthreads);
2662 t = build2 (NE_EXPR, utype, t, n);
2663 t = build2 (PLUS_EXPR, utype, q, t);
50674e96 2664 q = get_formal_tmp_var (t, &list);
953ff289
DN
2665
2666 t = build2 (MULT_EXPR, utype, q, threadid);
50674e96 2667 s0 = get_formal_tmp_var (t, &list);
953ff289
DN
2668
2669 t = build2 (PLUS_EXPR, utype, s0, q);
2670 t = build2 (MIN_EXPR, utype, t, n);
50674e96 2671 e0 = get_formal_tmp_var (t, &list);
953ff289
DN
2672
2673 t = build2 (GE_EXPR, boolean_type_node, s0, e0);
50674e96
DN
2674 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l2),
2675 build_and_jump (&l0));
2676 append_to_statement_list (t, &list);
2677
2678 si = bsi_last (entry_bb);
777f7f9a
RH
2679 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
2680 bsi_insert_after (&si, list, BSI_SAME_STMT);
50674e96 2681 bsi_remove (&si, true);
50674e96
DN
2682
2683 /* Setup code for sequential iteration goes in SEQ_START_BB. */
2684 list = alloc_stmt_list ();
953ff289 2685
953ff289
DN
2686 t = fold_convert (type, s0);
2687 t = build2 (MULT_EXPR, type, t, fd->step);
2688 t = build2 (PLUS_EXPR, type, t, fd->n1);
2689 t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
50674e96 2690 gimplify_and_add (t, &list);
953ff289
DN
2691
2692 t = fold_convert (type, e0);
2693 t = build2 (MULT_EXPR, type, t, fd->step);
2694 t = build2 (PLUS_EXPR, type, t, fd->n1);
50674e96 2695 e = get_formal_tmp_var (t, &list);
953ff289 2696
50674e96
DN
2697 si = bsi_start (seq_start_bb);
2698 bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
953ff289 2699
777f7f9a 2700 /* The code controlling the sequential loop replaces the OMP_CONTINUE. */
50674e96 2701 list = alloc_stmt_list ();
953ff289
DN
2702
2703 t = build2 (PLUS_EXPR, type, fd->v, fd->step);
2704 t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
50674e96 2705 gimplify_and_add (t, &list);
953ff289
DN
2706
2707 t = build2 (fd->cond_code, boolean_type_node, fd->v, e);
50674e96
DN
2708 t = get_formal_tmp_var (t, &list);
2709 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l1),
2710 build_and_jump (&l2));
2711 append_to_statement_list (t, &list);
953ff289 2712
777f7f9a
RH
2713 si = bsi_last (cont_bb);
2714 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
2715 bsi_insert_after (&si, list, BSI_SAME_STMT);
2716 bsi_remove (&si, true);
50674e96 2717
777f7f9a
RH
2718 /* Replace the OMP_RETURN with a barrier, or nothing. */
2719 si = bsi_last (exit_bb);
2720 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
2721 {
2722 list = alloc_stmt_list ();
2723 build_omp_barrier (&list);
2724 bsi_insert_after (&si, list, BSI_SAME_STMT);
2725 }
2726 bsi_remove (&si, true);
50674e96
DN
2727
2728 /* Connect all the blocks. */
2729 make_edge (seq_start_bb, body_bb, EDGE_FALLTHRU);
2730
2731 remove_edge (single_succ_edge (entry_bb));
777f7f9a 2732 make_edge (entry_bb, fin_bb, EDGE_TRUE_VALUE);
50674e96
DN
2733 make_edge (entry_bb, seq_start_bb, EDGE_FALSE_VALUE);
2734
777f7f9a
RH
2735 make_edge (cont_bb, body_bb, EDGE_TRUE_VALUE);
2736 find_edge (cont_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
953ff289
DN
2737}
2738
50674e96
DN
2739
2740/* A subroutine of expand_omp_for. Generate code for a parallel
2741 loop with static schedule and a specified chunk size. Given
2742 parameters:
953ff289
DN
2743
2744 for (V = N1; V cond N2; V += STEP) BODY;
2745
2746 where COND is "<" or ">", we generate pseudocode
2747
2748 if (cond is <)
2749 adj = STEP - 1;
2750 else
2751 adj = STEP + 1;
2752 n = (adj + N2 - N1) / STEP;
2753 trip = 0;
2754 L0:
2755 s0 = (trip * nthreads + threadid) * CHUNK;
2756 e0 = min(s0 + CHUNK, n);
2757 if (s0 < n) goto L1; else goto L4;
2758 L1:
2759 V = s0 * STEP + N1;
2760 e = e0 * STEP + N1;
2761 L2:
2762 BODY;
2763 V += STEP;
2764 if (V cond e) goto L2; else goto L3;
2765 L3:
2766 trip += 1;
2767 goto L0;
2768 L4:
953ff289
DN
2769*/
2770
777f7f9a 2771static void
50674e96 2772expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
953ff289 2773{
50674e96 2774 tree l0, l1, l2, l3, l4, n, s0, e0, e, t;
953ff289
DN
2775 tree trip, nthreads, threadid;
2776 tree type, utype;
50674e96 2777 basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
777f7f9a 2778 basic_block trip_update_bb, cont_bb, fin_bb;
50674e96
DN
2779 tree list;
2780 block_stmt_iterator si;
953ff289 2781
953ff289
DN
2782 type = TREE_TYPE (fd->v);
2783 utype = lang_hooks.types.unsigned_type (type);
2784
777f7f9a
RH
2785 entry_bb = region->entry;
2786 iter_part_bb = create_empty_bb (entry_bb);
2787 seq_start_bb = create_empty_bb (iter_part_bb);
50674e96 2788 body_bb = single_succ (entry_bb);
777f7f9a
RH
2789 cont_bb = region->cont;
2790 trip_update_bb = create_empty_bb (cont_bb);
2791 fin_bb = single_succ (cont_bb);
2792 exit_bb = region->exit;
50674e96 2793
777f7f9a
RH
2794 l0 = tree_block_label (iter_part_bb);
2795 l1 = tree_block_label (seq_start_bb);
2796 l2 = tree_block_label (body_bb);
2797 l3 = tree_block_label (trip_update_bb);
2798 l4 = tree_block_label (fin_bb);
50674e96
DN
2799
2800 /* Trip and adjustment setup goes in ENTRY_BB. */
2801 list = alloc_stmt_list ();
2802
953ff289
DN
2803 t = built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS];
2804 t = build_function_call_expr (t, NULL);
2805 t = fold_convert (utype, t);
50674e96 2806 nthreads = get_formal_tmp_var (t, &list);
953ff289
DN
2807
2808 t = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
2809 t = build_function_call_expr (t, NULL);
2810 t = fold_convert (utype, t);
50674e96 2811 threadid = get_formal_tmp_var (t, &list);
953ff289
DN
2812
2813 fd->n1 = fold_convert (type, fd->n1);
2814 if (!is_gimple_val (fd->n1))
50674e96 2815 fd->n1 = get_formal_tmp_var (fd->n1, &list);
953ff289
DN
2816
2817 fd->n2 = fold_convert (type, fd->n2);
2818 if (!is_gimple_val (fd->n2))
50674e96 2819 fd->n2 = get_formal_tmp_var (fd->n2, &list);
953ff289
DN
2820
2821 fd->step = fold_convert (type, fd->step);
2822 if (!is_gimple_val (fd->step))
50674e96 2823 fd->step = get_formal_tmp_var (fd->step, &list);
953ff289
DN
2824
2825 fd->chunk_size = fold_convert (utype, fd->chunk_size);
2826 if (!is_gimple_val (fd->chunk_size))
50674e96 2827 fd->chunk_size = get_formal_tmp_var (fd->chunk_size, &list);
953ff289
DN
2828
2829 t = build_int_cst (type, (fd->cond_code == LT_EXPR ? -1 : 1));
2830 t = fold_build2 (PLUS_EXPR, type, fd->step, t);
2831 t = fold_build2 (PLUS_EXPR, type, t, fd->n2);
2832 t = fold_build2 (MINUS_EXPR, type, t, fd->n1);
2833 t = fold_build2 (TRUNC_DIV_EXPR, type, t, fd->step);
2834 t = fold_convert (utype, t);
2835 if (is_gimple_val (t))
2836 n = t;
2837 else
50674e96 2838 n = get_formal_tmp_var (t, &list);
953ff289
DN
2839
2840 t = build_int_cst (utype, 0);
50674e96
DN
2841 trip = get_initialized_tmp_var (t, &list, NULL);
2842
2843 si = bsi_last (entry_bb);
777f7f9a
RH
2844 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
2845 bsi_insert_after (&si, list, BSI_SAME_STMT);
50674e96 2846 bsi_remove (&si, true);
50674e96
DN
2847
2848 /* Iteration space partitioning goes in ITER_PART_BB. */
2849 list = alloc_stmt_list ();
953ff289 2850
953ff289
DN
2851 t = build2 (MULT_EXPR, utype, trip, nthreads);
2852 t = build2 (PLUS_EXPR, utype, t, threadid);
2853 t = build2 (MULT_EXPR, utype, t, fd->chunk_size);
50674e96 2854 s0 = get_formal_tmp_var (t, &list);
953ff289
DN
2855
2856 t = build2 (PLUS_EXPR, utype, s0, fd->chunk_size);
2857 t = build2 (MIN_EXPR, utype, t, n);
50674e96 2858 e0 = get_formal_tmp_var (t, &list);
953ff289
DN
2859
2860 t = build2 (LT_EXPR, boolean_type_node, s0, n);
2861 t = build3 (COND_EXPR, void_type_node, t,
2862 build_and_jump (&l1), build_and_jump (&l4));
50674e96
DN
2863 append_to_statement_list (t, &list);
2864
50674e96
DN
2865 si = bsi_start (iter_part_bb);
2866 bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
2867
2868 /* Setup code for sequential iteration goes in SEQ_START_BB. */
2869 list = alloc_stmt_list ();
953ff289 2870
953ff289
DN
2871 t = fold_convert (type, s0);
2872 t = build2 (MULT_EXPR, type, t, fd->step);
2873 t = build2 (PLUS_EXPR, type, t, fd->n1);
2874 t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
50674e96 2875 gimplify_and_add (t, &list);
953ff289
DN
2876
2877 t = fold_convert (type, e0);
2878 t = build2 (MULT_EXPR, type, t, fd->step);
2879 t = build2 (PLUS_EXPR, type, t, fd->n1);
50674e96
DN
2880 e = get_formal_tmp_var (t, &list);
2881
50674e96
DN
2882 si = bsi_start (seq_start_bb);
2883 bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
953ff289 2884
777f7f9a
RH
2885 /* The code controlling the sequential loop goes in CONT_BB,
2886 replacing the OMP_CONTINUE. */
50674e96 2887 list = alloc_stmt_list ();
953ff289
DN
2888
2889 t = build2 (PLUS_EXPR, type, fd->v, fd->step);
2890 t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
50674e96 2891 gimplify_and_add (t, &list);
953ff289
DN
2892
2893 t = build2 (fd->cond_code, boolean_type_node, fd->v, e);
50674e96 2894 t = get_formal_tmp_var (t, &list);
953ff289
DN
2895 t = build3 (COND_EXPR, void_type_node, t,
2896 build_and_jump (&l2), build_and_jump (&l3));
50674e96
DN
2897 append_to_statement_list (t, &list);
2898
777f7f9a
RH
2899 si = bsi_last (cont_bb);
2900 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
2901 bsi_insert_after (&si, list, BSI_SAME_STMT);
50674e96 2902 bsi_remove (&si, true);
50674e96
DN
2903
2904 /* Trip update code goes into TRIP_UPDATE_BB. */
50674e96 2905 list = alloc_stmt_list ();
953ff289 2906
953ff289
DN
2907 t = build_int_cst (utype, 1);
2908 t = build2 (PLUS_EXPR, utype, trip, t);
2909 t = build2 (MODIFY_EXPR, void_type_node, trip, t);
50674e96 2910 gimplify_and_add (t, &list);
953ff289 2911
50674e96
DN
2912 si = bsi_start (trip_update_bb);
2913 bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
953ff289 2914
777f7f9a
RH
2915 /* Replace the OMP_RETURN with a barrier, or nothing. */
2916 si = bsi_last (exit_bb);
2917 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
2918 {
2919 list = alloc_stmt_list ();
2920 build_omp_barrier (&list);
2921 bsi_insert_after (&si, list, BSI_SAME_STMT);
2922 }
2923 bsi_remove (&si, true);
953ff289 2924
50674e96
DN
2925 /* Connect the new blocks. */
2926 remove_edge (single_succ_edge (entry_bb));
2927 make_edge (entry_bb, iter_part_bb, EDGE_FALLTHRU);
953ff289 2928
50674e96 2929 make_edge (iter_part_bb, seq_start_bb, EDGE_TRUE_VALUE);
777f7f9a 2930 make_edge (iter_part_bb, fin_bb, EDGE_FALSE_VALUE);
50674e96
DN
2931
2932 make_edge (seq_start_bb, body_bb, EDGE_FALLTHRU);
2933
777f7f9a
RH
2934 remove_edge (single_succ_edge (cont_bb));
2935 make_edge (cont_bb, body_bb, EDGE_TRUE_VALUE);
2936 make_edge (cont_bb, trip_update_bb, EDGE_FALSE_VALUE);
50674e96
DN
2937
2938 make_edge (trip_update_bb, iter_part_bb, EDGE_FALLTHRU);
953ff289
DN
2939}
2940
953ff289 2941
50674e96 2942/* Expand the OpenMP loop defined by REGION. */
953ff289 2943
50674e96
DN
2944static void
2945expand_omp_for (struct omp_region *region)
2946{
2947 struct omp_for_data fd;
953ff289 2948
50674e96 2949 push_gimplify_context ();
953ff289 2950
777f7f9a 2951 extract_omp_for_data (last_stmt (region->entry), &fd);
21a66e91 2952 region->sched_kind = fd.sched_kind;
953ff289
DN
2953
2954 if (fd.sched_kind == OMP_CLAUSE_SCHEDULE_STATIC && !fd.have_ordered)
2955 {
2956 if (fd.chunk_size == NULL)
777f7f9a 2957 expand_omp_for_static_nochunk (region, &fd);
953ff289 2958 else
777f7f9a 2959 expand_omp_for_static_chunk (region, &fd);
953ff289
DN
2960 }
2961 else
2962 {
50674e96
DN
2963 int fn_index = fd.sched_kind + fd.have_ordered * 4;
2964 int start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
2965 int next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
777f7f9a 2966 expand_omp_for_generic (region, &fd, start_ix, next_ix);
953ff289
DN
2967 }
2968
50674e96 2969 pop_gimplify_context (NULL);
953ff289
DN
2970}
2971
953ff289
DN
2972
2973/* Expand code for an OpenMP sections directive. In pseudo code, we generate
2974
953ff289
DN
2975 v = GOMP_sections_start (n);
2976 L0:
2977 switch (v)
2978 {
2979 case 0:
2980 goto L2;
2981 case 1:
2982 section 1;
2983 goto L1;
2984 case 2:
2985 ...
2986 case n:
2987 ...
953ff289
DN
2988 default:
2989 abort ();
2990 }
2991 L1:
2992 v = GOMP_sections_next ();
2993 goto L0;
2994 L2:
2995 reduction;
2996
50674e96
DN
2997 If this is a combined parallel sections, replace the call to
2998 GOMP_sections_start with 'goto L1'. */
953ff289
DN
2999
3000static void
50674e96 3001expand_omp_sections (struct omp_region *region)
953ff289 3002{
777f7f9a 3003 tree label_vec, l0, l1, l2, t, u, v, sections_stmt;
953ff289 3004 unsigned i, len;
777f7f9a 3005 basic_block entry_bb, exit_bb, l0_bb, l1_bb, l2_bb, default_bb;
50674e96 3006 block_stmt_iterator si;
777f7f9a
RH
3007 struct omp_region *inner;
3008 edge e;
953ff289 3009
777f7f9a
RH
3010 entry_bb = region->entry;
3011 l0_bb = create_empty_bb (entry_bb);
3012 l1_bb = region->cont;
3013 l2_bb = single_succ (l1_bb);
3014 default_bb = create_empty_bb (l1_bb->prev_bb);
3015 exit_bb = region->exit;
953ff289 3016
777f7f9a
RH
3017 l0 = tree_block_label (l0_bb);
3018 l1 = tree_block_label (l1_bb);
3019 l2 = tree_block_label (l2_bb);
50674e96 3020
953ff289 3021 v = create_tmp_var (unsigned_type_node, ".section");
50674e96
DN
3022
3023 /* We will build a switch() with enough cases for all the
3024 OMP_SECTION regions, a '0' case to handle the end of more work
3025 and a default case to abort if something goes wrong. */
3026 len = EDGE_COUNT (entry_bb->succs);
953ff289
DN
3027 label_vec = make_tree_vec (len + 2);
3028
777f7f9a
RH
3029 /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
3030 OMP_SECTIONS statement. */
50674e96 3031 si = bsi_last (entry_bb);
777f7f9a
RH
3032 sections_stmt = bsi_stmt (si);
3033 gcc_assert (TREE_CODE (sections_stmt) == OMP_SECTIONS);
50674e96 3034 if (!is_combined_parallel (region))
953ff289 3035 {
50674e96
DN
3036 /* If we are not inside a combined parallel+sections region,
3037 call GOMP_sections_start. */
3038 t = build_int_cst (unsigned_type_node, len);
3039 t = tree_cons (NULL, t, NULL);
953ff289
DN
3040 u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
3041 t = build_function_call_expr (u, t);
3042 t = build2 (MODIFY_EXPR, void_type_node, v, t);
777f7f9a 3043 bsi_insert_after (&si, t, BSI_SAME_STMT);
953ff289 3044 }
777f7f9a 3045 bsi_remove (&si, true);
953ff289 3046
50674e96 3047 /* The switch() statement replacing OMP_SECTIONS goes in L0_BB. */
777f7f9a 3048 si = bsi_start (l0_bb);
953ff289
DN
3049
3050 t = build3 (SWITCH_EXPR, void_type_node, v, NULL, label_vec);
50674e96 3051 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
953ff289
DN
3052
3053 t = build3 (CASE_LABEL_EXPR, void_type_node,
3054 build_int_cst (unsigned_type_node, 0), NULL, l2);
3055 TREE_VEC_ELT (label_vec, 0) = t;
777f7f9a 3056 make_edge (l0_bb, l2_bb, 0);
953ff289 3057
50674e96 3058 /* Convert each OMP_SECTION into a CASE_LABEL_EXPR. */
777f7f9a 3059 for (inner = region->inner, i = 1; inner; inner = inner->next, ++i)
953ff289 3060 {
50674e96
DN
3061 basic_block s_entry_bb, s_exit_bb;
3062
777f7f9a
RH
3063 s_entry_bb = inner->entry;
3064 s_exit_bb = inner->exit;
953ff289 3065
777f7f9a 3066 t = tree_block_label (s_entry_bb);
50674e96 3067 u = build_int_cst (unsigned_type_node, i);
953ff289 3068 u = build3 (CASE_LABEL_EXPR, void_type_node, u, NULL, t);
50674e96 3069 TREE_VEC_ELT (label_vec, i) = u;
777f7f9a 3070
50674e96 3071 si = bsi_last (s_entry_bb);
777f7f9a
RH
3072 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTION);
3073 gcc_assert (i < len || OMP_SECTION_LAST (bsi_stmt (si)));
3074 bsi_remove (&si, true);
50674e96
DN
3075
3076 si = bsi_last (s_exit_bb);
777f7f9a 3077 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
50674e96 3078 bsi_remove (&si, true);
777f7f9a
RH
3079
3080 e = single_pred_edge (s_entry_bb);
3081 e->flags = 0;
3082 redirect_edge_pred (e, l0_bb);
3083
3084 single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
50674e96 3085 single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
953ff289
DN
3086 }
3087
50674e96 3088 /* Error handling code goes in DEFAULT_BB. */
777f7f9a 3089 t = tree_block_label (default_bb);
953ff289
DN
3090 u = build3 (CASE_LABEL_EXPR, void_type_node, NULL, NULL, t);
3091 TREE_VEC_ELT (label_vec, len + 1) = u;
777f7f9a 3092 make_edge (l0_bb, default_bb, 0);
953ff289 3093
777f7f9a 3094 si = bsi_start (default_bb);
953ff289
DN
3095 t = built_in_decls[BUILT_IN_TRAP];
3096 t = build_function_call_expr (t, NULL);
50674e96
DN
3097 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
3098
50674e96 3099 /* Code to get the next section goes in L1_BB. */
777f7f9a
RH
3100 si = bsi_last (l1_bb);
3101 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
953ff289 3102
50674e96
DN
3103 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
3104 t = build_function_call_expr (t, NULL);
3105 t = build2 (MODIFY_EXPR, void_type_node, v, t);
777f7f9a
RH
3106 bsi_insert_after (&si, t, BSI_SAME_STMT);
3107 bsi_remove (&si, true);
50674e96 3108
777f7f9a 3109 /* Cleanup function replaces OMP_RETURN in EXIT_BB. */
50674e96 3110 si = bsi_last (exit_bb);
777f7f9a
RH
3111 if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
3112 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
3113 else
3114 t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
3115 t = build_function_call_expr (t, NULL);
3116 bsi_insert_after (&si, t, BSI_SAME_STMT);
3117 bsi_remove (&si, true);
50674e96 3118
777f7f9a 3119 /* Connect the new blocks. */
50674e96 3120 if (is_combined_parallel (region))
953ff289 3121 {
50674e96
DN
3122 /* If this was a combined parallel+sections region, we did not
3123 emit a GOMP_sections_start in the entry block, so we just
3124 need to jump to L1_BB to get the next section. */
50674e96 3125 make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
953ff289 3126 }
777f7f9a
RH
3127 else
3128 make_edge (entry_bb, l0_bb, EDGE_FALLTHRU);
3129
3130 e = single_succ_edge (l1_bb);
3131 redirect_edge_succ (e, l0_bb);
3132 e->flags = EDGE_FALLTHRU;
50674e96 3133}
953ff289 3134
953ff289 3135
777f7f9a
RH
3136/* Expand code for an OpenMP single directive. We've already expanded
3137 much of the code, here we simply place the GOMP_barrier call. */
3138
3139static void
3140expand_omp_single (struct omp_region *region)
3141{
3142 basic_block entry_bb, exit_bb;
3143 block_stmt_iterator si;
3144 bool need_barrier = false;
3145
3146 entry_bb = region->entry;
3147 exit_bb = region->exit;
3148
3149 si = bsi_last (entry_bb);
3150 /* The terminal barrier at the end of a GOMP_single_copy sequence cannot
3151 be removed. We need to ensure that the thread that entered the single
3152 does not exit before the data is copied out by the other threads. */
3153 if (find_omp_clause (OMP_SINGLE_CLAUSES (bsi_stmt (si)),
3154 OMP_CLAUSE_COPYPRIVATE))
3155 need_barrier = true;
3156 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE);
3157 bsi_remove (&si, true);
3158 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3159
3160 si = bsi_last (exit_bb);
3161 if (!OMP_RETURN_NOWAIT (bsi_stmt (si)) || need_barrier)
3162 {
3163 tree t = alloc_stmt_list ();
3164 build_omp_barrier (&t);
3165 bsi_insert_after (&si, t, BSI_SAME_STMT);
3166 }
3167 bsi_remove (&si, true);
3168 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
3169}
3170
3171
3172/* Generic expansion for OpenMP synchronization directives: master,
3173 ordered and critical. All we need to do here is remove the entry
3174 and exit markers for REGION. */
50674e96
DN
3175
3176static void
3177expand_omp_synch (struct omp_region *region)
3178{
3179 basic_block entry_bb, exit_bb;
3180 block_stmt_iterator si;
50674e96 3181
777f7f9a
RH
3182 entry_bb = region->entry;
3183 exit_bb = region->exit;
50674e96
DN
3184
3185 si = bsi_last (entry_bb);
777f7f9a
RH
3186 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE
3187 || TREE_CODE (bsi_stmt (si)) == OMP_MASTER
3188 || TREE_CODE (bsi_stmt (si)) == OMP_ORDERED
3189 || TREE_CODE (bsi_stmt (si)) == OMP_CRITICAL);
50674e96
DN
3190 bsi_remove (&si, true);
3191 single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
3192
3193 si = bsi_last (exit_bb);
777f7f9a 3194 gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
50674e96
DN
3195 bsi_remove (&si, true);
3196 single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
3197}
953ff289 3198
953ff289 3199
50674e96
DN
3200/* Expand the parallel region tree rooted at REGION. Expansion
3201 proceeds in depth-first order. Innermost regions are expanded
3202 first. This way, parallel regions that require a new function to
3203 be created (e.g., OMP_PARALLEL) can be expanded without having any
3204 internal dependencies in their body. */
3205
3206static void
3207expand_omp (struct omp_region *region)
3208{
3209 while (region)
3210 {
50674e96
DN
3211 if (region->inner)
3212 expand_omp (region->inner);
3213
777f7f9a 3214 switch (region->type)
50674e96 3215 {
777f7f9a
RH
3216 case OMP_PARALLEL:
3217 expand_omp_parallel (region);
3218 break;
50674e96 3219
777f7f9a
RH
3220 case OMP_FOR:
3221 expand_omp_for (region);
3222 break;
50674e96 3223
777f7f9a
RH
3224 case OMP_SECTIONS:
3225 expand_omp_sections (region);
3226 break;
50674e96 3227
777f7f9a
RH
3228 case OMP_SECTION:
3229 /* Individual omp sections are handled together with their
3230 parent OMP_SECTIONS region. */
3231 break;
50674e96 3232
777f7f9a
RH
3233 case OMP_SINGLE:
3234 expand_omp_single (region);
3235 break;
50674e96 3236
777f7f9a
RH
3237 case OMP_MASTER:
3238 case OMP_ORDERED:
3239 case OMP_CRITICAL:
3240 expand_omp_synch (region);
3241 break;
50674e96 3242
777f7f9a
RH
3243 default:
3244 gcc_unreachable ();
3245 }
8d9c1aec 3246
50674e96
DN
3247 region = region->next;
3248 }
3249}
3250
3251
3252/* Helper for build_omp_regions. Scan the dominator tree starting at
3253 block BB. PARENT is the region that contains BB. */
3254
3255static void
3256build_omp_regions_1 (basic_block bb, struct omp_region *parent)
3257{
3258 block_stmt_iterator si;
3259 tree stmt;
3260 basic_block son;
3261
3262 si = bsi_last (bb);
3263 if (!bsi_end_p (si) && OMP_DIRECTIVE_P (bsi_stmt (si)))
3264 {
3265 struct omp_region *region;
777f7f9a 3266 enum tree_code code;
50674e96
DN
3267
3268 stmt = bsi_stmt (si);
777f7f9a 3269 code = TREE_CODE (stmt);
50674e96 3270
777f7f9a 3271 if (code == OMP_RETURN)
50674e96
DN
3272 {
3273 /* STMT is the return point out of region PARENT. Mark it
3274 as the exit point and make PARENT the immediately
3275 enclosing region. */
3276 gcc_assert (parent);
3277 region = parent;
777f7f9a 3278 region->exit = bb;
50674e96
DN
3279 parent = parent->outer;
3280
3281 /* If REGION is a parallel region, determine whether it is
3282 a combined parallel+workshare region. */
777f7f9a 3283 if (region->type == OMP_PARALLEL)
50674e96
DN
3284 determine_parallel_type (region);
3285 }
777f7f9a
RH
3286 else if (code == OMP_CONTINUE)
3287 {
3288 gcc_assert (parent);
3289 parent->cont = bb;
3290 }
50674e96
DN
3291 else
3292 {
3293 /* Otherwise, this directive becomes the parent for a new
3294 region. */
777f7f9a 3295 region = new_omp_region (bb, code, parent);
50674e96
DN
3296 parent = region;
3297 }
50674e96
DN
3298 }
3299
3300 for (son = first_dom_son (CDI_DOMINATORS, bb);
3301 son;
3302 son = next_dom_son (CDI_DOMINATORS, son))
3303 build_omp_regions_1 (son, parent);
3304}
3305
3306
3307/* Scan the CFG and build a tree of OMP regions. Return the root of
3308 the OMP region tree. */
3309
3310static void
3311build_omp_regions (void)
3312{
777f7f9a 3313 gcc_assert (root_omp_region == NULL);
50674e96
DN
3314 calculate_dominance_info (CDI_DOMINATORS);
3315 build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL);
3316}
3317
3318
3319/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
3320
c2924966 3321static unsigned int
50674e96
DN
3322execute_expand_omp (void)
3323{
3324 build_omp_regions ();
3325
777f7f9a
RH
3326 if (!root_omp_region)
3327 return 0;
50674e96 3328
777f7f9a
RH
3329 if (dump_file)
3330 {
3331 fprintf (dump_file, "\nOMP region tree\n\n");
3332 dump_omp_region (dump_file, root_omp_region, 0);
3333 fprintf (dump_file, "\n");
50674e96 3334 }
777f7f9a
RH
3335
3336 remove_exit_barriers (root_omp_region);
3337
3338 expand_omp (root_omp_region);
3339
3340 free_dominance_info (CDI_DOMINATORS);
3341 free_dominance_info (CDI_POST_DOMINATORS);
3342 cleanup_tree_cfg ();
3343
3344 free_omp_regions ();
3345
c2924966 3346 return 0;
50674e96
DN
3347}
3348
3349static bool
3350gate_expand_omp (void)
3351{
3352 return flag_openmp != 0 && errorcount == 0;
3353}
3354
3355struct tree_opt_pass pass_expand_omp =
3356{
3357 "ompexp", /* name */
3358 gate_expand_omp, /* gate */
3359 execute_expand_omp, /* execute */
3360 NULL, /* sub */
3361 NULL, /* next */
3362 0, /* static_pass_number */
3363 0, /* tv_id */
3364 PROP_gimple_any, /* properties_required */
3365 PROP_gimple_lomp, /* properties_provided */
3366 0, /* properties_destroyed */
3367 0, /* todo_flags_start */
3368 TODO_dump_func, /* todo_flags_finish */
3369 0 /* letter */
3370};
3371\f
3372/* Routines to lower OpenMP directives into OMP-GIMPLE. */
3373
3374/* Lower the OpenMP sections directive in *STMT_P. */
3375
3376static void
3377lower_omp_sections (tree *stmt_p, omp_context *ctx)
3378{
3379 tree new_stmt, stmt, body, bind, block, ilist, olist, new_body;
777f7f9a 3380 tree t, dlist;
50674e96
DN
3381 tree_stmt_iterator tsi;
3382 unsigned i, len;
3383
3384 stmt = *stmt_p;
3385
50674e96
DN
3386 push_gimplify_context ();
3387
3388 dlist = NULL;
3389 ilist = NULL;
3390 lower_rec_input_clauses (OMP_SECTIONS_CLAUSES (stmt), &ilist, &dlist, ctx);
3391
3392 tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
3393 for (len = 0; !tsi_end_p (tsi); len++, tsi_next (&tsi))
3394 continue;
3395
50674e96
DN
3396 tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
3397 body = alloc_stmt_list ();
3398 for (i = 0; i < len; i++, tsi_next (&tsi))
3399 {
3400 omp_context *sctx;
777f7f9a 3401 tree sec_start, sec_end;
50674e96
DN
3402
3403 sec_start = tsi_stmt (tsi);
50674e96
DN
3404 sctx = maybe_lookup_ctx (sec_start);
3405 gcc_assert (sctx);
3406
777f7f9a
RH
3407 append_to_statement_list (sec_start, &body);
3408
50674e96 3409 lower_omp (&OMP_SECTION_BODY (sec_start), sctx);
777f7f9a
RH
3410 append_to_statement_list (OMP_SECTION_BODY (sec_start), &body);
3411 OMP_SECTION_BODY (sec_start) = NULL;
50674e96
DN
3412
3413 if (i == len - 1)
3414 {
3415 tree l = alloc_stmt_list ();
3416 lower_lastprivate_clauses (OMP_SECTIONS_CLAUSES (stmt), NULL,
3417 &l, ctx);
777f7f9a
RH
3418 append_to_statement_list (l, &body);
3419 OMP_SECTION_LAST (sec_start) = 1;
50674e96
DN
3420 }
3421
777f7f9a 3422 sec_end = make_node (OMP_RETURN);
50674e96 3423 append_to_statement_list (sec_end, &body);
50674e96 3424 }
953ff289
DN
3425
3426 block = make_node (BLOCK);
50674e96 3427 bind = build3 (BIND_EXPR, void_type_node, NULL, body, block);
953ff289 3428
50674e96
DN
3429 olist = NULL_TREE;
3430 lower_reduction_clauses (OMP_SECTIONS_CLAUSES (stmt), &olist, ctx);
3431
50674e96
DN
3432 pop_gimplify_context (NULL_TREE);
3433 record_vars_into (ctx->block_vars, ctx->cb.dst_fn);
3434
3435 new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
3436 TREE_SIDE_EFFECTS (new_stmt) = 1;
50674e96
DN
3437
3438 new_body = alloc_stmt_list ();
3439 append_to_statement_list (ilist, &new_body);
3440 append_to_statement_list (stmt, &new_body);
777f7f9a
RH
3441 append_to_statement_list (bind, &new_body);
3442
3443 t = make_node (OMP_CONTINUE);
3444 append_to_statement_list (t, &new_body);
3445
50674e96
DN
3446 append_to_statement_list (olist, &new_body);
3447 append_to_statement_list (dlist, &new_body);
50674e96 3448
4a31b7ee
JJ
3449 maybe_catch_exception (&new_body);
3450
777f7f9a
RH
3451 t = make_node (OMP_RETURN);
3452 OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SECTIONS_CLAUSES (stmt),
3453 OMP_CLAUSE_NOWAIT);
3454 append_to_statement_list (t, &new_body);
3455
3456 BIND_EXPR_BODY (new_stmt) = new_body;
3457 OMP_SECTIONS_BODY (stmt) = NULL;
50674e96
DN
3458
3459 *stmt_p = new_stmt;
953ff289
DN
3460}
3461
3462
50674e96 3463/* A subroutine of lower_omp_single. Expand the simple form of
953ff289
DN
3464 an OMP_SINGLE, without a copyprivate clause:
3465
3466 if (GOMP_single_start ())
3467 BODY;
3468 [ GOMP_barrier (); ] -> unless 'nowait' is present.
50674e96
DN
3469
3470 FIXME. It may be better to delay expanding the logic of this until
3471 pass_expand_omp. The expanded logic may make the job more difficult
3472 to a synchronization analysis pass. */
953ff289
DN
3473
3474static void
50674e96 3475lower_omp_single_simple (tree single_stmt, tree *pre_p)
953ff289
DN
3476{
3477 tree t;
3478
3479 t = built_in_decls[BUILT_IN_GOMP_SINGLE_START];
3480 t = build_function_call_expr (t, NULL);
3481 t = build3 (COND_EXPR, void_type_node, t,
3482 OMP_SINGLE_BODY (single_stmt), NULL);
3483 gimplify_and_add (t, pre_p);
953ff289
DN
3484}
3485
50674e96
DN
3486
3487/* A subroutine of lower_omp_single. Expand the simple form of
953ff289
DN
3488 an OMP_SINGLE, with a copyprivate clause:
3489
3490 #pragma omp single copyprivate (a, b, c)
3491
3492 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
3493
3494 {
3495 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
3496 {
3497 BODY;
3498 copyout.a = a;
3499 copyout.b = b;
3500 copyout.c = c;
3501 GOMP_single_copy_end (&copyout);
3502 }
3503 else
3504 {
3505 a = copyout_p->a;
3506 b = copyout_p->b;
3507 c = copyout_p->c;
3508 }
3509 GOMP_barrier ();
3510 }
50674e96
DN
3511
3512 FIXME. It may be better to delay expanding the logic of this until
3513 pass_expand_omp. The expanded logic may make the job more difficult
3514 to a synchronization analysis pass. */
953ff289
DN
3515
3516static void
50674e96 3517lower_omp_single_copy (tree single_stmt, tree *pre_p, omp_context *ctx)
953ff289
DN
3518{
3519 tree ptr_type, t, args, l0, l1, l2, copyin_seq;
3520
3521 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
3522
3523 ptr_type = build_pointer_type (ctx->record_type);
3524 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
3525
3526 l0 = create_artificial_label ();
3527 l1 = create_artificial_label ();
3528 l2 = create_artificial_label ();
3529
3530 t = built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START];
3531 t = build_function_call_expr (t, NULL);
3532 t = fold_convert (ptr_type, t);
3533 t = build2 (MODIFY_EXPR, void_type_node, ctx->receiver_decl, t);
3534 gimplify_and_add (t, pre_p);
3535
3536 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
3537 build_int_cst (ptr_type, 0));
3538 t = build3 (COND_EXPR, void_type_node, t,
3539 build_and_jump (&l0), build_and_jump (&l1));
3540 gimplify_and_add (t, pre_p);
3541
3542 t = build1 (LABEL_EXPR, void_type_node, l0);
3543 gimplify_and_add (t, pre_p);
3544
3545 append_to_statement_list (OMP_SINGLE_BODY (single_stmt), pre_p);
3546
3547 copyin_seq = NULL;
50674e96 3548 lower_copyprivate_clauses (OMP_SINGLE_CLAUSES (single_stmt), pre_p,
953ff289
DN
3549 &copyin_seq, ctx);
3550
3551 t = build_fold_addr_expr (ctx->sender_decl);
3552 args = tree_cons (NULL, t, NULL);
3553 t = built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_END];
3554 t = build_function_call_expr (t, args);
3555 gimplify_and_add (t, pre_p);
3556
3557 t = build_and_jump (&l2);
3558 gimplify_and_add (t, pre_p);
3559
3560 t = build1 (LABEL_EXPR, void_type_node, l1);
3561 gimplify_and_add (t, pre_p);
3562
3563 append_to_statement_list (copyin_seq, pre_p);
3564
3565 t = build1 (LABEL_EXPR, void_type_node, l2);
3566 gimplify_and_add (t, pre_p);
953ff289
DN
3567}
3568
50674e96 3569
953ff289
DN
3570/* Expand code for an OpenMP single directive. */
3571
3572static void
50674e96 3573lower_omp_single (tree *stmt_p, omp_context *ctx)
953ff289 3574{
50674e96 3575 tree t, bind, block, single_stmt = *stmt_p, dlist;
953ff289
DN
3576
3577 push_gimplify_context ();
3578
3579 block = make_node (BLOCK);
777f7f9a 3580 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
50674e96 3581 TREE_SIDE_EFFECTS (bind) = 1;
953ff289 3582
50674e96
DN
3583 lower_rec_input_clauses (OMP_SINGLE_CLAUSES (single_stmt),
3584 &BIND_EXPR_BODY (bind), &dlist, ctx);
3585 lower_omp (&OMP_SINGLE_BODY (single_stmt), ctx);
777f7f9a
RH
3586
3587 append_to_statement_list (single_stmt, &BIND_EXPR_BODY (bind));
953ff289
DN
3588
3589 if (ctx->record_type)
50674e96 3590 lower_omp_single_copy (single_stmt, &BIND_EXPR_BODY (bind), ctx);
953ff289 3591 else
50674e96 3592 lower_omp_single_simple (single_stmt, &BIND_EXPR_BODY (bind));
953ff289 3593
777f7f9a
RH
3594 OMP_SINGLE_BODY (single_stmt) = NULL;
3595
953ff289 3596 append_to_statement_list (dlist, &BIND_EXPR_BODY (bind));
777f7f9a 3597
4a31b7ee
JJ
3598 maybe_catch_exception (&BIND_EXPR_BODY (bind));
3599
777f7f9a
RH
3600 t = make_node (OMP_RETURN);
3601 OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SINGLE_CLAUSES (single_stmt),
3602 OMP_CLAUSE_NOWAIT);
50674e96 3603 append_to_statement_list (t, &BIND_EXPR_BODY (bind));
777f7f9a 3604
953ff289 3605 pop_gimplify_context (bind);
50674e96 3606
953ff289
DN
3607 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
3608 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
3609}
3610
50674e96 3611
953ff289
DN
3612/* Expand code for an OpenMP master directive. */
3613
3614static void
50674e96 3615lower_omp_master (tree *stmt_p, omp_context *ctx)
953ff289
DN
3616{
3617 tree bind, block, stmt = *stmt_p, lab = NULL, x;
3618
3619 push_gimplify_context ();
3620
3621 block = make_node (BLOCK);
777f7f9a 3622 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
50674e96 3623 TREE_SIDE_EFFECTS (bind) = 1;
953ff289 3624
777f7f9a
RH
3625 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
3626
953ff289
DN
3627 x = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
3628 x = build_function_call_expr (x, NULL);
3629 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
3630 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
3631 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
3632
50674e96 3633 lower_omp (&OMP_MASTER_BODY (stmt), ctx);
777f7f9a 3634 maybe_catch_exception (&OMP_MASTER_BODY (stmt));
953ff289 3635 append_to_statement_list (OMP_MASTER_BODY (stmt), &BIND_EXPR_BODY (bind));
777f7f9a 3636 OMP_MASTER_BODY (stmt) = NULL;
953ff289
DN
3637
3638 x = build1 (LABEL_EXPR, void_type_node, lab);
3639 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
777f7f9a
RH
3640
3641 x = make_node (OMP_RETURN);
3642 OMP_RETURN_NOWAIT (x) = 1;
50674e96 3643 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
777f7f9a 3644
953ff289 3645 pop_gimplify_context (bind);
50674e96 3646
953ff289
DN
3647 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
3648 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
3649}
3650
50674e96 3651
953ff289
DN
3652/* Expand code for an OpenMP ordered directive. */
3653
3654static void
50674e96 3655lower_omp_ordered (tree *stmt_p, omp_context *ctx)
953ff289
DN
3656{
3657 tree bind, block, stmt = *stmt_p, x;
3658
3659 push_gimplify_context ();
3660
3661 block = make_node (BLOCK);
777f7f9a 3662 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
50674e96 3663 TREE_SIDE_EFFECTS (bind) = 1;
953ff289 3664
777f7f9a
RH
3665 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
3666
953ff289
DN
3667 x = built_in_decls[BUILT_IN_GOMP_ORDERED_START];
3668 x = build_function_call_expr (x, NULL);
3669 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
3670
50674e96 3671 lower_omp (&OMP_ORDERED_BODY (stmt), ctx);
777f7f9a 3672 maybe_catch_exception (&OMP_ORDERED_BODY (stmt));
953ff289 3673 append_to_statement_list (OMP_ORDERED_BODY (stmt), &BIND_EXPR_BODY (bind));
777f7f9a 3674 OMP_ORDERED_BODY (stmt) = NULL;
953ff289
DN
3675
3676 x = built_in_decls[BUILT_IN_GOMP_ORDERED_END];
3677 x = build_function_call_expr (x, NULL);
3678 gimplify_and_add (x, &BIND_EXPR_BODY (bind));
777f7f9a
RH
3679
3680 x = make_node (OMP_RETURN);
3681 OMP_RETURN_NOWAIT (x) = 1;
50674e96 3682 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
777f7f9a 3683
953ff289 3684 pop_gimplify_context (bind);
50674e96 3685
953ff289
DN
3686 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
3687 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
3688}
3689
953ff289
DN
3690
3691/* Gimplify an OMP_CRITICAL statement. This is a relatively simple
3692 substitution of a couple of function calls. But in the NAMED case,
3693 requires that languages coordinate a symbol name. It is therefore
3694 best put here in common code. */
3695
3696static GTY((param1_is (tree), param2_is (tree)))
3697 splay_tree critical_name_mutexes;
3698
3699static void
50674e96 3700lower_omp_critical (tree *stmt_p, omp_context *ctx)
953ff289
DN
3701{
3702 tree bind, block, stmt = *stmt_p;
50674e96 3703 tree t, lock, unlock, name;
953ff289
DN
3704
3705 name = OMP_CRITICAL_NAME (stmt);
3706 if (name)
3707 {
3708 tree decl, args;
3709 splay_tree_node n;
3710
3711 if (!critical_name_mutexes)
3712 critical_name_mutexes
3713 = splay_tree_new_ggc (splay_tree_compare_pointers);
3714
3715 n = splay_tree_lookup (critical_name_mutexes, (splay_tree_key) name);
3716 if (n == NULL)
3717 {
3718 char *new_str;
3719
3720 decl = create_tmp_var_raw (ptr_type_node, NULL);
3721
3722 new_str = ACONCAT ((".gomp_critical_user_",
3723 IDENTIFIER_POINTER (name), NULL));
3724 DECL_NAME (decl) = get_identifier (new_str);
3725 TREE_PUBLIC (decl) = 1;
3726 TREE_STATIC (decl) = 1;
3727 DECL_COMMON (decl) = 1;
3728 DECL_ARTIFICIAL (decl) = 1;
3729 DECL_IGNORED_P (decl) = 1;
3730 cgraph_varpool_finalize_decl (decl);
3731
3732 splay_tree_insert (critical_name_mutexes, (splay_tree_key) name,
3733 (splay_tree_value) decl);
3734 }
3735 else
3736 decl = (tree) n->value;
3737
3738 args = tree_cons (NULL, build_fold_addr_expr (decl), NULL);
3739 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_START];
3740 lock = build_function_call_expr (lock, args);
3741
3742 args = tree_cons (NULL, build_fold_addr_expr (decl), NULL);
3743 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_NAME_END];
3744 unlock = build_function_call_expr (unlock, args);
3745 }
3746 else
3747 {
3748 lock = built_in_decls[BUILT_IN_GOMP_CRITICAL_START];
3749 lock = build_function_call_expr (lock, NULL);
3750
3751 unlock = built_in_decls[BUILT_IN_GOMP_CRITICAL_END];
3752 unlock = build_function_call_expr (unlock, NULL);
3753 }
3754
3755 push_gimplify_context ();
3756
3757 block = make_node (BLOCK);
777f7f9a 3758 *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
50674e96 3759 TREE_SIDE_EFFECTS (bind) = 1;
953ff289 3760
777f7f9a
RH
3761 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
3762
953ff289
DN
3763 gimplify_and_add (lock, &BIND_EXPR_BODY (bind));
3764
50674e96 3765 lower_omp (&OMP_CRITICAL_BODY (stmt), ctx);
953ff289
DN
3766 maybe_catch_exception (&OMP_CRITICAL_BODY (stmt));
3767 append_to_statement_list (OMP_CRITICAL_BODY (stmt), &BIND_EXPR_BODY (bind));
777f7f9a 3768 OMP_CRITICAL_BODY (stmt) = NULL;
953ff289
DN
3769
3770 gimplify_and_add (unlock, &BIND_EXPR_BODY (bind));
777f7f9a
RH
3771
3772 t = make_node (OMP_RETURN);
3773 OMP_RETURN_NOWAIT (t) = 1;
50674e96 3774 append_to_statement_list (t, &BIND_EXPR_BODY (bind));
953ff289
DN
3775
3776 pop_gimplify_context (bind);
3777 BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
3778 BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
50674e96
DN
3779}
3780
3781
3782/* A subroutine of lower_omp_for. Generate code to emit the predicate
3783 for a lastprivate clause. Given a loop control predicate of (V
3784 cond N2), we gate the clause on (!(V cond N2)). The lowered form
3785 is appended to *BODY_P. */
3786
3787static void
3788lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
3789 struct omp_context *ctx)
3790{
3791 tree clauses, cond;
3792 enum tree_code cond_code;
3793
3794 cond_code = fd->cond_code;
3795 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
3796
3797 /* When possible, use a strict equality expression. This can let VRP
3798 type optimizations deduce the value and remove a copy. */
3799 if (host_integerp (fd->step, 0))
3800 {
3801 HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->step);
3802 if (step == 1 || step == -1)
3803 cond_code = EQ_EXPR;
3804 }
3805
3806 cond = build2 (cond_code, boolean_type_node, fd->v, fd->n2);
3807
3808 clauses = OMP_FOR_CLAUSES (fd->for_stmt);
3809 lower_lastprivate_clauses (clauses, cond, body_p, ctx);
3810}
3811
3812
3813/* Lower code for an OpenMP loop directive. */
3814
3815static void
3816lower_omp_for (tree *stmt_p, omp_context *ctx)
3817{
3818 tree t, stmt, ilist, dlist, new_stmt, *body_p, *rhs_p;
3819 struct omp_for_data fd;
3820
3821 stmt = *stmt_p;
3822
3823 push_gimplify_context ();
3824
3825 lower_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
3826 lower_omp (&OMP_FOR_BODY (stmt), ctx);
3827
3828 /* Move declaration of temporaries in the loop body before we make
3829 it go away. */
3830 if (TREE_CODE (OMP_FOR_BODY (stmt)) == BIND_EXPR)
3831 record_vars_into (BIND_EXPR_VARS (OMP_FOR_BODY (stmt)), ctx->cb.dst_fn);
3832
3833 new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
3834 TREE_SIDE_EFFECTS (new_stmt) = 1;
3835 body_p = &BIND_EXPR_BODY (new_stmt);
3836
3837 /* The pre-body and input clauses go before the lowered OMP_FOR. */
3838 ilist = NULL;
3839 dlist = NULL;
3840 append_to_statement_list (OMP_FOR_PRE_BODY (stmt), body_p);
3841 lower_rec_input_clauses (OMP_FOR_CLAUSES (stmt), body_p, &dlist, ctx);
3842
3843 /* Lower the header expressions. At this point, we can assume that
3844 the header is of the form:
3845
3846 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
3847
3848 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
3849 using the .omp_data_s mapping, if needed. */
3850 rhs_p = &TREE_OPERAND (OMP_FOR_INIT (stmt), 1);
3851 if (!is_gimple_min_invariant (*rhs_p))
3852 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
3853
3854 rhs_p = &TREE_OPERAND (OMP_FOR_COND (stmt), 1);
3855 if (!is_gimple_min_invariant (*rhs_p))
3856 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
3857
3858 rhs_p = &TREE_OPERAND (TREE_OPERAND (OMP_FOR_INCR (stmt), 1), 1);
3859 if (!is_gimple_min_invariant (*rhs_p))
3860 *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
3861
3862 /* Once lowered, extract the bounds and clauses. */
3863 extract_omp_for_data (stmt, &fd);
3864
50674e96
DN
3865 append_to_statement_list (stmt, body_p);
3866
777f7f9a
RH
3867 append_to_statement_list (OMP_FOR_BODY (stmt), body_p);
3868
3869 t = make_node (OMP_CONTINUE);
3870 append_to_statement_list (t, body_p);
3871
50674e96
DN
3872 /* After the loop, add exit clauses. */
3873 lower_omp_for_lastprivate (&fd, &dlist, ctx);
3874 lower_reduction_clauses (OMP_FOR_CLAUSES (stmt), body_p, ctx);
3875 append_to_statement_list (dlist, body_p);
3876
4a31b7ee
JJ
3877 maybe_catch_exception (body_p);
3878
777f7f9a
RH
3879 /* Region exit marker goes at the end of the loop body. */
3880 t = make_node (OMP_RETURN);
3881 OMP_RETURN_NOWAIT (t) = fd.have_nowait;
3882 append_to_statement_list (t, body_p);
50674e96
DN
3883
3884 pop_gimplify_context (NULL_TREE);
3885 record_vars_into (ctx->block_vars, ctx->cb.dst_fn);
3886
777f7f9a 3887 OMP_FOR_BODY (stmt) = NULL_TREE;
50674e96
DN
3888 OMP_FOR_PRE_BODY (stmt) = NULL_TREE;
3889 *stmt_p = new_stmt;
953ff289
DN
3890}
3891
50674e96
DN
3892
3893/* Lower the OpenMP parallel directive in *STMT_P. CTX holds context
3894 information for the directive. */
3895
3896static void
3897lower_omp_parallel (tree *stmt_p, omp_context *ctx)
3898{
3899 tree clauses, par_bind, par_body, new_body, bind;
3900 tree olist, ilist, par_olist, par_ilist;
3901 tree stmt, child_fn, t;
3902
3903 stmt = *stmt_p;
3904
3905 clauses = OMP_PARALLEL_CLAUSES (stmt);
3906 par_bind = OMP_PARALLEL_BODY (stmt);
3907 par_body = BIND_EXPR_BODY (par_bind);
3908 child_fn = ctx->cb.dst_fn;
3909
3910 push_gimplify_context ();
3911
3912 par_olist = NULL_TREE;
3913 par_ilist = NULL_TREE;
3914 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
3915 lower_omp (&par_body, ctx);
50674e96
DN
3916 lower_reduction_clauses (clauses, &par_olist, ctx);
3917
3918 /* Declare all the variables created by mapping and the variables
3919 declared in the scope of the parallel body. */
3920 record_vars_into (ctx->block_vars, child_fn);
3921 record_vars_into (BIND_EXPR_VARS (par_bind), child_fn);
3922
3923 if (ctx->record_type)
3924 {
3925 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_data_o");
3926 OMP_PARALLEL_DATA_ARG (stmt) = ctx->sender_decl;
3927 }
3928
3929 olist = NULL_TREE;
3930 ilist = NULL_TREE;
3931 lower_send_clauses (clauses, &ilist, &olist, ctx);
3932 lower_send_shared_vars (&ilist, &olist, ctx);
3933
3934 /* Once all the expansions are done, sequence all the different
3935 fragments inside OMP_PARALLEL_BODY. */
3936 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
3937 append_to_statement_list (ilist, &BIND_EXPR_BODY (bind));
3938
3939 new_body = alloc_stmt_list ();
3940
3941 if (ctx->record_type)
3942 {
3943 t = build_fold_addr_expr (ctx->sender_decl);
3944 t = build2 (MODIFY_EXPR, void_type_node, ctx->receiver_decl, t);
3945 append_to_statement_list (t, &new_body);
3946 }
3947
3948 append_to_statement_list (par_ilist, &new_body);
3949 append_to_statement_list (par_body, &new_body);
3950 append_to_statement_list (par_olist, &new_body);
4a31b7ee 3951 maybe_catch_exception (&new_body);
777f7f9a 3952 t = make_node (OMP_RETURN);
50674e96
DN
3953 append_to_statement_list (t, &new_body);
3954 OMP_PARALLEL_BODY (stmt) = new_body;
3955
3956 append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
3957 append_to_statement_list (olist, &BIND_EXPR_BODY (bind));
3958
3959 *stmt_p = bind;
3960
3961 pop_gimplify_context (NULL_TREE);
3962}
3963
3964
953ff289
DN
3965/* Pass *TP back through the gimplifier within the context determined by WI.
3966 This handles replacement of DECL_VALUE_EXPR, as well as adjusting the
3967 flags on ADDR_EXPR. */
3968
3969static void
50674e96 3970lower_regimplify (tree *tp, struct walk_stmt_info *wi)
953ff289
DN
3971{
3972 enum gimplify_status gs;
3973 tree pre = NULL;
3974
3975 if (wi->is_lhs)
3976 gs = gimplify_expr (tp, &pre, NULL, is_gimple_lvalue, fb_lvalue);
3977 else if (wi->val_only)
3978 gs = gimplify_expr (tp, &pre, NULL, is_gimple_val, fb_rvalue);
3979 else
3980 gs = gimplify_expr (tp, &pre, NULL, is_gimple_formal_tmp_var, fb_rvalue);
3981 gcc_assert (gs == GS_ALL_DONE);
3982
3983 if (pre)
3984 tsi_link_before (&wi->tsi, pre, TSI_SAME_STMT);
3985}
3986
50674e96
DN
3987
3988/* Callback for walk_stmts. Lower the OpenMP directive pointed by TP. */
3989
953ff289 3990static tree
50674e96 3991lower_omp_1 (tree *tp, int *walk_subtrees, void *data)
953ff289
DN
3992{
3993 struct walk_stmt_info *wi = data;
3994 omp_context *ctx = wi->info;
3995 tree t = *tp;
3996
50674e96
DN
3997 /* If we have issued syntax errors, avoid doing any heavy lifting.
3998 Just replace the OpenMP directives with a NOP to avoid
3999 confusing RTL expansion. */
4000 if (errorcount && OMP_DIRECTIVE_P (*tp))
4001 {
4002 *tp = build_empty_stmt ();
4003 return NULL_TREE;
4004 }
4005
953ff289
DN
4006 *walk_subtrees = 0;
4007 switch (TREE_CODE (*tp))
4008 {
4009 case OMP_PARALLEL:
4010 ctx = maybe_lookup_ctx (t);
50674e96 4011 lower_omp_parallel (tp, ctx);
953ff289
DN
4012 break;
4013
4014 case OMP_FOR:
4015 ctx = maybe_lookup_ctx (t);
4016 gcc_assert (ctx);
50674e96 4017 lower_omp_for (tp, ctx);
953ff289
DN
4018 break;
4019
4020 case OMP_SECTIONS:
4021 ctx = maybe_lookup_ctx (t);
4022 gcc_assert (ctx);
50674e96 4023 lower_omp_sections (tp, ctx);
953ff289
DN
4024 break;
4025
4026 case OMP_SINGLE:
4027 ctx = maybe_lookup_ctx (t);
4028 gcc_assert (ctx);
50674e96 4029 lower_omp_single (tp, ctx);
953ff289
DN
4030 break;
4031
4032 case OMP_MASTER:
4033 ctx = maybe_lookup_ctx (t);
4034 gcc_assert (ctx);
50674e96 4035 lower_omp_master (tp, ctx);
953ff289
DN
4036 break;
4037
4038 case OMP_ORDERED:
4039 ctx = maybe_lookup_ctx (t);
4040 gcc_assert (ctx);
50674e96 4041 lower_omp_ordered (tp, ctx);
953ff289
DN
4042 break;
4043
4044 case OMP_CRITICAL:
4045 ctx = maybe_lookup_ctx (t);
4046 gcc_assert (ctx);
50674e96 4047 lower_omp_critical (tp, ctx);
953ff289
DN
4048 break;
4049
4050 case VAR_DECL:
4051 if (ctx && DECL_HAS_VALUE_EXPR_P (t))
50674e96 4052 lower_regimplify (tp, wi);
953ff289
DN
4053 break;
4054
4055 case ADDR_EXPR:
4056 if (ctx)
50674e96 4057 lower_regimplify (tp, wi);
953ff289
DN
4058 break;
4059
4060 case ARRAY_REF:
4061 case ARRAY_RANGE_REF:
4062 case REALPART_EXPR:
4063 case IMAGPART_EXPR:
4064 case COMPONENT_REF:
4065 case VIEW_CONVERT_EXPR:
4066 if (ctx)
50674e96 4067 lower_regimplify (tp, wi);
953ff289
DN
4068 break;
4069
4070 case INDIRECT_REF:
4071 if (ctx)
4072 {
4073 wi->is_lhs = false;
4074 wi->val_only = true;
50674e96 4075 lower_regimplify (&TREE_OPERAND (t, 0), wi);
953ff289
DN
4076 }
4077 break;
4078
4079 default:
4080 if (!TYPE_P (t) && !DECL_P (t))
4081 *walk_subtrees = 1;
4082 break;
4083 }
4084
4085 return NULL_TREE;
4086}
4087
4088static void
50674e96 4089lower_omp (tree *stmt_p, omp_context *ctx)
953ff289
DN
4090{
4091 struct walk_stmt_info wi;
4092
4093 memset (&wi, 0, sizeof (wi));
50674e96 4094 wi.callback = lower_omp_1;
953ff289
DN
4095 wi.info = ctx;
4096 wi.val_only = true;
4097 wi.want_locations = true;
4098
4099 walk_stmts (&wi, stmt_p);
4100}
4101\f
4102/* Main entry point. */
4103
c2924966 4104static unsigned int
953ff289
DN
4105execute_lower_omp (void)
4106{
4107 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
4108 delete_omp_context);
4109
4110 scan_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
4111 gcc_assert (parallel_nesting_level == 0);
4112
4113 if (all_contexts->root)
50674e96 4114 lower_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
953ff289 4115
50674e96
DN
4116 if (all_contexts)
4117 {
4118 splay_tree_delete (all_contexts);
4119 all_contexts = NULL;
4120 }
c2924966 4121 return 0;
953ff289
DN
4122}
4123
4124static bool
4125gate_lower_omp (void)
4126{
4127 return flag_openmp != 0;
4128}
4129
4130struct tree_opt_pass pass_lower_omp =
4131{
4132 "omplower", /* name */
4133 gate_lower_omp, /* gate */
4134 execute_lower_omp, /* execute */
4135 NULL, /* sub */
4136 NULL, /* next */
4137 0, /* static_pass_number */
4138 0, /* tv_id */
4139 PROP_gimple_any, /* properties_required */
4140 PROP_gimple_lomp, /* properties_provided */
4141 0, /* properties_destroyed */
4142 0, /* todo_flags_start */
4143 TODO_dump_func, /* todo_flags_finish */
4144 0 /* letter */
4145};
953ff289
DN
4146\f
4147/* The following is a utility to diagnose OpenMP structured block violations.
777f7f9a
RH
4148 It is not part of the "omplower" pass, as that's invoked too late. It
4149 should be invoked by the respective front ends after gimplification. */
953ff289
DN
4150
4151static splay_tree all_labels;
4152
4153/* Check for mismatched contexts and generate an error if needed. Return
4154 true if an error is detected. */
4155
4156static bool
4157diagnose_sb_0 (tree *stmt_p, tree branch_ctx, tree label_ctx)
4158{
4159 bool exit_p = true;
4160
4161 if ((label_ctx ? TREE_VALUE (label_ctx) : NULL) == branch_ctx)
4162 return false;
4163
4164 /* Try to avoid confusing the user by producing and error message
4165 with correct "exit" or "enter" verbage. We prefer "exit"
4166 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
4167 if (branch_ctx == NULL)
4168 exit_p = false;
4169 else
4170 {
4171 while (label_ctx)
4172 {
4173 if (TREE_VALUE (label_ctx) == branch_ctx)
4174 {
4175 exit_p = false;
4176 break;
4177 }
4178 label_ctx = TREE_CHAIN (label_ctx);
4179 }
4180 }
4181
4182 if (exit_p)
4183 error ("invalid exit from OpenMP structured block");
4184 else
4185 error ("invalid entry to OpenMP structured block");
4186
4187 *stmt_p = build_empty_stmt ();
4188 return true;
4189}
4190
4191/* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
4192 where in the tree each label is found. */
4193
4194static tree
4195diagnose_sb_1 (tree *tp, int *walk_subtrees, void *data)
4196{
4197 struct walk_stmt_info *wi = data;
4198 tree context = (tree) wi->info;
4199 tree inner_context;
4200 tree t = *tp;
4201
4202 *walk_subtrees = 0;
4203 switch (TREE_CODE (t))
4204 {
4205 case OMP_PARALLEL:
4206 case OMP_SECTIONS:
4207 case OMP_SINGLE:
4208 walk_tree (&OMP_CLAUSES (t), diagnose_sb_1, wi, NULL);
4209 /* FALLTHRU */
4210 case OMP_SECTION:
4211 case OMP_MASTER:
4212 case OMP_ORDERED:
4213 case OMP_CRITICAL:
4214 /* The minimal context here is just a tree of statements. */
4215 inner_context = tree_cons (NULL, t, context);
4216 wi->info = inner_context;
4217 walk_stmts (wi, &OMP_BODY (t));
4218 wi->info = context;
4219 break;
4220
4221 case OMP_FOR:
4222 walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_1, wi, NULL);
4223 inner_context = tree_cons (NULL, t, context);
4224 wi->info = inner_context;
4225 walk_tree (&OMP_FOR_INIT (t), diagnose_sb_1, wi, NULL);
4226 walk_tree (&OMP_FOR_COND (t), diagnose_sb_1, wi, NULL);
4227 walk_tree (&OMP_FOR_INCR (t), diagnose_sb_1, wi, NULL);
4228 walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
4229 walk_stmts (wi, &OMP_FOR_BODY (t));
4230 wi->info = context;
4231 break;
4232
4233 case LABEL_EXPR:
4234 splay_tree_insert (all_labels, (splay_tree_key) LABEL_EXPR_LABEL (t),
4235 (splay_tree_value) context);
4236 break;
4237
4238 default:
4239 break;
4240 }
4241
4242 return NULL_TREE;
4243}
4244
4245/* Pass 2: Check each branch and see if its context differs from that of
4246 the destination label's context. */
4247
4248static tree
4249diagnose_sb_2 (tree *tp, int *walk_subtrees, void *data)
4250{
4251 struct walk_stmt_info *wi = data;
4252 tree context = (tree) wi->info;
4253 splay_tree_node n;
4254 tree t = *tp;
4255
4256 *walk_subtrees = 0;
4257 switch (TREE_CODE (t))
4258 {
4259 case OMP_PARALLEL:
4260 case OMP_SECTIONS:
4261 case OMP_SINGLE:
4262 walk_tree (&OMP_CLAUSES (t), diagnose_sb_2, wi, NULL);
4263 /* FALLTHRU */
4264 case OMP_SECTION:
4265 case OMP_MASTER:
4266 case OMP_ORDERED:
4267 case OMP_CRITICAL:
4268 wi->info = t;
4269 walk_stmts (wi, &OMP_BODY (t));
4270 wi->info = context;
4271 break;
4272
4273 case OMP_FOR:
4274 walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_2, wi, NULL);
4275 wi->info = t;
4276 walk_tree (&OMP_FOR_INIT (t), diagnose_sb_2, wi, NULL);
4277 walk_tree (&OMP_FOR_COND (t), diagnose_sb_2, wi, NULL);
4278 walk_tree (&OMP_FOR_INCR (t), diagnose_sb_2, wi, NULL);
4279 walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
4280 walk_stmts (wi, &OMP_FOR_BODY (t));
4281 wi->info = context;
4282 break;
4283
4284 case GOTO_EXPR:
4285 {
4286 tree lab = GOTO_DESTINATION (t);
4287 if (TREE_CODE (lab) != LABEL_DECL)
4288 break;
4289
4290 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
4291 diagnose_sb_0 (tp, context, n ? (tree) n->value : NULL_TREE);
4292 }
4293 break;
4294
4295 case SWITCH_EXPR:
4296 {
4297 tree vec = SWITCH_LABELS (t);
4298 int i, len = TREE_VEC_LENGTH (vec);
4299 for (i = 0; i < len; ++i)
4300 {
4301 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4302 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
4303 if (diagnose_sb_0 (tp, context, (tree) n->value))
4304 break;
4305 }
4306 }
4307 break;
4308
4309 case RETURN_EXPR:
4310 diagnose_sb_0 (tp, context, NULL_TREE);
4311 break;
4312
4313 default:
4314 break;
4315 }
4316
4317 return NULL_TREE;
4318}
4319
4320void
4321diagnose_omp_structured_block_errors (tree fndecl)
4322{
4323 tree save_current = current_function_decl;
4324 struct walk_stmt_info wi;
4325
4326 current_function_decl = fndecl;
4327
4328 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
4329
4330 memset (&wi, 0, sizeof (wi));
4331 wi.callback = diagnose_sb_1;
4332 walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
4333
4334 memset (&wi, 0, sizeof (wi));
4335 wi.callback = diagnose_sb_2;
4336 wi.want_locations = true;
4337 wi.want_return_expr = true;
4338 walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
4339
4340 splay_tree_delete (all_labels);
4341 all_labels = NULL;
4342
4343 current_function_decl = save_current;
4344}
4345
4346#include "gt-omp-low.h"