]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fortran/trans-openmp.c
backport: ChangeLog.tuples: ChangeLog from gimple-tuples-branch.
[thirdparty/gcc.git] / gcc / fortran / trans-openmp.c
1 /* OpenMP directive translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
3 Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ggc.h"
28 #include "toplev.h"
29 #include "real.h"
30 #include "gfortran.h"
31 #include "trans.h"
32 #include "trans-stmt.h"
33 #include "trans-types.h"
34 #include "trans-array.h"
35 #include "trans-const.h"
36 #include "arith.h"
37
38
39 /* True if OpenMP should privatize what this DECL points to rather
40 than the DECL itself. */
41
42 bool
43 gfc_omp_privatize_by_reference (const_tree decl)
44 {
45 tree type = TREE_TYPE (decl);
46
47 if (TREE_CODE (type) == REFERENCE_TYPE
48 && (!DECL_ARTIFICIAL (decl) || TREE_CODE (decl) == PARM_DECL))
49 return true;
50
51 if (TREE_CODE (type) == POINTER_TYPE)
52 {
53 /* Array POINTER/ALLOCATABLE have aggregate types, all user variables
54 that have POINTER_TYPE type and don't have GFC_POINTER_TYPE_P
55 set are supposed to be privatized by reference. */
56 if (GFC_POINTER_TYPE_P (type))
57 return false;
58
59 if (!DECL_ARTIFICIAL (decl))
60 return true;
61
62 /* Some arrays are expanded as DECL_ARTIFICIAL pointers
63 by the frontend. */
64 if (DECL_LANG_SPECIFIC (decl)
65 && GFC_DECL_SAVED_DESCRIPTOR (decl))
66 return true;
67 }
68
69 return false;
70 }
71
72 /* True if OpenMP sharing attribute of DECL is predetermined. */
73
74 enum omp_clause_default_kind
75 gfc_omp_predetermined_sharing (tree decl)
76 {
77 if (DECL_ARTIFICIAL (decl) && ! GFC_DECL_RESULT (decl))
78 return OMP_CLAUSE_DEFAULT_SHARED;
79
80 /* Cray pointees shouldn't be listed in any clauses and should be
81 gimplified to dereference of the corresponding Cray pointer.
82 Make them all private, so that they are emitted in the debug
83 information. */
84 if (GFC_DECL_CRAY_POINTEE (decl))
85 return OMP_CLAUSE_DEFAULT_PRIVATE;
86
87 /* Assumed-size arrays are predetermined to inherit sharing
88 attributes of the associated actual argument, which is shared
89 for all we care. */
90 if (TREE_CODE (decl) == PARM_DECL
91 && GFC_ARRAY_TYPE_P (TREE_TYPE (decl))
92 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_UNKNOWN
93 && GFC_TYPE_ARRAY_UBOUND (TREE_TYPE (decl),
94 GFC_TYPE_ARRAY_RANK (TREE_TYPE (decl)) - 1)
95 == NULL)
96 return OMP_CLAUSE_DEFAULT_SHARED;
97
98 /* COMMON and EQUIVALENCE decls are shared. They
99 are only referenced through DECL_VALUE_EXPR of the variables
100 contained in them. If those are privatized, they will not be
101 gimplified to the COMMON or EQUIVALENCE decls. */
102 if (GFC_DECL_COMMON_OR_EQUIV (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
103 return OMP_CLAUSE_DEFAULT_SHARED;
104
105 if (GFC_DECL_RESULT (decl) && ! DECL_HAS_VALUE_EXPR_P (decl))
106 return OMP_CLAUSE_DEFAULT_SHARED;
107
108 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
109 }
110
111
112 /* Return true if DECL in private clause needs
113 OMP_CLAUSE_PRIVATE_OUTER_REF on the private clause. */
114 bool
115 gfc_omp_private_outer_ref (tree decl)
116 {
117 tree type = TREE_TYPE (decl);
118
119 if (GFC_DESCRIPTOR_TYPE_P (type)
120 && GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
121 return true;
122
123 return false;
124 }
125
126 /* Return code to initialize DECL with its default constructor, or
127 NULL if there's nothing to do. */
128
129 tree
130 gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
131 {
132 tree type = TREE_TYPE (decl), rank, size, esize, ptr, cond, then_b, else_b;
133 stmtblock_t block, cond_block;
134
135 if (! GFC_DESCRIPTOR_TYPE_P (type)
136 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
137 return NULL;
138
139 gcc_assert (outer != NULL);
140 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_PRIVATE
141 || OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_LASTPRIVATE);
142
143 /* Allocatable arrays in PRIVATE clauses need to be set to
144 "not currently allocated" allocation status if outer
145 array is "not currently allocated", otherwise should be allocated. */
146 gfc_start_block (&block);
147
148 gfc_init_block (&cond_block);
149
150 gfc_add_modify (&cond_block, decl, outer);
151 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
152 size = gfc_conv_descriptor_ubound (decl, rank);
153 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
154 gfc_conv_descriptor_lbound (decl, rank));
155 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
156 gfc_index_one_node);
157 if (GFC_TYPE_ARRAY_RANK (type) > 1)
158 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
159 gfc_conv_descriptor_stride (decl, rank));
160 esize = fold_convert (gfc_array_index_type,
161 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
162 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
163 size = gfc_evaluate_now (fold_convert (size_type_node, size), &cond_block);
164 ptr = gfc_allocate_array_with_status (&cond_block,
165 build_int_cst (pvoid_type_node, 0),
166 size, NULL);
167 gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
168 then_b = gfc_finish_block (&cond_block);
169
170 gfc_init_block (&cond_block);
171 gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
172 else_b = gfc_finish_block (&cond_block);
173
174 cond = fold_build2 (NE_EXPR, boolean_type_node,
175 fold_convert (pvoid_type_node,
176 gfc_conv_descriptor_data_get (outer)),
177 null_pointer_node);
178 gfc_add_expr_to_block (&block, build3 (COND_EXPR, void_type_node,
179 cond, then_b, else_b));
180
181 return gfc_finish_block (&block);
182 }
183
184 /* Build and return code for a copy constructor from SRC to DEST. */
185
186 tree
187 gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
188 {
189 tree type = TREE_TYPE (dest), ptr, size, esize, rank, call;
190 stmtblock_t block;
191
192 if (! GFC_DESCRIPTOR_TYPE_P (type)
193 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
194 return build2_v (MODIFY_EXPR, dest, src);
195
196 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
197
198 /* Allocatable arrays in FIRSTPRIVATE clauses need to be allocated
199 and copied from SRC. */
200 gfc_start_block (&block);
201
202 gfc_add_modify (&block, dest, src);
203 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
204 size = gfc_conv_descriptor_ubound (dest, rank);
205 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
206 gfc_conv_descriptor_lbound (dest, rank));
207 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
208 gfc_index_one_node);
209 if (GFC_TYPE_ARRAY_RANK (type) > 1)
210 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
211 gfc_conv_descriptor_stride (dest, rank));
212 esize = fold_convert (gfc_array_index_type,
213 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
214 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
215 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
216 ptr = gfc_allocate_array_with_status (&block,
217 build_int_cst (pvoid_type_node, 0),
218 size, NULL);
219 gfc_conv_descriptor_data_set (&block, dest, ptr);
220 call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
221 fold_convert (pvoid_type_node,
222 gfc_conv_descriptor_data_get (src)),
223 size);
224 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
225
226 return gfc_finish_block (&block);
227 }
228
229 /* Similarly, except use an assignment operator instead. */
230
231 tree
232 gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
233 {
234 tree type = TREE_TYPE (dest), rank, size, esize, call;
235 stmtblock_t block;
236
237 if (! GFC_DESCRIPTOR_TYPE_P (type)
238 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
239 return build2_v (MODIFY_EXPR, dest, src);
240
241 /* Handle copying allocatable arrays. */
242 gfc_start_block (&block);
243
244 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
245 size = gfc_conv_descriptor_ubound (dest, rank);
246 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
247 gfc_conv_descriptor_lbound (dest, rank));
248 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
249 gfc_index_one_node);
250 if (GFC_TYPE_ARRAY_RANK (type) > 1)
251 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
252 gfc_conv_descriptor_stride (dest, rank));
253 esize = fold_convert (gfc_array_index_type,
254 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
255 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
256 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
257 call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
258 fold_convert (pvoid_type_node,
259 gfc_conv_descriptor_data_get (dest)),
260 fold_convert (pvoid_type_node,
261 gfc_conv_descriptor_data_get (src)),
262 size);
263 gfc_add_expr_to_block (&block, fold_convert (void_type_node, call));
264
265 return gfc_finish_block (&block);
266 }
267
268 /* Build and return code destructing DECL. Return NULL if nothing
269 to be done. */
270
271 tree
272 gfc_omp_clause_dtor (tree clause ATTRIBUTE_UNUSED, tree decl)
273 {
274 tree type = TREE_TYPE (decl);
275
276 if (! GFC_DESCRIPTOR_TYPE_P (type)
277 || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
278 return NULL;
279
280 /* Allocatable arrays in FIRSTPRIVATE/LASTPRIVATE etc. clauses need
281 to be deallocated if they were allocated. */
282 return gfc_trans_dealloc_allocated (decl);
283 }
284
285
286 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
287 disregarded in OpenMP construct, because it is going to be
288 remapped during OpenMP lowering. SHARED is true if DECL
289 is going to be shared, false if it is going to be privatized. */
290
291 bool
292 gfc_omp_disregard_value_expr (tree decl, bool shared)
293 {
294 if (GFC_DECL_COMMON_OR_EQUIV (decl)
295 && DECL_HAS_VALUE_EXPR_P (decl))
296 {
297 tree value = DECL_VALUE_EXPR (decl);
298
299 if (TREE_CODE (value) == COMPONENT_REF
300 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
301 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
302 {
303 /* If variable in COMMON or EQUIVALENCE is privatized, return
304 true, as just that variable is supposed to be privatized,
305 not the whole COMMON or whole EQUIVALENCE.
306 For shared variables in COMMON or EQUIVALENCE, let them be
307 gimplified to DECL_VALUE_EXPR, so that for multiple shared vars
308 from the same COMMON or EQUIVALENCE just one sharing of the
309 whole COMMON or EQUIVALENCE is enough. */
310 return ! shared;
311 }
312 }
313
314 if (GFC_DECL_RESULT (decl) && DECL_HAS_VALUE_EXPR_P (decl))
315 return ! shared;
316
317 return false;
318 }
319
320 /* Return true if DECL that is shared iff SHARED is true should
321 be put into OMP_CLAUSE_PRIVATE with OMP_CLAUSE_PRIVATE_DEBUG
322 flag set. */
323
324 bool
325 gfc_omp_private_debug_clause (tree decl, bool shared)
326 {
327 if (GFC_DECL_CRAY_POINTEE (decl))
328 return true;
329
330 if (GFC_DECL_COMMON_OR_EQUIV (decl)
331 && DECL_HAS_VALUE_EXPR_P (decl))
332 {
333 tree value = DECL_VALUE_EXPR (decl);
334
335 if (TREE_CODE (value) == COMPONENT_REF
336 && TREE_CODE (TREE_OPERAND (value, 0)) == VAR_DECL
337 && GFC_DECL_COMMON_OR_EQUIV (TREE_OPERAND (value, 0)))
338 return shared;
339 }
340
341 return false;
342 }
343
344 /* Register language specific type size variables as potentially OpenMP
345 firstprivate variables. */
346
347 void
348 gfc_omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
349 {
350 if (GFC_ARRAY_TYPE_P (type) || GFC_DESCRIPTOR_TYPE_P (type))
351 {
352 int r;
353
354 gcc_assert (TYPE_LANG_SPECIFIC (type) != NULL);
355 for (r = 0; r < GFC_TYPE_ARRAY_RANK (type); r++)
356 {
357 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_LBOUND (type, r));
358 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_UBOUND (type, r));
359 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_STRIDE (type, r));
360 }
361 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_SIZE (type));
362 omp_firstprivatize_variable (ctx, GFC_TYPE_ARRAY_OFFSET (type));
363 }
364 }
365
366
367 static inline tree
368 gfc_trans_add_clause (tree node, tree tail)
369 {
370 OMP_CLAUSE_CHAIN (node) = tail;
371 return node;
372 }
373
374 static tree
375 gfc_trans_omp_variable (gfc_symbol *sym)
376 {
377 tree t = gfc_get_symbol_decl (sym);
378 tree parent_decl;
379 int parent_flag;
380 bool return_value;
381 bool alternate_entry;
382 bool entry_master;
383
384 return_value = sym->attr.function && sym->result == sym;
385 alternate_entry = sym->attr.function && sym->attr.entry
386 && sym->result == sym;
387 entry_master = sym->attr.result
388 && sym->ns->proc_name->attr.entry_master
389 && !gfc_return_by_reference (sym->ns->proc_name);
390 parent_decl = DECL_CONTEXT (current_function_decl);
391
392 if ((t == parent_decl && return_value)
393 || (sym->ns && sym->ns->proc_name
394 && sym->ns->proc_name->backend_decl == parent_decl
395 && (alternate_entry || entry_master)))
396 parent_flag = 1;
397 else
398 parent_flag = 0;
399
400 /* Special case for assigning the return value of a function.
401 Self recursive functions must have an explicit return value. */
402 if (return_value && (t == current_function_decl || parent_flag))
403 t = gfc_get_fake_result_decl (sym, parent_flag);
404
405 /* Similarly for alternate entry points. */
406 else if (alternate_entry
407 && (sym->ns->proc_name->backend_decl == current_function_decl
408 || parent_flag))
409 {
410 gfc_entry_list *el = NULL;
411
412 for (el = sym->ns->entries; el; el = el->next)
413 if (sym == el->sym)
414 {
415 t = gfc_get_fake_result_decl (sym, parent_flag);
416 break;
417 }
418 }
419
420 else if (entry_master
421 && (sym->ns->proc_name->backend_decl == current_function_decl
422 || parent_flag))
423 t = gfc_get_fake_result_decl (sym, parent_flag);
424
425 return t;
426 }
427
428 static tree
429 gfc_trans_omp_variable_list (enum omp_clause_code code, gfc_namelist *namelist,
430 tree list)
431 {
432 for (; namelist != NULL; namelist = namelist->next)
433 if (namelist->sym->attr.referenced)
434 {
435 tree t = gfc_trans_omp_variable (namelist->sym);
436 if (t != error_mark_node)
437 {
438 tree node = build_omp_clause (code);
439 OMP_CLAUSE_DECL (node) = t;
440 list = gfc_trans_add_clause (node, list);
441 }
442 }
443 return list;
444 }
445
446 static void
447 gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
448 {
449 gfc_symtree *root1 = NULL, *root2 = NULL, *root3 = NULL, *root4 = NULL;
450 gfc_symtree *symtree1, *symtree2, *symtree3, *symtree4 = NULL;
451 gfc_symbol init_val_sym, outer_sym, intrinsic_sym;
452 gfc_expr *e1, *e2, *e3, *e4;
453 gfc_ref *ref;
454 tree decl, backend_decl, stmt;
455 locus old_loc = gfc_current_locus;
456 const char *iname;
457 try t;
458
459 decl = OMP_CLAUSE_DECL (c);
460 gfc_current_locus = where;
461
462 /* Create a fake symbol for init value. */
463 memset (&init_val_sym, 0, sizeof (init_val_sym));
464 init_val_sym.ns = sym->ns;
465 init_val_sym.name = sym->name;
466 init_val_sym.ts = sym->ts;
467 init_val_sym.attr.referenced = 1;
468 init_val_sym.declared_at = where;
469 init_val_sym.attr.flavor = FL_VARIABLE;
470 backend_decl = omp_reduction_init (c, gfc_sym_type (&init_val_sym));
471 init_val_sym.backend_decl = backend_decl;
472
473 /* Create a fake symbol for the outer array reference. */
474 outer_sym = *sym;
475 outer_sym.as = gfc_copy_array_spec (sym->as);
476 outer_sym.attr.dummy = 0;
477 outer_sym.attr.result = 0;
478 outer_sym.attr.flavor = FL_VARIABLE;
479 outer_sym.backend_decl = create_tmp_var_raw (TREE_TYPE (decl), NULL);
480
481 /* Create fake symtrees for it. */
482 symtree1 = gfc_new_symtree (&root1, sym->name);
483 symtree1->n.sym = sym;
484 gcc_assert (symtree1 == root1);
485
486 symtree2 = gfc_new_symtree (&root2, sym->name);
487 symtree2->n.sym = &init_val_sym;
488 gcc_assert (symtree2 == root2);
489
490 symtree3 = gfc_new_symtree (&root3, sym->name);
491 symtree3->n.sym = &outer_sym;
492 gcc_assert (symtree3 == root3);
493
494 /* Create expressions. */
495 e1 = gfc_get_expr ();
496 e1->expr_type = EXPR_VARIABLE;
497 e1->where = where;
498 e1->symtree = symtree1;
499 e1->ts = sym->ts;
500 e1->ref = ref = gfc_get_ref ();
501 ref->u.ar.where = where;
502 ref->u.ar.as = sym->as;
503 ref->u.ar.type = AR_FULL;
504 ref->u.ar.dimen = 0;
505 t = gfc_resolve_expr (e1);
506 gcc_assert (t == SUCCESS);
507
508 e2 = gfc_get_expr ();
509 e2->expr_type = EXPR_VARIABLE;
510 e2->where = where;
511 e2->symtree = symtree2;
512 e2->ts = sym->ts;
513 t = gfc_resolve_expr (e2);
514 gcc_assert (t == SUCCESS);
515
516 e3 = gfc_copy_expr (e1);
517 e3->symtree = symtree3;
518 t = gfc_resolve_expr (e3);
519 gcc_assert (t == SUCCESS);
520
521 iname = NULL;
522 switch (OMP_CLAUSE_REDUCTION_CODE (c))
523 {
524 case PLUS_EXPR:
525 case MINUS_EXPR:
526 e4 = gfc_add (e3, e1);
527 break;
528 case MULT_EXPR:
529 e4 = gfc_multiply (e3, e1);
530 break;
531 case TRUTH_ANDIF_EXPR:
532 e4 = gfc_and (e3, e1);
533 break;
534 case TRUTH_ORIF_EXPR:
535 e4 = gfc_or (e3, e1);
536 break;
537 case EQ_EXPR:
538 e4 = gfc_eqv (e3, e1);
539 break;
540 case NE_EXPR:
541 e4 = gfc_neqv (e3, e1);
542 break;
543 case MIN_EXPR:
544 iname = "min";
545 break;
546 case MAX_EXPR:
547 iname = "max";
548 break;
549 case BIT_AND_EXPR:
550 iname = "iand";
551 break;
552 case BIT_IOR_EXPR:
553 iname = "ior";
554 break;
555 case BIT_XOR_EXPR:
556 iname = "ieor";
557 break;
558 default:
559 gcc_unreachable ();
560 }
561 if (iname != NULL)
562 {
563 memset (&intrinsic_sym, 0, sizeof (intrinsic_sym));
564 intrinsic_sym.ns = sym->ns;
565 intrinsic_sym.name = iname;
566 intrinsic_sym.ts = sym->ts;
567 intrinsic_sym.attr.referenced = 1;
568 intrinsic_sym.attr.intrinsic = 1;
569 intrinsic_sym.attr.function = 1;
570 intrinsic_sym.result = &intrinsic_sym;
571 intrinsic_sym.declared_at = where;
572
573 symtree4 = gfc_new_symtree (&root4, iname);
574 symtree4->n.sym = &intrinsic_sym;
575 gcc_assert (symtree4 == root4);
576
577 e4 = gfc_get_expr ();
578 e4->expr_type = EXPR_FUNCTION;
579 e4->where = where;
580 e4->symtree = symtree4;
581 e4->value.function.isym = gfc_find_function (iname);
582 e4->value.function.actual = gfc_get_actual_arglist ();
583 e4->value.function.actual->expr = e3;
584 e4->value.function.actual->next = gfc_get_actual_arglist ();
585 e4->value.function.actual->next->expr = e1;
586 }
587 /* e1 and e3 have been stored as arguments of e4, avoid sharing. */
588 e1 = gfc_copy_expr (e1);
589 e3 = gfc_copy_expr (e3);
590 t = gfc_resolve_expr (e4);
591 gcc_assert (t == SUCCESS);
592
593 /* Create the init statement list. */
594 pushlevel (0);
595 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
596 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
597 {
598 /* If decl is an allocatable array, it needs to be allocated
599 with the same bounds as the outer var. */
600 tree type = TREE_TYPE (decl), rank, size, esize, ptr;
601 stmtblock_t block;
602
603 gfc_start_block (&block);
604
605 gfc_add_modify (&block, decl, outer_sym.backend_decl);
606 rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
607 size = gfc_conv_descriptor_ubound (decl, rank);
608 size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
609 gfc_conv_descriptor_lbound (decl, rank));
610 size = fold_build2 (PLUS_EXPR, gfc_array_index_type, size,
611 gfc_index_one_node);
612 if (GFC_TYPE_ARRAY_RANK (type) > 1)
613 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size,
614 gfc_conv_descriptor_stride (decl, rank));
615 esize = fold_convert (gfc_array_index_type,
616 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
617 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, esize);
618 size = gfc_evaluate_now (fold_convert (size_type_node, size), &block);
619 ptr = gfc_allocate_array_with_status (&block,
620 build_int_cst (pvoid_type_node, 0),
621 size, NULL);
622 gfc_conv_descriptor_data_set (&block, decl, ptr);
623 gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false));
624 stmt = gfc_finish_block (&block);
625 }
626 else
627 stmt = gfc_trans_assignment (e1, e2, false);
628 if (TREE_CODE (stmt) != BIND_EXPR)
629 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
630 else
631 poplevel (0, 0, 0);
632 OMP_CLAUSE_REDUCTION_INIT (c) = stmt;
633
634 /* Create the merge statement list. */
635 pushlevel (0);
636 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (decl))
637 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (decl)) == GFC_ARRAY_ALLOCATABLE)
638 {
639 /* If decl is an allocatable array, it needs to be deallocated
640 afterwards. */
641 stmtblock_t block;
642
643 gfc_start_block (&block);
644 gfc_add_expr_to_block (&block, gfc_trans_assignment (e3, e4, false));
645 gfc_add_expr_to_block (&block, gfc_trans_dealloc_allocated (decl));
646 stmt = gfc_finish_block (&block);
647 }
648 else
649 stmt = gfc_trans_assignment (e3, e4, false);
650 if (TREE_CODE (stmt) != BIND_EXPR)
651 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
652 else
653 poplevel (0, 0, 0);
654 OMP_CLAUSE_REDUCTION_MERGE (c) = stmt;
655
656 /* And stick the placeholder VAR_DECL into the clause as well. */
657 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = outer_sym.backend_decl;
658
659 gfc_current_locus = old_loc;
660
661 gfc_free_expr (e1);
662 gfc_free_expr (e2);
663 gfc_free_expr (e3);
664 gfc_free_expr (e4);
665 gfc_free (symtree1);
666 gfc_free (symtree2);
667 gfc_free (symtree3);
668 if (symtree4)
669 gfc_free (symtree4);
670 gfc_free_array_spec (outer_sym.as);
671 }
672
673 static tree
674 gfc_trans_omp_reduction_list (gfc_namelist *namelist, tree list,
675 enum tree_code reduction_code, locus where)
676 {
677 for (; namelist != NULL; namelist = namelist->next)
678 if (namelist->sym->attr.referenced)
679 {
680 tree t = gfc_trans_omp_variable (namelist->sym);
681 if (t != error_mark_node)
682 {
683 tree node = build_omp_clause (OMP_CLAUSE_REDUCTION);
684 OMP_CLAUSE_DECL (node) = t;
685 OMP_CLAUSE_REDUCTION_CODE (node) = reduction_code;
686 if (namelist->sym->attr.dimension)
687 gfc_trans_omp_array_reduction (node, namelist->sym, where);
688 list = gfc_trans_add_clause (node, list);
689 }
690 }
691 return list;
692 }
693
694 static tree
695 gfc_trans_omp_clauses (stmtblock_t *block, gfc_omp_clauses *clauses,
696 locus where)
697 {
698 tree omp_clauses = NULL_TREE, chunk_size, c, old_clauses;
699 int list;
700 enum omp_clause_code clause_code;
701 gfc_se se;
702
703 if (clauses == NULL)
704 return NULL_TREE;
705
706 for (list = 0; list < OMP_LIST_NUM; list++)
707 {
708 gfc_namelist *n = clauses->lists[list];
709
710 if (n == NULL)
711 continue;
712 if (list >= OMP_LIST_REDUCTION_FIRST
713 && list <= OMP_LIST_REDUCTION_LAST)
714 {
715 enum tree_code reduction_code;
716 switch (list)
717 {
718 case OMP_LIST_PLUS:
719 reduction_code = PLUS_EXPR;
720 break;
721 case OMP_LIST_MULT:
722 reduction_code = MULT_EXPR;
723 break;
724 case OMP_LIST_SUB:
725 reduction_code = MINUS_EXPR;
726 break;
727 case OMP_LIST_AND:
728 reduction_code = TRUTH_ANDIF_EXPR;
729 break;
730 case OMP_LIST_OR:
731 reduction_code = TRUTH_ORIF_EXPR;
732 break;
733 case OMP_LIST_EQV:
734 reduction_code = EQ_EXPR;
735 break;
736 case OMP_LIST_NEQV:
737 reduction_code = NE_EXPR;
738 break;
739 case OMP_LIST_MAX:
740 reduction_code = MAX_EXPR;
741 break;
742 case OMP_LIST_MIN:
743 reduction_code = MIN_EXPR;
744 break;
745 case OMP_LIST_IAND:
746 reduction_code = BIT_AND_EXPR;
747 break;
748 case OMP_LIST_IOR:
749 reduction_code = BIT_IOR_EXPR;
750 break;
751 case OMP_LIST_IEOR:
752 reduction_code = BIT_XOR_EXPR;
753 break;
754 default:
755 gcc_unreachable ();
756 }
757 old_clauses = omp_clauses;
758 omp_clauses
759 = gfc_trans_omp_reduction_list (n, omp_clauses, reduction_code,
760 where);
761 continue;
762 }
763 switch (list)
764 {
765 case OMP_LIST_PRIVATE:
766 clause_code = OMP_CLAUSE_PRIVATE;
767 goto add_clause;
768 case OMP_LIST_SHARED:
769 clause_code = OMP_CLAUSE_SHARED;
770 goto add_clause;
771 case OMP_LIST_FIRSTPRIVATE:
772 clause_code = OMP_CLAUSE_FIRSTPRIVATE;
773 goto add_clause;
774 case OMP_LIST_LASTPRIVATE:
775 clause_code = OMP_CLAUSE_LASTPRIVATE;
776 goto add_clause;
777 case OMP_LIST_COPYIN:
778 clause_code = OMP_CLAUSE_COPYIN;
779 goto add_clause;
780 case OMP_LIST_COPYPRIVATE:
781 clause_code = OMP_CLAUSE_COPYPRIVATE;
782 /* FALLTHROUGH */
783 add_clause:
784 omp_clauses
785 = gfc_trans_omp_variable_list (clause_code, n, omp_clauses);
786 break;
787 default:
788 break;
789 }
790 }
791
792 if (clauses->if_expr)
793 {
794 tree if_var;
795
796 gfc_init_se (&se, NULL);
797 gfc_conv_expr (&se, clauses->if_expr);
798 gfc_add_block_to_block (block, &se.pre);
799 if_var = gfc_evaluate_now (se.expr, block);
800 gfc_add_block_to_block (block, &se.post);
801
802 c = build_omp_clause (OMP_CLAUSE_IF);
803 OMP_CLAUSE_IF_EXPR (c) = if_var;
804 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
805 }
806
807 if (clauses->num_threads)
808 {
809 tree num_threads;
810
811 gfc_init_se (&se, NULL);
812 gfc_conv_expr (&se, clauses->num_threads);
813 gfc_add_block_to_block (block, &se.pre);
814 num_threads = gfc_evaluate_now (se.expr, block);
815 gfc_add_block_to_block (block, &se.post);
816
817 c = build_omp_clause (OMP_CLAUSE_NUM_THREADS);
818 OMP_CLAUSE_NUM_THREADS_EXPR (c) = num_threads;
819 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
820 }
821
822 chunk_size = NULL_TREE;
823 if (clauses->chunk_size)
824 {
825 gfc_init_se (&se, NULL);
826 gfc_conv_expr (&se, clauses->chunk_size);
827 gfc_add_block_to_block (block, &se.pre);
828 chunk_size = gfc_evaluate_now (se.expr, block);
829 gfc_add_block_to_block (block, &se.post);
830 }
831
832 if (clauses->sched_kind != OMP_SCHED_NONE)
833 {
834 c = build_omp_clause (OMP_CLAUSE_SCHEDULE);
835 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = chunk_size;
836 switch (clauses->sched_kind)
837 {
838 case OMP_SCHED_STATIC:
839 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_STATIC;
840 break;
841 case OMP_SCHED_DYNAMIC:
842 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_DYNAMIC;
843 break;
844 case OMP_SCHED_GUIDED:
845 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_GUIDED;
846 break;
847 case OMP_SCHED_RUNTIME:
848 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_RUNTIME;
849 break;
850 case OMP_SCHED_AUTO:
851 OMP_CLAUSE_SCHEDULE_KIND (c) = OMP_CLAUSE_SCHEDULE_AUTO;
852 break;
853 default:
854 gcc_unreachable ();
855 }
856 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
857 }
858
859 if (clauses->default_sharing != OMP_DEFAULT_UNKNOWN)
860 {
861 c = build_omp_clause (OMP_CLAUSE_DEFAULT);
862 switch (clauses->default_sharing)
863 {
864 case OMP_DEFAULT_NONE:
865 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_NONE;
866 break;
867 case OMP_DEFAULT_SHARED:
868 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_SHARED;
869 break;
870 case OMP_DEFAULT_PRIVATE:
871 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_PRIVATE;
872 break;
873 case OMP_DEFAULT_FIRSTPRIVATE:
874 OMP_CLAUSE_DEFAULT_KIND (c) = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
875 break;
876 default:
877 gcc_unreachable ();
878 }
879 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
880 }
881
882 if (clauses->nowait)
883 {
884 c = build_omp_clause (OMP_CLAUSE_NOWAIT);
885 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
886 }
887
888 if (clauses->ordered)
889 {
890 c = build_omp_clause (OMP_CLAUSE_ORDERED);
891 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
892 }
893
894 if (clauses->untied)
895 {
896 c = build_omp_clause (OMP_CLAUSE_UNTIED);
897 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
898 }
899
900 if (clauses->collapse)
901 {
902 c = build_omp_clause (OMP_CLAUSE_COLLAPSE);
903 OMP_CLAUSE_COLLAPSE_EXPR (c) = build_int_cst (NULL, clauses->collapse);
904 omp_clauses = gfc_trans_add_clause (c, omp_clauses);
905 }
906
907 return omp_clauses;
908 }
909
910 /* Like gfc_trans_code, but force creation of a BIND_EXPR around it. */
911
912 static tree
913 gfc_trans_omp_code (gfc_code *code, bool force_empty)
914 {
915 tree stmt;
916
917 pushlevel (0);
918 stmt = gfc_trans_code (code);
919 if (TREE_CODE (stmt) != BIND_EXPR)
920 {
921 if (!IS_EMPTY_STMT (stmt) || force_empty)
922 {
923 tree block = poplevel (1, 0, 0);
924 stmt = build3_v (BIND_EXPR, NULL, stmt, block);
925 }
926 else
927 poplevel (0, 0, 0);
928 }
929 else
930 poplevel (0, 0, 0);
931 return stmt;
932 }
933
934
935 static tree gfc_trans_omp_sections (gfc_code *, gfc_omp_clauses *);
936 static tree gfc_trans_omp_workshare (gfc_code *, gfc_omp_clauses *);
937
938 static tree
939 gfc_trans_omp_atomic (gfc_code *code)
940 {
941 gfc_se lse;
942 gfc_se rse;
943 gfc_expr *expr2, *e;
944 gfc_symbol *var;
945 stmtblock_t block;
946 tree lhsaddr, type, rhs, x;
947 enum tree_code op = ERROR_MARK;
948 bool var_on_left = false;
949
950 code = code->block->next;
951 gcc_assert (code->op == EXEC_ASSIGN);
952 gcc_assert (code->next == NULL);
953 var = code->expr->symtree->n.sym;
954
955 gfc_init_se (&lse, NULL);
956 gfc_init_se (&rse, NULL);
957 gfc_start_block (&block);
958
959 gfc_conv_expr (&lse, code->expr);
960 gfc_add_block_to_block (&block, &lse.pre);
961 type = TREE_TYPE (lse.expr);
962 lhsaddr = gfc_build_addr_expr (NULL, lse.expr);
963
964 expr2 = code->expr2;
965 if (expr2->expr_type == EXPR_FUNCTION
966 && expr2->value.function.isym->id == GFC_ISYM_CONVERSION)
967 expr2 = expr2->value.function.actual->expr;
968
969 if (expr2->expr_type == EXPR_OP)
970 {
971 gfc_expr *e;
972 switch (expr2->value.op.op)
973 {
974 case INTRINSIC_PLUS:
975 op = PLUS_EXPR;
976 break;
977 case INTRINSIC_TIMES:
978 op = MULT_EXPR;
979 break;
980 case INTRINSIC_MINUS:
981 op = MINUS_EXPR;
982 break;
983 case INTRINSIC_DIVIDE:
984 if (expr2->ts.type == BT_INTEGER)
985 op = TRUNC_DIV_EXPR;
986 else
987 op = RDIV_EXPR;
988 break;
989 case INTRINSIC_AND:
990 op = TRUTH_ANDIF_EXPR;
991 break;
992 case INTRINSIC_OR:
993 op = TRUTH_ORIF_EXPR;
994 break;
995 case INTRINSIC_EQV:
996 op = EQ_EXPR;
997 break;
998 case INTRINSIC_NEQV:
999 op = NE_EXPR;
1000 break;
1001 default:
1002 gcc_unreachable ();
1003 }
1004 e = expr2->value.op.op1;
1005 if (e->expr_type == EXPR_FUNCTION
1006 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1007 e = e->value.function.actual->expr;
1008 if (e->expr_type == EXPR_VARIABLE
1009 && e->symtree != NULL
1010 && e->symtree->n.sym == var)
1011 {
1012 expr2 = expr2->value.op.op2;
1013 var_on_left = true;
1014 }
1015 else
1016 {
1017 e = expr2->value.op.op2;
1018 if (e->expr_type == EXPR_FUNCTION
1019 && e->value.function.isym->id == GFC_ISYM_CONVERSION)
1020 e = e->value.function.actual->expr;
1021 gcc_assert (e->expr_type == EXPR_VARIABLE
1022 && e->symtree != NULL
1023 && e->symtree->n.sym == var);
1024 expr2 = expr2->value.op.op1;
1025 var_on_left = false;
1026 }
1027 gfc_conv_expr (&rse, expr2);
1028 gfc_add_block_to_block (&block, &rse.pre);
1029 }
1030 else
1031 {
1032 gcc_assert (expr2->expr_type == EXPR_FUNCTION);
1033 switch (expr2->value.function.isym->id)
1034 {
1035 case GFC_ISYM_MIN:
1036 op = MIN_EXPR;
1037 break;
1038 case GFC_ISYM_MAX:
1039 op = MAX_EXPR;
1040 break;
1041 case GFC_ISYM_IAND:
1042 op = BIT_AND_EXPR;
1043 break;
1044 case GFC_ISYM_IOR:
1045 op = BIT_IOR_EXPR;
1046 break;
1047 case GFC_ISYM_IEOR:
1048 op = BIT_XOR_EXPR;
1049 break;
1050 default:
1051 gcc_unreachable ();
1052 }
1053 e = expr2->value.function.actual->expr;
1054 gcc_assert (e->expr_type == EXPR_VARIABLE
1055 && e->symtree != NULL
1056 && e->symtree->n.sym == var);
1057
1058 gfc_conv_expr (&rse, expr2->value.function.actual->next->expr);
1059 gfc_add_block_to_block (&block, &rse.pre);
1060 if (expr2->value.function.actual->next->next != NULL)
1061 {
1062 tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
1063 gfc_actual_arglist *arg;
1064
1065 gfc_add_modify (&block, accum, rse.expr);
1066 for (arg = expr2->value.function.actual->next->next; arg;
1067 arg = arg->next)
1068 {
1069 gfc_init_block (&rse.pre);
1070 gfc_conv_expr (&rse, arg->expr);
1071 gfc_add_block_to_block (&block, &rse.pre);
1072 x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr);
1073 gfc_add_modify (&block, accum, x);
1074 }
1075
1076 rse.expr = accum;
1077 }
1078
1079 expr2 = expr2->value.function.actual->next->expr;
1080 }
1081
1082 lhsaddr = save_expr (lhsaddr);
1083 rhs = gfc_evaluate_now (rse.expr, &block);
1084 x = convert (TREE_TYPE (rhs), build_fold_indirect_ref (lhsaddr));
1085
1086 if (var_on_left)
1087 x = fold_build2 (op, TREE_TYPE (rhs), x, rhs);
1088 else
1089 x = fold_build2 (op, TREE_TYPE (rhs), rhs, x);
1090
1091 if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE
1092 && TREE_CODE (type) != COMPLEX_TYPE)
1093 x = fold_build1 (REALPART_EXPR, TREE_TYPE (TREE_TYPE (rhs)), x);
1094
1095 x = build2_v (OMP_ATOMIC, lhsaddr, convert (type, x));
1096 gfc_add_expr_to_block (&block, x);
1097
1098 gfc_add_block_to_block (&block, &lse.pre);
1099 gfc_add_block_to_block (&block, &rse.pre);
1100
1101 return gfc_finish_block (&block);
1102 }
1103
1104 static tree
1105 gfc_trans_omp_barrier (void)
1106 {
1107 tree decl = built_in_decls [BUILT_IN_GOMP_BARRIER];
1108 return build_call_expr (decl, 0);
1109 }
1110
1111 static tree
1112 gfc_trans_omp_critical (gfc_code *code)
1113 {
1114 tree name = NULL_TREE, stmt;
1115 if (code->ext.omp_name != NULL)
1116 name = get_identifier (code->ext.omp_name);
1117 stmt = gfc_trans_code (code->block->next);
1118 return build2 (OMP_CRITICAL, void_type_node, stmt, name);
1119 }
1120
1121 static tree
1122 gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
1123 gfc_omp_clauses *do_clauses, tree par_clauses)
1124 {
1125 gfc_se se;
1126 tree dovar, stmt, from, to, step, type, init, cond, incr;
1127 tree count = NULL_TREE, cycle_label, tmp, omp_clauses;
1128 stmtblock_t block;
1129 stmtblock_t body;
1130 gfc_omp_clauses *clauses = code->ext.omp_clauses;
1131 gfc_code *outermost;
1132 int i, collapse = clauses->collapse;
1133 tree dovar_init = NULL_TREE;
1134
1135 if (collapse <= 0)
1136 collapse = 1;
1137
1138 outermost = code = code->block->next;
1139 gcc_assert (code->op == EXEC_DO);
1140
1141 init = make_tree_vec (collapse);
1142 cond = make_tree_vec (collapse);
1143 incr = make_tree_vec (collapse);
1144
1145 if (pblock == NULL)
1146 {
1147 gfc_start_block (&block);
1148 pblock = &block;
1149 }
1150
1151 omp_clauses = gfc_trans_omp_clauses (pblock, do_clauses, code->loc);
1152
1153 for (i = 0; i < collapse; i++)
1154 {
1155 int simple = 0;
1156 int dovar_found = 0;
1157
1158 if (clauses)
1159 {
1160 gfc_namelist *n;
1161 for (n = clauses->lists[OMP_LIST_LASTPRIVATE]; n != NULL;
1162 n = n->next)
1163 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1164 break;
1165 if (n != NULL)
1166 dovar_found = 1;
1167 else if (n == NULL)
1168 for (n = clauses->lists[OMP_LIST_PRIVATE]; n != NULL; n = n->next)
1169 if (code->ext.iterator->var->symtree->n.sym == n->sym)
1170 break;
1171 if (n != NULL)
1172 dovar_found++;
1173 }
1174
1175 /* Evaluate all the expressions in the iterator. */
1176 gfc_init_se (&se, NULL);
1177 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
1178 gfc_add_block_to_block (pblock, &se.pre);
1179 dovar = se.expr;
1180 type = TREE_TYPE (dovar);
1181 gcc_assert (TREE_CODE (type) == INTEGER_TYPE);
1182
1183 gfc_init_se (&se, NULL);
1184 gfc_conv_expr_val (&se, code->ext.iterator->start);
1185 gfc_add_block_to_block (pblock, &se.pre);
1186 from = gfc_evaluate_now (se.expr, pblock);
1187
1188 gfc_init_se (&se, NULL);
1189 gfc_conv_expr_val (&se, code->ext.iterator->end);
1190 gfc_add_block_to_block (pblock, &se.pre);
1191 to = gfc_evaluate_now (se.expr, pblock);
1192
1193 gfc_init_se (&se, NULL);
1194 gfc_conv_expr_val (&se, code->ext.iterator->step);
1195 gfc_add_block_to_block (pblock, &se.pre);
1196 step = gfc_evaluate_now (se.expr, pblock);
1197
1198 /* Special case simple loops. */
1199 if (integer_onep (step))
1200 simple = 1;
1201 else if (tree_int_cst_equal (step, integer_minus_one_node))
1202 simple = -1;
1203
1204 /* Loop body. */
1205 if (simple)
1206 {
1207 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
1208 TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
1209 boolean_type_node, dovar, to);
1210 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
1211 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
1212 TREE_VEC_ELT (incr, i));
1213 }
1214 else
1215 {
1216 /* STEP is not 1 or -1. Use:
1217 for (count = 0; count < (to + step - from) / step; count++)
1218 {
1219 dovar = from + count * step;
1220 body;
1221 cycle_label:;
1222 } */
1223 tmp = fold_build2 (MINUS_EXPR, type, step, from);
1224 tmp = fold_build2 (PLUS_EXPR, type, to, tmp);
1225 tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
1226 tmp = gfc_evaluate_now (tmp, pblock);
1227 count = gfc_create_var (type, "count");
1228 TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
1229 build_int_cst (type, 0));
1230 TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
1231 count, tmp);
1232 TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
1233 build_int_cst (type, 1));
1234 TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
1235 count, TREE_VEC_ELT (incr, i));
1236
1237 /* Initialize DOVAR. */
1238 tmp = fold_build2 (MULT_EXPR, type, count, step);
1239 tmp = fold_build2 (PLUS_EXPR, type, from, tmp);
1240 dovar_init = tree_cons (dovar, tmp, dovar_init);
1241 }
1242
1243 if (!dovar_found)
1244 {
1245 tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
1246 OMP_CLAUSE_DECL (tmp) = dovar;
1247 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1248 }
1249 else if (dovar_found == 2)
1250 {
1251 tree c = NULL;
1252
1253 tmp = NULL;
1254 if (!simple)
1255 {
1256 /* If dovar is lastprivate, but different counter is used,
1257 dovar += step needs to be added to
1258 OMP_CLAUSE_LASTPRIVATE_STMT, otherwise the copied dovar
1259 will have the value on entry of the last loop, rather
1260 than value after iterator increment. */
1261 tmp = gfc_evaluate_now (step, pblock);
1262 tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
1263 tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
1264 for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1265 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1266 && OMP_CLAUSE_DECL (c) == dovar)
1267 {
1268 OMP_CLAUSE_LASTPRIVATE_STMT (c) = tmp;
1269 break;
1270 }
1271 }
1272 if (c == NULL && par_clauses != NULL)
1273 {
1274 for (c = par_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
1275 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1276 && OMP_CLAUSE_DECL (c) == dovar)
1277 {
1278 tree l = build_omp_clause (OMP_CLAUSE_LASTPRIVATE);
1279 OMP_CLAUSE_DECL (l) = dovar;
1280 OMP_CLAUSE_CHAIN (l) = omp_clauses;
1281 OMP_CLAUSE_LASTPRIVATE_STMT (l) = tmp;
1282 omp_clauses = l;
1283 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_SHARED);
1284 break;
1285 }
1286 }
1287 gcc_assert (simple || c != NULL);
1288 }
1289 if (!simple)
1290 {
1291 tmp = build_omp_clause (OMP_CLAUSE_PRIVATE);
1292 OMP_CLAUSE_DECL (tmp) = count;
1293 omp_clauses = gfc_trans_add_clause (tmp, omp_clauses);
1294 }
1295
1296 if (i + 1 < collapse)
1297 code = code->block->next;
1298 }
1299
1300 if (pblock != &block)
1301 {
1302 pushlevel (0);
1303 gfc_start_block (&block);
1304 }
1305
1306 gfc_start_block (&body);
1307
1308 dovar_init = nreverse (dovar_init);
1309 while (dovar_init)
1310 {
1311 gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
1312 TREE_VALUE (dovar_init));
1313 dovar_init = TREE_CHAIN (dovar_init);
1314 }
1315
1316 /* Cycle statement is implemented with a goto. Exit statement must not be
1317 present for this loop. */
1318 cycle_label = gfc_build_label_decl (NULL_TREE);
1319
1320 /* Put these labels where they can be found later. We put the
1321 labels in a TREE_LIST node (because TREE_CHAIN is already
1322 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1323 label in TREE_VALUE (backend_decl). */
1324
1325 code->block->backend_decl = tree_cons (cycle_label, NULL, NULL);
1326
1327 /* Main loop body. */
1328 tmp = gfc_trans_omp_code (code->block->next, true);
1329 gfc_add_expr_to_block (&body, tmp);
1330
1331 /* Label for cycle statements (if needed). */
1332 if (TREE_USED (cycle_label))
1333 {
1334 tmp = build1_v (LABEL_EXPR, cycle_label);
1335 gfc_add_expr_to_block (&body, tmp);
1336 }
1337
1338 /* End of loop body. */
1339 stmt = make_node (OMP_FOR);
1340
1341 TREE_TYPE (stmt) = void_type_node;
1342 OMP_FOR_BODY (stmt) = gfc_finish_block (&body);
1343 OMP_FOR_CLAUSES (stmt) = omp_clauses;
1344 OMP_FOR_INIT (stmt) = init;
1345 OMP_FOR_COND (stmt) = cond;
1346 OMP_FOR_INCR (stmt) = incr;
1347 gfc_add_expr_to_block (&block, stmt);
1348
1349 return gfc_finish_block (&block);
1350 }
1351
1352 static tree
1353 gfc_trans_omp_flush (void)
1354 {
1355 tree decl = built_in_decls [BUILT_IN_SYNCHRONIZE];
1356 return build_call_expr (decl, 0);
1357 }
1358
1359 static tree
1360 gfc_trans_omp_master (gfc_code *code)
1361 {
1362 tree stmt = gfc_trans_code (code->block->next);
1363 if (IS_EMPTY_STMT (stmt))
1364 return stmt;
1365 return build1_v (OMP_MASTER, stmt);
1366 }
1367
1368 static tree
1369 gfc_trans_omp_ordered (gfc_code *code)
1370 {
1371 return build1_v (OMP_ORDERED, gfc_trans_code (code->block->next));
1372 }
1373
1374 static tree
1375 gfc_trans_omp_parallel (gfc_code *code)
1376 {
1377 stmtblock_t block;
1378 tree stmt, omp_clauses;
1379
1380 gfc_start_block (&block);
1381 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1382 code->loc);
1383 stmt = gfc_trans_omp_code (code->block->next, true);
1384 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1385 gfc_add_expr_to_block (&block, stmt);
1386 return gfc_finish_block (&block);
1387 }
1388
1389 static tree
1390 gfc_trans_omp_parallel_do (gfc_code *code)
1391 {
1392 stmtblock_t block, *pblock = NULL;
1393 gfc_omp_clauses parallel_clauses, do_clauses;
1394 tree stmt, omp_clauses = NULL_TREE;
1395
1396 gfc_start_block (&block);
1397
1398 memset (&do_clauses, 0, sizeof (do_clauses));
1399 if (code->ext.omp_clauses != NULL)
1400 {
1401 memcpy (&parallel_clauses, code->ext.omp_clauses,
1402 sizeof (parallel_clauses));
1403 do_clauses.sched_kind = parallel_clauses.sched_kind;
1404 do_clauses.chunk_size = parallel_clauses.chunk_size;
1405 do_clauses.ordered = parallel_clauses.ordered;
1406 do_clauses.collapse = parallel_clauses.collapse;
1407 parallel_clauses.sched_kind = OMP_SCHED_NONE;
1408 parallel_clauses.chunk_size = NULL;
1409 parallel_clauses.ordered = false;
1410 parallel_clauses.collapse = 0;
1411 omp_clauses = gfc_trans_omp_clauses (&block, &parallel_clauses,
1412 code->loc);
1413 }
1414 do_clauses.nowait = true;
1415 if (!do_clauses.ordered && do_clauses.sched_kind != OMP_SCHED_STATIC)
1416 pblock = &block;
1417 else
1418 pushlevel (0);
1419 stmt = gfc_trans_omp_do (code, pblock, &do_clauses, omp_clauses);
1420 if (TREE_CODE (stmt) != BIND_EXPR)
1421 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1422 else
1423 poplevel (0, 0, 0);
1424 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1425 OMP_PARALLEL_COMBINED (stmt) = 1;
1426 gfc_add_expr_to_block (&block, stmt);
1427 return gfc_finish_block (&block);
1428 }
1429
1430 static tree
1431 gfc_trans_omp_parallel_sections (gfc_code *code)
1432 {
1433 stmtblock_t block;
1434 gfc_omp_clauses section_clauses;
1435 tree stmt, omp_clauses;
1436
1437 memset (&section_clauses, 0, sizeof (section_clauses));
1438 section_clauses.nowait = true;
1439
1440 gfc_start_block (&block);
1441 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1442 code->loc);
1443 pushlevel (0);
1444 stmt = gfc_trans_omp_sections (code, &section_clauses);
1445 if (TREE_CODE (stmt) != BIND_EXPR)
1446 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1447 else
1448 poplevel (0, 0, 0);
1449 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1450 OMP_PARALLEL_COMBINED (stmt) = 1;
1451 gfc_add_expr_to_block (&block, stmt);
1452 return gfc_finish_block (&block);
1453 }
1454
1455 static tree
1456 gfc_trans_omp_parallel_workshare (gfc_code *code)
1457 {
1458 stmtblock_t block;
1459 gfc_omp_clauses workshare_clauses;
1460 tree stmt, omp_clauses;
1461
1462 memset (&workshare_clauses, 0, sizeof (workshare_clauses));
1463 workshare_clauses.nowait = true;
1464
1465 gfc_start_block (&block);
1466 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1467 code->loc);
1468 pushlevel (0);
1469 stmt = gfc_trans_omp_workshare (code, &workshare_clauses);
1470 if (TREE_CODE (stmt) != BIND_EXPR)
1471 stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
1472 else
1473 poplevel (0, 0, 0);
1474 stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
1475 OMP_PARALLEL_COMBINED (stmt) = 1;
1476 gfc_add_expr_to_block (&block, stmt);
1477 return gfc_finish_block (&block);
1478 }
1479
1480 static tree
1481 gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
1482 {
1483 stmtblock_t block, body;
1484 tree omp_clauses, stmt;
1485 bool has_lastprivate = clauses->lists[OMP_LIST_LASTPRIVATE] != NULL;
1486
1487 gfc_start_block (&block);
1488
1489 omp_clauses = gfc_trans_omp_clauses (&block, clauses, code->loc);
1490
1491 gfc_init_block (&body);
1492 for (code = code->block; code; code = code->block)
1493 {
1494 /* Last section is special because of lastprivate, so even if it
1495 is empty, chain it in. */
1496 stmt = gfc_trans_omp_code (code->next,
1497 has_lastprivate && code->block == NULL);
1498 if (! IS_EMPTY_STMT (stmt))
1499 {
1500 stmt = build1_v (OMP_SECTION, stmt);
1501 gfc_add_expr_to_block (&body, stmt);
1502 }
1503 }
1504 stmt = gfc_finish_block (&body);
1505
1506 stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses);
1507 gfc_add_expr_to_block (&block, stmt);
1508
1509 return gfc_finish_block (&block);
1510 }
1511
1512 static tree
1513 gfc_trans_omp_single (gfc_code *code, gfc_omp_clauses *clauses)
1514 {
1515 tree omp_clauses = gfc_trans_omp_clauses (NULL, clauses, code->loc);
1516 tree stmt = gfc_trans_omp_code (code->block->next, true);
1517 stmt = build2 (OMP_SINGLE, void_type_node, stmt, omp_clauses);
1518 return stmt;
1519 }
1520
1521 static tree
1522 gfc_trans_omp_task (gfc_code *code)
1523 {
1524 stmtblock_t block;
1525 tree stmt, omp_clauses;
1526
1527 gfc_start_block (&block);
1528 omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
1529 code->loc);
1530 stmt = gfc_trans_omp_code (code->block->next, true);
1531 stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses);
1532 gfc_add_expr_to_block (&block, stmt);
1533 return gfc_finish_block (&block);
1534 }
1535
1536 static tree
1537 gfc_trans_omp_taskwait (void)
1538 {
1539 tree decl = built_in_decls [BUILT_IN_GOMP_TASKWAIT];
1540 return build_call_expr (decl, 0);
1541 }
1542
1543 static tree
1544 gfc_trans_omp_workshare (gfc_code *code, gfc_omp_clauses *clauses)
1545 {
1546 /* XXX */
1547 return gfc_trans_omp_single (code, clauses);
1548 }
1549
1550 tree
1551 gfc_trans_omp_directive (gfc_code *code)
1552 {
1553 switch (code->op)
1554 {
1555 case EXEC_OMP_ATOMIC:
1556 return gfc_trans_omp_atomic (code);
1557 case EXEC_OMP_BARRIER:
1558 return gfc_trans_omp_barrier ();
1559 case EXEC_OMP_CRITICAL:
1560 return gfc_trans_omp_critical (code);
1561 case EXEC_OMP_DO:
1562 return gfc_trans_omp_do (code, NULL, code->ext.omp_clauses, NULL);
1563 case EXEC_OMP_FLUSH:
1564 return gfc_trans_omp_flush ();
1565 case EXEC_OMP_MASTER:
1566 return gfc_trans_omp_master (code);
1567 case EXEC_OMP_ORDERED:
1568 return gfc_trans_omp_ordered (code);
1569 case EXEC_OMP_PARALLEL:
1570 return gfc_trans_omp_parallel (code);
1571 case EXEC_OMP_PARALLEL_DO:
1572 return gfc_trans_omp_parallel_do (code);
1573 case EXEC_OMP_PARALLEL_SECTIONS:
1574 return gfc_trans_omp_parallel_sections (code);
1575 case EXEC_OMP_PARALLEL_WORKSHARE:
1576 return gfc_trans_omp_parallel_workshare (code);
1577 case EXEC_OMP_SECTIONS:
1578 return gfc_trans_omp_sections (code, code->ext.omp_clauses);
1579 case EXEC_OMP_SINGLE:
1580 return gfc_trans_omp_single (code, code->ext.omp_clauses);
1581 case EXEC_OMP_TASK:
1582 return gfc_trans_omp_task (code);
1583 case EXEC_OMP_TASKWAIT:
1584 return gfc_trans_omp_taskwait ();
1585 case EXEC_OMP_WORKSHARE:
1586 return gfc_trans_omp_workshare (code, code->ext.omp_clauses);
1587 default:
1588 gcc_unreachable ();
1589 }
1590 }