]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fortran/trans-stmt.c
builtin-types.def (BT_FN_PTR_PTR_SIZE): New type.
[thirdparty/gcc.git] / gcc / fortran / trans-stmt.c
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "tree-gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
41
42 typedef struct iter_info
43 {
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
49 }
50 iter_info;
51
52 typedef struct forall_info
53 {
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
60 }
61 forall_info;
62
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
65
66 /* Translate a F95 label number to a LABEL_EXPR. */
67
68 tree
69 gfc_trans_label_here (gfc_code * code)
70 {
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
72 }
73
74
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
78
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
81 {
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
90 }
91
92 /* Translate a label assignment statement. */
93
94 tree
95 gfc_trans_label_assign (gfc_code * code)
96 {
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 char *label_str;
103 int label_len;
104
105 /* Start a new block. */
106 gfc_init_se (&se, NULL);
107 gfc_start_block (&se.pre);
108 gfc_conv_label_variable (&se, code->expr);
109
110 len = GFC_DECL_STRING_LEN (se.expr);
111 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
112
113 label_tree = gfc_get_label_decl (code->label);
114
115 if (code->label->defined == ST_LABEL_TARGET)
116 {
117 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
118 len_tree = integer_minus_one_node;
119 }
120 else
121 {
122 label_str = code->label->format->value.character.string;
123 label_len = code->label->format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_string_const (label_len + 1, label_str);
126 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
127 }
128
129 gfc_add_modify_expr (&se.pre, len, len_tree);
130 gfc_add_modify_expr (&se.pre, addr, label_tree);
131
132 return gfc_finish_block (&se.pre);
133 }
134
135 /* Translate a GOTO statement. */
136
137 tree
138 gfc_trans_goto (gfc_code * code)
139 {
140 locus loc = code->loc;
141 tree assigned_goto;
142 tree target;
143 tree tmp;
144 gfc_se se;
145
146 if (code->label != NULL)
147 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
148
149 /* ASSIGNED GOTO. */
150 gfc_init_se (&se, NULL);
151 gfc_start_block (&se.pre);
152 gfc_conv_label_variable (&se, code->expr);
153 tmp = GFC_DECL_STRING_LEN (se.expr);
154 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
155 build_int_cst (TREE_TYPE (tmp), -1));
156 gfc_trans_runtime_check (tmp, &se.pre, &loc,
157 "Assigned label is not a target label");
158
159 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
160
161 code = code->block;
162 if (code == NULL)
163 {
164 target = build1 (GOTO_EXPR, void_type_node, assigned_goto);
165 gfc_add_expr_to_block (&se.pre, target);
166 return gfc_finish_block (&se.pre);
167 }
168
169 /* Check the label list. */
170 do
171 {
172 target = gfc_get_label_decl (code->label);
173 tmp = gfc_build_addr_expr (pvoid_type_node, target);
174 tmp = build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
175 tmp = build3_v (COND_EXPR, tmp,
176 build1 (GOTO_EXPR, void_type_node, target),
177 build_empty_stmt ());
178 gfc_add_expr_to_block (&se.pre, tmp);
179 code = code->block;
180 }
181 while (code != NULL);
182 gfc_trans_runtime_check (boolean_true_node, &se.pre, &loc,
183 "Assigned label is not in the list");
184
185 return gfc_finish_block (&se.pre);
186 }
187
188
189 /* Translate an ENTRY statement. Just adds a label for this entry point. */
190 tree
191 gfc_trans_entry (gfc_code * code)
192 {
193 return build1_v (LABEL_EXPR, code->ext.entry->label);
194 }
195
196
197 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
198 elemental subroutines. Make temporaries for output arguments if any such
199 dependencies are found. Output arguments are chosen because internal_unpack
200 can be used, as is, to copy the result back to the variable. */
201 static void
202 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
203 gfc_symbol * sym, gfc_actual_arglist * arg)
204 {
205 gfc_actual_arglist *arg0;
206 gfc_expr *e;
207 gfc_formal_arglist *formal;
208 gfc_loopinfo tmp_loop;
209 gfc_se parmse;
210 gfc_ss *ss;
211 gfc_ss_info *info;
212 gfc_symbol *fsym;
213 int n;
214 stmtblock_t block;
215 tree data;
216 tree offset;
217 tree size;
218 tree tmp;
219
220 if (loopse->ss == NULL)
221 return;
222
223 ss = loopse->ss;
224 arg0 = arg;
225 formal = sym->formal;
226
227 /* Loop over all the arguments testing for dependencies. */
228 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
229 {
230 e = arg->expr;
231 if (e == NULL)
232 continue;
233
234 /* Obtain the info structure for the current argument. */
235 info = NULL;
236 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
237 {
238 if (ss->expr != e)
239 continue;
240 info = &ss->data.info;
241 break;
242 }
243
244 /* If there is a dependency, create a temporary and use it
245 instead of the variable. */
246 fsym = formal ? formal->sym : NULL;
247 if (e->expr_type == EXPR_VARIABLE
248 && e->rank && fsym
249 && fsym->attr.intent == INTENT_OUT
250 && gfc_check_fncall_dependency (e, INTENT_OUT, sym, arg0))
251 {
252 /* Make a local loopinfo for the temporary creation, so that
253 none of the other ss->info's have to be renormalized. */
254 gfc_init_loopinfo (&tmp_loop);
255 for (n = 0; n < info->dimen; n++)
256 {
257 tmp_loop.to[n] = loopse->loop->to[n];
258 tmp_loop.from[n] = loopse->loop->from[n];
259 tmp_loop.order[n] = loopse->loop->order[n];
260 }
261
262 /* Generate the temporary. Merge the block so that the
263 declarations are put at the right binding level. */
264 size = gfc_create_var (gfc_array_index_type, NULL);
265 data = gfc_create_var (pvoid_type_node, NULL);
266 gfc_start_block (&block);
267 tmp = gfc_typenode_for_spec (&e->ts);
268 tmp = gfc_trans_create_temp_array (&se->pre, &se->post,
269 &tmp_loop, info, tmp,
270 false, true, false);
271 gfc_add_modify_expr (&se->pre, size, tmp);
272 tmp = fold_convert (pvoid_type_node, info->data);
273 gfc_add_modify_expr (&se->pre, data, tmp);
274 gfc_merge_block_scope (&block);
275
276 /* Obtain the argument descriptor for unpacking. */
277 gfc_init_se (&parmse, NULL);
278 parmse.want_pointer = 1;
279 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
280 gfc_add_block_to_block (&se->pre, &parmse.pre);
281
282 /* Calculate the offset for the temporary. */
283 offset = gfc_index_zero_node;
284 for (n = 0; n < info->dimen; n++)
285 {
286 tmp = gfc_conv_descriptor_stride (info->descriptor,
287 gfc_rank_cst[n]);
288 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
289 loopse->loop->from[n], tmp);
290 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
291 offset, tmp);
292 }
293 info->offset = gfc_create_var (gfc_array_index_type, NULL);
294 gfc_add_modify_expr (&se->pre, info->offset, offset);
295
296 /* Copy the result back using unpack. */
297 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
298 gfc_add_expr_to_block (&se->post, tmp);
299
300 gfc_add_block_to_block (&se->post, &parmse.post);
301 }
302 }
303 }
304
305
306 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
307
308 tree
309 gfc_trans_call (gfc_code * code, bool dependency_check)
310 {
311 gfc_se se;
312 gfc_ss * ss;
313 int has_alternate_specifier;
314
315 /* A CALL starts a new block because the actual arguments may have to
316 be evaluated first. */
317 gfc_init_se (&se, NULL);
318 gfc_start_block (&se.pre);
319
320 gcc_assert (code->resolved_sym);
321
322 ss = gfc_ss_terminator;
323 if (code->resolved_sym->attr.elemental)
324 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
325
326 /* Is not an elemental subroutine call with array valued arguments. */
327 if (ss == gfc_ss_terminator)
328 {
329
330 /* Translate the call. */
331 has_alternate_specifier
332 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
333 NULL_TREE);
334
335 /* A subroutine without side-effect, by definition, does nothing! */
336 TREE_SIDE_EFFECTS (se.expr) = 1;
337
338 /* Chain the pieces together and return the block. */
339 if (has_alternate_specifier)
340 {
341 gfc_code *select_code;
342 gfc_symbol *sym;
343 select_code = code->next;
344 gcc_assert(select_code->op == EXEC_SELECT);
345 sym = select_code->expr->symtree->n.sym;
346 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
347 if (sym->backend_decl == NULL)
348 sym->backend_decl = gfc_get_symbol_decl (sym);
349 gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
350 }
351 else
352 gfc_add_expr_to_block (&se.pre, se.expr);
353
354 gfc_add_block_to_block (&se.pre, &se.post);
355 }
356
357 else
358 {
359 /* An elemental subroutine call with array valued arguments has
360 to be scalarized. */
361 gfc_loopinfo loop;
362 stmtblock_t body;
363 stmtblock_t block;
364 gfc_se loopse;
365
366 /* gfc_walk_elemental_function_args renders the ss chain in the
367 reverse order to the actual argument order. */
368 ss = gfc_reverse_ss (ss);
369
370 /* Initialize the loop. */
371 gfc_init_se (&loopse, NULL);
372 gfc_init_loopinfo (&loop);
373 gfc_add_ss_to_loop (&loop, ss);
374
375 gfc_conv_ss_startstride (&loop);
376 gfc_conv_loop_setup (&loop);
377 gfc_mark_ss_chain_used (ss, 1);
378
379 /* Convert the arguments, checking for dependencies. */
380 gfc_copy_loopinfo_to_se (&loopse, &loop);
381 loopse.ss = ss;
382
383 /* For operator assignment, we need to do dependency checking.
384 We also check the intent of the parameters. */
385 if (dependency_check)
386 {
387 gfc_symbol *sym;
388 sym = code->resolved_sym;
389 gcc_assert (sym->formal->sym->attr.intent == INTENT_OUT);
390 gcc_assert (sym->formal->next->sym->attr.intent == INTENT_IN);
391 gfc_conv_elemental_dependencies (&se, &loopse, sym,
392 code->ext.actual);
393 }
394
395 /* Generate the loop body. */
396 gfc_start_scalarized_body (&loop, &body);
397 gfc_init_block (&block);
398
399 /* Add the subroutine call to the block. */
400 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
401 NULL_TREE);
402 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
403
404 gfc_add_block_to_block (&block, &loopse.pre);
405 gfc_add_block_to_block (&block, &loopse.post);
406
407 /* Finish up the loop block and the loop. */
408 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
409 gfc_trans_scalarizing_loops (&loop, &body);
410 gfc_add_block_to_block (&se.pre, &loop.pre);
411 gfc_add_block_to_block (&se.pre, &loop.post);
412 gfc_add_block_to_block (&se.pre, &se.post);
413 gfc_cleanup_loop (&loop);
414 }
415
416 return gfc_finish_block (&se.pre);
417 }
418
419
420 /* Translate the RETURN statement. */
421
422 tree
423 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
424 {
425 if (code->expr)
426 {
427 gfc_se se;
428 tree tmp;
429 tree result;
430
431 /* If code->expr is not NULL, this return statement must appear
432 in a subroutine and current_fake_result_decl has already
433 been generated. */
434
435 result = gfc_get_fake_result_decl (NULL, 0);
436 if (!result)
437 {
438 gfc_warning ("An alternate return at %L without a * dummy argument",
439 &code->expr->where);
440 return build1_v (GOTO_EXPR, gfc_get_return_label ());
441 }
442
443 /* Start a new block for this statement. */
444 gfc_init_se (&se, NULL);
445 gfc_start_block (&se.pre);
446
447 gfc_conv_expr (&se, code->expr);
448
449 tmp = build2 (MODIFY_EXPR, TREE_TYPE (result), result,
450 fold_convert (TREE_TYPE (result), se.expr));
451 gfc_add_expr_to_block (&se.pre, tmp);
452
453 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
454 gfc_add_expr_to_block (&se.pre, tmp);
455 gfc_add_block_to_block (&se.pre, &se.post);
456 return gfc_finish_block (&se.pre);
457 }
458 else
459 return build1_v (GOTO_EXPR, gfc_get_return_label ());
460 }
461
462
463 /* Translate the PAUSE statement. We have to translate this statement
464 to a runtime library call. */
465
466 tree
467 gfc_trans_pause (gfc_code * code)
468 {
469 tree gfc_int4_type_node = gfc_get_int_type (4);
470 gfc_se se;
471 tree tmp;
472
473 /* Start a new block for this statement. */
474 gfc_init_se (&se, NULL);
475 gfc_start_block (&se.pre);
476
477
478 if (code->expr == NULL)
479 {
480 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
481 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
482 }
483 else
484 {
485 gfc_conv_expr_reference (&se, code->expr);
486 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
487 se.expr, se.string_length);
488 }
489
490 gfc_add_expr_to_block (&se.pre, tmp);
491
492 gfc_add_block_to_block (&se.pre, &se.post);
493
494 return gfc_finish_block (&se.pre);
495 }
496
497
498 /* Translate the STOP statement. We have to translate this statement
499 to a runtime library call. */
500
501 tree
502 gfc_trans_stop (gfc_code * code)
503 {
504 tree gfc_int4_type_node = gfc_get_int_type (4);
505 gfc_se se;
506 tree tmp;
507
508 /* Start a new block for this statement. */
509 gfc_init_se (&se, NULL);
510 gfc_start_block (&se.pre);
511
512
513 if (code->expr == NULL)
514 {
515 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
516 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
517 }
518 else
519 {
520 gfc_conv_expr_reference (&se, code->expr);
521 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
522 se.expr, se.string_length);
523 }
524
525 gfc_add_expr_to_block (&se.pre, tmp);
526
527 gfc_add_block_to_block (&se.pre, &se.post);
528
529 return gfc_finish_block (&se.pre);
530 }
531
532
533 /* Generate GENERIC for the IF construct. This function also deals with
534 the simple IF statement, because the front end translates the IF
535 statement into an IF construct.
536
537 We translate:
538
539 IF (cond) THEN
540 then_clause
541 ELSEIF (cond2)
542 elseif_clause
543 ELSE
544 else_clause
545 ENDIF
546
547 into:
548
549 pre_cond_s;
550 if (cond_s)
551 {
552 then_clause;
553 }
554 else
555 {
556 pre_cond_s
557 if (cond_s)
558 {
559 elseif_clause
560 }
561 else
562 {
563 else_clause;
564 }
565 }
566
567 where COND_S is the simplified version of the predicate. PRE_COND_S
568 are the pre side-effects produced by the translation of the
569 conditional.
570 We need to build the chain recursively otherwise we run into
571 problems with folding incomplete statements. */
572
573 static tree
574 gfc_trans_if_1 (gfc_code * code)
575 {
576 gfc_se if_se;
577 tree stmt, elsestmt;
578
579 /* Check for an unconditional ELSE clause. */
580 if (!code->expr)
581 return gfc_trans_code (code->next);
582
583 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
584 gfc_init_se (&if_se, NULL);
585 gfc_start_block (&if_se.pre);
586
587 /* Calculate the IF condition expression. */
588 gfc_conv_expr_val (&if_se, code->expr);
589
590 /* Translate the THEN clause. */
591 stmt = gfc_trans_code (code->next);
592
593 /* Translate the ELSE clause. */
594 if (code->block)
595 elsestmt = gfc_trans_if_1 (code->block);
596 else
597 elsestmt = build_empty_stmt ();
598
599 /* Build the condition expression and add it to the condition block. */
600 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
601
602 gfc_add_expr_to_block (&if_se.pre, stmt);
603
604 /* Finish off this statement. */
605 return gfc_finish_block (&if_se.pre);
606 }
607
608 tree
609 gfc_trans_if (gfc_code * code)
610 {
611 /* Ignore the top EXEC_IF, it only announces an IF construct. The
612 actual code we must translate is in code->block. */
613
614 return gfc_trans_if_1 (code->block);
615 }
616
617
618 /* Translate an arithmetic IF expression.
619
620 IF (cond) label1, label2, label3 translates to
621
622 if (cond <= 0)
623 {
624 if (cond < 0)
625 goto label1;
626 else // cond == 0
627 goto label2;
628 }
629 else // cond > 0
630 goto label3;
631
632 An optimized version can be generated in case of equal labels.
633 E.g., if label1 is equal to label2, we can translate it to
634
635 if (cond <= 0)
636 goto label1;
637 else
638 goto label3;
639 */
640
641 tree
642 gfc_trans_arithmetic_if (gfc_code * code)
643 {
644 gfc_se se;
645 tree tmp;
646 tree branch1;
647 tree branch2;
648 tree zero;
649
650 /* Start a new block. */
651 gfc_init_se (&se, NULL);
652 gfc_start_block (&se.pre);
653
654 /* Pre-evaluate COND. */
655 gfc_conv_expr_val (&se, code->expr);
656 se.expr = gfc_evaluate_now (se.expr, &se.pre);
657
658 /* Build something to compare with. */
659 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
660
661 if (code->label->value != code->label2->value)
662 {
663 /* If (cond < 0) take branch1 else take branch2.
664 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
665 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
666 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
667
668 if (code->label->value != code->label3->value)
669 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
670 else
671 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
672
673 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
674 }
675 else
676 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
677
678 if (code->label->value != code->label3->value
679 && code->label2->value != code->label3->value)
680 {
681 /* if (cond <= 0) take branch1 else take branch2. */
682 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
683 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
684 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
685 }
686
687 /* Append the COND_EXPR to the evaluation of COND, and return. */
688 gfc_add_expr_to_block (&se.pre, branch1);
689 return gfc_finish_block (&se.pre);
690 }
691
692
693 /* Translate the simple DO construct. This is where the loop variable has
694 integer type and step +-1. We can't use this in the general case
695 because integer overflow and floating point errors could give incorrect
696 results.
697 We translate a do loop from:
698
699 DO dovar = from, to, step
700 body
701 END DO
702
703 to:
704
705 [Evaluate loop bounds and step]
706 dovar = from;
707 if ((step > 0) ? (dovar <= to) : (dovar => to))
708 {
709 for (;;)
710 {
711 body;
712 cycle_label:
713 cond = (dovar == to);
714 dovar += step;
715 if (cond) goto end_label;
716 }
717 }
718 end_label:
719
720 This helps the optimizers by avoiding the extra induction variable
721 used in the general case. */
722
723 static tree
724 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
725 tree from, tree to, tree step)
726 {
727 stmtblock_t body;
728 tree type;
729 tree cond;
730 tree tmp;
731 tree cycle_label;
732 tree exit_label;
733
734 type = TREE_TYPE (dovar);
735
736 /* Initialize the DO variable: dovar = from. */
737 gfc_add_modify_expr (pblock, dovar, from);
738
739 /* Cycle and exit statements are implemented with gotos. */
740 cycle_label = gfc_build_label_decl (NULL_TREE);
741 exit_label = gfc_build_label_decl (NULL_TREE);
742
743 /* Put the labels where they can be found later. See gfc_trans_do(). */
744 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
745
746 /* Loop body. */
747 gfc_start_block (&body);
748
749 /* Main loop body. */
750 tmp = gfc_trans_code (code->block->next);
751 gfc_add_expr_to_block (&body, tmp);
752
753 /* Label for cycle statements (if needed). */
754 if (TREE_USED (cycle_label))
755 {
756 tmp = build1_v (LABEL_EXPR, cycle_label);
757 gfc_add_expr_to_block (&body, tmp);
758 }
759
760 /* Evaluate the loop condition. */
761 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
762 cond = gfc_evaluate_now (cond, &body);
763
764 /* Increment the loop variable. */
765 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
766 gfc_add_modify_expr (&body, dovar, tmp);
767
768 /* The loop exit. */
769 tmp = build1_v (GOTO_EXPR, exit_label);
770 TREE_USED (exit_label) = 1;
771 tmp = fold_build3 (COND_EXPR, void_type_node,
772 cond, tmp, build_empty_stmt ());
773 gfc_add_expr_to_block (&body, tmp);
774
775 /* Finish the loop body. */
776 tmp = gfc_finish_block (&body);
777 tmp = build1_v (LOOP_EXPR, tmp);
778
779 /* Only execute the loop if the number of iterations is positive. */
780 if (tree_int_cst_sgn (step) > 0)
781 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
782 else
783 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
784 tmp = fold_build3 (COND_EXPR, void_type_node,
785 cond, tmp, build_empty_stmt ());
786 gfc_add_expr_to_block (pblock, tmp);
787
788 /* Add the exit label. */
789 tmp = build1_v (LABEL_EXPR, exit_label);
790 gfc_add_expr_to_block (pblock, tmp);
791
792 return gfc_finish_block (pblock);
793 }
794
795 /* Translate the DO construct. This obviously is one of the most
796 important ones to get right with any compiler, but especially
797 so for Fortran.
798
799 We special case some loop forms as described in gfc_trans_simple_do.
800 For other cases we implement them with a separate loop count,
801 as described in the standard.
802
803 We translate a do loop from:
804
805 DO dovar = from, to, step
806 body
807 END DO
808
809 to:
810
811 [evaluate loop bounds and step]
812 empty = (step > 0 ? to < from : to > from);
813 countm1 = (to - from) / step;
814 dovar = from;
815 if (empty) goto exit_label;
816 for (;;)
817 {
818 body;
819 cycle_label:
820 dovar += step
821 if (countm1 ==0) goto exit_label;
822 countm1--;
823 }
824 exit_label:
825
826 countm1 is an unsigned integer. It is equal to the loop count minus one,
827 because the loop count itself can overflow. */
828
829 tree
830 gfc_trans_do (gfc_code * code)
831 {
832 gfc_se se;
833 tree dovar;
834 tree from;
835 tree to;
836 tree step;
837 tree empty;
838 tree countm1;
839 tree type;
840 tree utype;
841 tree cond;
842 tree cycle_label;
843 tree exit_label;
844 tree tmp;
845 tree pos_step;
846 stmtblock_t block;
847 stmtblock_t body;
848
849 gfc_start_block (&block);
850
851 /* Evaluate all the expressions in the iterator. */
852 gfc_init_se (&se, NULL);
853 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
854 gfc_add_block_to_block (&block, &se.pre);
855 dovar = se.expr;
856 type = TREE_TYPE (dovar);
857
858 gfc_init_se (&se, NULL);
859 gfc_conv_expr_val (&se, code->ext.iterator->start);
860 gfc_add_block_to_block (&block, &se.pre);
861 from = gfc_evaluate_now (se.expr, &block);
862
863 gfc_init_se (&se, NULL);
864 gfc_conv_expr_val (&se, code->ext.iterator->end);
865 gfc_add_block_to_block (&block, &se.pre);
866 to = gfc_evaluate_now (se.expr, &block);
867
868 gfc_init_se (&se, NULL);
869 gfc_conv_expr_val (&se, code->ext.iterator->step);
870 gfc_add_block_to_block (&block, &se.pre);
871 step = gfc_evaluate_now (se.expr, &block);
872
873 /* Special case simple loops. */
874 if (TREE_CODE (type) == INTEGER_TYPE
875 && (integer_onep (step)
876 || tree_int_cst_equal (step, integer_minus_one_node)))
877 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
878
879 /* We need a special check for empty loops:
880 empty = (step > 0 ? to < from : to > from); */
881 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
882 fold_convert (type, integer_zero_node));
883 empty = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
884 fold_build2 (LT_EXPR, boolean_type_node, to, from),
885 fold_build2 (GT_EXPR, boolean_type_node, to, from));
886
887 /* Initialize loop count. This code is executed before we enter the
888 loop body. We generate: countm1 = abs(to - from) / abs(step). */
889 if (TREE_CODE (type) == INTEGER_TYPE)
890 {
891 tree ustep;
892
893 utype = unsigned_type_for (type);
894
895 /* tmp = abs(to - from) / abs(step) */
896 ustep = fold_convert (utype, fold_build1 (ABS_EXPR, type, step));
897 tmp = fold_build3 (COND_EXPR, type, pos_step,
898 fold_build2 (MINUS_EXPR, type, to, from),
899 fold_build2 (MINUS_EXPR, type, from, to));
900 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, fold_convert (utype, tmp),
901 ustep);
902 }
903 else
904 {
905 /* TODO: We could use the same width as the real type.
906 This would probably cause more problems that it solves
907 when we implement "long double" types. */
908 utype = unsigned_type_for (gfc_array_index_type);
909 tmp = fold_build2 (MINUS_EXPR, type, to, from);
910 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
911 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
912 }
913 countm1 = gfc_create_var (utype, "countm1");
914 gfc_add_modify_expr (&block, countm1, tmp);
915
916 /* Cycle and exit statements are implemented with gotos. */
917 cycle_label = gfc_build_label_decl (NULL_TREE);
918 exit_label = gfc_build_label_decl (NULL_TREE);
919 TREE_USED (exit_label) = 1;
920
921 /* Initialize the DO variable: dovar = from. */
922 gfc_add_modify_expr (&block, dovar, from);
923
924 /* If the loop is empty, go directly to the exit label. */
925 tmp = fold_build3 (COND_EXPR, void_type_node, empty,
926 build1_v (GOTO_EXPR, exit_label), build_empty_stmt ());
927 gfc_add_expr_to_block (&block, tmp);
928
929 /* Loop body. */
930 gfc_start_block (&body);
931
932 /* Put these labels where they can be found later. We put the
933 labels in a TREE_LIST node (because TREE_CHAIN is already
934 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
935 label in TREE_VALUE (backend_decl). */
936
937 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
938
939 /* Main loop body. */
940 tmp = gfc_trans_code (code->block->next);
941 gfc_add_expr_to_block (&body, tmp);
942
943 /* Label for cycle statements (if needed). */
944 if (TREE_USED (cycle_label))
945 {
946 tmp = build1_v (LABEL_EXPR, cycle_label);
947 gfc_add_expr_to_block (&body, tmp);
948 }
949
950 /* Increment the loop variable. */
951 tmp = build2 (PLUS_EXPR, type, dovar, step);
952 gfc_add_modify_expr (&body, dovar, tmp);
953
954 /* End with the loop condition. Loop until countm1 == 0. */
955 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
956 build_int_cst (utype, 0));
957 tmp = build1_v (GOTO_EXPR, exit_label);
958 tmp = fold_build3 (COND_EXPR, void_type_node,
959 cond, tmp, build_empty_stmt ());
960 gfc_add_expr_to_block (&body, tmp);
961
962 /* Decrement the loop count. */
963 tmp = build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
964 gfc_add_modify_expr (&body, countm1, tmp);
965
966 /* End of loop body. */
967 tmp = gfc_finish_block (&body);
968
969 /* The for loop itself. */
970 tmp = build1_v (LOOP_EXPR, tmp);
971 gfc_add_expr_to_block (&block, tmp);
972
973 /* Add the exit label. */
974 tmp = build1_v (LABEL_EXPR, exit_label);
975 gfc_add_expr_to_block (&block, tmp);
976
977 return gfc_finish_block (&block);
978 }
979
980
981 /* Translate the DO WHILE construct.
982
983 We translate
984
985 DO WHILE (cond)
986 body
987 END DO
988
989 to:
990
991 for ( ; ; )
992 {
993 pre_cond;
994 if (! cond) goto exit_label;
995 body;
996 cycle_label:
997 }
998 exit_label:
999
1000 Because the evaluation of the exit condition `cond' may have side
1001 effects, we can't do much for empty loop bodies. The backend optimizers
1002 should be smart enough to eliminate any dead loops. */
1003
1004 tree
1005 gfc_trans_do_while (gfc_code * code)
1006 {
1007 gfc_se cond;
1008 tree tmp;
1009 tree cycle_label;
1010 tree exit_label;
1011 stmtblock_t block;
1012
1013 /* Everything we build here is part of the loop body. */
1014 gfc_start_block (&block);
1015
1016 /* Cycle and exit statements are implemented with gotos. */
1017 cycle_label = gfc_build_label_decl (NULL_TREE);
1018 exit_label = gfc_build_label_decl (NULL_TREE);
1019
1020 /* Put the labels where they can be found later. See gfc_trans_do(). */
1021 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1022
1023 /* Create a GIMPLE version of the exit condition. */
1024 gfc_init_se (&cond, NULL);
1025 gfc_conv_expr_val (&cond, code->expr);
1026 gfc_add_block_to_block (&block, &cond.pre);
1027 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1028
1029 /* Build "IF (! cond) GOTO exit_label". */
1030 tmp = build1_v (GOTO_EXPR, exit_label);
1031 TREE_USED (exit_label) = 1;
1032 tmp = fold_build3 (COND_EXPR, void_type_node,
1033 cond.expr, tmp, build_empty_stmt ());
1034 gfc_add_expr_to_block (&block, tmp);
1035
1036 /* The main body of the loop. */
1037 tmp = gfc_trans_code (code->block->next);
1038 gfc_add_expr_to_block (&block, tmp);
1039
1040 /* Label for cycle statements (if needed). */
1041 if (TREE_USED (cycle_label))
1042 {
1043 tmp = build1_v (LABEL_EXPR, cycle_label);
1044 gfc_add_expr_to_block (&block, tmp);
1045 }
1046
1047 /* End of loop body. */
1048 tmp = gfc_finish_block (&block);
1049
1050 gfc_init_block (&block);
1051 /* Build the loop. */
1052 tmp = build1_v (LOOP_EXPR, tmp);
1053 gfc_add_expr_to_block (&block, tmp);
1054
1055 /* Add the exit label. */
1056 tmp = build1_v (LABEL_EXPR, exit_label);
1057 gfc_add_expr_to_block (&block, tmp);
1058
1059 return gfc_finish_block (&block);
1060 }
1061
1062
1063 /* Translate the SELECT CASE construct for INTEGER case expressions,
1064 without killing all potential optimizations. The problem is that
1065 Fortran allows unbounded cases, but the back-end does not, so we
1066 need to intercept those before we enter the equivalent SWITCH_EXPR
1067 we can build.
1068
1069 For example, we translate this,
1070
1071 SELECT CASE (expr)
1072 CASE (:100,101,105:115)
1073 block_1
1074 CASE (190:199,200:)
1075 block_2
1076 CASE (300)
1077 block_3
1078 CASE DEFAULT
1079 block_4
1080 END SELECT
1081
1082 to the GENERIC equivalent,
1083
1084 switch (expr)
1085 {
1086 case (minimum value for typeof(expr) ... 100:
1087 case 101:
1088 case 105 ... 114:
1089 block1:
1090 goto end_label;
1091
1092 case 200 ... (maximum value for typeof(expr):
1093 case 190 ... 199:
1094 block2;
1095 goto end_label;
1096
1097 case 300:
1098 block_3;
1099 goto end_label;
1100
1101 default:
1102 block_4;
1103 goto end_label;
1104 }
1105
1106 end_label: */
1107
1108 static tree
1109 gfc_trans_integer_select (gfc_code * code)
1110 {
1111 gfc_code *c;
1112 gfc_case *cp;
1113 tree end_label;
1114 tree tmp;
1115 gfc_se se;
1116 stmtblock_t block;
1117 stmtblock_t body;
1118
1119 gfc_start_block (&block);
1120
1121 /* Calculate the switch expression. */
1122 gfc_init_se (&se, NULL);
1123 gfc_conv_expr_val (&se, code->expr);
1124 gfc_add_block_to_block (&block, &se.pre);
1125
1126 end_label = gfc_build_label_decl (NULL_TREE);
1127
1128 gfc_init_block (&body);
1129
1130 for (c = code->block; c; c = c->block)
1131 {
1132 for (cp = c->ext.case_list; cp; cp = cp->next)
1133 {
1134 tree low, high;
1135 tree label;
1136
1137 /* Assume it's the default case. */
1138 low = high = NULL_TREE;
1139
1140 if (cp->low)
1141 {
1142 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1143 cp->low->ts.kind);
1144
1145 /* If there's only a lower bound, set the high bound to the
1146 maximum value of the case expression. */
1147 if (!cp->high)
1148 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1149 }
1150
1151 if (cp->high)
1152 {
1153 /* Three cases are possible here:
1154
1155 1) There is no lower bound, e.g. CASE (:N).
1156 2) There is a lower bound .NE. high bound, that is
1157 a case range, e.g. CASE (N:M) where M>N (we make
1158 sure that M>N during type resolution).
1159 3) There is a lower bound, and it has the same value
1160 as the high bound, e.g. CASE (N:N). This is our
1161 internal representation of CASE(N).
1162
1163 In the first and second case, we need to set a value for
1164 high. In the third case, we don't because the GCC middle
1165 end represents a single case value by just letting high be
1166 a NULL_TREE. We can't do that because we need to be able
1167 to represent unbounded cases. */
1168
1169 if (!cp->low
1170 || (cp->low
1171 && mpz_cmp (cp->low->value.integer,
1172 cp->high->value.integer) != 0))
1173 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1174 cp->high->ts.kind);
1175
1176 /* Unbounded case. */
1177 if (!cp->low)
1178 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1179 }
1180
1181 /* Build a label. */
1182 label = gfc_build_label_decl (NULL_TREE);
1183
1184 /* Add this case label.
1185 Add parameter 'label', make it match GCC backend. */
1186 tmp = build3 (CASE_LABEL_EXPR, void_type_node, low, high, label);
1187 gfc_add_expr_to_block (&body, tmp);
1188 }
1189
1190 /* Add the statements for this case. */
1191 tmp = gfc_trans_code (c->next);
1192 gfc_add_expr_to_block (&body, tmp);
1193
1194 /* Break to the end of the construct. */
1195 tmp = build1_v (GOTO_EXPR, end_label);
1196 gfc_add_expr_to_block (&body, tmp);
1197 }
1198
1199 tmp = gfc_finish_block (&body);
1200 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1201 gfc_add_expr_to_block (&block, tmp);
1202
1203 tmp = build1_v (LABEL_EXPR, end_label);
1204 gfc_add_expr_to_block (&block, tmp);
1205
1206 return gfc_finish_block (&block);
1207 }
1208
1209
1210 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1211
1212 There are only two cases possible here, even though the standard
1213 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1214 .FALSE., and DEFAULT.
1215
1216 We never generate more than two blocks here. Instead, we always
1217 try to eliminate the DEFAULT case. This way, we can translate this
1218 kind of SELECT construct to a simple
1219
1220 if {} else {};
1221
1222 expression in GENERIC. */
1223
1224 static tree
1225 gfc_trans_logical_select (gfc_code * code)
1226 {
1227 gfc_code *c;
1228 gfc_code *t, *f, *d;
1229 gfc_case *cp;
1230 gfc_se se;
1231 stmtblock_t block;
1232
1233 /* Assume we don't have any cases at all. */
1234 t = f = d = NULL;
1235
1236 /* Now see which ones we actually do have. We can have at most two
1237 cases in a single case list: one for .TRUE. and one for .FALSE.
1238 The default case is always separate. If the cases for .TRUE. and
1239 .FALSE. are in the same case list, the block for that case list
1240 always executed, and we don't generate code a COND_EXPR. */
1241 for (c = code->block; c; c = c->block)
1242 {
1243 for (cp = c->ext.case_list; cp; cp = cp->next)
1244 {
1245 if (cp->low)
1246 {
1247 if (cp->low->value.logical == 0) /* .FALSE. */
1248 f = c;
1249 else /* if (cp->value.logical != 0), thus .TRUE. */
1250 t = c;
1251 }
1252 else
1253 d = c;
1254 }
1255 }
1256
1257 /* Start a new block. */
1258 gfc_start_block (&block);
1259
1260 /* Calculate the switch expression. We always need to do this
1261 because it may have side effects. */
1262 gfc_init_se (&se, NULL);
1263 gfc_conv_expr_val (&se, code->expr);
1264 gfc_add_block_to_block (&block, &se.pre);
1265
1266 if (t == f && t != NULL)
1267 {
1268 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1269 translate the code for these cases, append it to the current
1270 block. */
1271 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1272 }
1273 else
1274 {
1275 tree true_tree, false_tree, stmt;
1276
1277 true_tree = build_empty_stmt ();
1278 false_tree = build_empty_stmt ();
1279
1280 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1281 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1282 make the missing case the default case. */
1283 if (t != NULL && f != NULL)
1284 d = NULL;
1285 else if (d != NULL)
1286 {
1287 if (t == NULL)
1288 t = d;
1289 else
1290 f = d;
1291 }
1292
1293 /* Translate the code for each of these blocks, and append it to
1294 the current block. */
1295 if (t != NULL)
1296 true_tree = gfc_trans_code (t->next);
1297
1298 if (f != NULL)
1299 false_tree = gfc_trans_code (f->next);
1300
1301 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1302 true_tree, false_tree);
1303 gfc_add_expr_to_block (&block, stmt);
1304 }
1305
1306 return gfc_finish_block (&block);
1307 }
1308
1309
1310 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1311 Instead of generating compares and jumps, it is far simpler to
1312 generate a data structure describing the cases in order and call a
1313 library subroutine that locates the right case.
1314 This is particularly true because this is the only case where we
1315 might have to dispose of a temporary.
1316 The library subroutine returns a pointer to jump to or NULL if no
1317 branches are to be taken. */
1318
1319 static tree
1320 gfc_trans_character_select (gfc_code *code)
1321 {
1322 tree init, node, end_label, tmp, type, case_num, label;
1323 stmtblock_t block, body;
1324 gfc_case *cp, *d;
1325 gfc_code *c;
1326 gfc_se se;
1327 int n;
1328
1329 static tree select_struct;
1330 static tree ss_string1, ss_string1_len;
1331 static tree ss_string2, ss_string2_len;
1332 static tree ss_target;
1333
1334 if (select_struct == NULL)
1335 {
1336 tree gfc_int4_type_node = gfc_get_int_type (4);
1337
1338 select_struct = make_node (RECORD_TYPE);
1339 TYPE_NAME (select_struct) = get_identifier ("_jump_struct");
1340
1341 #undef ADD_FIELD
1342 #define ADD_FIELD(NAME, TYPE) \
1343 ss_##NAME = gfc_add_field_to_struct \
1344 (&(TYPE_FIELDS (select_struct)), select_struct, \
1345 get_identifier (stringize(NAME)), TYPE)
1346
1347 ADD_FIELD (string1, pchar_type_node);
1348 ADD_FIELD (string1_len, gfc_int4_type_node);
1349
1350 ADD_FIELD (string2, pchar_type_node);
1351 ADD_FIELD (string2_len, gfc_int4_type_node);
1352
1353 ADD_FIELD (target, integer_type_node);
1354 #undef ADD_FIELD
1355
1356 gfc_finish_type (select_struct);
1357 }
1358
1359 cp = code->block->ext.case_list;
1360 while (cp->left != NULL)
1361 cp = cp->left;
1362
1363 n = 0;
1364 for (d = cp; d; d = d->right)
1365 d->n = n++;
1366
1367 end_label = gfc_build_label_decl (NULL_TREE);
1368
1369 /* Generate the body */
1370 gfc_start_block (&block);
1371 gfc_init_block (&body);
1372
1373 for (c = code->block; c; c = c->block)
1374 {
1375 for (d = c->ext.case_list; d; d = d->next)
1376 {
1377 label = gfc_build_label_decl (NULL_TREE);
1378 tmp = build3 (CASE_LABEL_EXPR, void_type_node,
1379 build_int_cst (NULL_TREE, d->n),
1380 build_int_cst (NULL_TREE, d->n), label);
1381 gfc_add_expr_to_block (&body, tmp);
1382 }
1383
1384 tmp = gfc_trans_code (c->next);
1385 gfc_add_expr_to_block (&body, tmp);
1386
1387 tmp = build1_v (GOTO_EXPR, end_label);
1388 gfc_add_expr_to_block (&body, tmp);
1389 }
1390
1391 /* Generate the structure describing the branches */
1392 init = NULL_TREE;
1393
1394 for(d = cp; d; d = d->right)
1395 {
1396 node = NULL_TREE;
1397
1398 gfc_init_se (&se, NULL);
1399
1400 if (d->low == NULL)
1401 {
1402 node = tree_cons (ss_string1, null_pointer_node, node);
1403 node = tree_cons (ss_string1_len, integer_zero_node, node);
1404 }
1405 else
1406 {
1407 gfc_conv_expr_reference (&se, d->low);
1408
1409 node = tree_cons (ss_string1, se.expr, node);
1410 node = tree_cons (ss_string1_len, se.string_length, node);
1411 }
1412
1413 if (d->high == NULL)
1414 {
1415 node = tree_cons (ss_string2, null_pointer_node, node);
1416 node = tree_cons (ss_string2_len, integer_zero_node, node);
1417 }
1418 else
1419 {
1420 gfc_init_se (&se, NULL);
1421 gfc_conv_expr_reference (&se, d->high);
1422
1423 node = tree_cons (ss_string2, se.expr, node);
1424 node = tree_cons (ss_string2_len, se.string_length, node);
1425 }
1426
1427 node = tree_cons (ss_target, build_int_cst (integer_type_node, d->n),
1428 node);
1429
1430 tmp = build_constructor_from_list (select_struct, nreverse (node));
1431 init = tree_cons (NULL_TREE, tmp, init);
1432 }
1433
1434 type = build_array_type (select_struct, build_index_type
1435 (build_int_cst (NULL_TREE, n - 1)));
1436
1437 init = build_constructor_from_list (type, nreverse(init));
1438 TREE_CONSTANT (init) = 1;
1439 TREE_INVARIANT (init) = 1;
1440 TREE_STATIC (init) = 1;
1441 /* Create a static variable to hold the jump table. */
1442 tmp = gfc_create_var (type, "jumptable");
1443 TREE_CONSTANT (tmp) = 1;
1444 TREE_INVARIANT (tmp) = 1;
1445 TREE_STATIC (tmp) = 1;
1446 TREE_READONLY (tmp) = 1;
1447 DECL_INITIAL (tmp) = init;
1448 init = tmp;
1449
1450 /* Build the library call */
1451 init = gfc_build_addr_expr (pvoid_type_node, init);
1452
1453 gfc_init_se (&se, NULL);
1454 gfc_conv_expr_reference (&se, code->expr);
1455
1456 gfc_add_block_to_block (&block, &se.pre);
1457
1458 tmp = build_call_expr (gfor_fndecl_select_string, 4, init,
1459 build_int_cst (NULL_TREE, n), se.expr,
1460 se.string_length);
1461 case_num = gfc_create_var (integer_type_node, "case_num");
1462 gfc_add_modify_expr (&block, case_num, tmp);
1463
1464 gfc_add_block_to_block (&block, &se.post);
1465
1466 tmp = gfc_finish_block (&body);
1467 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1468 gfc_add_expr_to_block (&block, tmp);
1469
1470 tmp = build1_v (LABEL_EXPR, end_label);
1471 gfc_add_expr_to_block (&block, tmp);
1472
1473 return gfc_finish_block (&block);
1474 }
1475
1476
1477 /* Translate the three variants of the SELECT CASE construct.
1478
1479 SELECT CASEs with INTEGER case expressions can be translated to an
1480 equivalent GENERIC switch statement, and for LOGICAL case
1481 expressions we build one or two if-else compares.
1482
1483 SELECT CASEs with CHARACTER case expressions are a whole different
1484 story, because they don't exist in GENERIC. So we sort them and
1485 do a binary search at runtime.
1486
1487 Fortran has no BREAK statement, and it does not allow jumps from
1488 one case block to another. That makes things a lot easier for
1489 the optimizers. */
1490
1491 tree
1492 gfc_trans_select (gfc_code * code)
1493 {
1494 gcc_assert (code && code->expr);
1495
1496 /* Empty SELECT constructs are legal. */
1497 if (code->block == NULL)
1498 return build_empty_stmt ();
1499
1500 /* Select the correct translation function. */
1501 switch (code->expr->ts.type)
1502 {
1503 case BT_LOGICAL: return gfc_trans_logical_select (code);
1504 case BT_INTEGER: return gfc_trans_integer_select (code);
1505 case BT_CHARACTER: return gfc_trans_character_select (code);
1506 default:
1507 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1508 /* Not reached */
1509 }
1510 }
1511
1512
1513 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1514 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1515 indicates whether we should generate code to test the FORALLs mask
1516 array. OUTER is the loop header to be used for initializing mask
1517 indices.
1518
1519 The generated loop format is:
1520 count = (end - start + step) / step
1521 loopvar = start
1522 while (1)
1523 {
1524 if (count <=0 )
1525 goto end_of_loop
1526 <body>
1527 loopvar += step
1528 count --
1529 }
1530 end_of_loop: */
1531
1532 static tree
1533 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1534 int mask_flag, stmtblock_t *outer)
1535 {
1536 int n, nvar;
1537 tree tmp;
1538 tree cond;
1539 stmtblock_t block;
1540 tree exit_label;
1541 tree count;
1542 tree var, start, end, step;
1543 iter_info *iter;
1544
1545 /* Initialize the mask index outside the FORALL nest. */
1546 if (mask_flag && forall_tmp->mask)
1547 gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
1548
1549 iter = forall_tmp->this_loop;
1550 nvar = forall_tmp->nvar;
1551 for (n = 0; n < nvar; n++)
1552 {
1553 var = iter->var;
1554 start = iter->start;
1555 end = iter->end;
1556 step = iter->step;
1557
1558 exit_label = gfc_build_label_decl (NULL_TREE);
1559 TREE_USED (exit_label) = 1;
1560
1561 /* The loop counter. */
1562 count = gfc_create_var (TREE_TYPE (var), "count");
1563
1564 /* The body of the loop. */
1565 gfc_init_block (&block);
1566
1567 /* The exit condition. */
1568 cond = fold_build2 (LE_EXPR, boolean_type_node,
1569 count, build_int_cst (TREE_TYPE (count), 0));
1570 tmp = build1_v (GOTO_EXPR, exit_label);
1571 tmp = fold_build3 (COND_EXPR, void_type_node,
1572 cond, tmp, build_empty_stmt ());
1573 gfc_add_expr_to_block (&block, tmp);
1574
1575 /* The main loop body. */
1576 gfc_add_expr_to_block (&block, body);
1577
1578 /* Increment the loop variable. */
1579 tmp = build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1580 gfc_add_modify_expr (&block, var, tmp);
1581
1582 /* Advance to the next mask element. Only do this for the
1583 innermost loop. */
1584 if (n == 0 && mask_flag && forall_tmp->mask)
1585 {
1586 tree maskindex = forall_tmp->maskindex;
1587 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
1588 maskindex, gfc_index_one_node);
1589 gfc_add_modify_expr (&block, maskindex, tmp);
1590 }
1591
1592 /* Decrement the loop counter. */
1593 tmp = build2 (MINUS_EXPR, TREE_TYPE (var), count,
1594 build_int_cst (TREE_TYPE (var), 1));
1595 gfc_add_modify_expr (&block, count, tmp);
1596
1597 body = gfc_finish_block (&block);
1598
1599 /* Loop var initialization. */
1600 gfc_init_block (&block);
1601 gfc_add_modify_expr (&block, var, start);
1602
1603
1604 /* Initialize the loop counter. */
1605 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1606 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1607 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1608 gfc_add_modify_expr (&block, count, tmp);
1609
1610 /* The loop expression. */
1611 tmp = build1_v (LOOP_EXPR, body);
1612 gfc_add_expr_to_block (&block, tmp);
1613
1614 /* The exit label. */
1615 tmp = build1_v (LABEL_EXPR, exit_label);
1616 gfc_add_expr_to_block (&block, tmp);
1617
1618 body = gfc_finish_block (&block);
1619 iter = iter->next;
1620 }
1621 return body;
1622 }
1623
1624
1625 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1626 is nonzero, the body is controlled by all masks in the forall nest.
1627 Otherwise, the innermost loop is not controlled by it's mask. This
1628 is used for initializing that mask. */
1629
1630 static tree
1631 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1632 int mask_flag)
1633 {
1634 tree tmp;
1635 stmtblock_t header;
1636 forall_info *forall_tmp;
1637 tree mask, maskindex;
1638
1639 gfc_start_block (&header);
1640
1641 forall_tmp = nested_forall_info;
1642 while (forall_tmp != NULL)
1643 {
1644 /* Generate body with masks' control. */
1645 if (mask_flag)
1646 {
1647 mask = forall_tmp->mask;
1648 maskindex = forall_tmp->maskindex;
1649
1650 /* If a mask was specified make the assignment conditional. */
1651 if (mask)
1652 {
1653 tmp = gfc_build_array_ref (mask, maskindex);
1654 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1655 }
1656 }
1657 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1658 forall_tmp = forall_tmp->prev_nest;
1659 mask_flag = 1;
1660 }
1661
1662 gfc_add_expr_to_block (&header, body);
1663 return gfc_finish_block (&header);
1664 }
1665
1666
1667 /* Allocate data for holding a temporary array. Returns either a local
1668 temporary array or a pointer variable. */
1669
1670 static tree
1671 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
1672 tree elem_type)
1673 {
1674 tree tmpvar;
1675 tree type;
1676 tree tmp;
1677
1678 if (INTEGER_CST_P (size))
1679 {
1680 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
1681 gfc_index_one_node);
1682 }
1683 else
1684 tmp = NULL_TREE;
1685
1686 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
1687 type = build_array_type (elem_type, type);
1688 if (gfc_can_put_var_on_stack (bytesize))
1689 {
1690 gcc_assert (INTEGER_CST_P (size));
1691 tmpvar = gfc_create_var (type, "temp");
1692 *pdata = NULL_TREE;
1693 }
1694 else
1695 {
1696 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
1697 *pdata = convert (pvoid_type_node, tmpvar);
1698
1699 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
1700 gfc_add_modify_expr (pblock, tmpvar, tmp);
1701 }
1702 return tmpvar;
1703 }
1704
1705
1706 /* Generate codes to copy the temporary to the actual lhs. */
1707
1708 static tree
1709 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
1710 tree count1, tree wheremask, bool invert)
1711 {
1712 gfc_ss *lss;
1713 gfc_se lse, rse;
1714 stmtblock_t block, body;
1715 gfc_loopinfo loop1;
1716 tree tmp;
1717 tree wheremaskexpr;
1718
1719 /* Walk the lhs. */
1720 lss = gfc_walk_expr (expr);
1721
1722 if (lss == gfc_ss_terminator)
1723 {
1724 gfc_start_block (&block);
1725
1726 gfc_init_se (&lse, NULL);
1727
1728 /* Translate the expression. */
1729 gfc_conv_expr (&lse, expr);
1730
1731 /* Form the expression for the temporary. */
1732 tmp = gfc_build_array_ref (tmp1, count1);
1733
1734 /* Use the scalar assignment as is. */
1735 gfc_add_block_to_block (&block, &lse.pre);
1736 gfc_add_modify_expr (&block, lse.expr, tmp);
1737 gfc_add_block_to_block (&block, &lse.post);
1738
1739 /* Increment the count1. */
1740 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1741 gfc_index_one_node);
1742 gfc_add_modify_expr (&block, count1, tmp);
1743
1744 tmp = gfc_finish_block (&block);
1745 }
1746 else
1747 {
1748 gfc_start_block (&block);
1749
1750 gfc_init_loopinfo (&loop1);
1751 gfc_init_se (&rse, NULL);
1752 gfc_init_se (&lse, NULL);
1753
1754 /* Associate the lss with the loop. */
1755 gfc_add_ss_to_loop (&loop1, lss);
1756
1757 /* Calculate the bounds of the scalarization. */
1758 gfc_conv_ss_startstride (&loop1);
1759 /* Setup the scalarizing loops. */
1760 gfc_conv_loop_setup (&loop1);
1761
1762 gfc_mark_ss_chain_used (lss, 1);
1763
1764 /* Start the scalarized loop body. */
1765 gfc_start_scalarized_body (&loop1, &body);
1766
1767 /* Setup the gfc_se structures. */
1768 gfc_copy_loopinfo_to_se (&lse, &loop1);
1769 lse.ss = lss;
1770
1771 /* Form the expression of the temporary. */
1772 if (lss != gfc_ss_terminator)
1773 rse.expr = gfc_build_array_ref (tmp1, count1);
1774 /* Translate expr. */
1775 gfc_conv_expr (&lse, expr);
1776
1777 /* Use the scalar assignment. */
1778 rse.string_length = lse.string_length;
1779 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
1780
1781 /* Form the mask expression according to the mask tree list. */
1782 if (wheremask)
1783 {
1784 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1785 if (invert)
1786 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1787 TREE_TYPE (wheremaskexpr),
1788 wheremaskexpr);
1789 tmp = fold_build3 (COND_EXPR, void_type_node,
1790 wheremaskexpr, tmp, build_empty_stmt ());
1791 }
1792
1793 gfc_add_expr_to_block (&body, tmp);
1794
1795 /* Increment count1. */
1796 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1797 count1, gfc_index_one_node);
1798 gfc_add_modify_expr (&body, count1, tmp);
1799
1800 /* Increment count3. */
1801 if (count3)
1802 {
1803 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1804 count3, gfc_index_one_node);
1805 gfc_add_modify_expr (&body, count3, tmp);
1806 }
1807
1808 /* Generate the copying loops. */
1809 gfc_trans_scalarizing_loops (&loop1, &body);
1810 gfc_add_block_to_block (&block, &loop1.pre);
1811 gfc_add_block_to_block (&block, &loop1.post);
1812 gfc_cleanup_loop (&loop1);
1813
1814 tmp = gfc_finish_block (&block);
1815 }
1816 return tmp;
1817 }
1818
1819
1820 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
1821 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
1822 and should not be freed. WHEREMASK is the conditional execution mask
1823 whose sense may be inverted by INVERT. */
1824
1825 static tree
1826 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
1827 tree count1, gfc_ss *lss, gfc_ss *rss,
1828 tree wheremask, bool invert)
1829 {
1830 stmtblock_t block, body1;
1831 gfc_loopinfo loop;
1832 gfc_se lse;
1833 gfc_se rse;
1834 tree tmp;
1835 tree wheremaskexpr;
1836
1837 gfc_start_block (&block);
1838
1839 gfc_init_se (&rse, NULL);
1840 gfc_init_se (&lse, NULL);
1841
1842 if (lss == gfc_ss_terminator)
1843 {
1844 gfc_init_block (&body1);
1845 gfc_conv_expr (&rse, expr2);
1846 lse.expr = gfc_build_array_ref (tmp1, count1);
1847 }
1848 else
1849 {
1850 /* Initialize the loop. */
1851 gfc_init_loopinfo (&loop);
1852
1853 /* We may need LSS to determine the shape of the expression. */
1854 gfc_add_ss_to_loop (&loop, lss);
1855 gfc_add_ss_to_loop (&loop, rss);
1856
1857 gfc_conv_ss_startstride (&loop);
1858 gfc_conv_loop_setup (&loop);
1859
1860 gfc_mark_ss_chain_used (rss, 1);
1861 /* Start the loop body. */
1862 gfc_start_scalarized_body (&loop, &body1);
1863
1864 /* Translate the expression. */
1865 gfc_copy_loopinfo_to_se (&rse, &loop);
1866 rse.ss = rss;
1867 gfc_conv_expr (&rse, expr2);
1868
1869 /* Form the expression of the temporary. */
1870 lse.expr = gfc_build_array_ref (tmp1, count1);
1871 }
1872
1873 /* Use the scalar assignment. */
1874 lse.string_length = rse.string_length;
1875 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
1876 expr2->expr_type == EXPR_VARIABLE);
1877
1878 /* Form the mask expression according to the mask tree list. */
1879 if (wheremask)
1880 {
1881 wheremaskexpr = gfc_build_array_ref (wheremask, count3);
1882 if (invert)
1883 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
1884 TREE_TYPE (wheremaskexpr),
1885 wheremaskexpr);
1886 tmp = fold_build3 (COND_EXPR, void_type_node,
1887 wheremaskexpr, tmp, build_empty_stmt ());
1888 }
1889
1890 gfc_add_expr_to_block (&body1, tmp);
1891
1892 if (lss == gfc_ss_terminator)
1893 {
1894 gfc_add_block_to_block (&block, &body1);
1895
1896 /* Increment count1. */
1897 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
1898 gfc_index_one_node);
1899 gfc_add_modify_expr (&block, count1, tmp);
1900 }
1901 else
1902 {
1903 /* Increment count1. */
1904 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1905 count1, gfc_index_one_node);
1906 gfc_add_modify_expr (&body1, count1, tmp);
1907
1908 /* Increment count3. */
1909 if (count3)
1910 {
1911 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1912 count3, gfc_index_one_node);
1913 gfc_add_modify_expr (&body1, count3, tmp);
1914 }
1915
1916 /* Generate the copying loops. */
1917 gfc_trans_scalarizing_loops (&loop, &body1);
1918
1919 gfc_add_block_to_block (&block, &loop.pre);
1920 gfc_add_block_to_block (&block, &loop.post);
1921
1922 gfc_cleanup_loop (&loop);
1923 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
1924 as tree nodes in SS may not be valid in different scope. */
1925 }
1926
1927 tmp = gfc_finish_block (&block);
1928 return tmp;
1929 }
1930
1931
1932 /* Calculate the size of temporary needed in the assignment inside forall.
1933 LSS and RSS are filled in this function. */
1934
1935 static tree
1936 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
1937 stmtblock_t * pblock,
1938 gfc_ss **lss, gfc_ss **rss)
1939 {
1940 gfc_loopinfo loop;
1941 tree size;
1942 int i;
1943 int save_flag;
1944 tree tmp;
1945
1946 *lss = gfc_walk_expr (expr1);
1947 *rss = NULL;
1948
1949 size = gfc_index_one_node;
1950 if (*lss != gfc_ss_terminator)
1951 {
1952 gfc_init_loopinfo (&loop);
1953
1954 /* Walk the RHS of the expression. */
1955 *rss = gfc_walk_expr (expr2);
1956 if (*rss == gfc_ss_terminator)
1957 {
1958 /* The rhs is scalar. Add a ss for the expression. */
1959 *rss = gfc_get_ss ();
1960 (*rss)->next = gfc_ss_terminator;
1961 (*rss)->type = GFC_SS_SCALAR;
1962 (*rss)->expr = expr2;
1963 }
1964
1965 /* Associate the SS with the loop. */
1966 gfc_add_ss_to_loop (&loop, *lss);
1967 /* We don't actually need to add the rhs at this point, but it might
1968 make guessing the loop bounds a bit easier. */
1969 gfc_add_ss_to_loop (&loop, *rss);
1970
1971 /* We only want the shape of the expression, not rest of the junk
1972 generated by the scalarizer. */
1973 loop.array_parameter = 1;
1974
1975 /* Calculate the bounds of the scalarization. */
1976 save_flag = flag_bounds_check;
1977 flag_bounds_check = 0;
1978 gfc_conv_ss_startstride (&loop);
1979 flag_bounds_check = save_flag;
1980 gfc_conv_loop_setup (&loop);
1981
1982 /* Figure out how many elements we need. */
1983 for (i = 0; i < loop.dimen; i++)
1984 {
1985 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
1986 gfc_index_one_node, loop.from[i]);
1987 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1988 tmp, loop.to[i]);
1989 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
1990 }
1991 gfc_add_block_to_block (pblock, &loop.pre);
1992 size = gfc_evaluate_now (size, pblock);
1993 gfc_add_block_to_block (pblock, &loop.post);
1994
1995 /* TODO: write a function that cleans up a loopinfo without freeing
1996 the SS chains. Currently a NOP. */
1997 }
1998
1999 return size;
2000 }
2001
2002
2003 /* Calculate the overall iterator number of the nested forall construct.
2004 This routine actually calculates the number of times the body of the
2005 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2006 that by the expression INNER_SIZE. The BLOCK argument specifies the
2007 block in which to calculate the result, and the optional INNER_SIZE_BODY
2008 argument contains any statements that need to executed (inside the loop)
2009 to initialize or calculate INNER_SIZE. */
2010
2011 static tree
2012 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2013 stmtblock_t *inner_size_body, stmtblock_t *block)
2014 {
2015 forall_info *forall_tmp = nested_forall_info;
2016 tree tmp, number;
2017 stmtblock_t body;
2018
2019 /* We can eliminate the innermost unconditional loops with constant
2020 array bounds. */
2021 if (INTEGER_CST_P (inner_size))
2022 {
2023 while (forall_tmp
2024 && !forall_tmp->mask
2025 && INTEGER_CST_P (forall_tmp->size))
2026 {
2027 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2028 inner_size, forall_tmp->size);
2029 forall_tmp = forall_tmp->prev_nest;
2030 }
2031
2032 /* If there are no loops left, we have our constant result. */
2033 if (!forall_tmp)
2034 return inner_size;
2035 }
2036
2037 /* Otherwise, create a temporary variable to compute the result. */
2038 number = gfc_create_var (gfc_array_index_type, "num");
2039 gfc_add_modify_expr (block, number, gfc_index_zero_node);
2040
2041 gfc_start_block (&body);
2042 if (inner_size_body)
2043 gfc_add_block_to_block (&body, inner_size_body);
2044 if (forall_tmp)
2045 tmp = build2 (PLUS_EXPR, gfc_array_index_type, number,
2046 inner_size);
2047 else
2048 tmp = inner_size;
2049 gfc_add_modify_expr (&body, number, tmp);
2050 tmp = gfc_finish_block (&body);
2051
2052 /* Generate loops. */
2053 if (forall_tmp != NULL)
2054 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2055
2056 gfc_add_expr_to_block (block, tmp);
2057
2058 return number;
2059 }
2060
2061
2062 /* Allocate temporary for forall construct. SIZE is the size of temporary
2063 needed. PTEMP1 is returned for space free. */
2064
2065 static tree
2066 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2067 tree * ptemp1)
2068 {
2069 tree bytesize;
2070 tree unit;
2071 tree tmp;
2072
2073 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2074 if (!integer_onep (unit))
2075 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2076 else
2077 bytesize = size;
2078
2079 *ptemp1 = NULL;
2080 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2081
2082 if (*ptemp1)
2083 tmp = build_fold_indirect_ref (tmp);
2084 return tmp;
2085 }
2086
2087
2088 /* Allocate temporary for forall construct according to the information in
2089 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2090 assignment inside forall. PTEMP1 is returned for space free. */
2091
2092 static tree
2093 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2094 tree inner_size, stmtblock_t * inner_size_body,
2095 stmtblock_t * block, tree * ptemp1)
2096 {
2097 tree size;
2098
2099 /* Calculate the total size of temporary needed in forall construct. */
2100 size = compute_overall_iter_number (nested_forall_info, inner_size,
2101 inner_size_body, block);
2102
2103 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2104 }
2105
2106
2107 /* Handle assignments inside forall which need temporary.
2108
2109 forall (i=start:end:stride; maskexpr)
2110 e<i> = f<i>
2111 end forall
2112 (where e,f<i> are arbitrary expressions possibly involving i
2113 and there is a dependency between e<i> and f<i>)
2114 Translates to:
2115 masktmp(:) = maskexpr(:)
2116
2117 maskindex = 0;
2118 count1 = 0;
2119 num = 0;
2120 for (i = start; i <= end; i += stride)
2121 num += SIZE (f<i>)
2122 count1 = 0;
2123 ALLOCATE (tmp(num))
2124 for (i = start; i <= end; i += stride)
2125 {
2126 if (masktmp[maskindex++])
2127 tmp[count1++] = f<i>
2128 }
2129 maskindex = 0;
2130 count1 = 0;
2131 for (i = start; i <= end; i += stride)
2132 {
2133 if (masktmp[maskindex++])
2134 e<i> = tmp[count1++]
2135 }
2136 DEALLOCATE (tmp)
2137 */
2138 static void
2139 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2140 tree wheremask, bool invert,
2141 forall_info * nested_forall_info,
2142 stmtblock_t * block)
2143 {
2144 tree type;
2145 tree inner_size;
2146 gfc_ss *lss, *rss;
2147 tree count, count1;
2148 tree tmp, tmp1;
2149 tree ptemp1;
2150 stmtblock_t inner_size_body;
2151
2152 /* Create vars. count1 is the current iterator number of the nested
2153 forall. */
2154 count1 = gfc_create_var (gfc_array_index_type, "count1");
2155
2156 /* Count is the wheremask index. */
2157 if (wheremask)
2158 {
2159 count = gfc_create_var (gfc_array_index_type, "count");
2160 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2161 }
2162 else
2163 count = NULL;
2164
2165 /* Initialize count1. */
2166 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2167
2168 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2169 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2170 gfc_init_block (&inner_size_body);
2171 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2172 &lss, &rss);
2173
2174 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2175 type = gfc_typenode_for_spec (&expr1->ts);
2176
2177 /* Allocate temporary for nested forall construct according to the
2178 information in nested_forall_info and inner_size. */
2179 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2180 &inner_size_body, block, &ptemp1);
2181
2182 /* Generate codes to copy rhs to the temporary . */
2183 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2184 wheremask, invert);
2185
2186 /* Generate body and loops according to the information in
2187 nested_forall_info. */
2188 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2189 gfc_add_expr_to_block (block, tmp);
2190
2191 /* Reset count1. */
2192 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
2193
2194 /* Reset count. */
2195 if (wheremask)
2196 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2197
2198 /* Generate codes to copy the temporary to lhs. */
2199 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2200 wheremask, invert);
2201
2202 /* Generate body and loops according to the information in
2203 nested_forall_info. */
2204 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2205 gfc_add_expr_to_block (block, tmp);
2206
2207 if (ptemp1)
2208 {
2209 /* Free the temporary. */
2210 tmp = gfc_call_free (ptemp1);
2211 gfc_add_expr_to_block (block, tmp);
2212 }
2213 }
2214
2215
2216 /* Translate pointer assignment inside FORALL which need temporary. */
2217
2218 static void
2219 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2220 forall_info * nested_forall_info,
2221 stmtblock_t * block)
2222 {
2223 tree type;
2224 tree inner_size;
2225 gfc_ss *lss, *rss;
2226 gfc_se lse;
2227 gfc_se rse;
2228 gfc_ss_info *info;
2229 gfc_loopinfo loop;
2230 tree desc;
2231 tree parm;
2232 tree parmtype;
2233 stmtblock_t body;
2234 tree count;
2235 tree tmp, tmp1, ptemp1;
2236
2237 count = gfc_create_var (gfc_array_index_type, "count");
2238 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2239
2240 inner_size = integer_one_node;
2241 lss = gfc_walk_expr (expr1);
2242 rss = gfc_walk_expr (expr2);
2243 if (lss == gfc_ss_terminator)
2244 {
2245 type = gfc_typenode_for_spec (&expr1->ts);
2246 type = build_pointer_type (type);
2247
2248 /* Allocate temporary for nested forall construct according to the
2249 information in nested_forall_info and inner_size. */
2250 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2251 inner_size, NULL, block, &ptemp1);
2252 gfc_start_block (&body);
2253 gfc_init_se (&lse, NULL);
2254 lse.expr = gfc_build_array_ref (tmp1, count);
2255 gfc_init_se (&rse, NULL);
2256 rse.want_pointer = 1;
2257 gfc_conv_expr (&rse, expr2);
2258 gfc_add_block_to_block (&body, &rse.pre);
2259 gfc_add_modify_expr (&body, lse.expr,
2260 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2261 gfc_add_block_to_block (&body, &rse.post);
2262
2263 /* Increment count. */
2264 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2265 count, gfc_index_one_node);
2266 gfc_add_modify_expr (&body, count, tmp);
2267
2268 tmp = gfc_finish_block (&body);
2269
2270 /* Generate body and loops according to the information in
2271 nested_forall_info. */
2272 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2273 gfc_add_expr_to_block (block, tmp);
2274
2275 /* Reset count. */
2276 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2277
2278 gfc_start_block (&body);
2279 gfc_init_se (&lse, NULL);
2280 gfc_init_se (&rse, NULL);
2281 rse.expr = gfc_build_array_ref (tmp1, count);
2282 lse.want_pointer = 1;
2283 gfc_conv_expr (&lse, expr1);
2284 gfc_add_block_to_block (&body, &lse.pre);
2285 gfc_add_modify_expr (&body, lse.expr, rse.expr);
2286 gfc_add_block_to_block (&body, &lse.post);
2287 /* Increment count. */
2288 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2289 count, gfc_index_one_node);
2290 gfc_add_modify_expr (&body, count, tmp);
2291 tmp = gfc_finish_block (&body);
2292
2293 /* Generate body and loops according to the information in
2294 nested_forall_info. */
2295 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2296 gfc_add_expr_to_block (block, tmp);
2297 }
2298 else
2299 {
2300 gfc_init_loopinfo (&loop);
2301
2302 /* Associate the SS with the loop. */
2303 gfc_add_ss_to_loop (&loop, rss);
2304
2305 /* Setup the scalarizing loops and bounds. */
2306 gfc_conv_ss_startstride (&loop);
2307
2308 gfc_conv_loop_setup (&loop);
2309
2310 info = &rss->data.info;
2311 desc = info->descriptor;
2312
2313 /* Make a new descriptor. */
2314 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2315 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2316 loop.from, loop.to, 1);
2317
2318 /* Allocate temporary for nested forall construct. */
2319 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2320 inner_size, NULL, block, &ptemp1);
2321 gfc_start_block (&body);
2322 gfc_init_se (&lse, NULL);
2323 lse.expr = gfc_build_array_ref (tmp1, count);
2324 lse.direct_byref = 1;
2325 rss = gfc_walk_expr (expr2);
2326 gfc_conv_expr_descriptor (&lse, expr2, rss);
2327
2328 gfc_add_block_to_block (&body, &lse.pre);
2329 gfc_add_block_to_block (&body, &lse.post);
2330
2331 /* Increment count. */
2332 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2333 count, gfc_index_one_node);
2334 gfc_add_modify_expr (&body, count, tmp);
2335
2336 tmp = gfc_finish_block (&body);
2337
2338 /* Generate body and loops according to the information in
2339 nested_forall_info. */
2340 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2341 gfc_add_expr_to_block (block, tmp);
2342
2343 /* Reset count. */
2344 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2345
2346 parm = gfc_build_array_ref (tmp1, count);
2347 lss = gfc_walk_expr (expr1);
2348 gfc_init_se (&lse, NULL);
2349 gfc_conv_expr_descriptor (&lse, expr1, lss);
2350 gfc_add_modify_expr (&lse.pre, lse.expr, parm);
2351 gfc_start_block (&body);
2352 gfc_add_block_to_block (&body, &lse.pre);
2353 gfc_add_block_to_block (&body, &lse.post);
2354
2355 /* Increment count. */
2356 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2357 count, gfc_index_one_node);
2358 gfc_add_modify_expr (&body, count, tmp);
2359
2360 tmp = gfc_finish_block (&body);
2361
2362 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2363 gfc_add_expr_to_block (block, tmp);
2364 }
2365 /* Free the temporary. */
2366 if (ptemp1)
2367 {
2368 tmp = gfc_call_free (ptemp1);
2369 gfc_add_expr_to_block (block, tmp);
2370 }
2371 }
2372
2373
2374 /* FORALL and WHERE statements are really nasty, especially when you nest
2375 them. All the rhs of a forall assignment must be evaluated before the
2376 actual assignments are performed. Presumably this also applies to all the
2377 assignments in an inner where statement. */
2378
2379 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2380 linear array, relying on the fact that we process in the same order in all
2381 loops.
2382
2383 forall (i=start:end:stride; maskexpr)
2384 e<i> = f<i>
2385 g<i> = h<i>
2386 end forall
2387 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2388 Translates to:
2389 count = ((end + 1 - start) / stride)
2390 masktmp(:) = maskexpr(:)
2391
2392 maskindex = 0;
2393 for (i = start; i <= end; i += stride)
2394 {
2395 if (masktmp[maskindex++])
2396 e<i> = f<i>
2397 }
2398 maskindex = 0;
2399 for (i = start; i <= end; i += stride)
2400 {
2401 if (masktmp[maskindex++])
2402 g<i> = h<i>
2403 }
2404
2405 Note that this code only works when there are no dependencies.
2406 Forall loop with array assignments and data dependencies are a real pain,
2407 because the size of the temporary cannot always be determined before the
2408 loop is executed. This problem is compounded by the presence of nested
2409 FORALL constructs.
2410 */
2411
2412 static tree
2413 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2414 {
2415 stmtblock_t block;
2416 stmtblock_t body;
2417 tree *var;
2418 tree *start;
2419 tree *end;
2420 tree *step;
2421 gfc_expr **varexpr;
2422 tree tmp;
2423 tree assign;
2424 tree size;
2425 tree maskindex;
2426 tree mask;
2427 tree pmask;
2428 int n;
2429 int nvar;
2430 int need_temp;
2431 gfc_forall_iterator *fa;
2432 gfc_se se;
2433 gfc_code *c;
2434 gfc_saved_var *saved_vars;
2435 iter_info *this_forall;
2436 forall_info *info;
2437 bool need_mask;
2438
2439 /* Do nothing if the mask is false. */
2440 if (code->expr
2441 && code->expr->expr_type == EXPR_CONSTANT
2442 && !code->expr->value.logical)
2443 return build_empty_stmt ();
2444
2445 n = 0;
2446 /* Count the FORALL index number. */
2447 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2448 n++;
2449 nvar = n;
2450
2451 /* Allocate the space for var, start, end, step, varexpr. */
2452 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2453 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2454 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2455 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2456 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2457 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2458
2459 /* Allocate the space for info. */
2460 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2461
2462 gfc_start_block (&block);
2463
2464 n = 0;
2465 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2466 {
2467 gfc_symbol *sym = fa->var->symtree->n.sym;
2468
2469 /* Allocate space for this_forall. */
2470 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2471
2472 /* Create a temporary variable for the FORALL index. */
2473 tmp = gfc_typenode_for_spec (&sym->ts);
2474 var[n] = gfc_create_var (tmp, sym->name);
2475 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2476
2477 /* Record it in this_forall. */
2478 this_forall->var = var[n];
2479
2480 /* Replace the index symbol's backend_decl with the temporary decl. */
2481 sym->backend_decl = var[n];
2482
2483 /* Work out the start, end and stride for the loop. */
2484 gfc_init_se (&se, NULL);
2485 gfc_conv_expr_val (&se, fa->start);
2486 /* Record it in this_forall. */
2487 this_forall->start = se.expr;
2488 gfc_add_block_to_block (&block, &se.pre);
2489 start[n] = se.expr;
2490
2491 gfc_init_se (&se, NULL);
2492 gfc_conv_expr_val (&se, fa->end);
2493 /* Record it in this_forall. */
2494 this_forall->end = se.expr;
2495 gfc_make_safe_expr (&se);
2496 gfc_add_block_to_block (&block, &se.pre);
2497 end[n] = se.expr;
2498
2499 gfc_init_se (&se, NULL);
2500 gfc_conv_expr_val (&se, fa->stride);
2501 /* Record it in this_forall. */
2502 this_forall->step = se.expr;
2503 gfc_make_safe_expr (&se);
2504 gfc_add_block_to_block (&block, &se.pre);
2505 step[n] = se.expr;
2506
2507 /* Set the NEXT field of this_forall to NULL. */
2508 this_forall->next = NULL;
2509 /* Link this_forall to the info construct. */
2510 if (info->this_loop)
2511 {
2512 iter_info *iter_tmp = info->this_loop;
2513 while (iter_tmp->next != NULL)
2514 iter_tmp = iter_tmp->next;
2515 iter_tmp->next = this_forall;
2516 }
2517 else
2518 info->this_loop = this_forall;
2519
2520 n++;
2521 }
2522 nvar = n;
2523
2524 /* Calculate the size needed for the current forall level. */
2525 size = gfc_index_one_node;
2526 for (n = 0; n < nvar; n++)
2527 {
2528 /* size = (end + step - start) / step. */
2529 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2530 step[n], start[n]);
2531 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2532
2533 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2534 tmp = convert (gfc_array_index_type, tmp);
2535
2536 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2537 }
2538
2539 /* Record the nvar and size of current forall level. */
2540 info->nvar = nvar;
2541 info->size = size;
2542
2543 if (code->expr)
2544 {
2545 /* If the mask is .true., consider the FORALL unconditional. */
2546 if (code->expr->expr_type == EXPR_CONSTANT
2547 && code->expr->value.logical)
2548 need_mask = false;
2549 else
2550 need_mask = true;
2551 }
2552 else
2553 need_mask = false;
2554
2555 /* First we need to allocate the mask. */
2556 if (need_mask)
2557 {
2558 /* As the mask array can be very big, prefer compact boolean types. */
2559 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2560 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2561 size, NULL, &block, &pmask);
2562 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2563
2564 /* Record them in the info structure. */
2565 info->maskindex = maskindex;
2566 info->mask = mask;
2567 }
2568 else
2569 {
2570 /* No mask was specified. */
2571 maskindex = NULL_TREE;
2572 mask = pmask = NULL_TREE;
2573 }
2574
2575 /* Link the current forall level to nested_forall_info. */
2576 info->prev_nest = nested_forall_info;
2577 nested_forall_info = info;
2578
2579 /* Copy the mask into a temporary variable if required.
2580 For now we assume a mask temporary is needed. */
2581 if (need_mask)
2582 {
2583 /* As the mask array can be very big, prefer compact boolean types. */
2584 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2585
2586 gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
2587
2588 /* Start of mask assignment loop body. */
2589 gfc_start_block (&body);
2590
2591 /* Evaluate the mask expression. */
2592 gfc_init_se (&se, NULL);
2593 gfc_conv_expr_val (&se, code->expr);
2594 gfc_add_block_to_block (&body, &se.pre);
2595
2596 /* Store the mask. */
2597 se.expr = convert (mask_type, se.expr);
2598
2599 tmp = gfc_build_array_ref (mask, maskindex);
2600 gfc_add_modify_expr (&body, tmp, se.expr);
2601
2602 /* Advance to the next mask element. */
2603 tmp = build2 (PLUS_EXPR, gfc_array_index_type,
2604 maskindex, gfc_index_one_node);
2605 gfc_add_modify_expr (&body, maskindex, tmp);
2606
2607 /* Generate the loops. */
2608 tmp = gfc_finish_block (&body);
2609 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2610 gfc_add_expr_to_block (&block, tmp);
2611 }
2612
2613 c = code->block->next;
2614
2615 /* TODO: loop merging in FORALL statements. */
2616 /* Now that we've got a copy of the mask, generate the assignment loops. */
2617 while (c)
2618 {
2619 switch (c->op)
2620 {
2621 case EXEC_ASSIGN:
2622 /* A scalar or array assignment. */
2623 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2624 /* Temporaries due to array assignment data dependencies introduce
2625 no end of problems. */
2626 if (need_temp)
2627 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2628 nested_forall_info, &block);
2629 else
2630 {
2631 /* Use the normal assignment copying routines. */
2632 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2633
2634 /* Generate body and loops. */
2635 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2636 assign, 1);
2637 gfc_add_expr_to_block (&block, tmp);
2638 }
2639
2640 break;
2641
2642 case EXEC_WHERE:
2643 /* Translate WHERE or WHERE construct nested in FORALL. */
2644 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
2645 break;
2646
2647 /* Pointer assignment inside FORALL. */
2648 case EXEC_POINTER_ASSIGN:
2649 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
2650 if (need_temp)
2651 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
2652 nested_forall_info, &block);
2653 else
2654 {
2655 /* Use the normal assignment copying routines. */
2656 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
2657
2658 /* Generate body and loops. */
2659 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2660 assign, 1);
2661 gfc_add_expr_to_block (&block, tmp);
2662 }
2663 break;
2664
2665 case EXEC_FORALL:
2666 tmp = gfc_trans_forall_1 (c, nested_forall_info);
2667 gfc_add_expr_to_block (&block, tmp);
2668 break;
2669
2670 /* Explicit subroutine calls are prevented by the frontend but interface
2671 assignments can legitimately produce them. */
2672 case EXEC_ASSIGN_CALL:
2673 assign = gfc_trans_call (c, true);
2674 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
2675 gfc_add_expr_to_block (&block, tmp);
2676 break;
2677
2678 default:
2679 gcc_unreachable ();
2680 }
2681
2682 c = c->next;
2683 }
2684
2685 /* Restore the original index variables. */
2686 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
2687 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
2688
2689 /* Free the space for var, start, end, step, varexpr. */
2690 gfc_free (var);
2691 gfc_free (start);
2692 gfc_free (end);
2693 gfc_free (step);
2694 gfc_free (varexpr);
2695 gfc_free (saved_vars);
2696
2697 /* Free the space for this forall_info. */
2698 gfc_free (info);
2699
2700 if (pmask)
2701 {
2702 /* Free the temporary for the mask. */
2703 tmp = gfc_call_free (pmask);
2704 gfc_add_expr_to_block (&block, tmp);
2705 }
2706 if (maskindex)
2707 pushdecl (maskindex);
2708
2709 return gfc_finish_block (&block);
2710 }
2711
2712
2713 /* Translate the FORALL statement or construct. */
2714
2715 tree gfc_trans_forall (gfc_code * code)
2716 {
2717 return gfc_trans_forall_1 (code, NULL);
2718 }
2719
2720
2721 /* Evaluate the WHERE mask expression, copy its value to a temporary.
2722 If the WHERE construct is nested in FORALL, compute the overall temporary
2723 needed by the WHERE mask expression multiplied by the iterator number of
2724 the nested forall.
2725 ME is the WHERE mask expression.
2726 MASK is the current execution mask upon input, whose sense may or may
2727 not be inverted as specified by the INVERT argument.
2728 CMASK is the updated execution mask on output, or NULL if not required.
2729 PMASK is the pending execution mask on output, or NULL if not required.
2730 BLOCK is the block in which to place the condition evaluation loops. */
2731
2732 static void
2733 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
2734 tree mask, bool invert, tree cmask, tree pmask,
2735 tree mask_type, stmtblock_t * block)
2736 {
2737 tree tmp, tmp1;
2738 gfc_ss *lss, *rss;
2739 gfc_loopinfo loop;
2740 stmtblock_t body, body1;
2741 tree count, cond, mtmp;
2742 gfc_se lse, rse;
2743
2744 gfc_init_loopinfo (&loop);
2745
2746 lss = gfc_walk_expr (me);
2747 rss = gfc_walk_expr (me);
2748
2749 /* Variable to index the temporary. */
2750 count = gfc_create_var (gfc_array_index_type, "count");
2751 /* Initialize count. */
2752 gfc_add_modify_expr (block, count, gfc_index_zero_node);
2753
2754 gfc_start_block (&body);
2755
2756 gfc_init_se (&rse, NULL);
2757 gfc_init_se (&lse, NULL);
2758
2759 if (lss == gfc_ss_terminator)
2760 {
2761 gfc_init_block (&body1);
2762 }
2763 else
2764 {
2765 /* Initialize the loop. */
2766 gfc_init_loopinfo (&loop);
2767
2768 /* We may need LSS to determine the shape of the expression. */
2769 gfc_add_ss_to_loop (&loop, lss);
2770 gfc_add_ss_to_loop (&loop, rss);
2771
2772 gfc_conv_ss_startstride (&loop);
2773 gfc_conv_loop_setup (&loop);
2774
2775 gfc_mark_ss_chain_used (rss, 1);
2776 /* Start the loop body. */
2777 gfc_start_scalarized_body (&loop, &body1);
2778
2779 /* Translate the expression. */
2780 gfc_copy_loopinfo_to_se (&rse, &loop);
2781 rse.ss = rss;
2782 gfc_conv_expr (&rse, me);
2783 }
2784
2785 /* Variable to evaluate mask condition. */
2786 cond = gfc_create_var (mask_type, "cond");
2787 if (mask && (cmask || pmask))
2788 mtmp = gfc_create_var (mask_type, "mask");
2789 else mtmp = NULL_TREE;
2790
2791 gfc_add_block_to_block (&body1, &lse.pre);
2792 gfc_add_block_to_block (&body1, &rse.pre);
2793
2794 gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
2795
2796 if (mask && (cmask || pmask))
2797 {
2798 tmp = gfc_build_array_ref (mask, count);
2799 if (invert)
2800 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
2801 gfc_add_modify_expr (&body1, mtmp, tmp);
2802 }
2803
2804 if (cmask)
2805 {
2806 tmp1 = gfc_build_array_ref (cmask, count);
2807 tmp = cond;
2808 if (mask)
2809 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2810 gfc_add_modify_expr (&body1, tmp1, tmp);
2811 }
2812
2813 if (pmask)
2814 {
2815 tmp1 = gfc_build_array_ref (pmask, count);
2816 tmp = build1 (TRUTH_NOT_EXPR, mask_type, cond);
2817 if (mask)
2818 tmp = build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
2819 gfc_add_modify_expr (&body1, tmp1, tmp);
2820 }
2821
2822 gfc_add_block_to_block (&body1, &lse.post);
2823 gfc_add_block_to_block (&body1, &rse.post);
2824
2825 if (lss == gfc_ss_terminator)
2826 {
2827 gfc_add_block_to_block (&body, &body1);
2828 }
2829 else
2830 {
2831 /* Increment count. */
2832 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
2833 gfc_index_one_node);
2834 gfc_add_modify_expr (&body1, count, tmp1);
2835
2836 /* Generate the copying loops. */
2837 gfc_trans_scalarizing_loops (&loop, &body1);
2838
2839 gfc_add_block_to_block (&body, &loop.pre);
2840 gfc_add_block_to_block (&body, &loop.post);
2841
2842 gfc_cleanup_loop (&loop);
2843 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2844 as tree nodes in SS may not be valid in different scope. */
2845 }
2846
2847 tmp1 = gfc_finish_block (&body);
2848 /* If the WHERE construct is inside FORALL, fill the full temporary. */
2849 if (nested_forall_info != NULL)
2850 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
2851
2852 gfc_add_expr_to_block (block, tmp1);
2853 }
2854
2855
2856 /* Translate an assignment statement in a WHERE statement or construct
2857 statement. The MASK expression is used to control which elements
2858 of EXPR1 shall be assigned. The sense of MASK is specified by
2859 INVERT. */
2860
2861 static tree
2862 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
2863 tree mask, bool invert,
2864 tree count1, tree count2,
2865 gfc_symbol *sym)
2866 {
2867 gfc_se lse;
2868 gfc_se rse;
2869 gfc_ss *lss;
2870 gfc_ss *lss_section;
2871 gfc_ss *rss;
2872
2873 gfc_loopinfo loop;
2874 tree tmp;
2875 stmtblock_t block;
2876 stmtblock_t body;
2877 tree index, maskexpr;
2878
2879 #if 0
2880 /* TODO: handle this special case.
2881 Special case a single function returning an array. */
2882 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
2883 {
2884 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
2885 if (tmp)
2886 return tmp;
2887 }
2888 #endif
2889
2890 /* Assignment of the form lhs = rhs. */
2891 gfc_start_block (&block);
2892
2893 gfc_init_se (&lse, NULL);
2894 gfc_init_se (&rse, NULL);
2895
2896 /* Walk the lhs. */
2897 lss = gfc_walk_expr (expr1);
2898 rss = NULL;
2899
2900 /* In each where-assign-stmt, the mask-expr and the variable being
2901 defined shall be arrays of the same shape. */
2902 gcc_assert (lss != gfc_ss_terminator);
2903
2904 /* The assignment needs scalarization. */
2905 lss_section = lss;
2906
2907 /* Find a non-scalar SS from the lhs. */
2908 while (lss_section != gfc_ss_terminator
2909 && lss_section->type != GFC_SS_SECTION)
2910 lss_section = lss_section->next;
2911
2912 gcc_assert (lss_section != gfc_ss_terminator);
2913
2914 /* Initialize the scalarizer. */
2915 gfc_init_loopinfo (&loop);
2916
2917 /* Walk the rhs. */
2918 rss = gfc_walk_expr (expr2);
2919 if (rss == gfc_ss_terminator)
2920 {
2921 /* The rhs is scalar. Add a ss for the expression. */
2922 rss = gfc_get_ss ();
2923 rss->next = gfc_ss_terminator;
2924 rss->type = GFC_SS_SCALAR;
2925 rss->expr = expr2;
2926 }
2927
2928 /* Associate the SS with the loop. */
2929 gfc_add_ss_to_loop (&loop, lss);
2930 gfc_add_ss_to_loop (&loop, rss);
2931
2932 /* Calculate the bounds of the scalarization. */
2933 gfc_conv_ss_startstride (&loop);
2934
2935 /* Resolve any data dependencies in the statement. */
2936 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
2937
2938 /* Setup the scalarizing loops. */
2939 gfc_conv_loop_setup (&loop);
2940
2941 /* Setup the gfc_se structures. */
2942 gfc_copy_loopinfo_to_se (&lse, &loop);
2943 gfc_copy_loopinfo_to_se (&rse, &loop);
2944
2945 rse.ss = rss;
2946 gfc_mark_ss_chain_used (rss, 1);
2947 if (loop.temp_ss == NULL)
2948 {
2949 lse.ss = lss;
2950 gfc_mark_ss_chain_used (lss, 1);
2951 }
2952 else
2953 {
2954 lse.ss = loop.temp_ss;
2955 gfc_mark_ss_chain_used (lss, 3);
2956 gfc_mark_ss_chain_used (loop.temp_ss, 3);
2957 }
2958
2959 /* Start the scalarized loop body. */
2960 gfc_start_scalarized_body (&loop, &body);
2961
2962 /* Translate the expression. */
2963 gfc_conv_expr (&rse, expr2);
2964 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
2965 {
2966 gfc_conv_tmp_array_ref (&lse);
2967 gfc_advance_se_ss_chain (&lse);
2968 }
2969 else
2970 gfc_conv_expr (&lse, expr1);
2971
2972 /* Form the mask expression according to the mask. */
2973 index = count1;
2974 maskexpr = gfc_build_array_ref (mask, index);
2975 if (invert)
2976 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
2977
2978 /* Use the scalar assignment as is. */
2979 if (sym == NULL)
2980 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
2981 loop.temp_ss != NULL, false);
2982 else
2983 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
2984
2985 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
2986
2987 gfc_add_expr_to_block (&body, tmp);
2988
2989 if (lss == gfc_ss_terminator)
2990 {
2991 /* Increment count1. */
2992 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2993 count1, gfc_index_one_node);
2994 gfc_add_modify_expr (&body, count1, tmp);
2995
2996 /* Use the scalar assignment as is. */
2997 gfc_add_block_to_block (&block, &body);
2998 }
2999 else
3000 {
3001 gcc_assert (lse.ss == gfc_ss_terminator
3002 && rse.ss == gfc_ss_terminator);
3003
3004 if (loop.temp_ss != NULL)
3005 {
3006 /* Increment count1 before finish the main body of a scalarized
3007 expression. */
3008 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3009 count1, gfc_index_one_node);
3010 gfc_add_modify_expr (&body, count1, tmp);
3011 gfc_trans_scalarized_loop_boundary (&loop, &body);
3012
3013 /* We need to copy the temporary to the actual lhs. */
3014 gfc_init_se (&lse, NULL);
3015 gfc_init_se (&rse, NULL);
3016 gfc_copy_loopinfo_to_se (&lse, &loop);
3017 gfc_copy_loopinfo_to_se (&rse, &loop);
3018
3019 rse.ss = loop.temp_ss;
3020 lse.ss = lss;
3021
3022 gfc_conv_tmp_array_ref (&rse);
3023 gfc_advance_se_ss_chain (&rse);
3024 gfc_conv_expr (&lse, expr1);
3025
3026 gcc_assert (lse.ss == gfc_ss_terminator
3027 && rse.ss == gfc_ss_terminator);
3028
3029 /* Form the mask expression according to the mask tree list. */
3030 index = count2;
3031 maskexpr = gfc_build_array_ref (mask, index);
3032 if (invert)
3033 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3034 maskexpr);
3035
3036 /* Use the scalar assignment as is. */
3037 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3038 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3039 gfc_add_expr_to_block (&body, tmp);
3040
3041 /* Increment count2. */
3042 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3043 count2, gfc_index_one_node);
3044 gfc_add_modify_expr (&body, count2, tmp);
3045 }
3046 else
3047 {
3048 /* Increment count1. */
3049 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3050 count1, gfc_index_one_node);
3051 gfc_add_modify_expr (&body, count1, tmp);
3052 }
3053
3054 /* Generate the copying loops. */
3055 gfc_trans_scalarizing_loops (&loop, &body);
3056
3057 /* Wrap the whole thing up. */
3058 gfc_add_block_to_block (&block, &loop.pre);
3059 gfc_add_block_to_block (&block, &loop.post);
3060 gfc_cleanup_loop (&loop);
3061 }
3062
3063 return gfc_finish_block (&block);
3064 }
3065
3066
3067 /* Translate the WHERE construct or statement.
3068 This function can be called iteratively to translate the nested WHERE
3069 construct or statement.
3070 MASK is the control mask. */
3071
3072 static void
3073 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3074 forall_info * nested_forall_info, stmtblock_t * block)
3075 {
3076 stmtblock_t inner_size_body;
3077 tree inner_size, size;
3078 gfc_ss *lss, *rss;
3079 tree mask_type;
3080 gfc_expr *expr1;
3081 gfc_expr *expr2;
3082 gfc_code *cblock;
3083 gfc_code *cnext;
3084 tree tmp;
3085 tree count1, count2;
3086 bool need_cmask;
3087 bool need_pmask;
3088 int need_temp;
3089 tree pcmask = NULL_TREE;
3090 tree ppmask = NULL_TREE;
3091 tree cmask = NULL_TREE;
3092 tree pmask = NULL_TREE;
3093 gfc_actual_arglist *arg;
3094
3095 /* the WHERE statement or the WHERE construct statement. */
3096 cblock = code->block;
3097
3098 /* As the mask array can be very big, prefer compact boolean types. */
3099 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3100
3101 /* Determine which temporary masks are needed. */
3102 if (!cblock->block)
3103 {
3104 /* One clause: No ELSEWHEREs. */
3105 need_cmask = (cblock->next != 0);
3106 need_pmask = false;
3107 }
3108 else if (cblock->block->block)
3109 {
3110 /* Three or more clauses: Conditional ELSEWHEREs. */
3111 need_cmask = true;
3112 need_pmask = true;
3113 }
3114 else if (cblock->next)
3115 {
3116 /* Two clauses, the first non-empty. */
3117 need_cmask = true;
3118 need_pmask = (mask != NULL_TREE
3119 && cblock->block->next != 0);
3120 }
3121 else if (!cblock->block->next)
3122 {
3123 /* Two clauses, both empty. */
3124 need_cmask = false;
3125 need_pmask = false;
3126 }
3127 /* Two clauses, the first empty, the second non-empty. */
3128 else if (mask)
3129 {
3130 need_cmask = (cblock->block->expr != 0);
3131 need_pmask = true;
3132 }
3133 else
3134 {
3135 need_cmask = true;
3136 need_pmask = false;
3137 }
3138
3139 if (need_cmask || need_pmask)
3140 {
3141 /* Calculate the size of temporary needed by the mask-expr. */
3142 gfc_init_block (&inner_size_body);
3143 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3144 &inner_size_body, &lss, &rss);
3145
3146 /* Calculate the total size of temporary needed. */
3147 size = compute_overall_iter_number (nested_forall_info, inner_size,
3148 &inner_size_body, block);
3149
3150 /* Allocate temporary for WHERE mask if needed. */
3151 if (need_cmask)
3152 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3153 &pcmask);
3154
3155 /* Allocate temporary for !mask if needed. */
3156 if (need_pmask)
3157 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3158 &ppmask);
3159 }
3160
3161 while (cblock)
3162 {
3163 /* Each time around this loop, the where clause is conditional
3164 on the value of mask and invert, which are updated at the
3165 bottom of the loop. */
3166
3167 /* Has mask-expr. */
3168 if (cblock->expr)
3169 {
3170 /* Ensure that the WHERE mask will be evaluated exactly once.
3171 If there are no statements in this WHERE/ELSEWHERE clause,
3172 then we don't need to update the control mask (cmask).
3173 If this is the last clause of the WHERE construct, then
3174 we don't need to update the pending control mask (pmask). */
3175 if (mask)
3176 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3177 mask, invert,
3178 cblock->next ? cmask : NULL_TREE,
3179 cblock->block ? pmask : NULL_TREE,
3180 mask_type, block);
3181 else
3182 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3183 NULL_TREE, false,
3184 (cblock->next || cblock->block)
3185 ? cmask : NULL_TREE,
3186 NULL_TREE, mask_type, block);
3187
3188 invert = false;
3189 }
3190 /* It's a final elsewhere-stmt. No mask-expr is present. */
3191 else
3192 cmask = mask;
3193
3194 /* The body of this where clause are controlled by cmask with
3195 sense specified by invert. */
3196
3197 /* Get the assignment statement of a WHERE statement, or the first
3198 statement in where-body-construct of a WHERE construct. */
3199 cnext = cblock->next;
3200 while (cnext)
3201 {
3202 switch (cnext->op)
3203 {
3204 /* WHERE assignment statement. */
3205 case EXEC_ASSIGN_CALL:
3206
3207 arg = cnext->ext.actual;
3208 expr1 = expr2 = NULL;
3209 for (; arg; arg = arg->next)
3210 {
3211 if (!arg->expr)
3212 continue;
3213 if (expr1 == NULL)
3214 expr1 = arg->expr;
3215 else
3216 expr2 = arg->expr;
3217 }
3218 goto evaluate;
3219
3220 case EXEC_ASSIGN:
3221 expr1 = cnext->expr;
3222 expr2 = cnext->expr2;
3223 evaluate:
3224 if (nested_forall_info != NULL)
3225 {
3226 need_temp = gfc_check_dependency (expr1, expr2, 0);
3227 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3228 gfc_trans_assign_need_temp (expr1, expr2,
3229 cmask, invert,
3230 nested_forall_info, block);
3231 else
3232 {
3233 /* Variables to control maskexpr. */
3234 count1 = gfc_create_var (gfc_array_index_type, "count1");
3235 count2 = gfc_create_var (gfc_array_index_type, "count2");
3236 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3237 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3238
3239 tmp = gfc_trans_where_assign (expr1, expr2,
3240 cmask, invert,
3241 count1, count2,
3242 cnext->resolved_sym);
3243
3244 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3245 tmp, 1);
3246 gfc_add_expr_to_block (block, tmp);
3247 }
3248 }
3249 else
3250 {
3251 /* Variables to control maskexpr. */
3252 count1 = gfc_create_var (gfc_array_index_type, "count1");
3253 count2 = gfc_create_var (gfc_array_index_type, "count2");
3254 gfc_add_modify_expr (block, count1, gfc_index_zero_node);
3255 gfc_add_modify_expr (block, count2, gfc_index_zero_node);
3256
3257 tmp = gfc_trans_where_assign (expr1, expr2,
3258 cmask, invert,
3259 count1, count2,
3260 cnext->resolved_sym);
3261 gfc_add_expr_to_block (block, tmp);
3262
3263 }
3264 break;
3265
3266 /* WHERE or WHERE construct is part of a where-body-construct. */
3267 case EXEC_WHERE:
3268 gfc_trans_where_2 (cnext, cmask, invert,
3269 nested_forall_info, block);
3270 break;
3271
3272 default:
3273 gcc_unreachable ();
3274 }
3275
3276 /* The next statement within the same where-body-construct. */
3277 cnext = cnext->next;
3278 }
3279 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3280 cblock = cblock->block;
3281 if (mask == NULL_TREE)
3282 {
3283 /* If we're the initial WHERE, we can simply invert the sense
3284 of the current mask to obtain the "mask" for the remaining
3285 ELSEWHEREs. */
3286 invert = true;
3287 mask = cmask;
3288 }
3289 else
3290 {
3291 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3292 invert = false;
3293 mask = pmask;
3294 }
3295 }
3296
3297 /* If we allocated a pending mask array, deallocate it now. */
3298 if (ppmask)
3299 {
3300 tmp = gfc_call_free (ppmask);
3301 gfc_add_expr_to_block (block, tmp);
3302 }
3303
3304 /* If we allocated a current mask array, deallocate it now. */
3305 if (pcmask)
3306 {
3307 tmp = gfc_call_free (pcmask);
3308 gfc_add_expr_to_block (block, tmp);
3309 }
3310 }
3311
3312 /* Translate a simple WHERE construct or statement without dependencies.
3313 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3314 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3315 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3316
3317 static tree
3318 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3319 {
3320 stmtblock_t block, body;
3321 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3322 tree tmp, cexpr, tstmt, estmt;
3323 gfc_ss *css, *tdss, *tsss;
3324 gfc_se cse, tdse, tsse, edse, esse;
3325 gfc_loopinfo loop;
3326 gfc_ss *edss = 0;
3327 gfc_ss *esss = 0;
3328
3329 cond = cblock->expr;
3330 tdst = cblock->next->expr;
3331 tsrc = cblock->next->expr2;
3332 edst = eblock ? eblock->next->expr : NULL;
3333 esrc = eblock ? eblock->next->expr2 : NULL;
3334
3335 gfc_start_block (&block);
3336 gfc_init_loopinfo (&loop);
3337
3338 /* Handle the condition. */
3339 gfc_init_se (&cse, NULL);
3340 css = gfc_walk_expr (cond);
3341 gfc_add_ss_to_loop (&loop, css);
3342
3343 /* Handle the then-clause. */
3344 gfc_init_se (&tdse, NULL);
3345 gfc_init_se (&tsse, NULL);
3346 tdss = gfc_walk_expr (tdst);
3347 tsss = gfc_walk_expr (tsrc);
3348 if (tsss == gfc_ss_terminator)
3349 {
3350 tsss = gfc_get_ss ();
3351 tsss->next = gfc_ss_terminator;
3352 tsss->type = GFC_SS_SCALAR;
3353 tsss->expr = tsrc;
3354 }
3355 gfc_add_ss_to_loop (&loop, tdss);
3356 gfc_add_ss_to_loop (&loop, tsss);
3357
3358 if (eblock)
3359 {
3360 /* Handle the else clause. */
3361 gfc_init_se (&edse, NULL);
3362 gfc_init_se (&esse, NULL);
3363 edss = gfc_walk_expr (edst);
3364 esss = gfc_walk_expr (esrc);
3365 if (esss == gfc_ss_terminator)
3366 {
3367 esss = gfc_get_ss ();
3368 esss->next = gfc_ss_terminator;
3369 esss->type = GFC_SS_SCALAR;
3370 esss->expr = esrc;
3371 }
3372 gfc_add_ss_to_loop (&loop, edss);
3373 gfc_add_ss_to_loop (&loop, esss);
3374 }
3375
3376 gfc_conv_ss_startstride (&loop);
3377 gfc_conv_loop_setup (&loop);
3378
3379 gfc_mark_ss_chain_used (css, 1);
3380 gfc_mark_ss_chain_used (tdss, 1);
3381 gfc_mark_ss_chain_used (tsss, 1);
3382 if (eblock)
3383 {
3384 gfc_mark_ss_chain_used (edss, 1);
3385 gfc_mark_ss_chain_used (esss, 1);
3386 }
3387
3388 gfc_start_scalarized_body (&loop, &body);
3389
3390 gfc_copy_loopinfo_to_se (&cse, &loop);
3391 gfc_copy_loopinfo_to_se (&tdse, &loop);
3392 gfc_copy_loopinfo_to_se (&tsse, &loop);
3393 cse.ss = css;
3394 tdse.ss = tdss;
3395 tsse.ss = tsss;
3396 if (eblock)
3397 {
3398 gfc_copy_loopinfo_to_se (&edse, &loop);
3399 gfc_copy_loopinfo_to_se (&esse, &loop);
3400 edse.ss = edss;
3401 esse.ss = esss;
3402 }
3403
3404 gfc_conv_expr (&cse, cond);
3405 gfc_add_block_to_block (&body, &cse.pre);
3406 cexpr = cse.expr;
3407
3408 gfc_conv_expr (&tsse, tsrc);
3409 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3410 {
3411 gfc_conv_tmp_array_ref (&tdse);
3412 gfc_advance_se_ss_chain (&tdse);
3413 }
3414 else
3415 gfc_conv_expr (&tdse, tdst);
3416
3417 if (eblock)
3418 {
3419 gfc_conv_expr (&esse, esrc);
3420 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3421 {
3422 gfc_conv_tmp_array_ref (&edse);
3423 gfc_advance_se_ss_chain (&edse);
3424 }
3425 else
3426 gfc_conv_expr (&edse, edst);
3427 }
3428
3429 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3430 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3431 : build_empty_stmt ();
3432 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3433 gfc_add_expr_to_block (&body, tmp);
3434 gfc_add_block_to_block (&body, &cse.post);
3435
3436 gfc_trans_scalarizing_loops (&loop, &body);
3437 gfc_add_block_to_block (&block, &loop.pre);
3438 gfc_add_block_to_block (&block, &loop.post);
3439 gfc_cleanup_loop (&loop);
3440
3441 return gfc_finish_block (&block);
3442 }
3443
3444 /* As the WHERE or WHERE construct statement can be nested, we call
3445 gfc_trans_where_2 to do the translation, and pass the initial
3446 NULL values for both the control mask and the pending control mask. */
3447
3448 tree
3449 gfc_trans_where (gfc_code * code)
3450 {
3451 stmtblock_t block;
3452 gfc_code *cblock;
3453 gfc_code *eblock;
3454
3455 cblock = code->block;
3456 if (cblock->next
3457 && cblock->next->op == EXEC_ASSIGN
3458 && !cblock->next->next)
3459 {
3460 eblock = cblock->block;
3461 if (!eblock)
3462 {
3463 /* A simple "WHERE (cond) x = y" statement or block is
3464 dependence free if cond is not dependent upon writing x,
3465 and the source y is unaffected by the destination x. */
3466 if (!gfc_check_dependency (cblock->next->expr,
3467 cblock->expr, 0)
3468 && !gfc_check_dependency (cblock->next->expr,
3469 cblock->next->expr2, 0))
3470 return gfc_trans_where_3 (cblock, NULL);
3471 }
3472 else if (!eblock->expr
3473 && !eblock->block
3474 && eblock->next
3475 && eblock->next->op == EXEC_ASSIGN
3476 && !eblock->next->next)
3477 {
3478 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3479 block is dependence free if cond is not dependent on writes
3480 to x1 and x2, y1 is not dependent on writes to x2, and y2
3481 is not dependent on writes to x1, and both y's are not
3482 dependent upon their own x's. */
3483 if (!gfc_check_dependency(cblock->next->expr,
3484 cblock->expr, 0)
3485 && !gfc_check_dependency(eblock->next->expr,
3486 cblock->expr, 0)
3487 && !gfc_check_dependency(cblock->next->expr,
3488 eblock->next->expr2, 0)
3489 && !gfc_check_dependency(eblock->next->expr,
3490 cblock->next->expr2, 0)
3491 && !gfc_check_dependency(cblock->next->expr,
3492 cblock->next->expr2, 0)
3493 && !gfc_check_dependency(eblock->next->expr,
3494 eblock->next->expr2, 0))
3495 return gfc_trans_where_3 (cblock, eblock);
3496 }
3497 }
3498
3499 gfc_start_block (&block);
3500
3501 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3502
3503 return gfc_finish_block (&block);
3504 }
3505
3506
3507 /* CYCLE a DO loop. The label decl has already been created by
3508 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3509 node at the head of the loop. We must mark the label as used. */
3510
3511 tree
3512 gfc_trans_cycle (gfc_code * code)
3513 {
3514 tree cycle_label;
3515
3516 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3517 TREE_USED (cycle_label) = 1;
3518 return build1_v (GOTO_EXPR, cycle_label);
3519 }
3520
3521
3522 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3523 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3524 loop. */
3525
3526 tree
3527 gfc_trans_exit (gfc_code * code)
3528 {
3529 tree exit_label;
3530
3531 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3532 TREE_USED (exit_label) = 1;
3533 return build1_v (GOTO_EXPR, exit_label);
3534 }
3535
3536
3537 /* Translate the ALLOCATE statement. */
3538
3539 tree
3540 gfc_trans_allocate (gfc_code * code)
3541 {
3542 gfc_alloc *al;
3543 gfc_expr *expr;
3544 gfc_se se;
3545 tree tmp;
3546 tree parm;
3547 tree stat;
3548 tree pstat;
3549 tree error_label;
3550 stmtblock_t block;
3551
3552 if (!code->ext.alloc_list)
3553 return NULL_TREE;
3554
3555 gfc_start_block (&block);
3556
3557 if (code->expr)
3558 {
3559 tree gfc_int4_type_node = gfc_get_int_type (4);
3560
3561 stat = gfc_create_var (gfc_int4_type_node, "stat");
3562 pstat = build_fold_addr_expr (stat);
3563
3564 error_label = gfc_build_label_decl (NULL_TREE);
3565 TREE_USED (error_label) = 1;
3566 }
3567 else
3568 pstat = stat = error_label = NULL_TREE;
3569
3570 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3571 {
3572 expr = al->expr;
3573
3574 gfc_init_se (&se, NULL);
3575 gfc_start_block (&se.pre);
3576
3577 se.want_pointer = 1;
3578 se.descriptor_only = 1;
3579 gfc_conv_expr (&se, expr);
3580
3581 if (!gfc_array_allocate (&se, expr, pstat))
3582 {
3583 /* A scalar or derived type. */
3584 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3585
3586 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3587 tmp = se.string_length;
3588
3589 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3590 tmp = build2 (MODIFY_EXPR, void_type_node, se.expr,
3591 fold_convert (TREE_TYPE (se.expr), tmp));
3592 gfc_add_expr_to_block (&se.pre, tmp);
3593
3594 if (code->expr)
3595 {
3596 tmp = build1_v (GOTO_EXPR, error_label);
3597 parm = fold_build2 (NE_EXPR, boolean_type_node,
3598 stat, build_int_cst (TREE_TYPE (stat), 0));
3599 tmp = fold_build3 (COND_EXPR, void_type_node,
3600 parm, tmp, build_empty_stmt ());
3601 gfc_add_expr_to_block (&se.pre, tmp);
3602 }
3603
3604 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3605 {
3606 tmp = build_fold_indirect_ref (se.expr);
3607 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3608 gfc_add_expr_to_block (&se.pre, tmp);
3609 }
3610
3611 }
3612
3613 tmp = gfc_finish_block (&se.pre);
3614 gfc_add_expr_to_block (&block, tmp);
3615 }
3616
3617 /* Assign the value to the status variable. */
3618 if (code->expr)
3619 {
3620 tmp = build1_v (LABEL_EXPR, error_label);
3621 gfc_add_expr_to_block (&block, tmp);
3622
3623 gfc_init_se (&se, NULL);
3624 gfc_conv_expr_lhs (&se, code->expr);
3625 tmp = convert (TREE_TYPE (se.expr), stat);
3626 gfc_add_modify_expr (&block, se.expr, tmp);
3627 }
3628
3629 return gfc_finish_block (&block);
3630 }
3631
3632
3633 /* Translate a DEALLOCATE statement.
3634 There are two cases within the for loop:
3635 (1) deallocate(a1, a2, a3) is translated into the following sequence
3636 _gfortran_deallocate(a1, 0B)
3637 _gfortran_deallocate(a2, 0B)
3638 _gfortran_deallocate(a3, 0B)
3639 where the STAT= variable is passed a NULL pointer.
3640 (2) deallocate(a1, a2, a3, stat=i) is translated into the following
3641 astat = 0
3642 _gfortran_deallocate(a1, &stat)
3643 astat = astat + stat
3644 _gfortran_deallocate(a2, &stat)
3645 astat = astat + stat
3646 _gfortran_deallocate(a3, &stat)
3647 astat = astat + stat
3648 In case (1), we simply return at the end of the for loop. In case (2)
3649 we set STAT= astat. */
3650 tree
3651 gfc_trans_deallocate (gfc_code * code)
3652 {
3653 gfc_se se;
3654 gfc_alloc *al;
3655 gfc_expr *expr;
3656 tree apstat, astat, pstat, stat, tmp;
3657 stmtblock_t block;
3658
3659 gfc_start_block (&block);
3660
3661 /* Set up the optional STAT= */
3662 if (code->expr)
3663 {
3664 tree gfc_int4_type_node = gfc_get_int_type (4);
3665
3666 /* Variable used with the library call. */
3667 stat = gfc_create_var (gfc_int4_type_node, "stat");
3668 pstat = build_fold_addr_expr (stat);
3669
3670 /* Running total of possible deallocation failures. */
3671 astat = gfc_create_var (gfc_int4_type_node, "astat");
3672 apstat = build_fold_addr_expr (astat);
3673
3674 /* Initialize astat to 0. */
3675 gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
3676 }
3677 else
3678 pstat = apstat = stat = astat = NULL_TREE;
3679
3680 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3681 {
3682 expr = al->expr;
3683 gcc_assert (expr->expr_type == EXPR_VARIABLE);
3684
3685 gfc_init_se (&se, NULL);
3686 gfc_start_block (&se.pre);
3687
3688 se.want_pointer = 1;
3689 se.descriptor_only = 1;
3690 gfc_conv_expr (&se, expr);
3691
3692 if (expr->ts.type == BT_DERIVED
3693 && expr->ts.derived->attr.alloc_comp)
3694 {
3695 gfc_ref *ref;
3696 gfc_ref *last = NULL;
3697 for (ref = expr->ref; ref; ref = ref->next)
3698 if (ref->type == REF_COMPONENT)
3699 last = ref;
3700
3701 /* Do not deallocate the components of a derived type
3702 ultimate pointer component. */
3703 if (!(last && last->u.c.component->pointer)
3704 && !(!last && expr->symtree->n.sym->attr.pointer))
3705 {
3706 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
3707 expr->rank);
3708 gfc_add_expr_to_block (&se.pre, tmp);
3709 }
3710 }
3711
3712 if (expr->rank)
3713 tmp = gfc_array_deallocate (se.expr, pstat);
3714 else
3715 {
3716 tmp = gfc_deallocate_with_status (se.expr, pstat, false);
3717 gfc_add_expr_to_block (&se.pre, tmp);
3718
3719 tmp = build2 (MODIFY_EXPR, void_type_node,
3720 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
3721 }
3722
3723 gfc_add_expr_to_block (&se.pre, tmp);
3724
3725 /* Keep track of the number of failed deallocations by adding stat
3726 of the last deallocation to the running total. */
3727 if (code->expr)
3728 {
3729 apstat = build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
3730 gfc_add_modify_expr (&se.pre, astat, apstat);
3731 }
3732
3733 tmp = gfc_finish_block (&se.pre);
3734 gfc_add_expr_to_block (&block, tmp);
3735
3736 }
3737
3738 /* Assign the value to the status variable. */
3739 if (code->expr)
3740 {
3741 gfc_init_se (&se, NULL);
3742 gfc_conv_expr_lhs (&se, code->expr);
3743 tmp = convert (TREE_TYPE (se.expr), astat);
3744 gfc_add_modify_expr (&block, se.expr, tmp);
3745 }
3746
3747 return gfc_finish_block (&block);
3748 }
3749