]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fortran/trans-stmt.c
24e7b80be193db4dbb7b3ed06445104358eaa72c
[thirdparty/gcc.git] / gcc / fortran / trans-stmt.c
1 /* Statement translation -- generate GCC trees from gfc_code.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Paul Brook <paul@nowt.org>
5 and Steven Bosscher <s.bosscher@student.tudelft.nl>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ggc.h"
30 #include "toplev.h"
31 #include "real.h"
32 #include "gfortran.h"
33 #include "flags.h"
34 #include "trans.h"
35 #include "trans-stmt.h"
36 #include "trans-types.h"
37 #include "trans-array.h"
38 #include "trans-const.h"
39 #include "arith.h"
40 #include "dependency.h"
41
42 typedef struct iter_info
43 {
44 tree var;
45 tree start;
46 tree end;
47 tree step;
48 struct iter_info *next;
49 }
50 iter_info;
51
52 typedef struct forall_info
53 {
54 iter_info *this_loop;
55 tree mask;
56 tree maskindex;
57 int nvar;
58 tree size;
59 struct forall_info *prev_nest;
60 }
61 forall_info;
62
63 static void gfc_trans_where_2 (gfc_code *, tree, bool,
64 forall_info *, stmtblock_t *);
65
66 /* Translate a F95 label number to a LABEL_EXPR. */
67
68 tree
69 gfc_trans_label_here (gfc_code * code)
70 {
71 return build1_v (LABEL_EXPR, gfc_get_label_decl (code->here));
72 }
73
74
75 /* Given a variable expression which has been ASSIGNed to, find the decl
76 containing the auxiliary variables. For variables in common blocks this
77 is a field_decl. */
78
79 void
80 gfc_conv_label_variable (gfc_se * se, gfc_expr * expr)
81 {
82 gcc_assert (expr->symtree->n.sym->attr.assign == 1);
83 gfc_conv_expr (se, expr);
84 /* Deals with variable in common block. Get the field declaration. */
85 if (TREE_CODE (se->expr) == COMPONENT_REF)
86 se->expr = TREE_OPERAND (se->expr, 1);
87 /* Deals with dummy argument. Get the parameter declaration. */
88 else if (TREE_CODE (se->expr) == INDIRECT_REF)
89 se->expr = TREE_OPERAND (se->expr, 0);
90 }
91
92 /* Translate a label assignment statement. */
93
94 tree
95 gfc_trans_label_assign (gfc_code * code)
96 {
97 tree label_tree;
98 gfc_se se;
99 tree len;
100 tree addr;
101 tree len_tree;
102 int label_len;
103
104 /* Start a new block. */
105 gfc_init_se (&se, NULL);
106 gfc_start_block (&se.pre);
107 gfc_conv_label_variable (&se, code->expr);
108
109 len = GFC_DECL_STRING_LEN (se.expr);
110 addr = GFC_DECL_ASSIGN_ADDR (se.expr);
111
112 label_tree = gfc_get_label_decl (code->label);
113
114 if (code->label->defined == ST_LABEL_TARGET)
115 {
116 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
117 len_tree = integer_minus_one_node;
118 }
119 else
120 {
121 gfc_expr *format = code->label->format;
122
123 label_len = format->value.character.length;
124 len_tree = build_int_cst (NULL_TREE, label_len);
125 label_tree = gfc_build_wide_string_const (format->ts.kind, label_len + 1,
126 format->value.character.string);
127 label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
128 }
129
130 gfc_add_modify (&se.pre, len, len_tree);
131 gfc_add_modify (&se.pre, addr, label_tree);
132
133 return gfc_finish_block (&se.pre);
134 }
135
136 /* Translate a GOTO statement. */
137
138 tree
139 gfc_trans_goto (gfc_code * code)
140 {
141 locus loc = code->loc;
142 tree assigned_goto;
143 tree target;
144 tree tmp;
145 gfc_se se;
146
147 if (code->label != NULL)
148 return build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
149
150 /* ASSIGNED GOTO. */
151 gfc_init_se (&se, NULL);
152 gfc_start_block (&se.pre);
153 gfc_conv_label_variable (&se, code->expr);
154 tmp = GFC_DECL_STRING_LEN (se.expr);
155 tmp = fold_build2 (NE_EXPR, boolean_type_node, tmp,
156 build_int_cst (TREE_TYPE (tmp), -1));
157 gfc_trans_runtime_check (true, false, tmp, &se.pre, &loc,
158 "Assigned label is not a target label");
159
160 assigned_goto = GFC_DECL_ASSIGN_ADDR (se.expr);
161
162 code = code->block;
163 if (code == NULL)
164 {
165 target = fold_build1 (GOTO_EXPR, void_type_node, assigned_goto);
166 gfc_add_expr_to_block (&se.pre, target);
167 return gfc_finish_block (&se.pre);
168 }
169
170 /* Check the label list. */
171 do
172 {
173 target = gfc_get_label_decl (code->label);
174 tmp = gfc_build_addr_expr (pvoid_type_node, target);
175 tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, assigned_goto);
176 tmp = build3_v (COND_EXPR, tmp,
177 fold_build1 (GOTO_EXPR, void_type_node, target),
178 build_empty_stmt ());
179 gfc_add_expr_to_block (&se.pre, tmp);
180 code = code->block;
181 }
182 while (code != NULL);
183 gfc_trans_runtime_check (true, false, boolean_true_node, &se.pre, &loc,
184 "Assigned label is not in the list");
185
186 return gfc_finish_block (&se.pre);
187 }
188
189
190 /* Translate an ENTRY statement. Just adds a label for this entry point. */
191 tree
192 gfc_trans_entry (gfc_code * code)
193 {
194 return build1_v (LABEL_EXPR, code->ext.entry->label);
195 }
196
197
198 /* Check for dependencies between INTENT(IN) and INTENT(OUT) arguments of
199 elemental subroutines. Make temporaries for output arguments if any such
200 dependencies are found. Output arguments are chosen because internal_unpack
201 can be used, as is, to copy the result back to the variable. */
202 static void
203 gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
204 gfc_symbol * sym, gfc_actual_arglist * arg,
205 gfc_dep_check check_variable)
206 {
207 gfc_actual_arglist *arg0;
208 gfc_expr *e;
209 gfc_formal_arglist *formal;
210 gfc_loopinfo tmp_loop;
211 gfc_se parmse;
212 gfc_ss *ss;
213 gfc_ss_info *info;
214 gfc_symbol *fsym;
215 int n;
216 tree data;
217 tree offset;
218 tree size;
219 tree tmp;
220
221 if (loopse->ss == NULL)
222 return;
223
224 ss = loopse->ss;
225 arg0 = arg;
226 formal = sym->formal;
227
228 /* Loop over all the arguments testing for dependencies. */
229 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
230 {
231 e = arg->expr;
232 if (e == NULL)
233 continue;
234
235 /* Obtain the info structure for the current argument. */
236 info = NULL;
237 for (ss = loopse->ss; ss && ss != gfc_ss_terminator; ss = ss->next)
238 {
239 if (ss->expr != e)
240 continue;
241 info = &ss->data.info;
242 break;
243 }
244
245 /* If there is a dependency, create a temporary and use it
246 instead of the variable. */
247 fsym = formal ? formal->sym : NULL;
248 if (e->expr_type == EXPR_VARIABLE
249 && e->rank && fsym
250 && fsym->attr.intent != INTENT_IN
251 && gfc_check_fncall_dependency (e, fsym->attr.intent,
252 sym, arg0, check_variable))
253 {
254 tree initial, temptype;
255 stmtblock_t temp_post;
256
257 /* Make a local loopinfo for the temporary creation, so that
258 none of the other ss->info's have to be renormalized. */
259 gfc_init_loopinfo (&tmp_loop);
260 for (n = 0; n < info->dimen; n++)
261 {
262 tmp_loop.to[n] = loopse->loop->to[n];
263 tmp_loop.from[n] = loopse->loop->from[n];
264 tmp_loop.order[n] = loopse->loop->order[n];
265 }
266
267 /* Obtain the argument descriptor for unpacking. */
268 gfc_init_se (&parmse, NULL);
269 parmse.want_pointer = 1;
270 gfc_conv_expr_descriptor (&parmse, e, gfc_walk_expr (e));
271 gfc_add_block_to_block (&se->pre, &parmse.pre);
272
273 /* If we've got INTENT(INOUT), initialize the array temporary with
274 a copy of the values. */
275 if (fsym->attr.intent == INTENT_INOUT)
276 initial = parmse.expr;
277 else
278 initial = NULL_TREE;
279
280 /* Find the type of the temporary to create; we don't use the type
281 of e itself as this breaks for subcomponent-references in e (where
282 the type of e is that of the final reference, but parmse.expr's
283 type corresponds to the full derived-type). */
284 /* TODO: Fix this somehow so we don't need a temporary of the whole
285 array but instead only the components referenced. */
286 temptype = TREE_TYPE (parmse.expr); /* Pointer to descriptor. */
287 gcc_assert (TREE_CODE (temptype) == POINTER_TYPE);
288 temptype = TREE_TYPE (temptype);
289 temptype = gfc_get_element_type (temptype);
290
291 /* Generate the temporary. Cleaning up the temporary should be the
292 very last thing done, so we add the code to a new block and add it
293 to se->post as last instructions. */
294 size = gfc_create_var (gfc_array_index_type, NULL);
295 data = gfc_create_var (pvoid_type_node, NULL);
296 gfc_init_block (&temp_post);
297 tmp = gfc_trans_create_temp_array (&se->pre, &temp_post,
298 &tmp_loop, info, temptype,
299 initial,
300 false, true, false,
301 &arg->expr->where);
302 gfc_add_modify (&se->pre, size, tmp);
303 tmp = fold_convert (pvoid_type_node, info->data);
304 gfc_add_modify (&se->pre, data, tmp);
305
306 /* Calculate the offset for the temporary. */
307 offset = gfc_index_zero_node;
308 for (n = 0; n < info->dimen; n++)
309 {
310 tmp = gfc_conv_descriptor_stride (info->descriptor,
311 gfc_rank_cst[n]);
312 tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
313 loopse->loop->from[n], tmp);
314 offset = fold_build2 (MINUS_EXPR, gfc_array_index_type,
315 offset, tmp);
316 }
317 info->offset = gfc_create_var (gfc_array_index_type, NULL);
318 gfc_add_modify (&se->pre, info->offset, offset);
319
320 /* Copy the result back using unpack. */
321 tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
322 gfc_add_expr_to_block (&se->post, tmp);
323
324 /* parmse.pre is already added above. */
325 gfc_add_block_to_block (&se->post, &parmse.post);
326 gfc_add_block_to_block (&se->post, &temp_post);
327 }
328 }
329 }
330
331
332 /* Translate the CALL statement. Builds a call to an F95 subroutine. */
333
334 tree
335 gfc_trans_call (gfc_code * code, bool dependency_check)
336 {
337 gfc_se se;
338 gfc_ss * ss;
339 int has_alternate_specifier;
340 gfc_dep_check check_variable;
341
342 /* A CALL starts a new block because the actual arguments may have to
343 be evaluated first. */
344 gfc_init_se (&se, NULL);
345 gfc_start_block (&se.pre);
346
347 gcc_assert (code->resolved_sym);
348
349 ss = gfc_ss_terminator;
350 if (code->resolved_sym->attr.elemental)
351 ss = gfc_walk_elemental_function_args (ss, code->ext.actual, GFC_SS_REFERENCE);
352
353 /* Is not an elemental subroutine call with array valued arguments. */
354 if (ss == gfc_ss_terminator)
355 {
356
357 /* Translate the call. */
358 has_alternate_specifier
359 = gfc_conv_function_call (&se, code->resolved_sym, code->ext.actual,
360 NULL_TREE);
361
362 /* A subroutine without side-effect, by definition, does nothing! */
363 TREE_SIDE_EFFECTS (se.expr) = 1;
364
365 /* Chain the pieces together and return the block. */
366 if (has_alternate_specifier)
367 {
368 gfc_code *select_code;
369 gfc_symbol *sym;
370 select_code = code->next;
371 gcc_assert(select_code->op == EXEC_SELECT);
372 sym = select_code->expr->symtree->n.sym;
373 se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
374 if (sym->backend_decl == NULL)
375 sym->backend_decl = gfc_get_symbol_decl (sym);
376 gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
377 }
378 else
379 gfc_add_expr_to_block (&se.pre, se.expr);
380
381 gfc_add_block_to_block (&se.pre, &se.post);
382 }
383
384 else
385 {
386 /* An elemental subroutine call with array valued arguments has
387 to be scalarized. */
388 gfc_loopinfo loop;
389 stmtblock_t body;
390 stmtblock_t block;
391 gfc_se loopse;
392 gfc_se depse;
393
394 /* gfc_walk_elemental_function_args renders the ss chain in the
395 reverse order to the actual argument order. */
396 ss = gfc_reverse_ss (ss);
397
398 /* Initialize the loop. */
399 gfc_init_se (&loopse, NULL);
400 gfc_init_loopinfo (&loop);
401 gfc_add_ss_to_loop (&loop, ss);
402
403 gfc_conv_ss_startstride (&loop);
404 /* TODO: gfc_conv_loop_setup generates a temporary for vector
405 subscripts. This could be prevented in the elemental case
406 as temporaries are handled separatedly
407 (below in gfc_conv_elemental_dependencies). */
408 gfc_conv_loop_setup (&loop, &code->expr->where);
409 gfc_mark_ss_chain_used (ss, 1);
410
411 /* Convert the arguments, checking for dependencies. */
412 gfc_copy_loopinfo_to_se (&loopse, &loop);
413 loopse.ss = ss;
414
415 /* For operator assignment, do dependency checking. */
416 if (dependency_check)
417 check_variable = ELEM_CHECK_VARIABLE;
418 else
419 check_variable = ELEM_DONT_CHECK_VARIABLE;
420
421 gfc_init_se (&depse, NULL);
422 gfc_conv_elemental_dependencies (&depse, &loopse, code->resolved_sym,
423 code->ext.actual, check_variable);
424
425 gfc_add_block_to_block (&loop.pre, &depse.pre);
426 gfc_add_block_to_block (&loop.post, &depse.post);
427
428 /* Generate the loop body. */
429 gfc_start_scalarized_body (&loop, &body);
430 gfc_init_block (&block);
431
432 /* Add the subroutine call to the block. */
433 gfc_conv_function_call (&loopse, code->resolved_sym, code->ext.actual,
434 NULL_TREE);
435 gfc_add_expr_to_block (&loopse.pre, loopse.expr);
436
437 gfc_add_block_to_block (&block, &loopse.pre);
438 gfc_add_block_to_block (&block, &loopse.post);
439
440 /* Finish up the loop block and the loop. */
441 gfc_add_expr_to_block (&body, gfc_finish_block (&block));
442 gfc_trans_scalarizing_loops (&loop, &body);
443 gfc_add_block_to_block (&se.pre, &loop.pre);
444 gfc_add_block_to_block (&se.pre, &loop.post);
445 gfc_add_block_to_block (&se.pre, &se.post);
446 gfc_cleanup_loop (&loop);
447 }
448
449 return gfc_finish_block (&se.pre);
450 }
451
452
453 /* Translate the RETURN statement. */
454
455 tree
456 gfc_trans_return (gfc_code * code ATTRIBUTE_UNUSED)
457 {
458 if (code->expr)
459 {
460 gfc_se se;
461 tree tmp;
462 tree result;
463
464 /* If code->expr is not NULL, this return statement must appear
465 in a subroutine and current_fake_result_decl has already
466 been generated. */
467
468 result = gfc_get_fake_result_decl (NULL, 0);
469 if (!result)
470 {
471 gfc_warning ("An alternate return at %L without a * dummy argument",
472 &code->expr->where);
473 return build1_v (GOTO_EXPR, gfc_get_return_label ());
474 }
475
476 /* Start a new block for this statement. */
477 gfc_init_se (&se, NULL);
478 gfc_start_block (&se.pre);
479
480 gfc_conv_expr (&se, code->expr);
481
482 tmp = fold_build2 (MODIFY_EXPR, TREE_TYPE (result), result,
483 fold_convert (TREE_TYPE (result), se.expr));
484 gfc_add_expr_to_block (&se.pre, tmp);
485
486 tmp = build1_v (GOTO_EXPR, gfc_get_return_label ());
487 gfc_add_expr_to_block (&se.pre, tmp);
488 gfc_add_block_to_block (&se.pre, &se.post);
489 return gfc_finish_block (&se.pre);
490 }
491 else
492 return build1_v (GOTO_EXPR, gfc_get_return_label ());
493 }
494
495
496 /* Translate the PAUSE statement. We have to translate this statement
497 to a runtime library call. */
498
499 tree
500 gfc_trans_pause (gfc_code * code)
501 {
502 tree gfc_int4_type_node = gfc_get_int_type (4);
503 gfc_se se;
504 tree tmp;
505
506 /* Start a new block for this statement. */
507 gfc_init_se (&se, NULL);
508 gfc_start_block (&se.pre);
509
510
511 if (code->expr == NULL)
512 {
513 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
514 tmp = build_call_expr (gfor_fndecl_pause_numeric, 1, tmp);
515 }
516 else
517 {
518 gfc_conv_expr_reference (&se, code->expr);
519 tmp = build_call_expr (gfor_fndecl_pause_string, 2,
520 se.expr, se.string_length);
521 }
522
523 gfc_add_expr_to_block (&se.pre, tmp);
524
525 gfc_add_block_to_block (&se.pre, &se.post);
526
527 return gfc_finish_block (&se.pre);
528 }
529
530
531 /* Translate the STOP statement. We have to translate this statement
532 to a runtime library call. */
533
534 tree
535 gfc_trans_stop (gfc_code * code)
536 {
537 tree gfc_int4_type_node = gfc_get_int_type (4);
538 gfc_se se;
539 tree tmp;
540
541 /* Start a new block for this statement. */
542 gfc_init_se (&se, NULL);
543 gfc_start_block (&se.pre);
544
545
546 if (code->expr == NULL)
547 {
548 tmp = build_int_cst (gfc_int4_type_node, code->ext.stop_code);
549 tmp = build_call_expr (gfor_fndecl_stop_numeric, 1, tmp);
550 }
551 else
552 {
553 gfc_conv_expr_reference (&se, code->expr);
554 tmp = build_call_expr (gfor_fndecl_stop_string, 2,
555 se.expr, se.string_length);
556 }
557
558 gfc_add_expr_to_block (&se.pre, tmp);
559
560 gfc_add_block_to_block (&se.pre, &se.post);
561
562 return gfc_finish_block (&se.pre);
563 }
564
565
566 /* Generate GENERIC for the IF construct. This function also deals with
567 the simple IF statement, because the front end translates the IF
568 statement into an IF construct.
569
570 We translate:
571
572 IF (cond) THEN
573 then_clause
574 ELSEIF (cond2)
575 elseif_clause
576 ELSE
577 else_clause
578 ENDIF
579
580 into:
581
582 pre_cond_s;
583 if (cond_s)
584 {
585 then_clause;
586 }
587 else
588 {
589 pre_cond_s
590 if (cond_s)
591 {
592 elseif_clause
593 }
594 else
595 {
596 else_clause;
597 }
598 }
599
600 where COND_S is the simplified version of the predicate. PRE_COND_S
601 are the pre side-effects produced by the translation of the
602 conditional.
603 We need to build the chain recursively otherwise we run into
604 problems with folding incomplete statements. */
605
606 static tree
607 gfc_trans_if_1 (gfc_code * code)
608 {
609 gfc_se if_se;
610 tree stmt, elsestmt;
611
612 /* Check for an unconditional ELSE clause. */
613 if (!code->expr)
614 return gfc_trans_code (code->next);
615
616 /* Initialize a statement builder for each block. Puts in NULL_TREEs. */
617 gfc_init_se (&if_se, NULL);
618 gfc_start_block (&if_se.pre);
619
620 /* Calculate the IF condition expression. */
621 gfc_conv_expr_val (&if_se, code->expr);
622
623 /* Translate the THEN clause. */
624 stmt = gfc_trans_code (code->next);
625
626 /* Translate the ELSE clause. */
627 if (code->block)
628 elsestmt = gfc_trans_if_1 (code->block);
629 else
630 elsestmt = build_empty_stmt ();
631
632 /* Build the condition expression and add it to the condition block. */
633 stmt = fold_build3 (COND_EXPR, void_type_node, if_se.expr, stmt, elsestmt);
634
635 gfc_add_expr_to_block (&if_se.pre, stmt);
636
637 /* Finish off this statement. */
638 return gfc_finish_block (&if_se.pre);
639 }
640
641 tree
642 gfc_trans_if (gfc_code * code)
643 {
644 /* Ignore the top EXEC_IF, it only announces an IF construct. The
645 actual code we must translate is in code->block. */
646
647 return gfc_trans_if_1 (code->block);
648 }
649
650
651 /* Translate an arithmetic IF expression.
652
653 IF (cond) label1, label2, label3 translates to
654
655 if (cond <= 0)
656 {
657 if (cond < 0)
658 goto label1;
659 else // cond == 0
660 goto label2;
661 }
662 else // cond > 0
663 goto label3;
664
665 An optimized version can be generated in case of equal labels.
666 E.g., if label1 is equal to label2, we can translate it to
667
668 if (cond <= 0)
669 goto label1;
670 else
671 goto label3;
672 */
673
674 tree
675 gfc_trans_arithmetic_if (gfc_code * code)
676 {
677 gfc_se se;
678 tree tmp;
679 tree branch1;
680 tree branch2;
681 tree zero;
682
683 /* Start a new block. */
684 gfc_init_se (&se, NULL);
685 gfc_start_block (&se.pre);
686
687 /* Pre-evaluate COND. */
688 gfc_conv_expr_val (&se, code->expr);
689 se.expr = gfc_evaluate_now (se.expr, &se.pre);
690
691 /* Build something to compare with. */
692 zero = gfc_build_const (TREE_TYPE (se.expr), integer_zero_node);
693
694 if (code->label->value != code->label2->value)
695 {
696 /* If (cond < 0) take branch1 else take branch2.
697 First build jumps to the COND .LT. 0 and the COND .EQ. 0 cases. */
698 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
699 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label2));
700
701 if (code->label->value != code->label3->value)
702 tmp = fold_build2 (LT_EXPR, boolean_type_node, se.expr, zero);
703 else
704 tmp = fold_build2 (NE_EXPR, boolean_type_node, se.expr, zero);
705
706 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
707 }
708 else
709 branch1 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label));
710
711 if (code->label->value != code->label3->value
712 && code->label2->value != code->label3->value)
713 {
714 /* if (cond <= 0) take branch1 else take branch2. */
715 branch2 = build1_v (GOTO_EXPR, gfc_get_label_decl (code->label3));
716 tmp = fold_build2 (LE_EXPR, boolean_type_node, se.expr, zero);
717 branch1 = fold_build3 (COND_EXPR, void_type_node, tmp, branch1, branch2);
718 }
719
720 /* Append the COND_EXPR to the evaluation of COND, and return. */
721 gfc_add_expr_to_block (&se.pre, branch1);
722 return gfc_finish_block (&se.pre);
723 }
724
725
726 /* Translate the simple DO construct. This is where the loop variable has
727 integer type and step +-1. We can't use this in the general case
728 because integer overflow and floating point errors could give incorrect
729 results.
730 We translate a do loop from:
731
732 DO dovar = from, to, step
733 body
734 END DO
735
736 to:
737
738 [Evaluate loop bounds and step]
739 dovar = from;
740 if ((step > 0) ? (dovar <= to) : (dovar => to))
741 {
742 for (;;)
743 {
744 body;
745 cycle_label:
746 cond = (dovar == to);
747 dovar += step;
748 if (cond) goto end_label;
749 }
750 }
751 end_label:
752
753 This helps the optimizers by avoiding the extra induction variable
754 used in the general case. */
755
756 static tree
757 gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
758 tree from, tree to, tree step)
759 {
760 stmtblock_t body;
761 tree type;
762 tree cond;
763 tree tmp;
764 tree saved_dovar = NULL;
765 tree cycle_label;
766 tree exit_label;
767
768 type = TREE_TYPE (dovar);
769
770 /* Initialize the DO variable: dovar = from. */
771 gfc_add_modify (pblock, dovar, from);
772
773 /* Save value for do-tinkering checking. */
774 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
775 {
776 saved_dovar = gfc_create_var (type, ".saved_dovar");
777 gfc_add_modify (pblock, saved_dovar, dovar);
778 }
779
780 /* Cycle and exit statements are implemented with gotos. */
781 cycle_label = gfc_build_label_decl (NULL_TREE);
782 exit_label = gfc_build_label_decl (NULL_TREE);
783
784 /* Put the labels where they can be found later. See gfc_trans_do(). */
785 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
786
787 /* Loop body. */
788 gfc_start_block (&body);
789
790 /* Main loop body. */
791 tmp = gfc_trans_code (code->block->next);
792 gfc_add_expr_to_block (&body, tmp);
793
794 /* Label for cycle statements (if needed). */
795 if (TREE_USED (cycle_label))
796 {
797 tmp = build1_v (LABEL_EXPR, cycle_label);
798 gfc_add_expr_to_block (&body, tmp);
799 }
800
801 /* Check whether someone has modified the loop variable. */
802 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
803 {
804 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
805 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
806 "Loop variable has been modified");
807 }
808
809 /* Evaluate the loop condition. */
810 cond = fold_build2 (EQ_EXPR, boolean_type_node, dovar, to);
811 cond = gfc_evaluate_now (cond, &body);
812
813 /* Increment the loop variable. */
814 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
815 gfc_add_modify (&body, dovar, tmp);
816
817 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
818 gfc_add_modify (&body, saved_dovar, dovar);
819
820 /* The loop exit. */
821 tmp = build1_v (GOTO_EXPR, exit_label);
822 TREE_USED (exit_label) = 1;
823 tmp = fold_build3 (COND_EXPR, void_type_node,
824 cond, tmp, build_empty_stmt ());
825 gfc_add_expr_to_block (&body, tmp);
826
827 /* Finish the loop body. */
828 tmp = gfc_finish_block (&body);
829 tmp = build1_v (LOOP_EXPR, tmp);
830
831 /* Only execute the loop if the number of iterations is positive. */
832 if (tree_int_cst_sgn (step) > 0)
833 cond = fold_build2 (LE_EXPR, boolean_type_node, dovar, to);
834 else
835 cond = fold_build2 (GE_EXPR, boolean_type_node, dovar, to);
836 tmp = fold_build3 (COND_EXPR, void_type_node,
837 cond, tmp, build_empty_stmt ());
838 gfc_add_expr_to_block (pblock, tmp);
839
840 /* Add the exit label. */
841 tmp = build1_v (LABEL_EXPR, exit_label);
842 gfc_add_expr_to_block (pblock, tmp);
843
844 return gfc_finish_block (pblock);
845 }
846
847 /* Translate the DO construct. This obviously is one of the most
848 important ones to get right with any compiler, but especially
849 so for Fortran.
850
851 We special case some loop forms as described in gfc_trans_simple_do.
852 For other cases we implement them with a separate loop count,
853 as described in the standard.
854
855 We translate a do loop from:
856
857 DO dovar = from, to, step
858 body
859 END DO
860
861 to:
862
863 [evaluate loop bounds and step]
864 empty = (step > 0 ? to < from : to > from);
865 countm1 = (to - from) / step;
866 dovar = from;
867 if (empty) goto exit_label;
868 for (;;)
869 {
870 body;
871 cycle_label:
872 dovar += step
873 if (countm1 ==0) goto exit_label;
874 countm1--;
875 }
876 exit_label:
877
878 countm1 is an unsigned integer. It is equal to the loop count minus one,
879 because the loop count itself can overflow. */
880
881 tree
882 gfc_trans_do (gfc_code * code)
883 {
884 gfc_se se;
885 tree dovar;
886 tree saved_dovar = NULL;
887 tree from;
888 tree to;
889 tree step;
890 tree countm1;
891 tree type;
892 tree utype;
893 tree cond;
894 tree cycle_label;
895 tree exit_label;
896 tree tmp;
897 tree pos_step;
898 stmtblock_t block;
899 stmtblock_t body;
900
901 gfc_start_block (&block);
902
903 /* Evaluate all the expressions in the iterator. */
904 gfc_init_se (&se, NULL);
905 gfc_conv_expr_lhs (&se, code->ext.iterator->var);
906 gfc_add_block_to_block (&block, &se.pre);
907 dovar = se.expr;
908 type = TREE_TYPE (dovar);
909
910 gfc_init_se (&se, NULL);
911 gfc_conv_expr_val (&se, code->ext.iterator->start);
912 gfc_add_block_to_block (&block, &se.pre);
913 from = gfc_evaluate_now (se.expr, &block);
914
915 gfc_init_se (&se, NULL);
916 gfc_conv_expr_val (&se, code->ext.iterator->end);
917 gfc_add_block_to_block (&block, &se.pre);
918 to = gfc_evaluate_now (se.expr, &block);
919
920 gfc_init_se (&se, NULL);
921 gfc_conv_expr_val (&se, code->ext.iterator->step);
922 gfc_add_block_to_block (&block, &se.pre);
923 step = gfc_evaluate_now (se.expr, &block);
924
925 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
926 {
927 tmp = fold_build2 (EQ_EXPR, boolean_type_node, step,
928 fold_convert (type, integer_zero_node));
929 gfc_trans_runtime_check (true, false, tmp, &block, &code->loc,
930 "DO step value is zero");
931 }
932
933 /* Special case simple loops. */
934 if (TREE_CODE (type) == INTEGER_TYPE
935 && (integer_onep (step)
936 || tree_int_cst_equal (step, integer_minus_one_node)))
937 return gfc_trans_simple_do (code, &block, dovar, from, to, step);
938
939 pos_step = fold_build2 (GT_EXPR, boolean_type_node, step,
940 fold_convert (type, integer_zero_node));
941
942 if (TREE_CODE (type) == INTEGER_TYPE)
943 utype = unsigned_type_for (type);
944 else
945 utype = unsigned_type_for (gfc_array_index_type);
946 countm1 = gfc_create_var (utype, "countm1");
947
948 /* Cycle and exit statements are implemented with gotos. */
949 cycle_label = gfc_build_label_decl (NULL_TREE);
950 exit_label = gfc_build_label_decl (NULL_TREE);
951 TREE_USED (exit_label) = 1;
952
953 /* Initialize the DO variable: dovar = from. */
954 gfc_add_modify (&block, dovar, from);
955
956 /* Save value for do-tinkering checking. */
957 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
958 {
959 saved_dovar = gfc_create_var (type, ".saved_dovar");
960 gfc_add_modify (&block, saved_dovar, dovar);
961 }
962
963 /* Initialize loop count and jump to exit label if the loop is empty.
964 This code is executed before we enter the loop body. We generate:
965 if (step > 0)
966 {
967 if (to < from) goto exit_label;
968 countm1 = (to - from) / step;
969 }
970 else
971 {
972 if (to > from) goto exit_label;
973 countm1 = (from - to) / -step;
974 } */
975 if (TREE_CODE (type) == INTEGER_TYPE)
976 {
977 tree pos, neg;
978
979 tmp = fold_build2 (LT_EXPR, boolean_type_node, to, from);
980 pos = fold_build3 (COND_EXPR, void_type_node, tmp,
981 build1_v (GOTO_EXPR, exit_label),
982 build_empty_stmt ());
983 tmp = fold_build2 (MINUS_EXPR, type, to, from);
984 tmp = fold_convert (utype, tmp);
985 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
986 fold_convert (utype, step));
987 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
988 pos = build2 (COMPOUND_EXPR, void_type_node, pos, tmp);
989
990 tmp = fold_build2 (GT_EXPR, boolean_type_node, to, from);
991 neg = fold_build3 (COND_EXPR, void_type_node, tmp,
992 build1_v (GOTO_EXPR, exit_label),
993 build_empty_stmt ());
994 tmp = fold_build2 (MINUS_EXPR, type, from, to);
995 tmp = fold_convert (utype, tmp);
996 tmp = fold_build2 (TRUNC_DIV_EXPR, utype, tmp,
997 fold_convert (utype, fold_build1 (NEGATE_EXPR,
998 type, step)));
999 tmp = build2 (MODIFY_EXPR, void_type_node, countm1, tmp);
1000 neg = build2 (COMPOUND_EXPR, void_type_node, neg, tmp);
1001
1002 tmp = fold_build3 (COND_EXPR, void_type_node, pos_step, pos, neg);
1003 gfc_add_expr_to_block (&block, tmp);
1004 }
1005 else
1006 {
1007 /* TODO: We could use the same width as the real type.
1008 This would probably cause more problems that it solves
1009 when we implement "long double" types. */
1010
1011 tmp = fold_build2 (MINUS_EXPR, type, to, from);
1012 tmp = fold_build2 (RDIV_EXPR, type, tmp, step);
1013 tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
1014 gfc_add_modify (&block, countm1, tmp);
1015
1016 /* We need a special check for empty loops:
1017 empty = (step > 0 ? to < from : to > from); */
1018 tmp = fold_build3 (COND_EXPR, boolean_type_node, pos_step,
1019 fold_build2 (LT_EXPR, boolean_type_node, to, from),
1020 fold_build2 (GT_EXPR, boolean_type_node, to, from));
1021 /* If the loop is empty, go directly to the exit label. */
1022 tmp = fold_build3 (COND_EXPR, void_type_node, tmp,
1023 build1_v (GOTO_EXPR, exit_label),
1024 build_empty_stmt ());
1025 gfc_add_expr_to_block (&block, tmp);
1026 }
1027
1028 /* Loop body. */
1029 gfc_start_block (&body);
1030
1031 /* Put these labels where they can be found later. We put the
1032 labels in a TREE_LIST node (because TREE_CHAIN is already
1033 used). cycle_label goes in TREE_PURPOSE (backend_decl), exit
1034 label in TREE_VALUE (backend_decl). */
1035
1036 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1037
1038 /* Main loop body. */
1039 tmp = gfc_trans_code (code->block->next);
1040 gfc_add_expr_to_block (&body, tmp);
1041
1042 /* Label for cycle statements (if needed). */
1043 if (TREE_USED (cycle_label))
1044 {
1045 tmp = build1_v (LABEL_EXPR, cycle_label);
1046 gfc_add_expr_to_block (&body, tmp);
1047 }
1048
1049 /* Check whether someone has modified the loop variable. */
1050 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1051 {
1052 tmp = fold_build2 (NE_EXPR, boolean_type_node, dovar, saved_dovar);
1053 gfc_trans_runtime_check (true, false, tmp, &body, &code->loc,
1054 "Loop variable has been modified");
1055 }
1056
1057 /* Increment the loop variable. */
1058 tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
1059 gfc_add_modify (&body, dovar, tmp);
1060
1061 if (gfc_option.rtcheck & GFC_RTCHECK_DO)
1062 gfc_add_modify (&body, saved_dovar, dovar);
1063
1064 /* End with the loop condition. Loop until countm1 == 0. */
1065 cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
1066 build_int_cst (utype, 0));
1067 tmp = build1_v (GOTO_EXPR, exit_label);
1068 tmp = fold_build3 (COND_EXPR, void_type_node,
1069 cond, tmp, build_empty_stmt ());
1070 gfc_add_expr_to_block (&body, tmp);
1071
1072 /* Decrement the loop count. */
1073 tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
1074 gfc_add_modify (&body, countm1, tmp);
1075
1076 /* End of loop body. */
1077 tmp = gfc_finish_block (&body);
1078
1079 /* The for loop itself. */
1080 tmp = build1_v (LOOP_EXPR, tmp);
1081 gfc_add_expr_to_block (&block, tmp);
1082
1083 /* Add the exit label. */
1084 tmp = build1_v (LABEL_EXPR, exit_label);
1085 gfc_add_expr_to_block (&block, tmp);
1086
1087 return gfc_finish_block (&block);
1088 }
1089
1090
1091 /* Translate the DO WHILE construct.
1092
1093 We translate
1094
1095 DO WHILE (cond)
1096 body
1097 END DO
1098
1099 to:
1100
1101 for ( ; ; )
1102 {
1103 pre_cond;
1104 if (! cond) goto exit_label;
1105 body;
1106 cycle_label:
1107 }
1108 exit_label:
1109
1110 Because the evaluation of the exit condition `cond' may have side
1111 effects, we can't do much for empty loop bodies. The backend optimizers
1112 should be smart enough to eliminate any dead loops. */
1113
1114 tree
1115 gfc_trans_do_while (gfc_code * code)
1116 {
1117 gfc_se cond;
1118 tree tmp;
1119 tree cycle_label;
1120 tree exit_label;
1121 stmtblock_t block;
1122
1123 /* Everything we build here is part of the loop body. */
1124 gfc_start_block (&block);
1125
1126 /* Cycle and exit statements are implemented with gotos. */
1127 cycle_label = gfc_build_label_decl (NULL_TREE);
1128 exit_label = gfc_build_label_decl (NULL_TREE);
1129
1130 /* Put the labels where they can be found later. See gfc_trans_do(). */
1131 code->block->backend_decl = tree_cons (cycle_label, exit_label, NULL);
1132
1133 /* Create a GIMPLE version of the exit condition. */
1134 gfc_init_se (&cond, NULL);
1135 gfc_conv_expr_val (&cond, code->expr);
1136 gfc_add_block_to_block (&block, &cond.pre);
1137 cond.expr = fold_build1 (TRUTH_NOT_EXPR, boolean_type_node, cond.expr);
1138
1139 /* Build "IF (! cond) GOTO exit_label". */
1140 tmp = build1_v (GOTO_EXPR, exit_label);
1141 TREE_USED (exit_label) = 1;
1142 tmp = fold_build3 (COND_EXPR, void_type_node,
1143 cond.expr, tmp, build_empty_stmt ());
1144 gfc_add_expr_to_block (&block, tmp);
1145
1146 /* The main body of the loop. */
1147 tmp = gfc_trans_code (code->block->next);
1148 gfc_add_expr_to_block (&block, tmp);
1149
1150 /* Label for cycle statements (if needed). */
1151 if (TREE_USED (cycle_label))
1152 {
1153 tmp = build1_v (LABEL_EXPR, cycle_label);
1154 gfc_add_expr_to_block (&block, tmp);
1155 }
1156
1157 /* End of loop body. */
1158 tmp = gfc_finish_block (&block);
1159
1160 gfc_init_block (&block);
1161 /* Build the loop. */
1162 tmp = build1_v (LOOP_EXPR, tmp);
1163 gfc_add_expr_to_block (&block, tmp);
1164
1165 /* Add the exit label. */
1166 tmp = build1_v (LABEL_EXPR, exit_label);
1167 gfc_add_expr_to_block (&block, tmp);
1168
1169 return gfc_finish_block (&block);
1170 }
1171
1172
1173 /* Translate the SELECT CASE construct for INTEGER case expressions,
1174 without killing all potential optimizations. The problem is that
1175 Fortran allows unbounded cases, but the back-end does not, so we
1176 need to intercept those before we enter the equivalent SWITCH_EXPR
1177 we can build.
1178
1179 For example, we translate this,
1180
1181 SELECT CASE (expr)
1182 CASE (:100,101,105:115)
1183 block_1
1184 CASE (190:199,200:)
1185 block_2
1186 CASE (300)
1187 block_3
1188 CASE DEFAULT
1189 block_4
1190 END SELECT
1191
1192 to the GENERIC equivalent,
1193
1194 switch (expr)
1195 {
1196 case (minimum value for typeof(expr) ... 100:
1197 case 101:
1198 case 105 ... 114:
1199 block1:
1200 goto end_label;
1201
1202 case 200 ... (maximum value for typeof(expr):
1203 case 190 ... 199:
1204 block2;
1205 goto end_label;
1206
1207 case 300:
1208 block_3;
1209 goto end_label;
1210
1211 default:
1212 block_4;
1213 goto end_label;
1214 }
1215
1216 end_label: */
1217
1218 static tree
1219 gfc_trans_integer_select (gfc_code * code)
1220 {
1221 gfc_code *c;
1222 gfc_case *cp;
1223 tree end_label;
1224 tree tmp;
1225 gfc_se se;
1226 stmtblock_t block;
1227 stmtblock_t body;
1228
1229 gfc_start_block (&block);
1230
1231 /* Calculate the switch expression. */
1232 gfc_init_se (&se, NULL);
1233 gfc_conv_expr_val (&se, code->expr);
1234 gfc_add_block_to_block (&block, &se.pre);
1235
1236 end_label = gfc_build_label_decl (NULL_TREE);
1237
1238 gfc_init_block (&body);
1239
1240 for (c = code->block; c; c = c->block)
1241 {
1242 for (cp = c->ext.case_list; cp; cp = cp->next)
1243 {
1244 tree low, high;
1245 tree label;
1246
1247 /* Assume it's the default case. */
1248 low = high = NULL_TREE;
1249
1250 if (cp->low)
1251 {
1252 low = gfc_conv_mpz_to_tree (cp->low->value.integer,
1253 cp->low->ts.kind);
1254
1255 /* If there's only a lower bound, set the high bound to the
1256 maximum value of the case expression. */
1257 if (!cp->high)
1258 high = TYPE_MAX_VALUE (TREE_TYPE (se.expr));
1259 }
1260
1261 if (cp->high)
1262 {
1263 /* Three cases are possible here:
1264
1265 1) There is no lower bound, e.g. CASE (:N).
1266 2) There is a lower bound .NE. high bound, that is
1267 a case range, e.g. CASE (N:M) where M>N (we make
1268 sure that M>N during type resolution).
1269 3) There is a lower bound, and it has the same value
1270 as the high bound, e.g. CASE (N:N). This is our
1271 internal representation of CASE(N).
1272
1273 In the first and second case, we need to set a value for
1274 high. In the third case, we don't because the GCC middle
1275 end represents a single case value by just letting high be
1276 a NULL_TREE. We can't do that because we need to be able
1277 to represent unbounded cases. */
1278
1279 if (!cp->low
1280 || (cp->low
1281 && mpz_cmp (cp->low->value.integer,
1282 cp->high->value.integer) != 0))
1283 high = gfc_conv_mpz_to_tree (cp->high->value.integer,
1284 cp->high->ts.kind);
1285
1286 /* Unbounded case. */
1287 if (!cp->low)
1288 low = TYPE_MIN_VALUE (TREE_TYPE (se.expr));
1289 }
1290
1291 /* Build a label. */
1292 label = gfc_build_label_decl (NULL_TREE);
1293
1294 /* Add this case label.
1295 Add parameter 'label', make it match GCC backend. */
1296 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1297 low, high, label);
1298 gfc_add_expr_to_block (&body, tmp);
1299 }
1300
1301 /* Add the statements for this case. */
1302 tmp = gfc_trans_code (c->next);
1303 gfc_add_expr_to_block (&body, tmp);
1304
1305 /* Break to the end of the construct. */
1306 tmp = build1_v (GOTO_EXPR, end_label);
1307 gfc_add_expr_to_block (&body, tmp);
1308 }
1309
1310 tmp = gfc_finish_block (&body);
1311 tmp = build3_v (SWITCH_EXPR, se.expr, tmp, NULL_TREE);
1312 gfc_add_expr_to_block (&block, tmp);
1313
1314 tmp = build1_v (LABEL_EXPR, end_label);
1315 gfc_add_expr_to_block (&block, tmp);
1316
1317 return gfc_finish_block (&block);
1318 }
1319
1320
1321 /* Translate the SELECT CASE construct for LOGICAL case expressions.
1322
1323 There are only two cases possible here, even though the standard
1324 does allow three cases in a LOGICAL SELECT CASE construct: .TRUE.,
1325 .FALSE., and DEFAULT.
1326
1327 We never generate more than two blocks here. Instead, we always
1328 try to eliminate the DEFAULT case. This way, we can translate this
1329 kind of SELECT construct to a simple
1330
1331 if {} else {};
1332
1333 expression in GENERIC. */
1334
1335 static tree
1336 gfc_trans_logical_select (gfc_code * code)
1337 {
1338 gfc_code *c;
1339 gfc_code *t, *f, *d;
1340 gfc_case *cp;
1341 gfc_se se;
1342 stmtblock_t block;
1343
1344 /* Assume we don't have any cases at all. */
1345 t = f = d = NULL;
1346
1347 /* Now see which ones we actually do have. We can have at most two
1348 cases in a single case list: one for .TRUE. and one for .FALSE.
1349 The default case is always separate. If the cases for .TRUE. and
1350 .FALSE. are in the same case list, the block for that case list
1351 always executed, and we don't generate code a COND_EXPR. */
1352 for (c = code->block; c; c = c->block)
1353 {
1354 for (cp = c->ext.case_list; cp; cp = cp->next)
1355 {
1356 if (cp->low)
1357 {
1358 if (cp->low->value.logical == 0) /* .FALSE. */
1359 f = c;
1360 else /* if (cp->value.logical != 0), thus .TRUE. */
1361 t = c;
1362 }
1363 else
1364 d = c;
1365 }
1366 }
1367
1368 /* Start a new block. */
1369 gfc_start_block (&block);
1370
1371 /* Calculate the switch expression. We always need to do this
1372 because it may have side effects. */
1373 gfc_init_se (&se, NULL);
1374 gfc_conv_expr_val (&se, code->expr);
1375 gfc_add_block_to_block (&block, &se.pre);
1376
1377 if (t == f && t != NULL)
1378 {
1379 /* Cases for .TRUE. and .FALSE. are in the same block. Just
1380 translate the code for these cases, append it to the current
1381 block. */
1382 gfc_add_expr_to_block (&block, gfc_trans_code (t->next));
1383 }
1384 else
1385 {
1386 tree true_tree, false_tree, stmt;
1387
1388 true_tree = build_empty_stmt ();
1389 false_tree = build_empty_stmt ();
1390
1391 /* If we have a case for .TRUE. and for .FALSE., discard the default case.
1392 Otherwise, if .TRUE. or .FALSE. is missing and there is a default case,
1393 make the missing case the default case. */
1394 if (t != NULL && f != NULL)
1395 d = NULL;
1396 else if (d != NULL)
1397 {
1398 if (t == NULL)
1399 t = d;
1400 else
1401 f = d;
1402 }
1403
1404 /* Translate the code for each of these blocks, and append it to
1405 the current block. */
1406 if (t != NULL)
1407 true_tree = gfc_trans_code (t->next);
1408
1409 if (f != NULL)
1410 false_tree = gfc_trans_code (f->next);
1411
1412 stmt = fold_build3 (COND_EXPR, void_type_node, se.expr,
1413 true_tree, false_tree);
1414 gfc_add_expr_to_block (&block, stmt);
1415 }
1416
1417 return gfc_finish_block (&block);
1418 }
1419
1420
1421 /* Translate the SELECT CASE construct for CHARACTER case expressions.
1422 Instead of generating compares and jumps, it is far simpler to
1423 generate a data structure describing the cases in order and call a
1424 library subroutine that locates the right case.
1425 This is particularly true because this is the only case where we
1426 might have to dispose of a temporary.
1427 The library subroutine returns a pointer to jump to or NULL if no
1428 branches are to be taken. */
1429
1430 static tree
1431 gfc_trans_character_select (gfc_code *code)
1432 {
1433 tree init, node, end_label, tmp, type, case_num, label, fndecl;
1434 stmtblock_t block, body;
1435 gfc_case *cp, *d;
1436 gfc_code *c;
1437 gfc_se se;
1438 int n, k;
1439
1440 /* The jump table types are stored in static variables to avoid
1441 constructing them from scratch every single time. */
1442 static tree select_struct[2];
1443 static tree ss_string1[2], ss_string1_len[2];
1444 static tree ss_string2[2], ss_string2_len[2];
1445 static tree ss_target[2];
1446
1447 tree pchartype = gfc_get_pchar_type (code->expr->ts.kind);
1448
1449 if (code->expr->ts.kind == 1)
1450 k = 0;
1451 else if (code->expr->ts.kind == 4)
1452 k = 1;
1453 else
1454 gcc_unreachable ();
1455
1456 if (select_struct[k] == NULL)
1457 {
1458 select_struct[k] = make_node (RECORD_TYPE);
1459
1460 if (code->expr->ts.kind == 1)
1461 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char1");
1462 else if (code->expr->ts.kind == 4)
1463 TYPE_NAME (select_struct[k]) = get_identifier ("_jump_struct_char4");
1464 else
1465 gcc_unreachable ();
1466
1467 #undef ADD_FIELD
1468 #define ADD_FIELD(NAME, TYPE) \
1469 ss_##NAME[k] = gfc_add_field_to_struct \
1470 (&(TYPE_FIELDS (select_struct[k])), select_struct[k], \
1471 get_identifier (stringize(NAME)), TYPE)
1472
1473 ADD_FIELD (string1, pchartype);
1474 ADD_FIELD (string1_len, gfc_charlen_type_node);
1475
1476 ADD_FIELD (string2, pchartype);
1477 ADD_FIELD (string2_len, gfc_charlen_type_node);
1478
1479 ADD_FIELD (target, integer_type_node);
1480 #undef ADD_FIELD
1481
1482 gfc_finish_type (select_struct[k]);
1483 }
1484
1485 cp = code->block->ext.case_list;
1486 while (cp->left != NULL)
1487 cp = cp->left;
1488
1489 n = 0;
1490 for (d = cp; d; d = d->right)
1491 d->n = n++;
1492
1493 end_label = gfc_build_label_decl (NULL_TREE);
1494
1495 /* Generate the body */
1496 gfc_start_block (&block);
1497 gfc_init_block (&body);
1498
1499 for (c = code->block; c; c = c->block)
1500 {
1501 for (d = c->ext.case_list; d; d = d->next)
1502 {
1503 label = gfc_build_label_decl (NULL_TREE);
1504 tmp = fold_build3 (CASE_LABEL_EXPR, void_type_node,
1505 build_int_cst (NULL_TREE, d->n),
1506 build_int_cst (NULL_TREE, d->n), label);
1507 gfc_add_expr_to_block (&body, tmp);
1508 }
1509
1510 tmp = gfc_trans_code (c->next);
1511 gfc_add_expr_to_block (&body, tmp);
1512
1513 tmp = build1_v (GOTO_EXPR, end_label);
1514 gfc_add_expr_to_block (&body, tmp);
1515 }
1516
1517 /* Generate the structure describing the branches */
1518 init = NULL_TREE;
1519
1520 for(d = cp; d; d = d->right)
1521 {
1522 node = NULL_TREE;
1523
1524 gfc_init_se (&se, NULL);
1525
1526 if (d->low == NULL)
1527 {
1528 node = tree_cons (ss_string1[k], null_pointer_node, node);
1529 node = tree_cons (ss_string1_len[k], integer_zero_node, node);
1530 }
1531 else
1532 {
1533 gfc_conv_expr_reference (&se, d->low);
1534
1535 node = tree_cons (ss_string1[k], se.expr, node);
1536 node = tree_cons (ss_string1_len[k], se.string_length, node);
1537 }
1538
1539 if (d->high == NULL)
1540 {
1541 node = tree_cons (ss_string2[k], null_pointer_node, node);
1542 node = tree_cons (ss_string2_len[k], integer_zero_node, node);
1543 }
1544 else
1545 {
1546 gfc_init_se (&se, NULL);
1547 gfc_conv_expr_reference (&se, d->high);
1548
1549 node = tree_cons (ss_string2[k], se.expr, node);
1550 node = tree_cons (ss_string2_len[k], se.string_length, node);
1551 }
1552
1553 node = tree_cons (ss_target[k], build_int_cst (integer_type_node, d->n),
1554 node);
1555
1556 tmp = build_constructor_from_list (select_struct[k], nreverse (node));
1557 init = tree_cons (NULL_TREE, tmp, init);
1558 }
1559
1560 type = build_array_type (select_struct[k],
1561 build_index_type (build_int_cst (NULL_TREE, n-1)));
1562
1563 init = build_constructor_from_list (type, nreverse(init));
1564 TREE_CONSTANT (init) = 1;
1565 TREE_STATIC (init) = 1;
1566 /* Create a static variable to hold the jump table. */
1567 tmp = gfc_create_var (type, "jumptable");
1568 TREE_CONSTANT (tmp) = 1;
1569 TREE_STATIC (tmp) = 1;
1570 TREE_READONLY (tmp) = 1;
1571 DECL_INITIAL (tmp) = init;
1572 init = tmp;
1573
1574 /* Build the library call */
1575 init = gfc_build_addr_expr (pvoid_type_node, init);
1576
1577 gfc_init_se (&se, NULL);
1578 gfc_conv_expr_reference (&se, code->expr);
1579
1580 gfc_add_block_to_block (&block, &se.pre);
1581
1582 if (code->expr->ts.kind == 1)
1583 fndecl = gfor_fndecl_select_string;
1584 else if (code->expr->ts.kind == 4)
1585 fndecl = gfor_fndecl_select_string_char4;
1586 else
1587 gcc_unreachable ();
1588
1589 tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
1590 se.expr, se.string_length);
1591 case_num = gfc_create_var (integer_type_node, "case_num");
1592 gfc_add_modify (&block, case_num, tmp);
1593
1594 gfc_add_block_to_block (&block, &se.post);
1595
1596 tmp = gfc_finish_block (&body);
1597 tmp = build3_v (SWITCH_EXPR, case_num, tmp, NULL_TREE);
1598 gfc_add_expr_to_block (&block, tmp);
1599
1600 tmp = build1_v (LABEL_EXPR, end_label);
1601 gfc_add_expr_to_block (&block, tmp);
1602
1603 return gfc_finish_block (&block);
1604 }
1605
1606
1607 /* Translate the three variants of the SELECT CASE construct.
1608
1609 SELECT CASEs with INTEGER case expressions can be translated to an
1610 equivalent GENERIC switch statement, and for LOGICAL case
1611 expressions we build one or two if-else compares.
1612
1613 SELECT CASEs with CHARACTER case expressions are a whole different
1614 story, because they don't exist in GENERIC. So we sort them and
1615 do a binary search at runtime.
1616
1617 Fortran has no BREAK statement, and it does not allow jumps from
1618 one case block to another. That makes things a lot easier for
1619 the optimizers. */
1620
1621 tree
1622 gfc_trans_select (gfc_code * code)
1623 {
1624 gcc_assert (code && code->expr);
1625
1626 /* Empty SELECT constructs are legal. */
1627 if (code->block == NULL)
1628 return build_empty_stmt ();
1629
1630 /* Select the correct translation function. */
1631 switch (code->expr->ts.type)
1632 {
1633 case BT_LOGICAL: return gfc_trans_logical_select (code);
1634 case BT_INTEGER: return gfc_trans_integer_select (code);
1635 case BT_CHARACTER: return gfc_trans_character_select (code);
1636 default:
1637 gfc_internal_error ("gfc_trans_select(): Bad type for case expr.");
1638 /* Not reached */
1639 }
1640 }
1641
1642
1643 /* Traversal function to substitute a replacement symtree if the symbol
1644 in the expression is the same as that passed. f == 2 signals that
1645 that variable itself is not to be checked - only the references.
1646 This group of functions is used when the variable expression in a
1647 FORALL assignment has internal references. For example:
1648 FORALL (i = 1:4) p(p(i)) = i
1649 The only recourse here is to store a copy of 'p' for the index
1650 expression. */
1651
1652 static gfc_symtree *new_symtree;
1653 static gfc_symtree *old_symtree;
1654
1655 static bool
1656 forall_replace (gfc_expr *expr, gfc_symbol *sym, int *f)
1657 {
1658 if (expr->expr_type != EXPR_VARIABLE)
1659 return false;
1660
1661 if (*f == 2)
1662 *f = 1;
1663 else if (expr->symtree->n.sym == sym)
1664 expr->symtree = new_symtree;
1665
1666 return false;
1667 }
1668
1669 static void
1670 forall_replace_symtree (gfc_expr *e, gfc_symbol *sym, int f)
1671 {
1672 gfc_traverse_expr (e, sym, forall_replace, f);
1673 }
1674
1675 static bool
1676 forall_restore (gfc_expr *expr,
1677 gfc_symbol *sym ATTRIBUTE_UNUSED,
1678 int *f ATTRIBUTE_UNUSED)
1679 {
1680 if (expr->expr_type != EXPR_VARIABLE)
1681 return false;
1682
1683 if (expr->symtree == new_symtree)
1684 expr->symtree = old_symtree;
1685
1686 return false;
1687 }
1688
1689 static void
1690 forall_restore_symtree (gfc_expr *e)
1691 {
1692 gfc_traverse_expr (e, NULL, forall_restore, 0);
1693 }
1694
1695 static void
1696 forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1697 {
1698 gfc_se tse;
1699 gfc_se rse;
1700 gfc_expr *e;
1701 gfc_symbol *new_sym;
1702 gfc_symbol *old_sym;
1703 gfc_symtree *root;
1704 tree tmp;
1705
1706 /* Build a copy of the lvalue. */
1707 old_symtree = c->expr->symtree;
1708 old_sym = old_symtree->n.sym;
1709 e = gfc_lval_expr_from_sym (old_sym);
1710 if (old_sym->attr.dimension)
1711 {
1712 gfc_init_se (&tse, NULL);
1713 gfc_conv_subref_array_arg (&tse, e, 0, INTENT_IN);
1714 gfc_add_block_to_block (pre, &tse.pre);
1715 gfc_add_block_to_block (post, &tse.post);
1716 tse.expr = build_fold_indirect_ref (tse.expr);
1717
1718 if (e->ts.type != BT_CHARACTER)
1719 {
1720 /* Use the variable offset for the temporary. */
1721 tmp = gfc_conv_descriptor_offset (tse.expr);
1722 gfc_add_modify (pre, tmp,
1723 gfc_conv_array_offset (old_sym->backend_decl));
1724 }
1725 }
1726 else
1727 {
1728 gfc_init_se (&tse, NULL);
1729 gfc_init_se (&rse, NULL);
1730 gfc_conv_expr (&rse, e);
1731 if (e->ts.type == BT_CHARACTER)
1732 {
1733 tse.string_length = rse.string_length;
1734 tmp = gfc_get_character_type_len (gfc_default_character_kind,
1735 tse.string_length);
1736 tse.expr = gfc_conv_string_tmp (&tse, build_pointer_type (tmp),
1737 rse.string_length);
1738 gfc_add_block_to_block (pre, &tse.pre);
1739 gfc_add_block_to_block (post, &tse.post);
1740 }
1741 else
1742 {
1743 tmp = gfc_typenode_for_spec (&e->ts);
1744 tse.expr = gfc_create_var (tmp, "temp");
1745 }
1746
1747 tmp = gfc_trans_scalar_assign (&tse, &rse, e->ts, true,
1748 e->expr_type == EXPR_VARIABLE);
1749 gfc_add_expr_to_block (pre, tmp);
1750 }
1751 gfc_free_expr (e);
1752
1753 /* Create a new symbol to represent the lvalue. */
1754 new_sym = gfc_new_symbol (old_sym->name, NULL);
1755 new_sym->ts = old_sym->ts;
1756 new_sym->attr.referenced = 1;
1757 new_sym->attr.dimension = old_sym->attr.dimension;
1758 new_sym->attr.flavor = old_sym->attr.flavor;
1759
1760 /* Use the temporary as the backend_decl. */
1761 new_sym->backend_decl = tse.expr;
1762
1763 /* Create a fake symtree for it. */
1764 root = NULL;
1765 new_symtree = gfc_new_symtree (&root, old_sym->name);
1766 new_symtree->n.sym = new_sym;
1767 gcc_assert (new_symtree == root);
1768
1769 /* Go through the expression reference replacing the old_symtree
1770 with the new. */
1771 forall_replace_symtree (c->expr, old_sym, 2);
1772
1773 /* Now we have made this temporary, we might as well use it for
1774 the right hand side. */
1775 forall_replace_symtree (c->expr2, old_sym, 1);
1776 }
1777
1778
1779 /* Handles dependencies in forall assignments. */
1780 static int
1781 check_forall_dependencies (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
1782 {
1783 gfc_ref *lref;
1784 gfc_ref *rref;
1785 int need_temp;
1786 gfc_symbol *lsym;
1787
1788 lsym = c->expr->symtree->n.sym;
1789 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
1790
1791 /* Now check for dependencies within the 'variable'
1792 expression itself. These are treated by making a complete
1793 copy of variable and changing all the references to it
1794 point to the copy instead. Note that the shallow copy of
1795 the variable will not suffice for derived types with
1796 pointer components. We therefore leave these to their
1797 own devices. */
1798 if (lsym->ts.type == BT_DERIVED
1799 && lsym->ts.derived->attr.pointer_comp)
1800 return need_temp;
1801
1802 new_symtree = NULL;
1803 if (find_forall_index (c->expr, lsym, 2) == SUCCESS)
1804 {
1805 forall_make_variable_temp (c, pre, post);
1806 need_temp = 0;
1807 }
1808
1809 /* Substrings with dependencies are treated in the same
1810 way. */
1811 if (c->expr->ts.type == BT_CHARACTER
1812 && c->expr->ref
1813 && c->expr2->expr_type == EXPR_VARIABLE
1814 && lsym == c->expr2->symtree->n.sym)
1815 {
1816 for (lref = c->expr->ref; lref; lref = lref->next)
1817 if (lref->type == REF_SUBSTRING)
1818 break;
1819 for (rref = c->expr2->ref; rref; rref = rref->next)
1820 if (rref->type == REF_SUBSTRING)
1821 break;
1822
1823 if (rref && lref
1824 && gfc_dep_compare_expr (rref->u.ss.start, lref->u.ss.start) < 0)
1825 {
1826 forall_make_variable_temp (c, pre, post);
1827 need_temp = 0;
1828 }
1829 }
1830 return need_temp;
1831 }
1832
1833
1834 static void
1835 cleanup_forall_symtrees (gfc_code *c)
1836 {
1837 forall_restore_symtree (c->expr);
1838 forall_restore_symtree (c->expr2);
1839 gfc_free (new_symtree->n.sym);
1840 gfc_free (new_symtree);
1841 }
1842
1843
1844 /* Generate the loops for a FORALL block, specified by FORALL_TMP. BODY
1845 is the contents of the FORALL block/stmt to be iterated. MASK_FLAG
1846 indicates whether we should generate code to test the FORALLs mask
1847 array. OUTER is the loop header to be used for initializing mask
1848 indices.
1849
1850 The generated loop format is:
1851 count = (end - start + step) / step
1852 loopvar = start
1853 while (1)
1854 {
1855 if (count <=0 )
1856 goto end_of_loop
1857 <body>
1858 loopvar += step
1859 count --
1860 }
1861 end_of_loop: */
1862
1863 static tree
1864 gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
1865 int mask_flag, stmtblock_t *outer)
1866 {
1867 int n, nvar;
1868 tree tmp;
1869 tree cond;
1870 stmtblock_t block;
1871 tree exit_label;
1872 tree count;
1873 tree var, start, end, step;
1874 iter_info *iter;
1875
1876 /* Initialize the mask index outside the FORALL nest. */
1877 if (mask_flag && forall_tmp->mask)
1878 gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
1879
1880 iter = forall_tmp->this_loop;
1881 nvar = forall_tmp->nvar;
1882 for (n = 0; n < nvar; n++)
1883 {
1884 var = iter->var;
1885 start = iter->start;
1886 end = iter->end;
1887 step = iter->step;
1888
1889 exit_label = gfc_build_label_decl (NULL_TREE);
1890 TREE_USED (exit_label) = 1;
1891
1892 /* The loop counter. */
1893 count = gfc_create_var (TREE_TYPE (var), "count");
1894
1895 /* The body of the loop. */
1896 gfc_init_block (&block);
1897
1898 /* The exit condition. */
1899 cond = fold_build2 (LE_EXPR, boolean_type_node,
1900 count, build_int_cst (TREE_TYPE (count), 0));
1901 tmp = build1_v (GOTO_EXPR, exit_label);
1902 tmp = fold_build3 (COND_EXPR, void_type_node,
1903 cond, tmp, build_empty_stmt ());
1904 gfc_add_expr_to_block (&block, tmp);
1905
1906 /* The main loop body. */
1907 gfc_add_expr_to_block (&block, body);
1908
1909 /* Increment the loop variable. */
1910 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
1911 gfc_add_modify (&block, var, tmp);
1912
1913 /* Advance to the next mask element. Only do this for the
1914 innermost loop. */
1915 if (n == 0 && mask_flag && forall_tmp->mask)
1916 {
1917 tree maskindex = forall_tmp->maskindex;
1918 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
1919 maskindex, gfc_index_one_node);
1920 gfc_add_modify (&block, maskindex, tmp);
1921 }
1922
1923 /* Decrement the loop counter. */
1924 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
1925 build_int_cst (TREE_TYPE (var), 1));
1926 gfc_add_modify (&block, count, tmp);
1927
1928 body = gfc_finish_block (&block);
1929
1930 /* Loop var initialization. */
1931 gfc_init_block (&block);
1932 gfc_add_modify (&block, var, start);
1933
1934
1935 /* Initialize the loop counter. */
1936 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
1937 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
1938 tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
1939 gfc_add_modify (&block, count, tmp);
1940
1941 /* The loop expression. */
1942 tmp = build1_v (LOOP_EXPR, body);
1943 gfc_add_expr_to_block (&block, tmp);
1944
1945 /* The exit label. */
1946 tmp = build1_v (LABEL_EXPR, exit_label);
1947 gfc_add_expr_to_block (&block, tmp);
1948
1949 body = gfc_finish_block (&block);
1950 iter = iter->next;
1951 }
1952 return body;
1953 }
1954
1955
1956 /* Generate the body and loops according to MASK_FLAG. If MASK_FLAG
1957 is nonzero, the body is controlled by all masks in the forall nest.
1958 Otherwise, the innermost loop is not controlled by it's mask. This
1959 is used for initializing that mask. */
1960
1961 static tree
1962 gfc_trans_nested_forall_loop (forall_info * nested_forall_info, tree body,
1963 int mask_flag)
1964 {
1965 tree tmp;
1966 stmtblock_t header;
1967 forall_info *forall_tmp;
1968 tree mask, maskindex;
1969
1970 gfc_start_block (&header);
1971
1972 forall_tmp = nested_forall_info;
1973 while (forall_tmp != NULL)
1974 {
1975 /* Generate body with masks' control. */
1976 if (mask_flag)
1977 {
1978 mask = forall_tmp->mask;
1979 maskindex = forall_tmp->maskindex;
1980
1981 /* If a mask was specified make the assignment conditional. */
1982 if (mask)
1983 {
1984 tmp = gfc_build_array_ref (mask, maskindex, NULL);
1985 body = build3_v (COND_EXPR, tmp, body, build_empty_stmt ());
1986 }
1987 }
1988 body = gfc_trans_forall_loop (forall_tmp, body, mask_flag, &header);
1989 forall_tmp = forall_tmp->prev_nest;
1990 mask_flag = 1;
1991 }
1992
1993 gfc_add_expr_to_block (&header, body);
1994 return gfc_finish_block (&header);
1995 }
1996
1997
1998 /* Allocate data for holding a temporary array. Returns either a local
1999 temporary array or a pointer variable. */
2000
2001 static tree
2002 gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
2003 tree elem_type)
2004 {
2005 tree tmpvar;
2006 tree type;
2007 tree tmp;
2008
2009 if (INTEGER_CST_P (size))
2010 {
2011 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
2012 gfc_index_one_node);
2013 }
2014 else
2015 tmp = NULL_TREE;
2016
2017 type = build_range_type (gfc_array_index_type, gfc_index_zero_node, tmp);
2018 type = build_array_type (elem_type, type);
2019 if (gfc_can_put_var_on_stack (bytesize))
2020 {
2021 gcc_assert (INTEGER_CST_P (size));
2022 tmpvar = gfc_create_var (type, "temp");
2023 *pdata = NULL_TREE;
2024 }
2025 else
2026 {
2027 tmpvar = gfc_create_var (build_pointer_type (type), "temp");
2028 *pdata = convert (pvoid_type_node, tmpvar);
2029
2030 tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
2031 gfc_add_modify (pblock, tmpvar, tmp);
2032 }
2033 return tmpvar;
2034 }
2035
2036
2037 /* Generate codes to copy the temporary to the actual lhs. */
2038
2039 static tree
2040 generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
2041 tree count1, tree wheremask, bool invert)
2042 {
2043 gfc_ss *lss;
2044 gfc_se lse, rse;
2045 stmtblock_t block, body;
2046 gfc_loopinfo loop1;
2047 tree tmp;
2048 tree wheremaskexpr;
2049
2050 /* Walk the lhs. */
2051 lss = gfc_walk_expr (expr);
2052
2053 if (lss == gfc_ss_terminator)
2054 {
2055 gfc_start_block (&block);
2056
2057 gfc_init_se (&lse, NULL);
2058
2059 /* Translate the expression. */
2060 gfc_conv_expr (&lse, expr);
2061
2062 /* Form the expression for the temporary. */
2063 tmp = gfc_build_array_ref (tmp1, count1, NULL);
2064
2065 /* Use the scalar assignment as is. */
2066 gfc_add_block_to_block (&block, &lse.pre);
2067 gfc_add_modify (&block, lse.expr, tmp);
2068 gfc_add_block_to_block (&block, &lse.post);
2069
2070 /* Increment the count1. */
2071 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2072 gfc_index_one_node);
2073 gfc_add_modify (&block, count1, tmp);
2074
2075 tmp = gfc_finish_block (&block);
2076 }
2077 else
2078 {
2079 gfc_start_block (&block);
2080
2081 gfc_init_loopinfo (&loop1);
2082 gfc_init_se (&rse, NULL);
2083 gfc_init_se (&lse, NULL);
2084
2085 /* Associate the lss with the loop. */
2086 gfc_add_ss_to_loop (&loop1, lss);
2087
2088 /* Calculate the bounds of the scalarization. */
2089 gfc_conv_ss_startstride (&loop1);
2090 /* Setup the scalarizing loops. */
2091 gfc_conv_loop_setup (&loop1, &expr->where);
2092
2093 gfc_mark_ss_chain_used (lss, 1);
2094
2095 /* Start the scalarized loop body. */
2096 gfc_start_scalarized_body (&loop1, &body);
2097
2098 /* Setup the gfc_se structures. */
2099 gfc_copy_loopinfo_to_se (&lse, &loop1);
2100 lse.ss = lss;
2101
2102 /* Form the expression of the temporary. */
2103 if (lss != gfc_ss_terminator)
2104 rse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2105 /* Translate expr. */
2106 gfc_conv_expr (&lse, expr);
2107
2108 /* Use the scalar assignment. */
2109 rse.string_length = lse.string_length;
2110 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, false, false);
2111
2112 /* Form the mask expression according to the mask tree list. */
2113 if (wheremask)
2114 {
2115 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2116 if (invert)
2117 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2118 TREE_TYPE (wheremaskexpr),
2119 wheremaskexpr);
2120 tmp = fold_build3 (COND_EXPR, void_type_node,
2121 wheremaskexpr, tmp, build_empty_stmt ());
2122 }
2123
2124 gfc_add_expr_to_block (&body, tmp);
2125
2126 /* Increment count1. */
2127 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2128 count1, gfc_index_one_node);
2129 gfc_add_modify (&body, count1, tmp);
2130
2131 /* Increment count3. */
2132 if (count3)
2133 {
2134 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2135 count3, gfc_index_one_node);
2136 gfc_add_modify (&body, count3, tmp);
2137 }
2138
2139 /* Generate the copying loops. */
2140 gfc_trans_scalarizing_loops (&loop1, &body);
2141 gfc_add_block_to_block (&block, &loop1.pre);
2142 gfc_add_block_to_block (&block, &loop1.post);
2143 gfc_cleanup_loop (&loop1);
2144
2145 tmp = gfc_finish_block (&block);
2146 }
2147 return tmp;
2148 }
2149
2150
2151 /* Generate codes to copy rhs to the temporary. TMP1 is the address of
2152 temporary, LSS and RSS are formed in function compute_inner_temp_size(),
2153 and should not be freed. WHEREMASK is the conditional execution mask
2154 whose sense may be inverted by INVERT. */
2155
2156 static tree
2157 generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
2158 tree count1, gfc_ss *lss, gfc_ss *rss,
2159 tree wheremask, bool invert)
2160 {
2161 stmtblock_t block, body1;
2162 gfc_loopinfo loop;
2163 gfc_se lse;
2164 gfc_se rse;
2165 tree tmp;
2166 tree wheremaskexpr;
2167
2168 gfc_start_block (&block);
2169
2170 gfc_init_se (&rse, NULL);
2171 gfc_init_se (&lse, NULL);
2172
2173 if (lss == gfc_ss_terminator)
2174 {
2175 gfc_init_block (&body1);
2176 gfc_conv_expr (&rse, expr2);
2177 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2178 }
2179 else
2180 {
2181 /* Initialize the loop. */
2182 gfc_init_loopinfo (&loop);
2183
2184 /* We may need LSS to determine the shape of the expression. */
2185 gfc_add_ss_to_loop (&loop, lss);
2186 gfc_add_ss_to_loop (&loop, rss);
2187
2188 gfc_conv_ss_startstride (&loop);
2189 gfc_conv_loop_setup (&loop, &expr2->where);
2190
2191 gfc_mark_ss_chain_used (rss, 1);
2192 /* Start the loop body. */
2193 gfc_start_scalarized_body (&loop, &body1);
2194
2195 /* Translate the expression. */
2196 gfc_copy_loopinfo_to_se (&rse, &loop);
2197 rse.ss = rss;
2198 gfc_conv_expr (&rse, expr2);
2199
2200 /* Form the expression of the temporary. */
2201 lse.expr = gfc_build_array_ref (tmp1, count1, NULL);
2202 }
2203
2204 /* Use the scalar assignment. */
2205 lse.string_length = rse.string_length;
2206 tmp = gfc_trans_scalar_assign (&lse, &rse, expr2->ts, true,
2207 expr2->expr_type == EXPR_VARIABLE);
2208
2209 /* Form the mask expression according to the mask tree list. */
2210 if (wheremask)
2211 {
2212 wheremaskexpr = gfc_build_array_ref (wheremask, count3, NULL);
2213 if (invert)
2214 wheremaskexpr = fold_build1 (TRUTH_NOT_EXPR,
2215 TREE_TYPE (wheremaskexpr),
2216 wheremaskexpr);
2217 tmp = fold_build3 (COND_EXPR, void_type_node,
2218 wheremaskexpr, tmp, build_empty_stmt ());
2219 }
2220
2221 gfc_add_expr_to_block (&body1, tmp);
2222
2223 if (lss == gfc_ss_terminator)
2224 {
2225 gfc_add_block_to_block (&block, &body1);
2226
2227 /* Increment count1. */
2228 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
2229 gfc_index_one_node);
2230 gfc_add_modify (&block, count1, tmp);
2231 }
2232 else
2233 {
2234 /* Increment count1. */
2235 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2236 count1, gfc_index_one_node);
2237 gfc_add_modify (&body1, count1, tmp);
2238
2239 /* Increment count3. */
2240 if (count3)
2241 {
2242 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2243 count3, gfc_index_one_node);
2244 gfc_add_modify (&body1, count3, tmp);
2245 }
2246
2247 /* Generate the copying loops. */
2248 gfc_trans_scalarizing_loops (&loop, &body1);
2249
2250 gfc_add_block_to_block (&block, &loop.pre);
2251 gfc_add_block_to_block (&block, &loop.post);
2252
2253 gfc_cleanup_loop (&loop);
2254 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
2255 as tree nodes in SS may not be valid in different scope. */
2256 }
2257
2258 tmp = gfc_finish_block (&block);
2259 return tmp;
2260 }
2261
2262
2263 /* Calculate the size of temporary needed in the assignment inside forall.
2264 LSS and RSS are filled in this function. */
2265
2266 static tree
2267 compute_inner_temp_size (gfc_expr *expr1, gfc_expr *expr2,
2268 stmtblock_t * pblock,
2269 gfc_ss **lss, gfc_ss **rss)
2270 {
2271 gfc_loopinfo loop;
2272 tree size;
2273 int i;
2274 int save_flag;
2275 tree tmp;
2276
2277 *lss = gfc_walk_expr (expr1);
2278 *rss = NULL;
2279
2280 size = gfc_index_one_node;
2281 if (*lss != gfc_ss_terminator)
2282 {
2283 gfc_init_loopinfo (&loop);
2284
2285 /* Walk the RHS of the expression. */
2286 *rss = gfc_walk_expr (expr2);
2287 if (*rss == gfc_ss_terminator)
2288 {
2289 /* The rhs is scalar. Add a ss for the expression. */
2290 *rss = gfc_get_ss ();
2291 (*rss)->next = gfc_ss_terminator;
2292 (*rss)->type = GFC_SS_SCALAR;
2293 (*rss)->expr = expr2;
2294 }
2295
2296 /* Associate the SS with the loop. */
2297 gfc_add_ss_to_loop (&loop, *lss);
2298 /* We don't actually need to add the rhs at this point, but it might
2299 make guessing the loop bounds a bit easier. */
2300 gfc_add_ss_to_loop (&loop, *rss);
2301
2302 /* We only want the shape of the expression, not rest of the junk
2303 generated by the scalarizer. */
2304 loop.array_parameter = 1;
2305
2306 /* Calculate the bounds of the scalarization. */
2307 save_flag = gfc_option.rtcheck;
2308 gfc_option.rtcheck &= !GFC_RTCHECK_BOUNDS;
2309 gfc_conv_ss_startstride (&loop);
2310 gfc_option.rtcheck = save_flag;
2311 gfc_conv_loop_setup (&loop, &expr2->where);
2312
2313 /* Figure out how many elements we need. */
2314 for (i = 0; i < loop.dimen; i++)
2315 {
2316 tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
2317 gfc_index_one_node, loop.from[i]);
2318 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2319 tmp, loop.to[i]);
2320 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2321 }
2322 gfc_add_block_to_block (pblock, &loop.pre);
2323 size = gfc_evaluate_now (size, pblock);
2324 gfc_add_block_to_block (pblock, &loop.post);
2325
2326 /* TODO: write a function that cleans up a loopinfo without freeing
2327 the SS chains. Currently a NOP. */
2328 }
2329
2330 return size;
2331 }
2332
2333
2334 /* Calculate the overall iterator number of the nested forall construct.
2335 This routine actually calculates the number of times the body of the
2336 nested forall specified by NESTED_FORALL_INFO is executed and multiplies
2337 that by the expression INNER_SIZE. The BLOCK argument specifies the
2338 block in which to calculate the result, and the optional INNER_SIZE_BODY
2339 argument contains any statements that need to executed (inside the loop)
2340 to initialize or calculate INNER_SIZE. */
2341
2342 static tree
2343 compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
2344 stmtblock_t *inner_size_body, stmtblock_t *block)
2345 {
2346 forall_info *forall_tmp = nested_forall_info;
2347 tree tmp, number;
2348 stmtblock_t body;
2349
2350 /* We can eliminate the innermost unconditional loops with constant
2351 array bounds. */
2352 if (INTEGER_CST_P (inner_size))
2353 {
2354 while (forall_tmp
2355 && !forall_tmp->mask
2356 && INTEGER_CST_P (forall_tmp->size))
2357 {
2358 inner_size = fold_build2 (MULT_EXPR, gfc_array_index_type,
2359 inner_size, forall_tmp->size);
2360 forall_tmp = forall_tmp->prev_nest;
2361 }
2362
2363 /* If there are no loops left, we have our constant result. */
2364 if (!forall_tmp)
2365 return inner_size;
2366 }
2367
2368 /* Otherwise, create a temporary variable to compute the result. */
2369 number = gfc_create_var (gfc_array_index_type, "num");
2370 gfc_add_modify (block, number, gfc_index_zero_node);
2371
2372 gfc_start_block (&body);
2373 if (inner_size_body)
2374 gfc_add_block_to_block (&body, inner_size_body);
2375 if (forall_tmp)
2376 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2377 number, inner_size);
2378 else
2379 tmp = inner_size;
2380 gfc_add_modify (&body, number, tmp);
2381 tmp = gfc_finish_block (&body);
2382
2383 /* Generate loops. */
2384 if (forall_tmp != NULL)
2385 tmp = gfc_trans_nested_forall_loop (forall_tmp, tmp, 1);
2386
2387 gfc_add_expr_to_block (block, tmp);
2388
2389 return number;
2390 }
2391
2392
2393 /* Allocate temporary for forall construct. SIZE is the size of temporary
2394 needed. PTEMP1 is returned for space free. */
2395
2396 static tree
2397 allocate_temp_for_forall_nest_1 (tree type, tree size, stmtblock_t * block,
2398 tree * ptemp1)
2399 {
2400 tree bytesize;
2401 tree unit;
2402 tree tmp;
2403
2404 unit = fold_convert (gfc_array_index_type, TYPE_SIZE_UNIT (type));
2405 if (!integer_onep (unit))
2406 bytesize = fold_build2 (MULT_EXPR, gfc_array_index_type, size, unit);
2407 else
2408 bytesize = size;
2409
2410 *ptemp1 = NULL;
2411 tmp = gfc_do_allocate (bytesize, size, ptemp1, block, type);
2412
2413 if (*ptemp1)
2414 tmp = build_fold_indirect_ref (tmp);
2415 return tmp;
2416 }
2417
2418
2419 /* Allocate temporary for forall construct according to the information in
2420 nested_forall_info. INNER_SIZE is the size of temporary needed in the
2421 assignment inside forall. PTEMP1 is returned for space free. */
2422
2423 static tree
2424 allocate_temp_for_forall_nest (forall_info * nested_forall_info, tree type,
2425 tree inner_size, stmtblock_t * inner_size_body,
2426 stmtblock_t * block, tree * ptemp1)
2427 {
2428 tree size;
2429
2430 /* Calculate the total size of temporary needed in forall construct. */
2431 size = compute_overall_iter_number (nested_forall_info, inner_size,
2432 inner_size_body, block);
2433
2434 return allocate_temp_for_forall_nest_1 (type, size, block, ptemp1);
2435 }
2436
2437
2438 /* Handle assignments inside forall which need temporary.
2439
2440 forall (i=start:end:stride; maskexpr)
2441 e<i> = f<i>
2442 end forall
2443 (where e,f<i> are arbitrary expressions possibly involving i
2444 and there is a dependency between e<i> and f<i>)
2445 Translates to:
2446 masktmp(:) = maskexpr(:)
2447
2448 maskindex = 0;
2449 count1 = 0;
2450 num = 0;
2451 for (i = start; i <= end; i += stride)
2452 num += SIZE (f<i>)
2453 count1 = 0;
2454 ALLOCATE (tmp(num))
2455 for (i = start; i <= end; i += stride)
2456 {
2457 if (masktmp[maskindex++])
2458 tmp[count1++] = f<i>
2459 }
2460 maskindex = 0;
2461 count1 = 0;
2462 for (i = start; i <= end; i += stride)
2463 {
2464 if (masktmp[maskindex++])
2465 e<i> = tmp[count1++]
2466 }
2467 DEALLOCATE (tmp)
2468 */
2469 static void
2470 gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2471 tree wheremask, bool invert,
2472 forall_info * nested_forall_info,
2473 stmtblock_t * block)
2474 {
2475 tree type;
2476 tree inner_size;
2477 gfc_ss *lss, *rss;
2478 tree count, count1;
2479 tree tmp, tmp1;
2480 tree ptemp1;
2481 stmtblock_t inner_size_body;
2482
2483 /* Create vars. count1 is the current iterator number of the nested
2484 forall. */
2485 count1 = gfc_create_var (gfc_array_index_type, "count1");
2486
2487 /* Count is the wheremask index. */
2488 if (wheremask)
2489 {
2490 count = gfc_create_var (gfc_array_index_type, "count");
2491 gfc_add_modify (block, count, gfc_index_zero_node);
2492 }
2493 else
2494 count = NULL;
2495
2496 /* Initialize count1. */
2497 gfc_add_modify (block, count1, gfc_index_zero_node);
2498
2499 /* Calculate the size of temporary needed in the assignment. Return loop, lss
2500 and rss which are used in function generate_loop_for_rhs_to_temp(). */
2501 gfc_init_block (&inner_size_body);
2502 inner_size = compute_inner_temp_size (expr1, expr2, &inner_size_body,
2503 &lss, &rss);
2504
2505 /* The type of LHS. Used in function allocate_temp_for_forall_nest */
2506 if (expr1->ts.type == BT_CHARACTER && expr1->ts.cl->length)
2507 {
2508 if (!expr1->ts.cl->backend_decl)
2509 {
2510 gfc_se tse;
2511 gfc_init_se (&tse, NULL);
2512 gfc_conv_expr (&tse, expr1->ts.cl->length);
2513 expr1->ts.cl->backend_decl = tse.expr;
2514 }
2515 type = gfc_get_character_type_len (gfc_default_character_kind,
2516 expr1->ts.cl->backend_decl);
2517 }
2518 else
2519 type = gfc_typenode_for_spec (&expr1->ts);
2520
2521 /* Allocate temporary for nested forall construct according to the
2522 information in nested_forall_info and inner_size. */
2523 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type, inner_size,
2524 &inner_size_body, block, &ptemp1);
2525
2526 /* Generate codes to copy rhs to the temporary . */
2527 tmp = generate_loop_for_rhs_to_temp (expr2, tmp1, count, count1, lss, rss,
2528 wheremask, invert);
2529
2530 /* Generate body and loops according to the information in
2531 nested_forall_info. */
2532 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2533 gfc_add_expr_to_block (block, tmp);
2534
2535 /* Reset count1. */
2536 gfc_add_modify (block, count1, gfc_index_zero_node);
2537
2538 /* Reset count. */
2539 if (wheremask)
2540 gfc_add_modify (block, count, gfc_index_zero_node);
2541
2542 /* Generate codes to copy the temporary to lhs. */
2543 tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
2544 wheremask, invert);
2545
2546 /* Generate body and loops according to the information in
2547 nested_forall_info. */
2548 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2549 gfc_add_expr_to_block (block, tmp);
2550
2551 if (ptemp1)
2552 {
2553 /* Free the temporary. */
2554 tmp = gfc_call_free (ptemp1);
2555 gfc_add_expr_to_block (block, tmp);
2556 }
2557 }
2558
2559
2560 /* Translate pointer assignment inside FORALL which need temporary. */
2561
2562 static void
2563 gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
2564 forall_info * nested_forall_info,
2565 stmtblock_t * block)
2566 {
2567 tree type;
2568 tree inner_size;
2569 gfc_ss *lss, *rss;
2570 gfc_se lse;
2571 gfc_se rse;
2572 gfc_ss_info *info;
2573 gfc_loopinfo loop;
2574 tree desc;
2575 tree parm;
2576 tree parmtype;
2577 stmtblock_t body;
2578 tree count;
2579 tree tmp, tmp1, ptemp1;
2580
2581 count = gfc_create_var (gfc_array_index_type, "count");
2582 gfc_add_modify (block, count, gfc_index_zero_node);
2583
2584 inner_size = integer_one_node;
2585 lss = gfc_walk_expr (expr1);
2586 rss = gfc_walk_expr (expr2);
2587 if (lss == gfc_ss_terminator)
2588 {
2589 type = gfc_typenode_for_spec (&expr1->ts);
2590 type = build_pointer_type (type);
2591
2592 /* Allocate temporary for nested forall construct according to the
2593 information in nested_forall_info and inner_size. */
2594 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, type,
2595 inner_size, NULL, block, &ptemp1);
2596 gfc_start_block (&body);
2597 gfc_init_se (&lse, NULL);
2598 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2599 gfc_init_se (&rse, NULL);
2600 rse.want_pointer = 1;
2601 gfc_conv_expr (&rse, expr2);
2602 gfc_add_block_to_block (&body, &rse.pre);
2603 gfc_add_modify (&body, lse.expr,
2604 fold_convert (TREE_TYPE (lse.expr), rse.expr));
2605 gfc_add_block_to_block (&body, &rse.post);
2606
2607 /* Increment count. */
2608 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2609 count, gfc_index_one_node);
2610 gfc_add_modify (&body, count, tmp);
2611
2612 tmp = gfc_finish_block (&body);
2613
2614 /* Generate body and loops according to the information in
2615 nested_forall_info. */
2616 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2617 gfc_add_expr_to_block (block, tmp);
2618
2619 /* Reset count. */
2620 gfc_add_modify (block, count, gfc_index_zero_node);
2621
2622 gfc_start_block (&body);
2623 gfc_init_se (&lse, NULL);
2624 gfc_init_se (&rse, NULL);
2625 rse.expr = gfc_build_array_ref (tmp1, count, NULL);
2626 lse.want_pointer = 1;
2627 gfc_conv_expr (&lse, expr1);
2628 gfc_add_block_to_block (&body, &lse.pre);
2629 gfc_add_modify (&body, lse.expr, rse.expr);
2630 gfc_add_block_to_block (&body, &lse.post);
2631 /* Increment count. */
2632 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2633 count, gfc_index_one_node);
2634 gfc_add_modify (&body, count, tmp);
2635 tmp = gfc_finish_block (&body);
2636
2637 /* Generate body and loops according to the information in
2638 nested_forall_info. */
2639 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2640 gfc_add_expr_to_block (block, tmp);
2641 }
2642 else
2643 {
2644 gfc_init_loopinfo (&loop);
2645
2646 /* Associate the SS with the loop. */
2647 gfc_add_ss_to_loop (&loop, rss);
2648
2649 /* Setup the scalarizing loops and bounds. */
2650 gfc_conv_ss_startstride (&loop);
2651
2652 gfc_conv_loop_setup (&loop, &expr2->where);
2653
2654 info = &rss->data.info;
2655 desc = info->descriptor;
2656
2657 /* Make a new descriptor. */
2658 parmtype = gfc_get_element_type (TREE_TYPE (desc));
2659 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen,
2660 loop.from, loop.to, 1,
2661 GFC_ARRAY_UNKNOWN);
2662
2663 /* Allocate temporary for nested forall construct. */
2664 tmp1 = allocate_temp_for_forall_nest (nested_forall_info, parmtype,
2665 inner_size, NULL, block, &ptemp1);
2666 gfc_start_block (&body);
2667 gfc_init_se (&lse, NULL);
2668 lse.expr = gfc_build_array_ref (tmp1, count, NULL);
2669 lse.direct_byref = 1;
2670 rss = gfc_walk_expr (expr2);
2671 gfc_conv_expr_descriptor (&lse, expr2, rss);
2672
2673 gfc_add_block_to_block (&body, &lse.pre);
2674 gfc_add_block_to_block (&body, &lse.post);
2675
2676 /* Increment count. */
2677 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2678 count, gfc_index_one_node);
2679 gfc_add_modify (&body, count, tmp);
2680
2681 tmp = gfc_finish_block (&body);
2682
2683 /* Generate body and loops according to the information in
2684 nested_forall_info. */
2685 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2686 gfc_add_expr_to_block (block, tmp);
2687
2688 /* Reset count. */
2689 gfc_add_modify (block, count, gfc_index_zero_node);
2690
2691 parm = gfc_build_array_ref (tmp1, count, NULL);
2692 lss = gfc_walk_expr (expr1);
2693 gfc_init_se (&lse, NULL);
2694 gfc_conv_expr_descriptor (&lse, expr1, lss);
2695 gfc_add_modify (&lse.pre, lse.expr, parm);
2696 gfc_start_block (&body);
2697 gfc_add_block_to_block (&body, &lse.pre);
2698 gfc_add_block_to_block (&body, &lse.post);
2699
2700 /* Increment count. */
2701 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2702 count, gfc_index_one_node);
2703 gfc_add_modify (&body, count, tmp);
2704
2705 tmp = gfc_finish_block (&body);
2706
2707 tmp = gfc_trans_nested_forall_loop (nested_forall_info, tmp, 1);
2708 gfc_add_expr_to_block (block, tmp);
2709 }
2710 /* Free the temporary. */
2711 if (ptemp1)
2712 {
2713 tmp = gfc_call_free (ptemp1);
2714 gfc_add_expr_to_block (block, tmp);
2715 }
2716 }
2717
2718
2719 /* FORALL and WHERE statements are really nasty, especially when you nest
2720 them. All the rhs of a forall assignment must be evaluated before the
2721 actual assignments are performed. Presumably this also applies to all the
2722 assignments in an inner where statement. */
2723
2724 /* Generate code for a FORALL statement. Any temporaries are allocated as a
2725 linear array, relying on the fact that we process in the same order in all
2726 loops.
2727
2728 forall (i=start:end:stride; maskexpr)
2729 e<i> = f<i>
2730 g<i> = h<i>
2731 end forall
2732 (where e,f,g,h<i> are arbitrary expressions possibly involving i)
2733 Translates to:
2734 count = ((end + 1 - start) / stride)
2735 masktmp(:) = maskexpr(:)
2736
2737 maskindex = 0;
2738 for (i = start; i <= end; i += stride)
2739 {
2740 if (masktmp[maskindex++])
2741 e<i> = f<i>
2742 }
2743 maskindex = 0;
2744 for (i = start; i <= end; i += stride)
2745 {
2746 if (masktmp[maskindex++])
2747 g<i> = h<i>
2748 }
2749
2750 Note that this code only works when there are no dependencies.
2751 Forall loop with array assignments and data dependencies are a real pain,
2752 because the size of the temporary cannot always be determined before the
2753 loop is executed. This problem is compounded by the presence of nested
2754 FORALL constructs.
2755 */
2756
2757 static tree
2758 gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
2759 {
2760 stmtblock_t pre;
2761 stmtblock_t post;
2762 stmtblock_t block;
2763 stmtblock_t body;
2764 tree *var;
2765 tree *start;
2766 tree *end;
2767 tree *step;
2768 gfc_expr **varexpr;
2769 tree tmp;
2770 tree assign;
2771 tree size;
2772 tree maskindex;
2773 tree mask;
2774 tree pmask;
2775 int n;
2776 int nvar;
2777 int need_temp;
2778 gfc_forall_iterator *fa;
2779 gfc_se se;
2780 gfc_code *c;
2781 gfc_saved_var *saved_vars;
2782 iter_info *this_forall;
2783 forall_info *info;
2784 bool need_mask;
2785
2786 /* Do nothing if the mask is false. */
2787 if (code->expr
2788 && code->expr->expr_type == EXPR_CONSTANT
2789 && !code->expr->value.logical)
2790 return build_empty_stmt ();
2791
2792 n = 0;
2793 /* Count the FORALL index number. */
2794 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2795 n++;
2796 nvar = n;
2797
2798 /* Allocate the space for var, start, end, step, varexpr. */
2799 var = (tree *) gfc_getmem (nvar * sizeof (tree));
2800 start = (tree *) gfc_getmem (nvar * sizeof (tree));
2801 end = (tree *) gfc_getmem (nvar * sizeof (tree));
2802 step = (tree *) gfc_getmem (nvar * sizeof (tree));
2803 varexpr = (gfc_expr **) gfc_getmem (nvar * sizeof (gfc_expr *));
2804 saved_vars = (gfc_saved_var *) gfc_getmem (nvar * sizeof (gfc_saved_var));
2805
2806 /* Allocate the space for info. */
2807 info = (forall_info *) gfc_getmem (sizeof (forall_info));
2808
2809 gfc_start_block (&pre);
2810 gfc_init_block (&post);
2811 gfc_init_block (&block);
2812
2813 n = 0;
2814 for (fa = code->ext.forall_iterator; fa; fa = fa->next)
2815 {
2816 gfc_symbol *sym = fa->var->symtree->n.sym;
2817
2818 /* Allocate space for this_forall. */
2819 this_forall = (iter_info *) gfc_getmem (sizeof (iter_info));
2820
2821 /* Create a temporary variable for the FORALL index. */
2822 tmp = gfc_typenode_for_spec (&sym->ts);
2823 var[n] = gfc_create_var (tmp, sym->name);
2824 gfc_shadow_sym (sym, var[n], &saved_vars[n]);
2825
2826 /* Record it in this_forall. */
2827 this_forall->var = var[n];
2828
2829 /* Replace the index symbol's backend_decl with the temporary decl. */
2830 sym->backend_decl = var[n];
2831
2832 /* Work out the start, end and stride for the loop. */
2833 gfc_init_se (&se, NULL);
2834 gfc_conv_expr_val (&se, fa->start);
2835 /* Record it in this_forall. */
2836 this_forall->start = se.expr;
2837 gfc_add_block_to_block (&block, &se.pre);
2838 start[n] = se.expr;
2839
2840 gfc_init_se (&se, NULL);
2841 gfc_conv_expr_val (&se, fa->end);
2842 /* Record it in this_forall. */
2843 this_forall->end = se.expr;
2844 gfc_make_safe_expr (&se);
2845 gfc_add_block_to_block (&block, &se.pre);
2846 end[n] = se.expr;
2847
2848 gfc_init_se (&se, NULL);
2849 gfc_conv_expr_val (&se, fa->stride);
2850 /* Record it in this_forall. */
2851 this_forall->step = se.expr;
2852 gfc_make_safe_expr (&se);
2853 gfc_add_block_to_block (&block, &se.pre);
2854 step[n] = se.expr;
2855
2856 /* Set the NEXT field of this_forall to NULL. */
2857 this_forall->next = NULL;
2858 /* Link this_forall to the info construct. */
2859 if (info->this_loop)
2860 {
2861 iter_info *iter_tmp = info->this_loop;
2862 while (iter_tmp->next != NULL)
2863 iter_tmp = iter_tmp->next;
2864 iter_tmp->next = this_forall;
2865 }
2866 else
2867 info->this_loop = this_forall;
2868
2869 n++;
2870 }
2871 nvar = n;
2872
2873 /* Calculate the size needed for the current forall level. */
2874 size = gfc_index_one_node;
2875 for (n = 0; n < nvar; n++)
2876 {
2877 /* size = (end + step - start) / step. */
2878 tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (start[n]),
2879 step[n], start[n]);
2880 tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (end[n]), end[n], tmp);
2881
2882 tmp = fold_build2 (FLOOR_DIV_EXPR, TREE_TYPE (tmp), tmp, step[n]);
2883 tmp = convert (gfc_array_index_type, tmp);
2884
2885 size = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
2886 }
2887
2888 /* Record the nvar and size of current forall level. */
2889 info->nvar = nvar;
2890 info->size = size;
2891
2892 if (code->expr)
2893 {
2894 /* If the mask is .true., consider the FORALL unconditional. */
2895 if (code->expr->expr_type == EXPR_CONSTANT
2896 && code->expr->value.logical)
2897 need_mask = false;
2898 else
2899 need_mask = true;
2900 }
2901 else
2902 need_mask = false;
2903
2904 /* First we need to allocate the mask. */
2905 if (need_mask)
2906 {
2907 /* As the mask array can be very big, prefer compact boolean types. */
2908 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2909 mask = allocate_temp_for_forall_nest (nested_forall_info, mask_type,
2910 size, NULL, &block, &pmask);
2911 maskindex = gfc_create_var_np (gfc_array_index_type, "mi");
2912
2913 /* Record them in the info structure. */
2914 info->maskindex = maskindex;
2915 info->mask = mask;
2916 }
2917 else
2918 {
2919 /* No mask was specified. */
2920 maskindex = NULL_TREE;
2921 mask = pmask = NULL_TREE;
2922 }
2923
2924 /* Link the current forall level to nested_forall_info. */
2925 info->prev_nest = nested_forall_info;
2926 nested_forall_info = info;
2927
2928 /* Copy the mask into a temporary variable if required.
2929 For now we assume a mask temporary is needed. */
2930 if (need_mask)
2931 {
2932 /* As the mask array can be very big, prefer compact boolean types. */
2933 tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
2934
2935 gfc_add_modify (&block, maskindex, gfc_index_zero_node);
2936
2937 /* Start of mask assignment loop body. */
2938 gfc_start_block (&body);
2939
2940 /* Evaluate the mask expression. */
2941 gfc_init_se (&se, NULL);
2942 gfc_conv_expr_val (&se, code->expr);
2943 gfc_add_block_to_block (&body, &se.pre);
2944
2945 /* Store the mask. */
2946 se.expr = convert (mask_type, se.expr);
2947
2948 tmp = gfc_build_array_ref (mask, maskindex, NULL);
2949 gfc_add_modify (&body, tmp, se.expr);
2950
2951 /* Advance to the next mask element. */
2952 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
2953 maskindex, gfc_index_one_node);
2954 gfc_add_modify (&body, maskindex, tmp);
2955
2956 /* Generate the loops. */
2957 tmp = gfc_finish_block (&body);
2958 tmp = gfc_trans_nested_forall_loop (info, tmp, 0);
2959 gfc_add_expr_to_block (&block, tmp);
2960 }
2961
2962 c = code->block->next;
2963
2964 /* TODO: loop merging in FORALL statements. */
2965 /* Now that we've got a copy of the mask, generate the assignment loops. */
2966 while (c)
2967 {
2968 switch (c->op)
2969 {
2970 case EXEC_ASSIGN:
2971 /* A scalar or array assignment. DO the simple check for
2972 lhs to rhs dependencies. These make a temporary for the
2973 rhs and form a second forall block to copy to variable. */
2974 need_temp = check_forall_dependencies(c, &pre, &post);
2975
2976 /* Temporaries due to array assignment data dependencies introduce
2977 no end of problems. */
2978 if (need_temp)
2979 gfc_trans_assign_need_temp (c->expr, c->expr2, NULL, false,
2980 nested_forall_info, &block);
2981 else
2982 {
2983 /* Use the normal assignment copying routines. */
2984 assign = gfc_trans_assignment (c->expr, c->expr2, false);
2985
2986 /* Generate body and loops. */
2987 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
2988 assign, 1);
2989 gfc_add_expr_to_block (&block, tmp);
2990 }
2991
2992 /* Cleanup any temporary symtrees that have been made to deal
2993 with dependencies. */
2994 if (new_symtree)
2995 cleanup_forall_symtrees (c);
2996
2997 break;
2998
2999 case EXEC_WHERE:
3000 /* Translate WHERE or WHERE construct nested in FORALL. */
3001 gfc_trans_where_2 (c, NULL, false, nested_forall_info, &block);
3002 break;
3003
3004 /* Pointer assignment inside FORALL. */
3005 case EXEC_POINTER_ASSIGN:
3006 need_temp = gfc_check_dependency (c->expr, c->expr2, 0);
3007 if (need_temp)
3008 gfc_trans_pointer_assign_need_temp (c->expr, c->expr2,
3009 nested_forall_info, &block);
3010 else
3011 {
3012 /* Use the normal assignment copying routines. */
3013 assign = gfc_trans_pointer_assignment (c->expr, c->expr2);
3014
3015 /* Generate body and loops. */
3016 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3017 assign, 1);
3018 gfc_add_expr_to_block (&block, tmp);
3019 }
3020 break;
3021
3022 case EXEC_FORALL:
3023 tmp = gfc_trans_forall_1 (c, nested_forall_info);
3024 gfc_add_expr_to_block (&block, tmp);
3025 break;
3026
3027 /* Explicit subroutine calls are prevented by the frontend but interface
3028 assignments can legitimately produce them. */
3029 case EXEC_ASSIGN_CALL:
3030 assign = gfc_trans_call (c, true);
3031 tmp = gfc_trans_nested_forall_loop (nested_forall_info, assign, 1);
3032 gfc_add_expr_to_block (&block, tmp);
3033 break;
3034
3035 default:
3036 gcc_unreachable ();
3037 }
3038
3039 c = c->next;
3040 }
3041
3042 /* Restore the original index variables. */
3043 for (fa = code->ext.forall_iterator, n = 0; fa; fa = fa->next, n++)
3044 gfc_restore_sym (fa->var->symtree->n.sym, &saved_vars[n]);
3045
3046 /* Free the space for var, start, end, step, varexpr. */
3047 gfc_free (var);
3048 gfc_free (start);
3049 gfc_free (end);
3050 gfc_free (step);
3051 gfc_free (varexpr);
3052 gfc_free (saved_vars);
3053
3054 /* Free the space for this forall_info. */
3055 gfc_free (info);
3056
3057 if (pmask)
3058 {
3059 /* Free the temporary for the mask. */
3060 tmp = gfc_call_free (pmask);
3061 gfc_add_expr_to_block (&block, tmp);
3062 }
3063 if (maskindex)
3064 pushdecl (maskindex);
3065
3066 gfc_add_block_to_block (&pre, &block);
3067 gfc_add_block_to_block (&pre, &post);
3068
3069 return gfc_finish_block (&pre);
3070 }
3071
3072
3073 /* Translate the FORALL statement or construct. */
3074
3075 tree gfc_trans_forall (gfc_code * code)
3076 {
3077 return gfc_trans_forall_1 (code, NULL);
3078 }
3079
3080
3081 /* Evaluate the WHERE mask expression, copy its value to a temporary.
3082 If the WHERE construct is nested in FORALL, compute the overall temporary
3083 needed by the WHERE mask expression multiplied by the iterator number of
3084 the nested forall.
3085 ME is the WHERE mask expression.
3086 MASK is the current execution mask upon input, whose sense may or may
3087 not be inverted as specified by the INVERT argument.
3088 CMASK is the updated execution mask on output, or NULL if not required.
3089 PMASK is the pending execution mask on output, or NULL if not required.
3090 BLOCK is the block in which to place the condition evaluation loops. */
3091
3092 static void
3093 gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
3094 tree mask, bool invert, tree cmask, tree pmask,
3095 tree mask_type, stmtblock_t * block)
3096 {
3097 tree tmp, tmp1;
3098 gfc_ss *lss, *rss;
3099 gfc_loopinfo loop;
3100 stmtblock_t body, body1;
3101 tree count, cond, mtmp;
3102 gfc_se lse, rse;
3103
3104 gfc_init_loopinfo (&loop);
3105
3106 lss = gfc_walk_expr (me);
3107 rss = gfc_walk_expr (me);
3108
3109 /* Variable to index the temporary. */
3110 count = gfc_create_var (gfc_array_index_type, "count");
3111 /* Initialize count. */
3112 gfc_add_modify (block, count, gfc_index_zero_node);
3113
3114 gfc_start_block (&body);
3115
3116 gfc_init_se (&rse, NULL);
3117 gfc_init_se (&lse, NULL);
3118
3119 if (lss == gfc_ss_terminator)
3120 {
3121 gfc_init_block (&body1);
3122 }
3123 else
3124 {
3125 /* Initialize the loop. */
3126 gfc_init_loopinfo (&loop);
3127
3128 /* We may need LSS to determine the shape of the expression. */
3129 gfc_add_ss_to_loop (&loop, lss);
3130 gfc_add_ss_to_loop (&loop, rss);
3131
3132 gfc_conv_ss_startstride (&loop);
3133 gfc_conv_loop_setup (&loop, &me->where);
3134
3135 gfc_mark_ss_chain_used (rss, 1);
3136 /* Start the loop body. */
3137 gfc_start_scalarized_body (&loop, &body1);
3138
3139 /* Translate the expression. */
3140 gfc_copy_loopinfo_to_se (&rse, &loop);
3141 rse.ss = rss;
3142 gfc_conv_expr (&rse, me);
3143 }
3144
3145 /* Variable to evaluate mask condition. */
3146 cond = gfc_create_var (mask_type, "cond");
3147 if (mask && (cmask || pmask))
3148 mtmp = gfc_create_var (mask_type, "mask");
3149 else mtmp = NULL_TREE;
3150
3151 gfc_add_block_to_block (&body1, &lse.pre);
3152 gfc_add_block_to_block (&body1, &rse.pre);
3153
3154 gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
3155
3156 if (mask && (cmask || pmask))
3157 {
3158 tmp = gfc_build_array_ref (mask, count, NULL);
3159 if (invert)
3160 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
3161 gfc_add_modify (&body1, mtmp, tmp);
3162 }
3163
3164 if (cmask)
3165 {
3166 tmp1 = gfc_build_array_ref (cmask, count, NULL);
3167 tmp = cond;
3168 if (mask)
3169 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3170 gfc_add_modify (&body1, tmp1, tmp);
3171 }
3172
3173 if (pmask)
3174 {
3175 tmp1 = gfc_build_array_ref (pmask, count, NULL);
3176 tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
3177 if (mask)
3178 tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
3179 gfc_add_modify (&body1, tmp1, tmp);
3180 }
3181
3182 gfc_add_block_to_block (&body1, &lse.post);
3183 gfc_add_block_to_block (&body1, &rse.post);
3184
3185 if (lss == gfc_ss_terminator)
3186 {
3187 gfc_add_block_to_block (&body, &body1);
3188 }
3189 else
3190 {
3191 /* Increment count. */
3192 tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
3193 gfc_index_one_node);
3194 gfc_add_modify (&body1, count, tmp1);
3195
3196 /* Generate the copying loops. */
3197 gfc_trans_scalarizing_loops (&loop, &body1);
3198
3199 gfc_add_block_to_block (&body, &loop.pre);
3200 gfc_add_block_to_block (&body, &loop.post);
3201
3202 gfc_cleanup_loop (&loop);
3203 /* TODO: Reuse lss and rss when copying temp->lhs. Need to be careful
3204 as tree nodes in SS may not be valid in different scope. */
3205 }
3206
3207 tmp1 = gfc_finish_block (&body);
3208 /* If the WHERE construct is inside FORALL, fill the full temporary. */
3209 if (nested_forall_info != NULL)
3210 tmp1 = gfc_trans_nested_forall_loop (nested_forall_info, tmp1, 1);
3211
3212 gfc_add_expr_to_block (block, tmp1);
3213 }
3214
3215
3216 /* Translate an assignment statement in a WHERE statement or construct
3217 statement. The MASK expression is used to control which elements
3218 of EXPR1 shall be assigned. The sense of MASK is specified by
3219 INVERT. */
3220
3221 static tree
3222 gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
3223 tree mask, bool invert,
3224 tree count1, tree count2,
3225 gfc_symbol *sym)
3226 {
3227 gfc_se lse;
3228 gfc_se rse;
3229 gfc_ss *lss;
3230 gfc_ss *lss_section;
3231 gfc_ss *rss;
3232
3233 gfc_loopinfo loop;
3234 tree tmp;
3235 stmtblock_t block;
3236 stmtblock_t body;
3237 tree index, maskexpr;
3238
3239 #if 0
3240 /* TODO: handle this special case.
3241 Special case a single function returning an array. */
3242 if (expr2->expr_type == EXPR_FUNCTION && expr2->rank > 0)
3243 {
3244 tmp = gfc_trans_arrayfunc_assign (expr1, expr2);
3245 if (tmp)
3246 return tmp;
3247 }
3248 #endif
3249
3250 /* Assignment of the form lhs = rhs. */
3251 gfc_start_block (&block);
3252
3253 gfc_init_se (&lse, NULL);
3254 gfc_init_se (&rse, NULL);
3255
3256 /* Walk the lhs. */
3257 lss = gfc_walk_expr (expr1);
3258 rss = NULL;
3259
3260 /* In each where-assign-stmt, the mask-expr and the variable being
3261 defined shall be arrays of the same shape. */
3262 gcc_assert (lss != gfc_ss_terminator);
3263
3264 /* The assignment needs scalarization. */
3265 lss_section = lss;
3266
3267 /* Find a non-scalar SS from the lhs. */
3268 while (lss_section != gfc_ss_terminator
3269 && lss_section->type != GFC_SS_SECTION)
3270 lss_section = lss_section->next;
3271
3272 gcc_assert (lss_section != gfc_ss_terminator);
3273
3274 /* Initialize the scalarizer. */
3275 gfc_init_loopinfo (&loop);
3276
3277 /* Walk the rhs. */
3278 rss = gfc_walk_expr (expr2);
3279 if (rss == gfc_ss_terminator)
3280 {
3281 /* The rhs is scalar. Add a ss for the expression. */
3282 rss = gfc_get_ss ();
3283 rss->where = 1;
3284 rss->next = gfc_ss_terminator;
3285 rss->type = GFC_SS_SCALAR;
3286 rss->expr = expr2;
3287 }
3288
3289 /* Associate the SS with the loop. */
3290 gfc_add_ss_to_loop (&loop, lss);
3291 gfc_add_ss_to_loop (&loop, rss);
3292
3293 /* Calculate the bounds of the scalarization. */
3294 gfc_conv_ss_startstride (&loop);
3295
3296 /* Resolve any data dependencies in the statement. */
3297 gfc_conv_resolve_dependencies (&loop, lss_section, rss);
3298
3299 /* Setup the scalarizing loops. */
3300 gfc_conv_loop_setup (&loop, &expr2->where);
3301
3302 /* Setup the gfc_se structures. */
3303 gfc_copy_loopinfo_to_se (&lse, &loop);
3304 gfc_copy_loopinfo_to_se (&rse, &loop);
3305
3306 rse.ss = rss;
3307 gfc_mark_ss_chain_used (rss, 1);
3308 if (loop.temp_ss == NULL)
3309 {
3310 lse.ss = lss;
3311 gfc_mark_ss_chain_used (lss, 1);
3312 }
3313 else
3314 {
3315 lse.ss = loop.temp_ss;
3316 gfc_mark_ss_chain_used (lss, 3);
3317 gfc_mark_ss_chain_used (loop.temp_ss, 3);
3318 }
3319
3320 /* Start the scalarized loop body. */
3321 gfc_start_scalarized_body (&loop, &body);
3322
3323 /* Translate the expression. */
3324 gfc_conv_expr (&rse, expr2);
3325 if (lss != gfc_ss_terminator && loop.temp_ss != NULL)
3326 {
3327 gfc_conv_tmp_array_ref (&lse);
3328 gfc_advance_se_ss_chain (&lse);
3329 }
3330 else
3331 gfc_conv_expr (&lse, expr1);
3332
3333 /* Form the mask expression according to the mask. */
3334 index = count1;
3335 maskexpr = gfc_build_array_ref (mask, index, NULL);
3336 if (invert)
3337 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr), maskexpr);
3338
3339 /* Use the scalar assignment as is. */
3340 if (sym == NULL)
3341 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts,
3342 loop.temp_ss != NULL, false);
3343 else
3344 tmp = gfc_conv_operator_assign (&lse, &rse, sym);
3345
3346 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3347
3348 gfc_add_expr_to_block (&body, tmp);
3349
3350 if (lss == gfc_ss_terminator)
3351 {
3352 /* Increment count1. */
3353 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3354 count1, gfc_index_one_node);
3355 gfc_add_modify (&body, count1, tmp);
3356
3357 /* Use the scalar assignment as is. */
3358 gfc_add_block_to_block (&block, &body);
3359 }
3360 else
3361 {
3362 gcc_assert (lse.ss == gfc_ss_terminator
3363 && rse.ss == gfc_ss_terminator);
3364
3365 if (loop.temp_ss != NULL)
3366 {
3367 /* Increment count1 before finish the main body of a scalarized
3368 expression. */
3369 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3370 count1, gfc_index_one_node);
3371 gfc_add_modify (&body, count1, tmp);
3372 gfc_trans_scalarized_loop_boundary (&loop, &body);
3373
3374 /* We need to copy the temporary to the actual lhs. */
3375 gfc_init_se (&lse, NULL);
3376 gfc_init_se (&rse, NULL);
3377 gfc_copy_loopinfo_to_se (&lse, &loop);
3378 gfc_copy_loopinfo_to_se (&rse, &loop);
3379
3380 rse.ss = loop.temp_ss;
3381 lse.ss = lss;
3382
3383 gfc_conv_tmp_array_ref (&rse);
3384 gfc_advance_se_ss_chain (&rse);
3385 gfc_conv_expr (&lse, expr1);
3386
3387 gcc_assert (lse.ss == gfc_ss_terminator
3388 && rse.ss == gfc_ss_terminator);
3389
3390 /* Form the mask expression according to the mask tree list. */
3391 index = count2;
3392 maskexpr = gfc_build_array_ref (mask, index, NULL);
3393 if (invert)
3394 maskexpr = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (maskexpr),
3395 maskexpr);
3396
3397 /* Use the scalar assignment as is. */
3398 tmp = gfc_trans_scalar_assign (&lse, &rse, expr1->ts, false, false);
3399 tmp = build3_v (COND_EXPR, maskexpr, tmp, build_empty_stmt ());
3400 gfc_add_expr_to_block (&body, tmp);
3401
3402 /* Increment count2. */
3403 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3404 count2, gfc_index_one_node);
3405 gfc_add_modify (&body, count2, tmp);
3406 }
3407 else
3408 {
3409 /* Increment count1. */
3410 tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
3411 count1, gfc_index_one_node);
3412 gfc_add_modify (&body, count1, tmp);
3413 }
3414
3415 /* Generate the copying loops. */
3416 gfc_trans_scalarizing_loops (&loop, &body);
3417
3418 /* Wrap the whole thing up. */
3419 gfc_add_block_to_block (&block, &loop.pre);
3420 gfc_add_block_to_block (&block, &loop.post);
3421 gfc_cleanup_loop (&loop);
3422 }
3423
3424 return gfc_finish_block (&block);
3425 }
3426
3427
3428 /* Translate the WHERE construct or statement.
3429 This function can be called iteratively to translate the nested WHERE
3430 construct or statement.
3431 MASK is the control mask. */
3432
3433 static void
3434 gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
3435 forall_info * nested_forall_info, stmtblock_t * block)
3436 {
3437 stmtblock_t inner_size_body;
3438 tree inner_size, size;
3439 gfc_ss *lss, *rss;
3440 tree mask_type;
3441 gfc_expr *expr1;
3442 gfc_expr *expr2;
3443 gfc_code *cblock;
3444 gfc_code *cnext;
3445 tree tmp;
3446 tree cond;
3447 tree count1, count2;
3448 bool need_cmask;
3449 bool need_pmask;
3450 int need_temp;
3451 tree pcmask = NULL_TREE;
3452 tree ppmask = NULL_TREE;
3453 tree cmask = NULL_TREE;
3454 tree pmask = NULL_TREE;
3455 gfc_actual_arglist *arg;
3456
3457 /* the WHERE statement or the WHERE construct statement. */
3458 cblock = code->block;
3459
3460 /* As the mask array can be very big, prefer compact boolean types. */
3461 mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
3462
3463 /* Determine which temporary masks are needed. */
3464 if (!cblock->block)
3465 {
3466 /* One clause: No ELSEWHEREs. */
3467 need_cmask = (cblock->next != 0);
3468 need_pmask = false;
3469 }
3470 else if (cblock->block->block)
3471 {
3472 /* Three or more clauses: Conditional ELSEWHEREs. */
3473 need_cmask = true;
3474 need_pmask = true;
3475 }
3476 else if (cblock->next)
3477 {
3478 /* Two clauses, the first non-empty. */
3479 need_cmask = true;
3480 need_pmask = (mask != NULL_TREE
3481 && cblock->block->next != 0);
3482 }
3483 else if (!cblock->block->next)
3484 {
3485 /* Two clauses, both empty. */
3486 need_cmask = false;
3487 need_pmask = false;
3488 }
3489 /* Two clauses, the first empty, the second non-empty. */
3490 else if (mask)
3491 {
3492 need_cmask = (cblock->block->expr != 0);
3493 need_pmask = true;
3494 }
3495 else
3496 {
3497 need_cmask = true;
3498 need_pmask = false;
3499 }
3500
3501 if (need_cmask || need_pmask)
3502 {
3503 /* Calculate the size of temporary needed by the mask-expr. */
3504 gfc_init_block (&inner_size_body);
3505 inner_size = compute_inner_temp_size (cblock->expr, cblock->expr,
3506 &inner_size_body, &lss, &rss);
3507
3508 /* Calculate the total size of temporary needed. */
3509 size = compute_overall_iter_number (nested_forall_info, inner_size,
3510 &inner_size_body, block);
3511
3512 /* Check whether the size is negative. */
3513 cond = fold_build2 (LE_EXPR, boolean_type_node, size,
3514 gfc_index_zero_node);
3515 size = fold_build3 (COND_EXPR, gfc_array_index_type, cond,
3516 gfc_index_zero_node, size);
3517 size = gfc_evaluate_now (size, block);
3518
3519 /* Allocate temporary for WHERE mask if needed. */
3520 if (need_cmask)
3521 cmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3522 &pcmask);
3523
3524 /* Allocate temporary for !mask if needed. */
3525 if (need_pmask)
3526 pmask = allocate_temp_for_forall_nest_1 (mask_type, size, block,
3527 &ppmask);
3528 }
3529
3530 while (cblock)
3531 {
3532 /* Each time around this loop, the where clause is conditional
3533 on the value of mask and invert, which are updated at the
3534 bottom of the loop. */
3535
3536 /* Has mask-expr. */
3537 if (cblock->expr)
3538 {
3539 /* Ensure that the WHERE mask will be evaluated exactly once.
3540 If there are no statements in this WHERE/ELSEWHERE clause,
3541 then we don't need to update the control mask (cmask).
3542 If this is the last clause of the WHERE construct, then
3543 we don't need to update the pending control mask (pmask). */
3544 if (mask)
3545 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3546 mask, invert,
3547 cblock->next ? cmask : NULL_TREE,
3548 cblock->block ? pmask : NULL_TREE,
3549 mask_type, block);
3550 else
3551 gfc_evaluate_where_mask (cblock->expr, nested_forall_info,
3552 NULL_TREE, false,
3553 (cblock->next || cblock->block)
3554 ? cmask : NULL_TREE,
3555 NULL_TREE, mask_type, block);
3556
3557 invert = false;
3558 }
3559 /* It's a final elsewhere-stmt. No mask-expr is present. */
3560 else
3561 cmask = mask;
3562
3563 /* The body of this where clause are controlled by cmask with
3564 sense specified by invert. */
3565
3566 /* Get the assignment statement of a WHERE statement, or the first
3567 statement in where-body-construct of a WHERE construct. */
3568 cnext = cblock->next;
3569 while (cnext)
3570 {
3571 switch (cnext->op)
3572 {
3573 /* WHERE assignment statement. */
3574 case EXEC_ASSIGN_CALL:
3575
3576 arg = cnext->ext.actual;
3577 expr1 = expr2 = NULL;
3578 for (; arg; arg = arg->next)
3579 {
3580 if (!arg->expr)
3581 continue;
3582 if (expr1 == NULL)
3583 expr1 = arg->expr;
3584 else
3585 expr2 = arg->expr;
3586 }
3587 goto evaluate;
3588
3589 case EXEC_ASSIGN:
3590 expr1 = cnext->expr;
3591 expr2 = cnext->expr2;
3592 evaluate:
3593 if (nested_forall_info != NULL)
3594 {
3595 need_temp = gfc_check_dependency (expr1, expr2, 0);
3596 if (need_temp && cnext->op != EXEC_ASSIGN_CALL)
3597 gfc_trans_assign_need_temp (expr1, expr2,
3598 cmask, invert,
3599 nested_forall_info, block);
3600 else
3601 {
3602 /* Variables to control maskexpr. */
3603 count1 = gfc_create_var (gfc_array_index_type, "count1");
3604 count2 = gfc_create_var (gfc_array_index_type, "count2");
3605 gfc_add_modify (block, count1, gfc_index_zero_node);
3606 gfc_add_modify (block, count2, gfc_index_zero_node);
3607
3608 tmp = gfc_trans_where_assign (expr1, expr2,
3609 cmask, invert,
3610 count1, count2,
3611 cnext->resolved_sym);
3612
3613 tmp = gfc_trans_nested_forall_loop (nested_forall_info,
3614 tmp, 1);
3615 gfc_add_expr_to_block (block, tmp);
3616 }
3617 }
3618 else
3619 {
3620 /* Variables to control maskexpr. */
3621 count1 = gfc_create_var (gfc_array_index_type, "count1");
3622 count2 = gfc_create_var (gfc_array_index_type, "count2");
3623 gfc_add_modify (block, count1, gfc_index_zero_node);
3624 gfc_add_modify (block, count2, gfc_index_zero_node);
3625
3626 tmp = gfc_trans_where_assign (expr1, expr2,
3627 cmask, invert,
3628 count1, count2,
3629 cnext->resolved_sym);
3630 gfc_add_expr_to_block (block, tmp);
3631
3632 }
3633 break;
3634
3635 /* WHERE or WHERE construct is part of a where-body-construct. */
3636 case EXEC_WHERE:
3637 gfc_trans_where_2 (cnext, cmask, invert,
3638 nested_forall_info, block);
3639 break;
3640
3641 default:
3642 gcc_unreachable ();
3643 }
3644
3645 /* The next statement within the same where-body-construct. */
3646 cnext = cnext->next;
3647 }
3648 /* The next masked-elsewhere-stmt, elsewhere-stmt, or end-where-stmt. */
3649 cblock = cblock->block;
3650 if (mask == NULL_TREE)
3651 {
3652 /* If we're the initial WHERE, we can simply invert the sense
3653 of the current mask to obtain the "mask" for the remaining
3654 ELSEWHEREs. */
3655 invert = true;
3656 mask = cmask;
3657 }
3658 else
3659 {
3660 /* Otherwise, for nested WHERE's we need to use the pending mask. */
3661 invert = false;
3662 mask = pmask;
3663 }
3664 }
3665
3666 /* If we allocated a pending mask array, deallocate it now. */
3667 if (ppmask)
3668 {
3669 tmp = gfc_call_free (ppmask);
3670 gfc_add_expr_to_block (block, tmp);
3671 }
3672
3673 /* If we allocated a current mask array, deallocate it now. */
3674 if (pcmask)
3675 {
3676 tmp = gfc_call_free (pcmask);
3677 gfc_add_expr_to_block (block, tmp);
3678 }
3679 }
3680
3681 /* Translate a simple WHERE construct or statement without dependencies.
3682 CBLOCK is the "then" clause of the WHERE statement, where CBLOCK->EXPR
3683 is the mask condition, and EBLOCK if non-NULL is the "else" clause.
3684 Currently both CBLOCK and EBLOCK are restricted to single assignments. */
3685
3686 static tree
3687 gfc_trans_where_3 (gfc_code * cblock, gfc_code * eblock)
3688 {
3689 stmtblock_t block, body;
3690 gfc_expr *cond, *tdst, *tsrc, *edst, *esrc;
3691 tree tmp, cexpr, tstmt, estmt;
3692 gfc_ss *css, *tdss, *tsss;
3693 gfc_se cse, tdse, tsse, edse, esse;
3694 gfc_loopinfo loop;
3695 gfc_ss *edss = 0;
3696 gfc_ss *esss = 0;
3697
3698 cond = cblock->expr;
3699 tdst = cblock->next->expr;
3700 tsrc = cblock->next->expr2;
3701 edst = eblock ? eblock->next->expr : NULL;
3702 esrc = eblock ? eblock->next->expr2 : NULL;
3703
3704 gfc_start_block (&block);
3705 gfc_init_loopinfo (&loop);
3706
3707 /* Handle the condition. */
3708 gfc_init_se (&cse, NULL);
3709 css = gfc_walk_expr (cond);
3710 gfc_add_ss_to_loop (&loop, css);
3711
3712 /* Handle the then-clause. */
3713 gfc_init_se (&tdse, NULL);
3714 gfc_init_se (&tsse, NULL);
3715 tdss = gfc_walk_expr (tdst);
3716 tsss = gfc_walk_expr (tsrc);
3717 if (tsss == gfc_ss_terminator)
3718 {
3719 tsss = gfc_get_ss ();
3720 tsss->where = 1;
3721 tsss->next = gfc_ss_terminator;
3722 tsss->type = GFC_SS_SCALAR;
3723 tsss->expr = tsrc;
3724 }
3725 gfc_add_ss_to_loop (&loop, tdss);
3726 gfc_add_ss_to_loop (&loop, tsss);
3727
3728 if (eblock)
3729 {
3730 /* Handle the else clause. */
3731 gfc_init_se (&edse, NULL);
3732 gfc_init_se (&esse, NULL);
3733 edss = gfc_walk_expr (edst);
3734 esss = gfc_walk_expr (esrc);
3735 if (esss == gfc_ss_terminator)
3736 {
3737 esss = gfc_get_ss ();
3738 esss->where = 1;
3739 esss->next = gfc_ss_terminator;
3740 esss->type = GFC_SS_SCALAR;
3741 esss->expr = esrc;
3742 }
3743 gfc_add_ss_to_loop (&loop, edss);
3744 gfc_add_ss_to_loop (&loop, esss);
3745 }
3746
3747 gfc_conv_ss_startstride (&loop);
3748 gfc_conv_loop_setup (&loop, &tdst->where);
3749
3750 gfc_mark_ss_chain_used (css, 1);
3751 gfc_mark_ss_chain_used (tdss, 1);
3752 gfc_mark_ss_chain_used (tsss, 1);
3753 if (eblock)
3754 {
3755 gfc_mark_ss_chain_used (edss, 1);
3756 gfc_mark_ss_chain_used (esss, 1);
3757 }
3758
3759 gfc_start_scalarized_body (&loop, &body);
3760
3761 gfc_copy_loopinfo_to_se (&cse, &loop);
3762 gfc_copy_loopinfo_to_se (&tdse, &loop);
3763 gfc_copy_loopinfo_to_se (&tsse, &loop);
3764 cse.ss = css;
3765 tdse.ss = tdss;
3766 tsse.ss = tsss;
3767 if (eblock)
3768 {
3769 gfc_copy_loopinfo_to_se (&edse, &loop);
3770 gfc_copy_loopinfo_to_se (&esse, &loop);
3771 edse.ss = edss;
3772 esse.ss = esss;
3773 }
3774
3775 gfc_conv_expr (&cse, cond);
3776 gfc_add_block_to_block (&body, &cse.pre);
3777 cexpr = cse.expr;
3778
3779 gfc_conv_expr (&tsse, tsrc);
3780 if (tdss != gfc_ss_terminator && loop.temp_ss != NULL)
3781 {
3782 gfc_conv_tmp_array_ref (&tdse);
3783 gfc_advance_se_ss_chain (&tdse);
3784 }
3785 else
3786 gfc_conv_expr (&tdse, tdst);
3787
3788 if (eblock)
3789 {
3790 gfc_conv_expr (&esse, esrc);
3791 if (edss != gfc_ss_terminator && loop.temp_ss != NULL)
3792 {
3793 gfc_conv_tmp_array_ref (&edse);
3794 gfc_advance_se_ss_chain (&edse);
3795 }
3796 else
3797 gfc_conv_expr (&edse, edst);
3798 }
3799
3800 tstmt = gfc_trans_scalar_assign (&tdse, &tsse, tdst->ts, false, false);
3801 estmt = eblock ? gfc_trans_scalar_assign (&edse, &esse, edst->ts, false, false)
3802 : build_empty_stmt ();
3803 tmp = build3_v (COND_EXPR, cexpr, tstmt, estmt);
3804 gfc_add_expr_to_block (&body, tmp);
3805 gfc_add_block_to_block (&body, &cse.post);
3806
3807 gfc_trans_scalarizing_loops (&loop, &body);
3808 gfc_add_block_to_block (&block, &loop.pre);
3809 gfc_add_block_to_block (&block, &loop.post);
3810 gfc_cleanup_loop (&loop);
3811
3812 return gfc_finish_block (&block);
3813 }
3814
3815 /* As the WHERE or WHERE construct statement can be nested, we call
3816 gfc_trans_where_2 to do the translation, and pass the initial
3817 NULL values for both the control mask and the pending control mask. */
3818
3819 tree
3820 gfc_trans_where (gfc_code * code)
3821 {
3822 stmtblock_t block;
3823 gfc_code *cblock;
3824 gfc_code *eblock;
3825
3826 cblock = code->block;
3827 if (cblock->next
3828 && cblock->next->op == EXEC_ASSIGN
3829 && !cblock->next->next)
3830 {
3831 eblock = cblock->block;
3832 if (!eblock)
3833 {
3834 /* A simple "WHERE (cond) x = y" statement or block is
3835 dependence free if cond is not dependent upon writing x,
3836 and the source y is unaffected by the destination x. */
3837 if (!gfc_check_dependency (cblock->next->expr,
3838 cblock->expr, 0)
3839 && !gfc_check_dependency (cblock->next->expr,
3840 cblock->next->expr2, 0))
3841 return gfc_trans_where_3 (cblock, NULL);
3842 }
3843 else if (!eblock->expr
3844 && !eblock->block
3845 && eblock->next
3846 && eblock->next->op == EXEC_ASSIGN
3847 && !eblock->next->next)
3848 {
3849 /* A simple "WHERE (cond) x1 = y1 ELSEWHERE x2 = y2 ENDWHERE"
3850 block is dependence free if cond is not dependent on writes
3851 to x1 and x2, y1 is not dependent on writes to x2, and y2
3852 is not dependent on writes to x1, and both y's are not
3853 dependent upon their own x's. In addition to this, the
3854 final two dependency checks below exclude all but the same
3855 array reference if the where and elswhere destinations
3856 are the same. In short, this is VERY conservative and this
3857 is needed because the two loops, required by the standard
3858 are coalesced in gfc_trans_where_3. */
3859 if (!gfc_check_dependency(cblock->next->expr,
3860 cblock->expr, 0)
3861 && !gfc_check_dependency(eblock->next->expr,
3862 cblock->expr, 0)
3863 && !gfc_check_dependency(cblock->next->expr,
3864 eblock->next->expr2, 1)
3865 && !gfc_check_dependency(eblock->next->expr,
3866 cblock->next->expr2, 1)
3867 && !gfc_check_dependency(cblock->next->expr,
3868 cblock->next->expr2, 1)
3869 && !gfc_check_dependency(eblock->next->expr,
3870 eblock->next->expr2, 1)
3871 && !gfc_check_dependency(cblock->next->expr,
3872 eblock->next->expr, 0)
3873 && !gfc_check_dependency(eblock->next->expr,
3874 cblock->next->expr, 0))
3875 return gfc_trans_where_3 (cblock, eblock);
3876 }
3877 }
3878
3879 gfc_start_block (&block);
3880
3881 gfc_trans_where_2 (code, NULL, false, NULL, &block);
3882
3883 return gfc_finish_block (&block);
3884 }
3885
3886
3887 /* CYCLE a DO loop. The label decl has already been created by
3888 gfc_trans_do(), it's in TREE_PURPOSE (backend_decl) of the gfc_code
3889 node at the head of the loop. We must mark the label as used. */
3890
3891 tree
3892 gfc_trans_cycle (gfc_code * code)
3893 {
3894 tree cycle_label;
3895
3896 cycle_label = TREE_PURPOSE (code->ext.whichloop->backend_decl);
3897 TREE_USED (cycle_label) = 1;
3898 return build1_v (GOTO_EXPR, cycle_label);
3899 }
3900
3901
3902 /* EXIT a DO loop. Similar to CYCLE, but now the label is in
3903 TREE_VALUE (backend_decl) of the gfc_code node at the head of the
3904 loop. */
3905
3906 tree
3907 gfc_trans_exit (gfc_code * code)
3908 {
3909 tree exit_label;
3910
3911 exit_label = TREE_VALUE (code->ext.whichloop->backend_decl);
3912 TREE_USED (exit_label) = 1;
3913 return build1_v (GOTO_EXPR, exit_label);
3914 }
3915
3916
3917 /* Translate the ALLOCATE statement. */
3918
3919 tree
3920 gfc_trans_allocate (gfc_code * code)
3921 {
3922 gfc_alloc *al;
3923 gfc_expr *expr;
3924 gfc_se se;
3925 tree tmp;
3926 tree parm;
3927 tree stat;
3928 tree pstat;
3929 tree error_label;
3930 stmtblock_t block;
3931
3932 if (!code->ext.alloc_list)
3933 return NULL_TREE;
3934
3935 pstat = stat = error_label = tmp = NULL_TREE;
3936
3937 gfc_start_block (&block);
3938
3939 /* Either STAT= and/or ERRMSG is present. */
3940 if (code->expr || code->expr2)
3941 {
3942 tree gfc_int4_type_node = gfc_get_int_type (4);
3943
3944 stat = gfc_create_var (gfc_int4_type_node, "stat");
3945 pstat = gfc_build_addr_expr (NULL_TREE, stat);
3946
3947 error_label = gfc_build_label_decl (NULL_TREE);
3948 TREE_USED (error_label) = 1;
3949 }
3950
3951 for (al = code->ext.alloc_list; al != NULL; al = al->next)
3952 {
3953 expr = al->expr;
3954
3955 gfc_init_se (&se, NULL);
3956 gfc_start_block (&se.pre);
3957
3958 se.want_pointer = 1;
3959 se.descriptor_only = 1;
3960 gfc_conv_expr (&se, expr);
3961
3962 if (!gfc_array_allocate (&se, expr, pstat))
3963 {
3964 /* A scalar or derived type. */
3965 tmp = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (se.expr)));
3966
3967 if (expr->ts.type == BT_CHARACTER && tmp == NULL_TREE)
3968 tmp = se.string_length;
3969
3970 tmp = gfc_allocate_with_status (&se.pre, tmp, pstat);
3971 tmp = fold_build2 (MODIFY_EXPR, void_type_node, se.expr,
3972 fold_convert (TREE_TYPE (se.expr), tmp));
3973 gfc_add_expr_to_block (&se.pre, tmp);
3974
3975 if (code->expr || code->expr2)
3976 {
3977 tmp = build1_v (GOTO_EXPR, error_label);
3978 parm = fold_build2 (NE_EXPR, boolean_type_node,
3979 stat, build_int_cst (TREE_TYPE (stat), 0));
3980 tmp = fold_build3 (COND_EXPR, void_type_node,
3981 parm, tmp, build_empty_stmt ());
3982 gfc_add_expr_to_block (&se.pre, tmp);
3983 }
3984
3985 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
3986 {
3987 tmp = build_fold_indirect_ref (se.expr);
3988 tmp = gfc_nullify_alloc_comp (expr->ts.derived, tmp, 0);
3989 gfc_add_expr_to_block (&se.pre, tmp);
3990 }
3991
3992 }
3993
3994 tmp = gfc_finish_block (&se.pre);
3995 gfc_add_expr_to_block (&block, tmp);
3996 }
3997
3998 /* STAT block. */
3999 if (code->expr)
4000 {
4001 tmp = build1_v (LABEL_EXPR, error_label);
4002 gfc_add_expr_to_block (&block, tmp);
4003
4004 gfc_init_se (&se, NULL);
4005 gfc_conv_expr_lhs (&se, code->expr);
4006 tmp = convert (TREE_TYPE (se.expr), stat);
4007 gfc_add_modify (&block, se.expr, tmp);
4008 }
4009
4010 /* ERRMSG block. */
4011 if (code->expr2)
4012 {
4013 /* A better error message may be possible, but not required. */
4014 const char *msg = "Attempt to allocate an allocated object";
4015 tree errmsg, slen, dlen;
4016
4017 gfc_init_se (&se, NULL);
4018 gfc_conv_expr_lhs (&se, code->expr2);
4019
4020 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4021
4022 gfc_add_modify (&block, errmsg,
4023 gfc_build_addr_expr (pchar_type_node,
4024 gfc_build_localized_cstring_const (msg)));
4025
4026 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4027 dlen = gfc_get_expr_charlen (code->expr2);
4028 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4029
4030 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4031 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4032
4033 tmp = fold_build2 (NE_EXPR, boolean_type_node, stat,
4034 build_int_cst (TREE_TYPE (stat), 0));
4035
4036 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt ());
4037
4038 gfc_add_expr_to_block (&block, tmp);
4039 }
4040
4041 return gfc_finish_block (&block);
4042 }
4043
4044
4045 /* Translate a DEALLOCATE statement. */
4046
4047 tree
4048 gfc_trans_deallocate (gfc_code *code)
4049 {
4050 gfc_se se;
4051 gfc_alloc *al;
4052 gfc_expr *expr;
4053 tree apstat, astat, pstat, stat, tmp;
4054 stmtblock_t block;
4055
4056 pstat = apstat = stat = astat = tmp = NULL_TREE;
4057
4058 gfc_start_block (&block);
4059
4060 /* Count the number of failed deallocations. If deallocate() was
4061 called with STAT= , then set STAT to the count. If deallocate
4062 was called with ERRMSG, then set ERRMG to a string. */
4063 if (code->expr || code->expr2)
4064 {
4065 tree gfc_int4_type_node = gfc_get_int_type (4);
4066
4067 stat = gfc_create_var (gfc_int4_type_node, "stat");
4068 pstat = gfc_build_addr_expr (NULL_TREE, stat);
4069
4070 /* Running total of possible deallocation failures. */
4071 astat = gfc_create_var (gfc_int4_type_node, "astat");
4072 apstat = gfc_build_addr_expr (NULL_TREE, astat);
4073
4074 /* Initialize astat to 0. */
4075 gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
4076 }
4077
4078 for (al = code->ext.alloc_list; al != NULL; al = al->next)
4079 {
4080 expr = al->expr;
4081 gcc_assert (expr->expr_type == EXPR_VARIABLE);
4082
4083 gfc_init_se (&se, NULL);
4084 gfc_start_block (&se.pre);
4085
4086 se.want_pointer = 1;
4087 se.descriptor_only = 1;
4088 gfc_conv_expr (&se, expr);
4089
4090 if (expr->ts.type == BT_DERIVED && expr->ts.derived->attr.alloc_comp)
4091 {
4092 gfc_ref *ref;
4093 gfc_ref *last = NULL;
4094 for (ref = expr->ref; ref; ref = ref->next)
4095 if (ref->type == REF_COMPONENT)
4096 last = ref;
4097
4098 /* Do not deallocate the components of a derived type
4099 ultimate pointer component. */
4100 if (!(last && last->u.c.component->attr.pointer)
4101 && !(!last && expr->symtree->n.sym->attr.pointer))
4102 {
4103 tmp = gfc_deallocate_alloc_comp (expr->ts.derived, se.expr,
4104 expr->rank);
4105 gfc_add_expr_to_block (&se.pre, tmp);
4106 }
4107 }
4108
4109 if (expr->rank)
4110 tmp = gfc_array_deallocate (se.expr, pstat, expr);
4111 else
4112 {
4113 tmp = gfc_deallocate_with_status (se.expr, pstat, false, expr);
4114 gfc_add_expr_to_block (&se.pre, tmp);
4115
4116 tmp = fold_build2 (MODIFY_EXPR, void_type_node,
4117 se.expr, build_int_cst (TREE_TYPE (se.expr), 0));
4118 }
4119
4120 gfc_add_expr_to_block (&se.pre, tmp);
4121
4122 /* Keep track of the number of failed deallocations by adding stat
4123 of the last deallocation to the running total. */
4124 if (code->expr || code->expr2)
4125 {
4126 apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
4127 gfc_add_modify (&se.pre, astat, apstat);
4128 }
4129
4130 tmp = gfc_finish_block (&se.pre);
4131 gfc_add_expr_to_block (&block, tmp);
4132
4133 }
4134
4135 /* Set STAT. */
4136 if (code->expr)
4137 {
4138 gfc_init_se (&se, NULL);
4139 gfc_conv_expr_lhs (&se, code->expr);
4140 tmp = convert (TREE_TYPE (se.expr), astat);
4141 gfc_add_modify (&block, se.expr, tmp);
4142 }
4143
4144 /* Set ERRMSG. */
4145 if (code->expr2)
4146 {
4147 /* A better error message may be possible, but not required. */
4148 const char *msg = "Attempt to deallocate an unallocated object";
4149 tree errmsg, slen, dlen;
4150
4151 gfc_init_se (&se, NULL);
4152 gfc_conv_expr_lhs (&se, code->expr2);
4153
4154 errmsg = gfc_create_var (pchar_type_node, "ERRMSG");
4155
4156 gfc_add_modify (&block, errmsg,
4157 gfc_build_addr_expr (pchar_type_node,
4158 gfc_build_localized_cstring_const (msg)));
4159
4160 slen = build_int_cst (gfc_charlen_type_node, ((int) strlen (msg)));
4161 dlen = gfc_get_expr_charlen (code->expr2);
4162 slen = fold_build2 (MIN_EXPR, TREE_TYPE (slen), dlen, slen);
4163
4164 dlen = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3,
4165 gfc_build_addr_expr (pvoid_type_node, se.expr), errmsg, slen);
4166
4167 tmp = fold_build2 (NE_EXPR, boolean_type_node, astat,
4168 build_int_cst (TREE_TYPE (astat), 0));
4169
4170 tmp = build3_v (COND_EXPR, tmp, dlen, build_empty_stmt ());
4171
4172 gfc_add_expr_to_block (&block, tmp);
4173 }
4174
4175 return gfc_finish_block (&block);
4176 }
4177