]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fortran/trans-array.c
re PR fortran/69128 (OpenMP workshare problem with SUM())
[thirdparty/gcc.git] / gcc / fortran / trans-array.c
1 /* Array translation routines
2 Copyright (C) 2002-2016 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* trans-array.c-- Various array related code, including scalarization,
23 allocation, initialization and other support routines. */
24
25 /* How the scalarizer works.
26 In gfortran, array expressions use the same core routines as scalar
27 expressions.
28 First, a Scalarization State (SS) chain is built. This is done by walking
29 the expression tree, and building a linear list of the terms in the
30 expression. As the tree is walked, scalar subexpressions are translated.
31
32 The scalarization parameters are stored in a gfc_loopinfo structure.
33 First the start and stride of each term is calculated by
34 gfc_conv_ss_startstride. During this process the expressions for the array
35 descriptors and data pointers are also translated.
36
37 If the expression is an assignment, we must then resolve any dependencies.
38 In Fortran all the rhs values of an assignment must be evaluated before
39 any assignments take place. This can require a temporary array to store the
40 values. We also require a temporary when we are passing array expressions
41 or vector subscripts as procedure parameters.
42
43 Array sections are passed without copying to a temporary. These use the
44 scalarizer to determine the shape of the section. The flag
45 loop->array_parameter tells the scalarizer that the actual values and loop
46 variables will not be required.
47
48 The function gfc_conv_loop_setup generates the scalarization setup code.
49 It determines the range of the scalarizing loop variables. If a temporary
50 is required, this is created and initialized. Code for scalar expressions
51 taken outside the loop is also generated at this time. Next the offset and
52 scaling required to translate from loop variables to array indices for each
53 term is calculated.
54
55 A call to gfc_start_scalarized_body marks the start of the scalarized
56 expression. This creates a scope and declares the loop variables. Before
57 calling this gfc_make_ss_chain_used must be used to indicate which terms
58 will be used inside this loop.
59
60 The scalar gfc_conv_* functions are then used to build the main body of the
61 scalarization loop. Scalarization loop variables and precalculated scalar
62 values are automatically substituted. Note that gfc_advance_se_ss_chain
63 must be used, rather than changing the se->ss directly.
64
65 For assignment expressions requiring a temporary two sub loops are
66 generated. The first stores the result of the expression in the temporary,
67 the second copies it to the result. A call to
68 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
69 the start of the copying loop. The temporary may be less than full rank.
70
71 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
72 loops. The loops are added to the pre chain of the loopinfo. The post
73 chain may still contain cleanup code.
74
75 After the loop code has been added into its parent scope gfc_cleanup_loop
76 is called to free all the SS allocated by the scalarizer. */
77
78 #include "config.h"
79 #include "system.h"
80 #include "coretypes.h"
81 #include "options.h"
82 #include "tree.h"
83 #include "gfortran.h"
84 #include "gimple-expr.h"
85 #include "trans.h"
86 #include "fold-const.h"
87 #include "constructor.h"
88 #include "trans-types.h"
89 #include "trans-array.h"
90 #include "trans-const.h"
91 #include "dependency.h"
92
93 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
94
95 /* The contents of this structure aren't actually used, just the address. */
96 static gfc_ss gfc_ss_terminator_var;
97 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
98
99
100 static tree
101 gfc_array_dataptr_type (tree desc)
102 {
103 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
104 }
105
106
107 /* Build expressions to access the members of an array descriptor.
108 It's surprisingly easy to mess up here, so never access
109 an array descriptor by "brute force", always use these
110 functions. This also avoids problems if we change the format
111 of an array descriptor.
112
113 To understand these magic numbers, look at the comments
114 before gfc_build_array_type() in trans-types.c.
115
116 The code within these defines should be the only code which knows the format
117 of an array descriptor.
118
119 Any code just needing to read obtain the bounds of an array should use
120 gfc_conv_array_* rather than the following functions as these will return
121 know constant values, and work with arrays which do not have descriptors.
122
123 Don't forget to #undef these! */
124
125 #define DATA_FIELD 0
126 #define OFFSET_FIELD 1
127 #define DTYPE_FIELD 2
128 #define DIMENSION_FIELD 3
129 #define CAF_TOKEN_FIELD 4
130
131 #define STRIDE_SUBFIELD 0
132 #define LBOUND_SUBFIELD 1
133 #define UBOUND_SUBFIELD 2
134
135 /* This provides READ-ONLY access to the data field. The field itself
136 doesn't have the proper type. */
137
138 tree
139 gfc_conv_descriptor_data_get (tree desc)
140 {
141 tree field, type, t;
142
143 type = TREE_TYPE (desc);
144 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
145
146 field = TYPE_FIELDS (type);
147 gcc_assert (DATA_FIELD == 0);
148
149 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
150 field, NULL_TREE);
151 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
152
153 return t;
154 }
155
156 /* This provides WRITE access to the data field.
157
158 TUPLES_P is true if we are generating tuples.
159
160 This function gets called through the following macros:
161 gfc_conv_descriptor_data_set
162 gfc_conv_descriptor_data_set. */
163
164 void
165 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
166 {
167 tree field, type, t;
168
169 type = TREE_TYPE (desc);
170 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
171
172 field = TYPE_FIELDS (type);
173 gcc_assert (DATA_FIELD == 0);
174
175 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
176 field, NULL_TREE);
177 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
178 }
179
180
181 /* This provides address access to the data field. This should only be
182 used by array allocation, passing this on to the runtime. */
183
184 tree
185 gfc_conv_descriptor_data_addr (tree desc)
186 {
187 tree field, type, t;
188
189 type = TREE_TYPE (desc);
190 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
191
192 field = TYPE_FIELDS (type);
193 gcc_assert (DATA_FIELD == 0);
194
195 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
196 field, NULL_TREE);
197 return gfc_build_addr_expr (NULL_TREE, t);
198 }
199
200 static tree
201 gfc_conv_descriptor_offset (tree desc)
202 {
203 tree type;
204 tree field;
205
206 type = TREE_TYPE (desc);
207 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
208
209 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
210 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
211
212 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
213 desc, field, NULL_TREE);
214 }
215
216 tree
217 gfc_conv_descriptor_offset_get (tree desc)
218 {
219 return gfc_conv_descriptor_offset (desc);
220 }
221
222 void
223 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
224 tree value)
225 {
226 tree t = gfc_conv_descriptor_offset (desc);
227 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
228 }
229
230
231 tree
232 gfc_conv_descriptor_dtype (tree desc)
233 {
234 tree field;
235 tree type;
236
237 type = TREE_TYPE (desc);
238 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
239
240 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
241 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
242
243 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
244 desc, field, NULL_TREE);
245 }
246
247
248 tree
249 gfc_conv_descriptor_rank (tree desc)
250 {
251 tree tmp;
252 tree dtype;
253
254 dtype = gfc_conv_descriptor_dtype (desc);
255 tmp = build_int_cst (TREE_TYPE (dtype), GFC_DTYPE_RANK_MASK);
256 tmp = fold_build2_loc (input_location, BIT_AND_EXPR, TREE_TYPE (dtype),
257 dtype, tmp);
258 return fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp);
259 }
260
261
262 tree
263 gfc_get_descriptor_dimension (tree desc)
264 {
265 tree type, field;
266
267 type = TREE_TYPE (desc);
268 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
269
270 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
271 gcc_assert (field != NULL_TREE
272 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
273 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
274
275 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
276 desc, field, NULL_TREE);
277 }
278
279
280 static tree
281 gfc_conv_descriptor_dimension (tree desc, tree dim)
282 {
283 tree tmp;
284
285 tmp = gfc_get_descriptor_dimension (desc);
286
287 return gfc_build_array_ref (tmp, dim, NULL);
288 }
289
290
291 tree
292 gfc_conv_descriptor_token (tree desc)
293 {
294 tree type;
295 tree field;
296
297 type = TREE_TYPE (desc);
298 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
299 gcc_assert (flag_coarray == GFC_FCOARRAY_LIB);
300 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
301
302 /* Should be a restricted pointer - except in the finalization wrapper. */
303 gcc_assert (field != NULL_TREE
304 && (TREE_TYPE (field) == prvoid_type_node
305 || TREE_TYPE (field) == pvoid_type_node));
306
307 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
308 desc, field, NULL_TREE);
309 }
310
311
312 static tree
313 gfc_conv_descriptor_stride (tree desc, tree dim)
314 {
315 tree tmp;
316 tree field;
317
318 tmp = gfc_conv_descriptor_dimension (desc, dim);
319 field = TYPE_FIELDS (TREE_TYPE (tmp));
320 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
321 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
322
323 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
324 tmp, field, NULL_TREE);
325 return tmp;
326 }
327
328 tree
329 gfc_conv_descriptor_stride_get (tree desc, tree dim)
330 {
331 tree type = TREE_TYPE (desc);
332 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
333 if (integer_zerop (dim)
334 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
335 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
336 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_RANK_CONT
337 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
338 return gfc_index_one_node;
339
340 return gfc_conv_descriptor_stride (desc, dim);
341 }
342
343 void
344 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
345 tree dim, tree value)
346 {
347 tree t = gfc_conv_descriptor_stride (desc, dim);
348 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
349 }
350
351 static tree
352 gfc_conv_descriptor_lbound (tree desc, tree dim)
353 {
354 tree tmp;
355 tree field;
356
357 tmp = gfc_conv_descriptor_dimension (desc, dim);
358 field = TYPE_FIELDS (TREE_TYPE (tmp));
359 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
360 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
361
362 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
363 tmp, field, NULL_TREE);
364 return tmp;
365 }
366
367 tree
368 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
369 {
370 return gfc_conv_descriptor_lbound (desc, dim);
371 }
372
373 void
374 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
375 tree dim, tree value)
376 {
377 tree t = gfc_conv_descriptor_lbound (desc, dim);
378 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
379 }
380
381 static tree
382 gfc_conv_descriptor_ubound (tree desc, tree dim)
383 {
384 tree tmp;
385 tree field;
386
387 tmp = gfc_conv_descriptor_dimension (desc, dim);
388 field = TYPE_FIELDS (TREE_TYPE (tmp));
389 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
390 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
391
392 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
393 tmp, field, NULL_TREE);
394 return tmp;
395 }
396
397 tree
398 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
399 {
400 return gfc_conv_descriptor_ubound (desc, dim);
401 }
402
403 void
404 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
405 tree dim, tree value)
406 {
407 tree t = gfc_conv_descriptor_ubound (desc, dim);
408 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
409 }
410
411 /* Build a null array descriptor constructor. */
412
413 tree
414 gfc_build_null_descriptor (tree type)
415 {
416 tree field;
417 tree tmp;
418
419 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
420 gcc_assert (DATA_FIELD == 0);
421 field = TYPE_FIELDS (type);
422
423 /* Set a NULL data pointer. */
424 tmp = build_constructor_single (type, field, null_pointer_node);
425 TREE_CONSTANT (tmp) = 1;
426 /* All other fields are ignored. */
427
428 return tmp;
429 }
430
431
432 /* Modify a descriptor such that the lbound of a given dimension is the value
433 specified. This also updates ubound and offset accordingly. */
434
435 void
436 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
437 int dim, tree new_lbound)
438 {
439 tree offs, ubound, lbound, stride;
440 tree diff, offs_diff;
441
442 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
443
444 offs = gfc_conv_descriptor_offset_get (desc);
445 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
446 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
447 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
448
449 /* Get difference (new - old) by which to shift stuff. */
450 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
451 new_lbound, lbound);
452
453 /* Shift ubound and offset accordingly. This has to be done before
454 updating the lbound, as they depend on the lbound expression! */
455 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
456 ubound, diff);
457 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
458 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
459 diff, stride);
460 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
461 offs, offs_diff);
462 gfc_conv_descriptor_offset_set (block, desc, offs);
463
464 /* Finally set lbound to value we want. */
465 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
466 }
467
468
469 /* Cleanup those #defines. */
470
471 #undef DATA_FIELD
472 #undef OFFSET_FIELD
473 #undef DTYPE_FIELD
474 #undef DIMENSION_FIELD
475 #undef CAF_TOKEN_FIELD
476 #undef STRIDE_SUBFIELD
477 #undef LBOUND_SUBFIELD
478 #undef UBOUND_SUBFIELD
479
480
481 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
482 flags & 1 = Main loop body.
483 flags & 2 = temp copy loop. */
484
485 void
486 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
487 {
488 for (; ss != gfc_ss_terminator; ss = ss->next)
489 ss->info->useflags = flags;
490 }
491
492
493 /* Free a gfc_ss chain. */
494
495 void
496 gfc_free_ss_chain (gfc_ss * ss)
497 {
498 gfc_ss *next;
499
500 while (ss != gfc_ss_terminator)
501 {
502 gcc_assert (ss != NULL);
503 next = ss->next;
504 gfc_free_ss (ss);
505 ss = next;
506 }
507 }
508
509
510 static void
511 free_ss_info (gfc_ss_info *ss_info)
512 {
513 int n;
514
515 ss_info->refcount--;
516 if (ss_info->refcount > 0)
517 return;
518
519 gcc_assert (ss_info->refcount == 0);
520
521 switch (ss_info->type)
522 {
523 case GFC_SS_SECTION:
524 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
525 if (ss_info->data.array.subscript[n])
526 gfc_free_ss_chain (ss_info->data.array.subscript[n]);
527 break;
528
529 default:
530 break;
531 }
532
533 free (ss_info);
534 }
535
536
537 /* Free a SS. */
538
539 void
540 gfc_free_ss (gfc_ss * ss)
541 {
542 free_ss_info (ss->info);
543 free (ss);
544 }
545
546
547 /* Creates and initializes an array type gfc_ss struct. */
548
549 gfc_ss *
550 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
551 {
552 gfc_ss *ss;
553 gfc_ss_info *ss_info;
554 int i;
555
556 ss_info = gfc_get_ss_info ();
557 ss_info->refcount++;
558 ss_info->type = type;
559 ss_info->expr = expr;
560
561 ss = gfc_get_ss ();
562 ss->info = ss_info;
563 ss->next = next;
564 ss->dimen = dimen;
565 for (i = 0; i < ss->dimen; i++)
566 ss->dim[i] = i;
567
568 return ss;
569 }
570
571
572 /* Creates and initializes a temporary type gfc_ss struct. */
573
574 gfc_ss *
575 gfc_get_temp_ss (tree type, tree string_length, int dimen)
576 {
577 gfc_ss *ss;
578 gfc_ss_info *ss_info;
579 int i;
580
581 ss_info = gfc_get_ss_info ();
582 ss_info->refcount++;
583 ss_info->type = GFC_SS_TEMP;
584 ss_info->string_length = string_length;
585 ss_info->data.temp.type = type;
586
587 ss = gfc_get_ss ();
588 ss->info = ss_info;
589 ss->next = gfc_ss_terminator;
590 ss->dimen = dimen;
591 for (i = 0; i < ss->dimen; i++)
592 ss->dim[i] = i;
593
594 return ss;
595 }
596
597
598 /* Creates and initializes a scalar type gfc_ss struct. */
599
600 gfc_ss *
601 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
602 {
603 gfc_ss *ss;
604 gfc_ss_info *ss_info;
605
606 ss_info = gfc_get_ss_info ();
607 ss_info->refcount++;
608 ss_info->type = GFC_SS_SCALAR;
609 ss_info->expr = expr;
610
611 ss = gfc_get_ss ();
612 ss->info = ss_info;
613 ss->next = next;
614
615 return ss;
616 }
617
618
619 /* Free all the SS associated with a loop. */
620
621 void
622 gfc_cleanup_loop (gfc_loopinfo * loop)
623 {
624 gfc_loopinfo *loop_next, **ploop;
625 gfc_ss *ss;
626 gfc_ss *next;
627
628 ss = loop->ss;
629 while (ss != gfc_ss_terminator)
630 {
631 gcc_assert (ss != NULL);
632 next = ss->loop_chain;
633 gfc_free_ss (ss);
634 ss = next;
635 }
636
637 /* Remove reference to self in the parent loop. */
638 if (loop->parent)
639 for (ploop = &loop->parent->nested; *ploop; ploop = &(*ploop)->next)
640 if (*ploop == loop)
641 {
642 *ploop = loop->next;
643 break;
644 }
645
646 /* Free non-freed nested loops. */
647 for (loop = loop->nested; loop; loop = loop_next)
648 {
649 loop_next = loop->next;
650 gfc_cleanup_loop (loop);
651 free (loop);
652 }
653 }
654
655
656 static void
657 set_ss_loop (gfc_ss *ss, gfc_loopinfo *loop)
658 {
659 int n;
660
661 for (; ss != gfc_ss_terminator; ss = ss->next)
662 {
663 ss->loop = loop;
664
665 if (ss->info->type == GFC_SS_SCALAR
666 || ss->info->type == GFC_SS_REFERENCE
667 || ss->info->type == GFC_SS_TEMP)
668 continue;
669
670 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
671 if (ss->info->data.array.subscript[n] != NULL)
672 set_ss_loop (ss->info->data.array.subscript[n], loop);
673 }
674 }
675
676
677 /* Associate a SS chain with a loop. */
678
679 void
680 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
681 {
682 gfc_ss *ss;
683 gfc_loopinfo *nested_loop;
684
685 if (head == gfc_ss_terminator)
686 return;
687
688 set_ss_loop (head, loop);
689
690 ss = head;
691 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
692 {
693 if (ss->nested_ss)
694 {
695 nested_loop = ss->nested_ss->loop;
696
697 /* More than one ss can belong to the same loop. Hence, we add the
698 loop to the chain only if it is different from the previously
699 added one, to avoid duplicate nested loops. */
700 if (nested_loop != loop->nested)
701 {
702 gcc_assert (nested_loop->parent == NULL);
703 nested_loop->parent = loop;
704
705 gcc_assert (nested_loop->next == NULL);
706 nested_loop->next = loop->nested;
707 loop->nested = nested_loop;
708 }
709 else
710 gcc_assert (nested_loop->parent == loop);
711 }
712
713 if (ss->next == gfc_ss_terminator)
714 ss->loop_chain = loop->ss;
715 else
716 ss->loop_chain = ss->next;
717 }
718 gcc_assert (ss == gfc_ss_terminator);
719 loop->ss = head;
720 }
721
722
723 /* Generate an initializer for a static pointer or allocatable array. */
724
725 void
726 gfc_trans_static_array_pointer (gfc_symbol * sym)
727 {
728 tree type;
729
730 gcc_assert (TREE_STATIC (sym->backend_decl));
731 /* Just zero the data member. */
732 type = TREE_TYPE (sym->backend_decl);
733 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
734 }
735
736
737 /* If the bounds of SE's loop have not yet been set, see if they can be
738 determined from array spec AS, which is the array spec of a called
739 function. MAPPING maps the callee's dummy arguments to the values
740 that the caller is passing. Add any initialization and finalization
741 code to SE. */
742
743 void
744 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
745 gfc_se * se, gfc_array_spec * as)
746 {
747 int n, dim, total_dim;
748 gfc_se tmpse;
749 gfc_ss *ss;
750 tree lower;
751 tree upper;
752 tree tmp;
753
754 total_dim = 0;
755
756 if (!as || as->type != AS_EXPLICIT)
757 return;
758
759 for (ss = se->ss; ss; ss = ss->parent)
760 {
761 total_dim += ss->loop->dimen;
762 for (n = 0; n < ss->loop->dimen; n++)
763 {
764 /* The bound is known, nothing to do. */
765 if (ss->loop->to[n] != NULL_TREE)
766 continue;
767
768 dim = ss->dim[n];
769 gcc_assert (dim < as->rank);
770 gcc_assert (ss->loop->dimen <= as->rank);
771
772 /* Evaluate the lower bound. */
773 gfc_init_se (&tmpse, NULL);
774 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
775 gfc_add_block_to_block (&se->pre, &tmpse.pre);
776 gfc_add_block_to_block (&se->post, &tmpse.post);
777 lower = fold_convert (gfc_array_index_type, tmpse.expr);
778
779 /* ...and the upper bound. */
780 gfc_init_se (&tmpse, NULL);
781 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
782 gfc_add_block_to_block (&se->pre, &tmpse.pre);
783 gfc_add_block_to_block (&se->post, &tmpse.post);
784 upper = fold_convert (gfc_array_index_type, tmpse.expr);
785
786 /* Set the upper bound of the loop to UPPER - LOWER. */
787 tmp = fold_build2_loc (input_location, MINUS_EXPR,
788 gfc_array_index_type, upper, lower);
789 tmp = gfc_evaluate_now (tmp, &se->pre);
790 ss->loop->to[n] = tmp;
791 }
792 }
793
794 gcc_assert (total_dim == as->rank);
795 }
796
797
798 /* Generate code to allocate an array temporary, or create a variable to
799 hold the data. If size is NULL, zero the descriptor so that the
800 callee will allocate the array. If DEALLOC is true, also generate code to
801 free the array afterwards.
802
803 If INITIAL is not NULL, it is packed using internal_pack and the result used
804 as data instead of allocating a fresh, unitialized area of memory.
805
806 Initialization code is added to PRE and finalization code to POST.
807 DYNAMIC is true if the caller may want to extend the array later
808 using realloc. This prevents us from putting the array on the stack. */
809
810 static void
811 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
812 gfc_array_info * info, tree size, tree nelem,
813 tree initial, bool dynamic, bool dealloc)
814 {
815 tree tmp;
816 tree desc;
817 bool onstack;
818
819 desc = info->descriptor;
820 info->offset = gfc_index_zero_node;
821 if (size == NULL_TREE || integer_zerop (size))
822 {
823 /* A callee allocated array. */
824 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
825 onstack = FALSE;
826 }
827 else
828 {
829 /* Allocate the temporary. */
830 onstack = !dynamic && initial == NULL_TREE
831 && (flag_stack_arrays
832 || gfc_can_put_var_on_stack (size));
833
834 if (onstack)
835 {
836 /* Make a temporary variable to hold the data. */
837 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
838 nelem, gfc_index_one_node);
839 tmp = gfc_evaluate_now (tmp, pre);
840 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
841 tmp);
842 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
843 tmp);
844 tmp = gfc_create_var (tmp, "A");
845 /* If we're here only because of -fstack-arrays we have to
846 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
847 if (!gfc_can_put_var_on_stack (size))
848 gfc_add_expr_to_block (pre,
849 fold_build1_loc (input_location,
850 DECL_EXPR, TREE_TYPE (tmp),
851 tmp));
852 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
853 gfc_conv_descriptor_data_set (pre, desc, tmp);
854 }
855 else
856 {
857 /* Allocate memory to hold the data or call internal_pack. */
858 if (initial == NULL_TREE)
859 {
860 tmp = gfc_call_malloc (pre, NULL, size);
861 tmp = gfc_evaluate_now (tmp, pre);
862 }
863 else
864 {
865 tree packed;
866 tree source_data;
867 tree was_packed;
868 stmtblock_t do_copying;
869
870 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
871 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
872 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
873 tmp = gfc_get_element_type (tmp);
874 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
875 packed = gfc_create_var (build_pointer_type (tmp), "data");
876
877 tmp = build_call_expr_loc (input_location,
878 gfor_fndecl_in_pack, 1, initial);
879 tmp = fold_convert (TREE_TYPE (packed), tmp);
880 gfc_add_modify (pre, packed, tmp);
881
882 tmp = build_fold_indirect_ref_loc (input_location,
883 initial);
884 source_data = gfc_conv_descriptor_data_get (tmp);
885
886 /* internal_pack may return source->data without any allocation
887 or copying if it is already packed. If that's the case, we
888 need to allocate and copy manually. */
889
890 gfc_start_block (&do_copying);
891 tmp = gfc_call_malloc (&do_copying, NULL, size);
892 tmp = fold_convert (TREE_TYPE (packed), tmp);
893 gfc_add_modify (&do_copying, packed, tmp);
894 tmp = gfc_build_memcpy_call (packed, source_data, size);
895 gfc_add_expr_to_block (&do_copying, tmp);
896
897 was_packed = fold_build2_loc (input_location, EQ_EXPR,
898 boolean_type_node, packed,
899 source_data);
900 tmp = gfc_finish_block (&do_copying);
901 tmp = build3_v (COND_EXPR, was_packed, tmp,
902 build_empty_stmt (input_location));
903 gfc_add_expr_to_block (pre, tmp);
904
905 tmp = fold_convert (pvoid_type_node, packed);
906 }
907
908 gfc_conv_descriptor_data_set (pre, desc, tmp);
909 }
910 }
911 info->data = gfc_conv_descriptor_data_get (desc);
912
913 /* The offset is zero because we create temporaries with a zero
914 lower bound. */
915 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
916
917 if (dealloc && !onstack)
918 {
919 /* Free the temporary. */
920 tmp = gfc_conv_descriptor_data_get (desc);
921 tmp = gfc_call_free (tmp);
922 gfc_add_expr_to_block (post, tmp);
923 }
924 }
925
926
927 /* Get the scalarizer array dimension corresponding to actual array dimension
928 given by ARRAY_DIM.
929
930 For example, if SS represents the array ref a(1,:,:,1), it is a
931 bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1,
932 and 1 for ARRAY_DIM=2.
933 If SS represents transpose(a(:,1,1,:)), it is again a bidimensional
934 scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for
935 ARRAY_DIM=3.
936 If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer
937 array. If called on the inner ss, the result would be respectively 0,1,2 for
938 ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1
939 for ARRAY_DIM=1,2. */
940
941 static int
942 get_scalarizer_dim_for_array_dim (gfc_ss *ss, int array_dim)
943 {
944 int array_ref_dim;
945 int n;
946
947 array_ref_dim = 0;
948
949 for (; ss; ss = ss->parent)
950 for (n = 0; n < ss->dimen; n++)
951 if (ss->dim[n] < array_dim)
952 array_ref_dim++;
953
954 return array_ref_dim;
955 }
956
957
958 static gfc_ss *
959 innermost_ss (gfc_ss *ss)
960 {
961 while (ss->nested_ss != NULL)
962 ss = ss->nested_ss;
963
964 return ss;
965 }
966
967
968
969 /* Get the array reference dimension corresponding to the given loop dimension.
970 It is different from the true array dimension given by the dim array in
971 the case of a partial array reference (i.e. a(:,:,1,:) for example)
972 It is different from the loop dimension in the case of a transposed array.
973 */
974
975 static int
976 get_array_ref_dim_for_loop_dim (gfc_ss *ss, int loop_dim)
977 {
978 return get_scalarizer_dim_for_array_dim (innermost_ss (ss),
979 ss->dim[loop_dim]);
980 }
981
982
983 /* Generate code to create and initialize the descriptor for a temporary
984 array. This is used for both temporaries needed by the scalarizer, and
985 functions returning arrays. Adjusts the loop variables to be
986 zero-based, and calculates the loop bounds for callee allocated arrays.
987 Allocate the array unless it's callee allocated (we have a callee
988 allocated array if 'callee_alloc' is true, or if loop->to[n] is
989 NULL_TREE for any n). Also fills in the descriptor, data and offset
990 fields of info if known. Returns the size of the array, or NULL for a
991 callee allocated array.
992
993 'eltype' == NULL signals that the temporary should be a class object.
994 The 'initial' expression is used to obtain the size of the dynamic
995 type; otherwise the allocation and initialization proceeds as for any
996 other expression
997
998 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
999 gfc_trans_allocate_array_storage. */
1000
1001 tree
1002 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post, gfc_ss * ss,
1003 tree eltype, tree initial, bool dynamic,
1004 bool dealloc, bool callee_alloc, locus * where)
1005 {
1006 gfc_loopinfo *loop;
1007 gfc_ss *s;
1008 gfc_array_info *info;
1009 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
1010 tree type;
1011 tree desc;
1012 tree tmp;
1013 tree size;
1014 tree nelem;
1015 tree cond;
1016 tree or_expr;
1017 tree class_expr = NULL_TREE;
1018 int n, dim, tmp_dim;
1019 int total_dim = 0;
1020
1021 /* This signals a class array for which we need the size of the
1022 dynamic type. Generate an eltype and then the class expression. */
1023 if (eltype == NULL_TREE && initial)
1024 {
1025 gcc_assert (POINTER_TYPE_P (TREE_TYPE (initial)));
1026 class_expr = build_fold_indirect_ref_loc (input_location, initial);
1027 eltype = TREE_TYPE (class_expr);
1028 eltype = gfc_get_element_type (eltype);
1029 /* Obtain the structure (class) expression. */
1030 class_expr = TREE_OPERAND (class_expr, 0);
1031 gcc_assert (class_expr);
1032 }
1033
1034 memset (from, 0, sizeof (from));
1035 memset (to, 0, sizeof (to));
1036
1037 info = &ss->info->data.array;
1038
1039 gcc_assert (ss->dimen > 0);
1040 gcc_assert (ss->loop->dimen == ss->dimen);
1041
1042 if (warn_array_temporaries && where)
1043 gfc_warning (OPT_Warray_temporaries,
1044 "Creating array temporary at %L", where);
1045
1046 /* Set the lower bound to zero. */
1047 for (s = ss; s; s = s->parent)
1048 {
1049 loop = s->loop;
1050
1051 total_dim += loop->dimen;
1052 for (n = 0; n < loop->dimen; n++)
1053 {
1054 dim = s->dim[n];
1055
1056 /* Callee allocated arrays may not have a known bound yet. */
1057 if (loop->to[n])
1058 loop->to[n] = gfc_evaluate_now (
1059 fold_build2_loc (input_location, MINUS_EXPR,
1060 gfc_array_index_type,
1061 loop->to[n], loop->from[n]),
1062 pre);
1063 loop->from[n] = gfc_index_zero_node;
1064
1065 /* We have just changed the loop bounds, we must clear the
1066 corresponding specloop, so that delta calculation is not skipped
1067 later in gfc_set_delta. */
1068 loop->specloop[n] = NULL;
1069
1070 /* We are constructing the temporary's descriptor based on the loop
1071 dimensions. As the dimensions may be accessed in arbitrary order
1072 (think of transpose) the size taken from the n'th loop may not map
1073 to the n'th dimension of the array. We need to reconstruct loop
1074 infos in the right order before using it to set the descriptor
1075 bounds. */
1076 tmp_dim = get_scalarizer_dim_for_array_dim (ss, dim);
1077 from[tmp_dim] = loop->from[n];
1078 to[tmp_dim] = loop->to[n];
1079
1080 info->delta[dim] = gfc_index_zero_node;
1081 info->start[dim] = gfc_index_zero_node;
1082 info->end[dim] = gfc_index_zero_node;
1083 info->stride[dim] = gfc_index_one_node;
1084 }
1085 }
1086
1087 /* Initialize the descriptor. */
1088 type =
1089 gfc_get_array_type_bounds (eltype, total_dim, 0, from, to, 1,
1090 GFC_ARRAY_UNKNOWN, true);
1091 desc = gfc_create_var (type, "atmp");
1092 GFC_DECL_PACKED_ARRAY (desc) = 1;
1093
1094 info->descriptor = desc;
1095 size = gfc_index_one_node;
1096
1097 /* Fill in the array dtype. */
1098 tmp = gfc_conv_descriptor_dtype (desc);
1099 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
1100
1101 /*
1102 Fill in the bounds and stride. This is a packed array, so:
1103
1104 size = 1;
1105 for (n = 0; n < rank; n++)
1106 {
1107 stride[n] = size
1108 delta = ubound[n] + 1 - lbound[n];
1109 size = size * delta;
1110 }
1111 size = size * sizeof(element);
1112 */
1113
1114 or_expr = NULL_TREE;
1115
1116 /* If there is at least one null loop->to[n], it is a callee allocated
1117 array. */
1118 for (n = 0; n < total_dim; n++)
1119 if (to[n] == NULL_TREE)
1120 {
1121 size = NULL_TREE;
1122 break;
1123 }
1124
1125 if (size == NULL_TREE)
1126 for (s = ss; s; s = s->parent)
1127 for (n = 0; n < s->loop->dimen; n++)
1128 {
1129 dim = get_scalarizer_dim_for_array_dim (ss, s->dim[n]);
1130
1131 /* For a callee allocated array express the loop bounds in terms
1132 of the descriptor fields. */
1133 tmp = fold_build2_loc (input_location,
1134 MINUS_EXPR, gfc_array_index_type,
1135 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
1136 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
1137 s->loop->to[n] = tmp;
1138 }
1139 else
1140 {
1141 for (n = 0; n < total_dim; n++)
1142 {
1143 /* Store the stride and bound components in the descriptor. */
1144 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
1145
1146 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
1147 gfc_index_zero_node);
1148
1149 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], to[n]);
1150
1151 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1152 gfc_array_index_type,
1153 to[n], gfc_index_one_node);
1154
1155 /* Check whether the size for this dimension is negative. */
1156 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1157 tmp, gfc_index_zero_node);
1158 cond = gfc_evaluate_now (cond, pre);
1159
1160 if (n == 0)
1161 or_expr = cond;
1162 else
1163 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
1164 boolean_type_node, or_expr, cond);
1165
1166 size = fold_build2_loc (input_location, MULT_EXPR,
1167 gfc_array_index_type, size, tmp);
1168 size = gfc_evaluate_now (size, pre);
1169 }
1170 }
1171
1172 /* Get the size of the array. */
1173 if (size && !callee_alloc)
1174 {
1175 tree elemsize;
1176 /* If or_expr is true, then the extent in at least one
1177 dimension is zero and the size is set to zero. */
1178 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
1179 or_expr, gfc_index_zero_node, size);
1180
1181 nelem = size;
1182 if (class_expr == NULL_TREE)
1183 elemsize = fold_convert (gfc_array_index_type,
1184 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
1185 else
1186 elemsize = gfc_class_vtab_size_get (class_expr);
1187
1188 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1189 size, elemsize);
1190 }
1191 else
1192 {
1193 nelem = size;
1194 size = NULL_TREE;
1195 }
1196
1197 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1198 dynamic, dealloc);
1199
1200 while (ss->parent)
1201 ss = ss->parent;
1202
1203 if (ss->dimen > ss->loop->temp_dim)
1204 ss->loop->temp_dim = ss->dimen;
1205
1206 return size;
1207 }
1208
1209
1210 /* Return the number of iterations in a loop that starts at START,
1211 ends at END, and has step STEP. */
1212
1213 static tree
1214 gfc_get_iteration_count (tree start, tree end, tree step)
1215 {
1216 tree tmp;
1217 tree type;
1218
1219 type = TREE_TYPE (step);
1220 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1221 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1222 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1223 build_int_cst (type, 1));
1224 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1225 build_int_cst (type, 0));
1226 return fold_convert (gfc_array_index_type, tmp);
1227 }
1228
1229
1230 /* Extend the data in array DESC by EXTRA elements. */
1231
1232 static void
1233 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1234 {
1235 tree arg0, arg1;
1236 tree tmp;
1237 tree size;
1238 tree ubound;
1239
1240 if (integer_zerop (extra))
1241 return;
1242
1243 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1244
1245 /* Add EXTRA to the upper bound. */
1246 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1247 ubound, extra);
1248 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1249
1250 /* Get the value of the current data pointer. */
1251 arg0 = gfc_conv_descriptor_data_get (desc);
1252
1253 /* Calculate the new array size. */
1254 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1255 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1256 ubound, gfc_index_one_node);
1257 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1258 fold_convert (size_type_node, tmp),
1259 fold_convert (size_type_node, size));
1260
1261 /* Call the realloc() function. */
1262 tmp = gfc_call_realloc (pblock, arg0, arg1);
1263 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1264 }
1265
1266
1267 /* Return true if the bounds of iterator I can only be determined
1268 at run time. */
1269
1270 static inline bool
1271 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1272 {
1273 return (i->start->expr_type != EXPR_CONSTANT
1274 || i->end->expr_type != EXPR_CONSTANT
1275 || i->step->expr_type != EXPR_CONSTANT);
1276 }
1277
1278
1279 /* Split the size of constructor element EXPR into the sum of two terms,
1280 one of which can be determined at compile time and one of which must
1281 be calculated at run time. Set *SIZE to the former and return true
1282 if the latter might be nonzero. */
1283
1284 static bool
1285 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1286 {
1287 if (expr->expr_type == EXPR_ARRAY)
1288 return gfc_get_array_constructor_size (size, expr->value.constructor);
1289 else if (expr->rank > 0)
1290 {
1291 /* Calculate everything at run time. */
1292 mpz_set_ui (*size, 0);
1293 return true;
1294 }
1295 else
1296 {
1297 /* A single element. */
1298 mpz_set_ui (*size, 1);
1299 return false;
1300 }
1301 }
1302
1303
1304 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1305 of array constructor C. */
1306
1307 static bool
1308 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1309 {
1310 gfc_constructor *c;
1311 gfc_iterator *i;
1312 mpz_t val;
1313 mpz_t len;
1314 bool dynamic;
1315
1316 mpz_set_ui (*size, 0);
1317 mpz_init (len);
1318 mpz_init (val);
1319
1320 dynamic = false;
1321 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1322 {
1323 i = c->iterator;
1324 if (i && gfc_iterator_has_dynamic_bounds (i))
1325 dynamic = true;
1326 else
1327 {
1328 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1329 if (i)
1330 {
1331 /* Multiply the static part of the element size by the
1332 number of iterations. */
1333 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1334 mpz_fdiv_q (val, val, i->step->value.integer);
1335 mpz_add_ui (val, val, 1);
1336 if (mpz_sgn (val) > 0)
1337 mpz_mul (len, len, val);
1338 else
1339 mpz_set_ui (len, 0);
1340 }
1341 mpz_add (*size, *size, len);
1342 }
1343 }
1344 mpz_clear (len);
1345 mpz_clear (val);
1346 return dynamic;
1347 }
1348
1349
1350 /* Make sure offset is a variable. */
1351
1352 static void
1353 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1354 tree * offsetvar)
1355 {
1356 /* We should have already created the offset variable. We cannot
1357 create it here because we may be in an inner scope. */
1358 gcc_assert (*offsetvar != NULL_TREE);
1359 gfc_add_modify (pblock, *offsetvar, *poffset);
1360 *poffset = *offsetvar;
1361 TREE_USED (*offsetvar) = 1;
1362 }
1363
1364
1365 /* Variables needed for bounds-checking. */
1366 static bool first_len;
1367 static tree first_len_val;
1368 static bool typespec_chararray_ctor;
1369
1370 static void
1371 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1372 tree offset, gfc_se * se, gfc_expr * expr)
1373 {
1374 tree tmp;
1375
1376 gfc_conv_expr (se, expr);
1377
1378 /* Store the value. */
1379 tmp = build_fold_indirect_ref_loc (input_location,
1380 gfc_conv_descriptor_data_get (desc));
1381 tmp = gfc_build_array_ref (tmp, offset, NULL);
1382
1383 if (expr->ts.type == BT_CHARACTER)
1384 {
1385 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1386 tree esize;
1387
1388 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1389 esize = fold_convert (gfc_charlen_type_node, esize);
1390 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1391 gfc_charlen_type_node, esize,
1392 build_int_cst (gfc_charlen_type_node,
1393 gfc_character_kinds[i].bit_size / 8));
1394
1395 gfc_conv_string_parameter (se);
1396 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1397 {
1398 /* The temporary is an array of pointers. */
1399 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1400 gfc_add_modify (&se->pre, tmp, se->expr);
1401 }
1402 else
1403 {
1404 /* The temporary is an array of string values. */
1405 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1406 /* We know the temporary and the value will be the same length,
1407 so can use memcpy. */
1408 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1409 se->string_length, se->expr, expr->ts.kind);
1410 }
1411 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1412 {
1413 if (first_len)
1414 {
1415 gfc_add_modify (&se->pre, first_len_val,
1416 se->string_length);
1417 first_len = false;
1418 }
1419 else
1420 {
1421 /* Verify that all constructor elements are of the same
1422 length. */
1423 tree cond = fold_build2_loc (input_location, NE_EXPR,
1424 boolean_type_node, first_len_val,
1425 se->string_length);
1426 gfc_trans_runtime_check
1427 (true, false, cond, &se->pre, &expr->where,
1428 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1429 fold_convert (long_integer_type_node, first_len_val),
1430 fold_convert (long_integer_type_node, se->string_length));
1431 }
1432 }
1433 }
1434 else
1435 {
1436 /* TODO: Should the frontend already have done this conversion? */
1437 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1438 gfc_add_modify (&se->pre, tmp, se->expr);
1439 }
1440
1441 gfc_add_block_to_block (pblock, &se->pre);
1442 gfc_add_block_to_block (pblock, &se->post);
1443 }
1444
1445
1446 /* Add the contents of an array to the constructor. DYNAMIC is as for
1447 gfc_trans_array_constructor_value. */
1448
1449 static void
1450 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1451 tree type ATTRIBUTE_UNUSED,
1452 tree desc, gfc_expr * expr,
1453 tree * poffset, tree * offsetvar,
1454 bool dynamic)
1455 {
1456 gfc_se se;
1457 gfc_ss *ss;
1458 gfc_loopinfo loop;
1459 stmtblock_t body;
1460 tree tmp;
1461 tree size;
1462 int n;
1463
1464 /* We need this to be a variable so we can increment it. */
1465 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1466
1467 gfc_init_se (&se, NULL);
1468
1469 /* Walk the array expression. */
1470 ss = gfc_walk_expr (expr);
1471 gcc_assert (ss != gfc_ss_terminator);
1472
1473 /* Initialize the scalarizer. */
1474 gfc_init_loopinfo (&loop);
1475 gfc_add_ss_to_loop (&loop, ss);
1476
1477 /* Initialize the loop. */
1478 gfc_conv_ss_startstride (&loop);
1479 gfc_conv_loop_setup (&loop, &expr->where);
1480
1481 /* Make sure the constructed array has room for the new data. */
1482 if (dynamic)
1483 {
1484 /* Set SIZE to the total number of elements in the subarray. */
1485 size = gfc_index_one_node;
1486 for (n = 0; n < loop.dimen; n++)
1487 {
1488 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1489 gfc_index_one_node);
1490 size = fold_build2_loc (input_location, MULT_EXPR,
1491 gfc_array_index_type, size, tmp);
1492 }
1493
1494 /* Grow the constructed array by SIZE elements. */
1495 gfc_grow_array (&loop.pre, desc, size);
1496 }
1497
1498 /* Make the loop body. */
1499 gfc_mark_ss_chain_used (ss, 1);
1500 gfc_start_scalarized_body (&loop, &body);
1501 gfc_copy_loopinfo_to_se (&se, &loop);
1502 se.ss = ss;
1503
1504 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1505 gcc_assert (se.ss == gfc_ss_terminator);
1506
1507 /* Increment the offset. */
1508 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1509 *poffset, gfc_index_one_node);
1510 gfc_add_modify (&body, *poffset, tmp);
1511
1512 /* Finish the loop. */
1513 gfc_trans_scalarizing_loops (&loop, &body);
1514 gfc_add_block_to_block (&loop.pre, &loop.post);
1515 tmp = gfc_finish_block (&loop.pre);
1516 gfc_add_expr_to_block (pblock, tmp);
1517
1518 gfc_cleanup_loop (&loop);
1519 }
1520
1521
1522 /* Assign the values to the elements of an array constructor. DYNAMIC
1523 is true if descriptor DESC only contains enough data for the static
1524 size calculated by gfc_get_array_constructor_size. When true, memory
1525 for the dynamic parts must be allocated using realloc. */
1526
1527 static void
1528 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1529 tree desc, gfc_constructor_base base,
1530 tree * poffset, tree * offsetvar,
1531 bool dynamic)
1532 {
1533 tree tmp;
1534 tree start = NULL_TREE;
1535 tree end = NULL_TREE;
1536 tree step = NULL_TREE;
1537 stmtblock_t body;
1538 gfc_se se;
1539 mpz_t size;
1540 gfc_constructor *c;
1541
1542 tree shadow_loopvar = NULL_TREE;
1543 gfc_saved_var saved_loopvar;
1544
1545 mpz_init (size);
1546 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1547 {
1548 /* If this is an iterator or an array, the offset must be a variable. */
1549 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1550 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1551
1552 /* Shadowing the iterator avoids changing its value and saves us from
1553 keeping track of it. Further, it makes sure that there's always a
1554 backend-decl for the symbol, even if there wasn't one before,
1555 e.g. in the case of an iterator that appears in a specification
1556 expression in an interface mapping. */
1557 if (c->iterator)
1558 {
1559 gfc_symbol *sym;
1560 tree type;
1561
1562 /* Evaluate loop bounds before substituting the loop variable
1563 in case they depend on it. Such a case is invalid, but it is
1564 not more expensive to do the right thing here.
1565 See PR 44354. */
1566 gfc_init_se (&se, NULL);
1567 gfc_conv_expr_val (&se, c->iterator->start);
1568 gfc_add_block_to_block (pblock, &se.pre);
1569 start = gfc_evaluate_now (se.expr, pblock);
1570
1571 gfc_init_se (&se, NULL);
1572 gfc_conv_expr_val (&se, c->iterator->end);
1573 gfc_add_block_to_block (pblock, &se.pre);
1574 end = gfc_evaluate_now (se.expr, pblock);
1575
1576 gfc_init_se (&se, NULL);
1577 gfc_conv_expr_val (&se, c->iterator->step);
1578 gfc_add_block_to_block (pblock, &se.pre);
1579 step = gfc_evaluate_now (se.expr, pblock);
1580
1581 sym = c->iterator->var->symtree->n.sym;
1582 type = gfc_typenode_for_spec (&sym->ts);
1583
1584 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1585 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1586 }
1587
1588 gfc_start_block (&body);
1589
1590 if (c->expr->expr_type == EXPR_ARRAY)
1591 {
1592 /* Array constructors can be nested. */
1593 gfc_trans_array_constructor_value (&body, type, desc,
1594 c->expr->value.constructor,
1595 poffset, offsetvar, dynamic);
1596 }
1597 else if (c->expr->rank > 0)
1598 {
1599 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1600 poffset, offsetvar, dynamic);
1601 }
1602 else
1603 {
1604 /* This code really upsets the gimplifier so don't bother for now. */
1605 gfc_constructor *p;
1606 HOST_WIDE_INT n;
1607 HOST_WIDE_INT size;
1608
1609 p = c;
1610 n = 0;
1611 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1612 {
1613 p = gfc_constructor_next (p);
1614 n++;
1615 }
1616 if (n < 4)
1617 {
1618 /* Scalar values. */
1619 gfc_init_se (&se, NULL);
1620 gfc_trans_array_ctor_element (&body, desc, *poffset,
1621 &se, c->expr);
1622
1623 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1624 gfc_array_index_type,
1625 *poffset, gfc_index_one_node);
1626 }
1627 else
1628 {
1629 /* Collect multiple scalar constants into a constructor. */
1630 vec<constructor_elt, va_gc> *v = NULL;
1631 tree init;
1632 tree bound;
1633 tree tmptype;
1634 HOST_WIDE_INT idx = 0;
1635
1636 p = c;
1637 /* Count the number of consecutive scalar constants. */
1638 while (p && !(p->iterator
1639 || p->expr->expr_type != EXPR_CONSTANT))
1640 {
1641 gfc_init_se (&se, NULL);
1642 gfc_conv_constant (&se, p->expr);
1643
1644 if (c->expr->ts.type != BT_CHARACTER)
1645 se.expr = fold_convert (type, se.expr);
1646 /* For constant character array constructors we build
1647 an array of pointers. */
1648 else if (POINTER_TYPE_P (type))
1649 se.expr = gfc_build_addr_expr
1650 (gfc_get_pchar_type (p->expr->ts.kind),
1651 se.expr);
1652
1653 CONSTRUCTOR_APPEND_ELT (v,
1654 build_int_cst (gfc_array_index_type,
1655 idx++),
1656 se.expr);
1657 c = p;
1658 p = gfc_constructor_next (p);
1659 }
1660
1661 bound = size_int (n - 1);
1662 /* Create an array type to hold them. */
1663 tmptype = build_range_type (gfc_array_index_type,
1664 gfc_index_zero_node, bound);
1665 tmptype = build_array_type (type, tmptype);
1666
1667 init = build_constructor (tmptype, v);
1668 TREE_CONSTANT (init) = 1;
1669 TREE_STATIC (init) = 1;
1670 /* Create a static variable to hold the data. */
1671 tmp = gfc_create_var (tmptype, "data");
1672 TREE_STATIC (tmp) = 1;
1673 TREE_CONSTANT (tmp) = 1;
1674 TREE_READONLY (tmp) = 1;
1675 DECL_INITIAL (tmp) = init;
1676 init = tmp;
1677
1678 /* Use BUILTIN_MEMCPY to assign the values. */
1679 tmp = gfc_conv_descriptor_data_get (desc);
1680 tmp = build_fold_indirect_ref_loc (input_location,
1681 tmp);
1682 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1683 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1684 init = gfc_build_addr_expr (NULL_TREE, init);
1685
1686 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1687 bound = build_int_cst (size_type_node, n * size);
1688 tmp = build_call_expr_loc (input_location,
1689 builtin_decl_explicit (BUILT_IN_MEMCPY),
1690 3, tmp, init, bound);
1691 gfc_add_expr_to_block (&body, tmp);
1692
1693 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1694 gfc_array_index_type, *poffset,
1695 build_int_cst (gfc_array_index_type, n));
1696 }
1697 if (!INTEGER_CST_P (*poffset))
1698 {
1699 gfc_add_modify (&body, *offsetvar, *poffset);
1700 *poffset = *offsetvar;
1701 }
1702 }
1703
1704 /* The frontend should already have done any expansions
1705 at compile-time. */
1706 if (!c->iterator)
1707 {
1708 /* Pass the code as is. */
1709 tmp = gfc_finish_block (&body);
1710 gfc_add_expr_to_block (pblock, tmp);
1711 }
1712 else
1713 {
1714 /* Build the implied do-loop. */
1715 stmtblock_t implied_do_block;
1716 tree cond;
1717 tree exit_label;
1718 tree loopbody;
1719 tree tmp2;
1720
1721 loopbody = gfc_finish_block (&body);
1722
1723 /* Create a new block that holds the implied-do loop. A temporary
1724 loop-variable is used. */
1725 gfc_start_block(&implied_do_block);
1726
1727 /* Initialize the loop. */
1728 gfc_add_modify (&implied_do_block, shadow_loopvar, start);
1729
1730 /* If this array expands dynamically, and the number of iterations
1731 is not constant, we won't have allocated space for the static
1732 part of C->EXPR's size. Do that now. */
1733 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1734 {
1735 /* Get the number of iterations. */
1736 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1737
1738 /* Get the static part of C->EXPR's size. */
1739 gfc_get_array_constructor_element_size (&size, c->expr);
1740 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1741
1742 /* Grow the array by TMP * TMP2 elements. */
1743 tmp = fold_build2_loc (input_location, MULT_EXPR,
1744 gfc_array_index_type, tmp, tmp2);
1745 gfc_grow_array (&implied_do_block, desc, tmp);
1746 }
1747
1748 /* Generate the loop body. */
1749 exit_label = gfc_build_label_decl (NULL_TREE);
1750 gfc_start_block (&body);
1751
1752 /* Generate the exit condition. Depending on the sign of
1753 the step variable we have to generate the correct
1754 comparison. */
1755 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1756 step, build_int_cst (TREE_TYPE (step), 0));
1757 cond = fold_build3_loc (input_location, COND_EXPR,
1758 boolean_type_node, tmp,
1759 fold_build2_loc (input_location, GT_EXPR,
1760 boolean_type_node, shadow_loopvar, end),
1761 fold_build2_loc (input_location, LT_EXPR,
1762 boolean_type_node, shadow_loopvar, end));
1763 tmp = build1_v (GOTO_EXPR, exit_label);
1764 TREE_USED (exit_label) = 1;
1765 tmp = build3_v (COND_EXPR, cond, tmp,
1766 build_empty_stmt (input_location));
1767 gfc_add_expr_to_block (&body, tmp);
1768
1769 /* The main loop body. */
1770 gfc_add_expr_to_block (&body, loopbody);
1771
1772 /* Increase loop variable by step. */
1773 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1774 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1775 step);
1776 gfc_add_modify (&body, shadow_loopvar, tmp);
1777
1778 /* Finish the loop. */
1779 tmp = gfc_finish_block (&body);
1780 tmp = build1_v (LOOP_EXPR, tmp);
1781 gfc_add_expr_to_block (&implied_do_block, tmp);
1782
1783 /* Add the exit label. */
1784 tmp = build1_v (LABEL_EXPR, exit_label);
1785 gfc_add_expr_to_block (&implied_do_block, tmp);
1786
1787 /* Finish the implied-do loop. */
1788 tmp = gfc_finish_block(&implied_do_block);
1789 gfc_add_expr_to_block(pblock, tmp);
1790
1791 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1792 }
1793 }
1794 mpz_clear (size);
1795 }
1796
1797
1798 /* The array constructor code can create a string length with an operand
1799 in the form of a temporary variable. This variable will retain its
1800 context (current_function_decl). If we store this length tree in a
1801 gfc_charlen structure which is shared by a variable in another
1802 context, the resulting gfc_charlen structure with a variable in a
1803 different context, we could trip the assertion in expand_expr_real_1
1804 when it sees that a variable has been created in one context and
1805 referenced in another.
1806
1807 If this might be the case, we create a new gfc_charlen structure and
1808 link it into the current namespace. */
1809
1810 static void
1811 store_backend_decl (gfc_charlen **clp, tree len, bool force_new_cl)
1812 {
1813 if (force_new_cl)
1814 {
1815 gfc_charlen *new_cl = gfc_new_charlen (gfc_current_ns, *clp);
1816 *clp = new_cl;
1817 }
1818 (*clp)->backend_decl = len;
1819 }
1820
1821 /* A catch-all to obtain the string length for anything that is not
1822 a substring of non-constant length, a constant, array or variable. */
1823
1824 static void
1825 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1826 {
1827 gfc_se se;
1828
1829 /* Don't bother if we already know the length is a constant. */
1830 if (*len && INTEGER_CST_P (*len))
1831 return;
1832
1833 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1834 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1835 {
1836 /* This is easy. */
1837 gfc_conv_const_charlen (e->ts.u.cl);
1838 *len = e->ts.u.cl->backend_decl;
1839 }
1840 else
1841 {
1842 /* Otherwise, be brutal even if inefficient. */
1843 gfc_init_se (&se, NULL);
1844
1845 /* No function call, in case of side effects. */
1846 se.no_function_call = 1;
1847 if (e->rank == 0)
1848 gfc_conv_expr (&se, e);
1849 else
1850 gfc_conv_expr_descriptor (&se, e);
1851
1852 /* Fix the value. */
1853 *len = gfc_evaluate_now (se.string_length, &se.pre);
1854
1855 gfc_add_block_to_block (block, &se.pre);
1856 gfc_add_block_to_block (block, &se.post);
1857
1858 store_backend_decl (&e->ts.u.cl, *len, true);
1859 }
1860 }
1861
1862
1863 /* Figure out the string length of a variable reference expression.
1864 Used by get_array_ctor_strlen. */
1865
1866 static void
1867 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1868 {
1869 gfc_ref *ref;
1870 gfc_typespec *ts;
1871 mpz_t char_len;
1872
1873 /* Don't bother if we already know the length is a constant. */
1874 if (*len && INTEGER_CST_P (*len))
1875 return;
1876
1877 ts = &expr->symtree->n.sym->ts;
1878 for (ref = expr->ref; ref; ref = ref->next)
1879 {
1880 switch (ref->type)
1881 {
1882 case REF_ARRAY:
1883 /* Array references don't change the string length. */
1884 break;
1885
1886 case REF_COMPONENT:
1887 /* Use the length of the component. */
1888 ts = &ref->u.c.component->ts;
1889 break;
1890
1891 case REF_SUBSTRING:
1892 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1893 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1894 {
1895 /* Note that this might evaluate expr. */
1896 get_array_ctor_all_strlen (block, expr, len);
1897 return;
1898 }
1899 mpz_init_set_ui (char_len, 1);
1900 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1901 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1902 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1903 *len = convert (gfc_charlen_type_node, *len);
1904 mpz_clear (char_len);
1905 return;
1906
1907 default:
1908 gcc_unreachable ();
1909 }
1910 }
1911
1912 *len = ts->u.cl->backend_decl;
1913 }
1914
1915
1916 /* Figure out the string length of a character array constructor.
1917 If len is NULL, don't calculate the length; this happens for recursive calls
1918 when a sub-array-constructor is an element but not at the first position,
1919 so when we're not interested in the length.
1920 Returns TRUE if all elements are character constants. */
1921
1922 bool
1923 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1924 {
1925 gfc_constructor *c;
1926 bool is_const;
1927
1928 is_const = TRUE;
1929
1930 if (gfc_constructor_first (base) == NULL)
1931 {
1932 if (len)
1933 *len = build_int_cstu (gfc_charlen_type_node, 0);
1934 return is_const;
1935 }
1936
1937 /* Loop over all constructor elements to find out is_const, but in len we
1938 want to store the length of the first, not the last, element. We can
1939 of course exit the loop as soon as is_const is found to be false. */
1940 for (c = gfc_constructor_first (base);
1941 c && is_const; c = gfc_constructor_next (c))
1942 {
1943 switch (c->expr->expr_type)
1944 {
1945 case EXPR_CONSTANT:
1946 if (len && !(*len && INTEGER_CST_P (*len)))
1947 *len = build_int_cstu (gfc_charlen_type_node,
1948 c->expr->value.character.length);
1949 break;
1950
1951 case EXPR_ARRAY:
1952 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1953 is_const = false;
1954 break;
1955
1956 case EXPR_VARIABLE:
1957 is_const = false;
1958 if (len)
1959 get_array_ctor_var_strlen (block, c->expr, len);
1960 break;
1961
1962 default:
1963 is_const = false;
1964 if (len)
1965 get_array_ctor_all_strlen (block, c->expr, len);
1966 break;
1967 }
1968
1969 /* After the first iteration, we don't want the length modified. */
1970 len = NULL;
1971 }
1972
1973 return is_const;
1974 }
1975
1976 /* Check whether the array constructor C consists entirely of constant
1977 elements, and if so returns the number of those elements, otherwise
1978 return zero. Note, an empty or NULL array constructor returns zero. */
1979
1980 unsigned HOST_WIDE_INT
1981 gfc_constant_array_constructor_p (gfc_constructor_base base)
1982 {
1983 unsigned HOST_WIDE_INT nelem = 0;
1984
1985 gfc_constructor *c = gfc_constructor_first (base);
1986 while (c)
1987 {
1988 if (c->iterator
1989 || c->expr->rank > 0
1990 || c->expr->expr_type != EXPR_CONSTANT)
1991 return 0;
1992 c = gfc_constructor_next (c);
1993 nelem++;
1994 }
1995 return nelem;
1996 }
1997
1998
1999 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
2000 and the tree type of it's elements, TYPE, return a static constant
2001 variable that is compile-time initialized. */
2002
2003 tree
2004 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
2005 {
2006 tree tmptype, init, tmp;
2007 HOST_WIDE_INT nelem;
2008 gfc_constructor *c;
2009 gfc_array_spec as;
2010 gfc_se se;
2011 int i;
2012 vec<constructor_elt, va_gc> *v = NULL;
2013
2014 /* First traverse the constructor list, converting the constants
2015 to tree to build an initializer. */
2016 nelem = 0;
2017 c = gfc_constructor_first (expr->value.constructor);
2018 while (c)
2019 {
2020 gfc_init_se (&se, NULL);
2021 gfc_conv_constant (&se, c->expr);
2022 if (c->expr->ts.type != BT_CHARACTER)
2023 se.expr = fold_convert (type, se.expr);
2024 else if (POINTER_TYPE_P (type))
2025 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
2026 se.expr);
2027 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
2028 se.expr);
2029 c = gfc_constructor_next (c);
2030 nelem++;
2031 }
2032
2033 /* Next determine the tree type for the array. We use the gfortran
2034 front-end's gfc_get_nodesc_array_type in order to create a suitable
2035 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
2036
2037 memset (&as, 0, sizeof (gfc_array_spec));
2038
2039 as.rank = expr->rank;
2040 as.type = AS_EXPLICIT;
2041 if (!expr->shape)
2042 {
2043 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2044 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
2045 NULL, nelem - 1);
2046 }
2047 else
2048 for (i = 0; i < expr->rank; i++)
2049 {
2050 int tmp = (int) mpz_get_si (expr->shape[i]);
2051 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2052 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
2053 NULL, tmp - 1);
2054 }
2055
2056 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
2057
2058 /* as is not needed anymore. */
2059 for (i = 0; i < as.rank + as.corank; i++)
2060 {
2061 gfc_free_expr (as.lower[i]);
2062 gfc_free_expr (as.upper[i]);
2063 }
2064
2065 init = build_constructor (tmptype, v);
2066
2067 TREE_CONSTANT (init) = 1;
2068 TREE_STATIC (init) = 1;
2069
2070 tmp = build_decl (input_location, VAR_DECL, create_tmp_var_name ("A"),
2071 tmptype);
2072 DECL_ARTIFICIAL (tmp) = 1;
2073 DECL_IGNORED_P (tmp) = 1;
2074 TREE_STATIC (tmp) = 1;
2075 TREE_CONSTANT (tmp) = 1;
2076 TREE_READONLY (tmp) = 1;
2077 DECL_INITIAL (tmp) = init;
2078 pushdecl (tmp);
2079
2080 return tmp;
2081 }
2082
2083
2084 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
2085 This mostly initializes the scalarizer state info structure with the
2086 appropriate values to directly use the array created by the function
2087 gfc_build_constant_array_constructor. */
2088
2089 static void
2090 trans_constant_array_constructor (gfc_ss * ss, tree type)
2091 {
2092 gfc_array_info *info;
2093 tree tmp;
2094 int i;
2095
2096 tmp = gfc_build_constant_array_constructor (ss->info->expr, type);
2097
2098 info = &ss->info->data.array;
2099
2100 info->descriptor = tmp;
2101 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
2102 info->offset = gfc_index_zero_node;
2103
2104 for (i = 0; i < ss->dimen; i++)
2105 {
2106 info->delta[i] = gfc_index_zero_node;
2107 info->start[i] = gfc_index_zero_node;
2108 info->end[i] = gfc_index_zero_node;
2109 info->stride[i] = gfc_index_one_node;
2110 }
2111 }
2112
2113
2114 static int
2115 get_rank (gfc_loopinfo *loop)
2116 {
2117 int rank;
2118
2119 rank = 0;
2120 for (; loop; loop = loop->parent)
2121 rank += loop->dimen;
2122
2123 return rank;
2124 }
2125
2126
2127 /* Helper routine of gfc_trans_array_constructor to determine if the
2128 bounds of the loop specified by LOOP are constant and simple enough
2129 to use with trans_constant_array_constructor. Returns the
2130 iteration count of the loop if suitable, and NULL_TREE otherwise. */
2131
2132 static tree
2133 constant_array_constructor_loop_size (gfc_loopinfo * l)
2134 {
2135 gfc_loopinfo *loop;
2136 tree size = gfc_index_one_node;
2137 tree tmp;
2138 int i, total_dim;
2139
2140 total_dim = get_rank (l);
2141
2142 for (loop = l; loop; loop = loop->parent)
2143 {
2144 for (i = 0; i < loop->dimen; i++)
2145 {
2146 /* If the bounds aren't constant, return NULL_TREE. */
2147 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
2148 return NULL_TREE;
2149 if (!integer_zerop (loop->from[i]))
2150 {
2151 /* Only allow nonzero "from" in one-dimensional arrays. */
2152 if (total_dim != 1)
2153 return NULL_TREE;
2154 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2155 gfc_array_index_type,
2156 loop->to[i], loop->from[i]);
2157 }
2158 else
2159 tmp = loop->to[i];
2160 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2161 gfc_array_index_type, tmp, gfc_index_one_node);
2162 size = fold_build2_loc (input_location, MULT_EXPR,
2163 gfc_array_index_type, size, tmp);
2164 }
2165 }
2166
2167 return size;
2168 }
2169
2170
2171 static tree *
2172 get_loop_upper_bound_for_array (gfc_ss *array, int array_dim)
2173 {
2174 gfc_ss *ss;
2175 int n;
2176
2177 gcc_assert (array->nested_ss == NULL);
2178
2179 for (ss = array; ss; ss = ss->parent)
2180 for (n = 0; n < ss->loop->dimen; n++)
2181 if (array_dim == get_array_ref_dim_for_loop_dim (ss, n))
2182 return &(ss->loop->to[n]);
2183
2184 gcc_unreachable ();
2185 }
2186
2187
2188 static gfc_loopinfo *
2189 outermost_loop (gfc_loopinfo * loop)
2190 {
2191 while (loop->parent != NULL)
2192 loop = loop->parent;
2193
2194 return loop;
2195 }
2196
2197
2198 /* Array constructors are handled by constructing a temporary, then using that
2199 within the scalarization loop. This is not optimal, but seems by far the
2200 simplest method. */
2201
2202 static void
2203 trans_array_constructor (gfc_ss * ss, locus * where)
2204 {
2205 gfc_constructor_base c;
2206 tree offset;
2207 tree offsetvar;
2208 tree desc;
2209 tree type;
2210 tree tmp;
2211 tree *loop_ubound0;
2212 bool dynamic;
2213 bool old_first_len, old_typespec_chararray_ctor;
2214 tree old_first_len_val;
2215 gfc_loopinfo *loop, *outer_loop;
2216 gfc_ss_info *ss_info;
2217 gfc_expr *expr;
2218 gfc_ss *s;
2219
2220 /* Save the old values for nested checking. */
2221 old_first_len = first_len;
2222 old_first_len_val = first_len_val;
2223 old_typespec_chararray_ctor = typespec_chararray_ctor;
2224
2225 loop = ss->loop;
2226 outer_loop = outermost_loop (loop);
2227 ss_info = ss->info;
2228 expr = ss_info->expr;
2229
2230 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
2231 typespec was given for the array constructor. */
2232 typespec_chararray_ctor = (expr->ts.u.cl
2233 && expr->ts.u.cl->length_from_typespec);
2234
2235 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2236 && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
2237 {
2238 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
2239 first_len = true;
2240 }
2241
2242 gcc_assert (ss->dimen == ss->loop->dimen);
2243
2244 c = expr->value.constructor;
2245 if (expr->ts.type == BT_CHARACTER)
2246 {
2247 bool const_string;
2248 bool force_new_cl = false;
2249
2250 /* get_array_ctor_strlen walks the elements of the constructor, if a
2251 typespec was given, we already know the string length and want the one
2252 specified there. */
2253 if (typespec_chararray_ctor && expr->ts.u.cl->length
2254 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
2255 {
2256 gfc_se length_se;
2257
2258 const_string = false;
2259 gfc_init_se (&length_se, NULL);
2260 gfc_conv_expr_type (&length_se, expr->ts.u.cl->length,
2261 gfc_charlen_type_node);
2262 ss_info->string_length = length_se.expr;
2263 gfc_add_block_to_block (&outer_loop->pre, &length_se.pre);
2264 gfc_add_block_to_block (&outer_loop->post, &length_se.post);
2265 }
2266 else
2267 {
2268 const_string = get_array_ctor_strlen (&outer_loop->pre, c,
2269 &ss_info->string_length);
2270 force_new_cl = true;
2271 }
2272
2273 /* Complex character array constructors should have been taken care of
2274 and not end up here. */
2275 gcc_assert (ss_info->string_length);
2276
2277 store_backend_decl (&expr->ts.u.cl, ss_info->string_length, force_new_cl);
2278
2279 type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length);
2280 if (const_string)
2281 type = build_pointer_type (type);
2282 }
2283 else
2284 type = gfc_typenode_for_spec (&expr->ts);
2285
2286 /* See if the constructor determines the loop bounds. */
2287 dynamic = false;
2288
2289 loop_ubound0 = get_loop_upper_bound_for_array (ss, 0);
2290
2291 if (expr->shape && get_rank (loop) > 1 && *loop_ubound0 == NULL_TREE)
2292 {
2293 /* We have a multidimensional parameter. */
2294 for (s = ss; s; s = s->parent)
2295 {
2296 int n;
2297 for (n = 0; n < s->loop->dimen; n++)
2298 {
2299 s->loop->from[n] = gfc_index_zero_node;
2300 s->loop->to[n] = gfc_conv_mpz_to_tree (expr->shape[s->dim[n]],
2301 gfc_index_integer_kind);
2302 s->loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2303 gfc_array_index_type,
2304 s->loop->to[n],
2305 gfc_index_one_node);
2306 }
2307 }
2308 }
2309
2310 if (*loop_ubound0 == NULL_TREE)
2311 {
2312 mpz_t size;
2313
2314 /* We should have a 1-dimensional, zero-based loop. */
2315 gcc_assert (loop->parent == NULL && loop->nested == NULL);
2316 gcc_assert (loop->dimen == 1);
2317 gcc_assert (integer_zerop (loop->from[0]));
2318
2319 /* Split the constructor size into a static part and a dynamic part.
2320 Allocate the static size up-front and record whether the dynamic
2321 size might be nonzero. */
2322 mpz_init (size);
2323 dynamic = gfc_get_array_constructor_size (&size, c);
2324 mpz_sub_ui (size, size, 1);
2325 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2326 mpz_clear (size);
2327 }
2328
2329 /* Special case constant array constructors. */
2330 if (!dynamic)
2331 {
2332 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2333 if (nelem > 0)
2334 {
2335 tree size = constant_array_constructor_loop_size (loop);
2336 if (size && compare_tree_int (size, nelem) == 0)
2337 {
2338 trans_constant_array_constructor (ss, type);
2339 goto finish;
2340 }
2341 }
2342 }
2343
2344 gfc_trans_create_temp_array (&outer_loop->pre, &outer_loop->post, ss, type,
2345 NULL_TREE, dynamic, true, false, where);
2346
2347 desc = ss_info->data.array.descriptor;
2348 offset = gfc_index_zero_node;
2349 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2350 TREE_NO_WARNING (offsetvar) = 1;
2351 TREE_USED (offsetvar) = 0;
2352 gfc_trans_array_constructor_value (&outer_loop->pre, type, desc, c,
2353 &offset, &offsetvar, dynamic);
2354
2355 /* If the array grows dynamically, the upper bound of the loop variable
2356 is determined by the array's final upper bound. */
2357 if (dynamic)
2358 {
2359 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2360 gfc_array_index_type,
2361 offsetvar, gfc_index_one_node);
2362 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2363 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2364 if (*loop_ubound0 && TREE_CODE (*loop_ubound0) == VAR_DECL)
2365 gfc_add_modify (&outer_loop->pre, *loop_ubound0, tmp);
2366 else
2367 *loop_ubound0 = tmp;
2368 }
2369
2370 if (TREE_USED (offsetvar))
2371 pushdecl (offsetvar);
2372 else
2373 gcc_assert (INTEGER_CST_P (offset));
2374
2375 #if 0
2376 /* Disable bound checking for now because it's probably broken. */
2377 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2378 {
2379 gcc_unreachable ();
2380 }
2381 #endif
2382
2383 finish:
2384 /* Restore old values of globals. */
2385 first_len = old_first_len;
2386 first_len_val = old_first_len_val;
2387 typespec_chararray_ctor = old_typespec_chararray_ctor;
2388 }
2389
2390
2391 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2392 called after evaluating all of INFO's vector dimensions. Go through
2393 each such vector dimension and see if we can now fill in any missing
2394 loop bounds. */
2395
2396 static void
2397 set_vector_loop_bounds (gfc_ss * ss)
2398 {
2399 gfc_loopinfo *loop, *outer_loop;
2400 gfc_array_info *info;
2401 gfc_se se;
2402 tree tmp;
2403 tree desc;
2404 tree zero;
2405 int n;
2406 int dim;
2407
2408 outer_loop = outermost_loop (ss->loop);
2409
2410 info = &ss->info->data.array;
2411
2412 for (; ss; ss = ss->parent)
2413 {
2414 loop = ss->loop;
2415
2416 for (n = 0; n < loop->dimen; n++)
2417 {
2418 dim = ss->dim[n];
2419 if (info->ref->u.ar.dimen_type[dim] != DIMEN_VECTOR
2420 || loop->to[n] != NULL)
2421 continue;
2422
2423 /* Loop variable N indexes vector dimension DIM, and we don't
2424 yet know the upper bound of loop variable N. Set it to the
2425 difference between the vector's upper and lower bounds. */
2426 gcc_assert (loop->from[n] == gfc_index_zero_node);
2427 gcc_assert (info->subscript[dim]
2428 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2429
2430 gfc_init_se (&se, NULL);
2431 desc = info->subscript[dim]->info->data.array.descriptor;
2432 zero = gfc_rank_cst[0];
2433 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2434 gfc_array_index_type,
2435 gfc_conv_descriptor_ubound_get (desc, zero),
2436 gfc_conv_descriptor_lbound_get (desc, zero));
2437 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2438 loop->to[n] = tmp;
2439 }
2440 }
2441 }
2442
2443
2444 /* Tells whether a scalar argument to an elemental procedure is saved out
2445 of a scalarization loop as a value or as a reference. */
2446
2447 bool
2448 gfc_scalar_elemental_arg_saved_as_reference (gfc_ss_info * ss_info)
2449 {
2450 if (ss_info->type != GFC_SS_REFERENCE)
2451 return false;
2452
2453 /* If the actual argument can be absent (in other words, it can
2454 be a NULL reference), don't try to evaluate it; pass instead
2455 the reference directly. */
2456 if (ss_info->can_be_null_ref)
2457 return true;
2458
2459 /* If the expression is of polymorphic type, it's actual size is not known,
2460 so we avoid copying it anywhere. */
2461 if (ss_info->data.scalar.dummy_arg
2462 && ss_info->data.scalar.dummy_arg->ts.type == BT_CLASS
2463 && ss_info->expr->ts.type == BT_CLASS)
2464 return true;
2465
2466 /* If the expression is a data reference of aggregate type,
2467 avoid a copy by saving a reference to the content. */
2468 if (ss_info->expr->expr_type == EXPR_VARIABLE
2469 && (ss_info->expr->ts.type == BT_DERIVED
2470 || ss_info->expr->ts.type == BT_CLASS))
2471 return true;
2472
2473 /* Otherwise the expression is evaluated to a temporary variable before the
2474 scalarization loop. */
2475 return false;
2476 }
2477
2478
2479 /* Add the pre and post chains for all the scalar expressions in a SS chain
2480 to loop. This is called after the loop parameters have been calculated,
2481 but before the actual scalarizing loops. */
2482
2483 static void
2484 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2485 locus * where)
2486 {
2487 gfc_loopinfo *nested_loop, *outer_loop;
2488 gfc_se se;
2489 gfc_ss_info *ss_info;
2490 gfc_array_info *info;
2491 gfc_expr *expr;
2492 int n;
2493
2494 /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise,
2495 arguments could get evaluated multiple times. */
2496 if (ss->is_alloc_lhs)
2497 return;
2498
2499 outer_loop = outermost_loop (loop);
2500
2501 /* TODO: This can generate bad code if there are ordering dependencies,
2502 e.g., a callee allocated function and an unknown size constructor. */
2503 gcc_assert (ss != NULL);
2504
2505 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2506 {
2507 gcc_assert (ss);
2508
2509 /* Cross loop arrays are handled from within the most nested loop. */
2510 if (ss->nested_ss != NULL)
2511 continue;
2512
2513 ss_info = ss->info;
2514 expr = ss_info->expr;
2515 info = &ss_info->data.array;
2516
2517 switch (ss_info->type)
2518 {
2519 case GFC_SS_SCALAR:
2520 /* Scalar expression. Evaluate this now. This includes elemental
2521 dimension indices, but not array section bounds. */
2522 gfc_init_se (&se, NULL);
2523 gfc_conv_expr (&se, expr);
2524 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2525
2526 if (expr->ts.type != BT_CHARACTER
2527 && !gfc_is_alloc_class_scalar_function (expr))
2528 {
2529 /* Move the evaluation of scalar expressions outside the
2530 scalarization loop, except for WHERE assignments. */
2531 if (subscript)
2532 se.expr = convert(gfc_array_index_type, se.expr);
2533 if (!ss_info->where)
2534 se.expr = gfc_evaluate_now (se.expr, &outer_loop->pre);
2535 gfc_add_block_to_block (&outer_loop->pre, &se.post);
2536 }
2537 else
2538 gfc_add_block_to_block (&outer_loop->post, &se.post);
2539
2540 ss_info->data.scalar.value = se.expr;
2541 ss_info->string_length = se.string_length;
2542 break;
2543
2544 case GFC_SS_REFERENCE:
2545 /* Scalar argument to elemental procedure. */
2546 gfc_init_se (&se, NULL);
2547 if (gfc_scalar_elemental_arg_saved_as_reference (ss_info))
2548 gfc_conv_expr_reference (&se, expr);
2549 else
2550 {
2551 /* Evaluate the argument outside the loop and pass
2552 a reference to the value. */
2553 gfc_conv_expr (&se, expr);
2554 }
2555
2556 /* Ensure that a pointer to the string is stored. */
2557 if (expr->ts.type == BT_CHARACTER)
2558 gfc_conv_string_parameter (&se);
2559
2560 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2561 gfc_add_block_to_block (&outer_loop->post, &se.post);
2562 if (gfc_is_class_scalar_expr (expr))
2563 /* This is necessary because the dynamic type will always be
2564 large than the declared type. In consequence, assigning
2565 the value to a temporary could segfault.
2566 OOP-TODO: see if this is generally correct or is the value
2567 has to be written to an allocated temporary, whose address
2568 is passed via ss_info. */
2569 ss_info->data.scalar.value = se.expr;
2570 else
2571 ss_info->data.scalar.value = gfc_evaluate_now (se.expr,
2572 &outer_loop->pre);
2573
2574 ss_info->string_length = se.string_length;
2575 break;
2576
2577 case GFC_SS_SECTION:
2578 /* Add the expressions for scalar and vector subscripts. */
2579 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2580 if (info->subscript[n])
2581 gfc_add_loop_ss_code (loop, info->subscript[n], true, where);
2582
2583 set_vector_loop_bounds (ss);
2584 break;
2585
2586 case GFC_SS_VECTOR:
2587 /* Get the vector's descriptor and store it in SS. */
2588 gfc_init_se (&se, NULL);
2589 gfc_conv_expr_descriptor (&se, expr);
2590 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2591 gfc_add_block_to_block (&outer_loop->post, &se.post);
2592 info->descriptor = se.expr;
2593 break;
2594
2595 case GFC_SS_INTRINSIC:
2596 gfc_add_intrinsic_ss_code (loop, ss);
2597 break;
2598
2599 case GFC_SS_FUNCTION:
2600 /* Array function return value. We call the function and save its
2601 result in a temporary for use inside the loop. */
2602 gfc_init_se (&se, NULL);
2603 se.loop = loop;
2604 se.ss = ss;
2605 gfc_conv_expr (&se, expr);
2606 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2607 gfc_add_block_to_block (&outer_loop->post, &se.post);
2608 ss_info->string_length = se.string_length;
2609 break;
2610
2611 case GFC_SS_CONSTRUCTOR:
2612 if (expr->ts.type == BT_CHARACTER
2613 && ss_info->string_length == NULL
2614 && expr->ts.u.cl
2615 && expr->ts.u.cl->length
2616 && expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
2617 {
2618 gfc_init_se (&se, NULL);
2619 gfc_conv_expr_type (&se, expr->ts.u.cl->length,
2620 gfc_charlen_type_node);
2621 ss_info->string_length = se.expr;
2622 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2623 gfc_add_block_to_block (&outer_loop->post, &se.post);
2624 }
2625 trans_array_constructor (ss, where);
2626 break;
2627
2628 case GFC_SS_TEMP:
2629 case GFC_SS_COMPONENT:
2630 /* Do nothing. These are handled elsewhere. */
2631 break;
2632
2633 default:
2634 gcc_unreachable ();
2635 }
2636 }
2637
2638 if (!subscript)
2639 for (nested_loop = loop->nested; nested_loop;
2640 nested_loop = nested_loop->next)
2641 gfc_add_loop_ss_code (nested_loop, nested_loop->ss, subscript, where);
2642 }
2643
2644
2645 /* Translate expressions for the descriptor and data pointer of a SS. */
2646 /*GCC ARRAYS*/
2647
2648 static void
2649 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2650 {
2651 gfc_se se;
2652 gfc_ss_info *ss_info;
2653 gfc_array_info *info;
2654 tree tmp;
2655
2656 ss_info = ss->info;
2657 info = &ss_info->data.array;
2658
2659 /* Get the descriptor for the array to be scalarized. */
2660 gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE);
2661 gfc_init_se (&se, NULL);
2662 se.descriptor_only = 1;
2663 gfc_conv_expr_lhs (&se, ss_info->expr);
2664 gfc_add_block_to_block (block, &se.pre);
2665 info->descriptor = se.expr;
2666 ss_info->string_length = se.string_length;
2667
2668 if (base)
2669 {
2670 /* Also the data pointer. */
2671 tmp = gfc_conv_array_data (se.expr);
2672 /* If this is a variable or address of a variable we use it directly.
2673 Otherwise we must evaluate it now to avoid breaking dependency
2674 analysis by pulling the expressions for elemental array indices
2675 inside the loop. */
2676 if (!(DECL_P (tmp)
2677 || (TREE_CODE (tmp) == ADDR_EXPR
2678 && DECL_P (TREE_OPERAND (tmp, 0)))))
2679 tmp = gfc_evaluate_now (tmp, block);
2680 info->data = tmp;
2681
2682 tmp = gfc_conv_array_offset (se.expr);
2683 info->offset = gfc_evaluate_now (tmp, block);
2684
2685 /* Make absolutely sure that the saved_offset is indeed saved
2686 so that the variable is still accessible after the loops
2687 are translated. */
2688 info->saved_offset = info->offset;
2689 }
2690 }
2691
2692
2693 /* Initialize a gfc_loopinfo structure. */
2694
2695 void
2696 gfc_init_loopinfo (gfc_loopinfo * loop)
2697 {
2698 int n;
2699
2700 memset (loop, 0, sizeof (gfc_loopinfo));
2701 gfc_init_block (&loop->pre);
2702 gfc_init_block (&loop->post);
2703
2704 /* Initially scalarize in order and default to no loop reversal. */
2705 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2706 {
2707 loop->order[n] = n;
2708 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2709 }
2710
2711 loop->ss = gfc_ss_terminator;
2712 }
2713
2714
2715 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2716 chain. */
2717
2718 void
2719 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2720 {
2721 se->loop = loop;
2722 }
2723
2724
2725 /* Return an expression for the data pointer of an array. */
2726
2727 tree
2728 gfc_conv_array_data (tree descriptor)
2729 {
2730 tree type;
2731
2732 type = TREE_TYPE (descriptor);
2733 if (GFC_ARRAY_TYPE_P (type))
2734 {
2735 if (TREE_CODE (type) == POINTER_TYPE)
2736 return descriptor;
2737 else
2738 {
2739 /* Descriptorless arrays. */
2740 return gfc_build_addr_expr (NULL_TREE, descriptor);
2741 }
2742 }
2743 else
2744 return gfc_conv_descriptor_data_get (descriptor);
2745 }
2746
2747
2748 /* Return an expression for the base offset of an array. */
2749
2750 tree
2751 gfc_conv_array_offset (tree descriptor)
2752 {
2753 tree type;
2754
2755 type = TREE_TYPE (descriptor);
2756 if (GFC_ARRAY_TYPE_P (type))
2757 return GFC_TYPE_ARRAY_OFFSET (type);
2758 else
2759 return gfc_conv_descriptor_offset_get (descriptor);
2760 }
2761
2762
2763 /* Get an expression for the array stride. */
2764
2765 tree
2766 gfc_conv_array_stride (tree descriptor, int dim)
2767 {
2768 tree tmp;
2769 tree type;
2770
2771 type = TREE_TYPE (descriptor);
2772
2773 /* For descriptorless arrays use the array size. */
2774 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2775 if (tmp != NULL_TREE)
2776 return tmp;
2777
2778 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2779 return tmp;
2780 }
2781
2782
2783 /* Like gfc_conv_array_stride, but for the lower bound. */
2784
2785 tree
2786 gfc_conv_array_lbound (tree descriptor, int dim)
2787 {
2788 tree tmp;
2789 tree type;
2790
2791 type = TREE_TYPE (descriptor);
2792
2793 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2794 if (tmp != NULL_TREE)
2795 return tmp;
2796
2797 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2798 return tmp;
2799 }
2800
2801
2802 /* Like gfc_conv_array_stride, but for the upper bound. */
2803
2804 tree
2805 gfc_conv_array_ubound (tree descriptor, int dim)
2806 {
2807 tree tmp;
2808 tree type;
2809
2810 type = TREE_TYPE (descriptor);
2811
2812 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2813 if (tmp != NULL_TREE)
2814 return tmp;
2815
2816 /* This should only ever happen when passing an assumed shape array
2817 as an actual parameter. The value will never be used. */
2818 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2819 return gfc_index_zero_node;
2820
2821 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2822 return tmp;
2823 }
2824
2825
2826 /* Generate code to perform an array index bound check. */
2827
2828 static tree
2829 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2830 locus * where, bool check_upper)
2831 {
2832 tree fault;
2833 tree tmp_lo, tmp_up;
2834 tree descriptor;
2835 char *msg;
2836 const char * name = NULL;
2837
2838 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2839 return index;
2840
2841 descriptor = ss->info->data.array.descriptor;
2842
2843 index = gfc_evaluate_now (index, &se->pre);
2844
2845 /* We find a name for the error message. */
2846 name = ss->info->expr->symtree->n.sym->name;
2847 gcc_assert (name != NULL);
2848
2849 if (TREE_CODE (descriptor) == VAR_DECL)
2850 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2851
2852 /* If upper bound is present, include both bounds in the error message. */
2853 if (check_upper)
2854 {
2855 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2856 tmp_up = gfc_conv_array_ubound (descriptor, n);
2857
2858 if (name)
2859 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
2860 "outside of expected range (%%ld:%%ld)", n+1, name);
2861 else
2862 msg = xasprintf ("Index '%%ld' of dimension %d "
2863 "outside of expected range (%%ld:%%ld)", n+1);
2864
2865 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2866 index, tmp_lo);
2867 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2868 fold_convert (long_integer_type_node, index),
2869 fold_convert (long_integer_type_node, tmp_lo),
2870 fold_convert (long_integer_type_node, tmp_up));
2871 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2872 index, tmp_up);
2873 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2874 fold_convert (long_integer_type_node, index),
2875 fold_convert (long_integer_type_node, tmp_lo),
2876 fold_convert (long_integer_type_node, tmp_up));
2877 free (msg);
2878 }
2879 else
2880 {
2881 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2882
2883 if (name)
2884 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
2885 "below lower bound of %%ld", n+1, name);
2886 else
2887 msg = xasprintf ("Index '%%ld' of dimension %d "
2888 "below lower bound of %%ld", n+1);
2889
2890 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2891 index, tmp_lo);
2892 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2893 fold_convert (long_integer_type_node, index),
2894 fold_convert (long_integer_type_node, tmp_lo));
2895 free (msg);
2896 }
2897
2898 return index;
2899 }
2900
2901
2902 /* Return the offset for an index. Performs bound checking for elemental
2903 dimensions. Single element references are processed separately.
2904 DIM is the array dimension, I is the loop dimension. */
2905
2906 static tree
2907 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2908 gfc_array_ref * ar, tree stride)
2909 {
2910 gfc_array_info *info;
2911 tree index;
2912 tree desc;
2913 tree data;
2914
2915 info = &ss->info->data.array;
2916
2917 /* Get the index into the array for this dimension. */
2918 if (ar)
2919 {
2920 gcc_assert (ar->type != AR_ELEMENT);
2921 switch (ar->dimen_type[dim])
2922 {
2923 case DIMEN_THIS_IMAGE:
2924 gcc_unreachable ();
2925 break;
2926 case DIMEN_ELEMENT:
2927 /* Elemental dimension. */
2928 gcc_assert (info->subscript[dim]
2929 && info->subscript[dim]->info->type == GFC_SS_SCALAR);
2930 /* We've already translated this value outside the loop. */
2931 index = info->subscript[dim]->info->data.scalar.value;
2932
2933 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2934 ar->as->type != AS_ASSUMED_SIZE
2935 || dim < ar->dimen - 1);
2936 break;
2937
2938 case DIMEN_VECTOR:
2939 gcc_assert (info && se->loop);
2940 gcc_assert (info->subscript[dim]
2941 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2942 desc = info->subscript[dim]->info->data.array.descriptor;
2943
2944 /* Get a zero-based index into the vector. */
2945 index = fold_build2_loc (input_location, MINUS_EXPR,
2946 gfc_array_index_type,
2947 se->loop->loopvar[i], se->loop->from[i]);
2948
2949 /* Multiply the index by the stride. */
2950 index = fold_build2_loc (input_location, MULT_EXPR,
2951 gfc_array_index_type,
2952 index, gfc_conv_array_stride (desc, 0));
2953
2954 /* Read the vector to get an index into info->descriptor. */
2955 data = build_fold_indirect_ref_loc (input_location,
2956 gfc_conv_array_data (desc));
2957 index = gfc_build_array_ref (data, index, NULL);
2958 index = gfc_evaluate_now (index, &se->pre);
2959 index = fold_convert (gfc_array_index_type, index);
2960
2961 /* Do any bounds checking on the final info->descriptor index. */
2962 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2963 ar->as->type != AS_ASSUMED_SIZE
2964 || dim < ar->dimen - 1);
2965 break;
2966
2967 case DIMEN_RANGE:
2968 /* Scalarized dimension. */
2969 gcc_assert (info && se->loop);
2970
2971 /* Multiply the loop variable by the stride and delta. */
2972 index = se->loop->loopvar[i];
2973 if (!integer_onep (info->stride[dim]))
2974 index = fold_build2_loc (input_location, MULT_EXPR,
2975 gfc_array_index_type, index,
2976 info->stride[dim]);
2977 if (!integer_zerop (info->delta[dim]))
2978 index = fold_build2_loc (input_location, PLUS_EXPR,
2979 gfc_array_index_type, index,
2980 info->delta[dim]);
2981 break;
2982
2983 default:
2984 gcc_unreachable ();
2985 }
2986 }
2987 else
2988 {
2989 /* Temporary array or derived type component. */
2990 gcc_assert (se->loop);
2991 index = se->loop->loopvar[se->loop->order[i]];
2992
2993 /* Pointer functions can have stride[0] different from unity.
2994 Use the stride returned by the function call and stored in
2995 the descriptor for the temporary. */
2996 if (se->ss && se->ss->info->type == GFC_SS_FUNCTION
2997 && se->ss->info->expr
2998 && se->ss->info->expr->symtree
2999 && se->ss->info->expr->symtree->n.sym->result
3000 && se->ss->info->expr->symtree->n.sym->result->attr.pointer)
3001 stride = gfc_conv_descriptor_stride_get (info->descriptor,
3002 gfc_rank_cst[dim]);
3003
3004 if (info->delta[dim] && !integer_zerop (info->delta[dim]))
3005 index = fold_build2_loc (input_location, PLUS_EXPR,
3006 gfc_array_index_type, index, info->delta[dim]);
3007 }
3008
3009 /* Multiply by the stride. */
3010 if (!integer_onep (stride))
3011 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3012 index, stride);
3013
3014 return index;
3015 }
3016
3017
3018 /* Build a scalarized array reference using the vptr 'size'. */
3019
3020 static bool
3021 build_class_array_ref (gfc_se *se, tree base, tree index)
3022 {
3023 tree type;
3024 tree size;
3025 tree offset;
3026 tree decl;
3027 tree tmp;
3028 gfc_expr *expr = se->ss->info->expr;
3029 gfc_ref *ref;
3030 gfc_ref *class_ref;
3031 gfc_typespec *ts;
3032
3033 if (expr == NULL
3034 || (expr->ts.type != BT_CLASS
3035 && !gfc_is_alloc_class_array_function (expr)))
3036 return false;
3037
3038 if (expr->symtree && expr->symtree->n.sym->ts.type == BT_CLASS)
3039 ts = &expr->symtree->n.sym->ts;
3040 else
3041 ts = NULL;
3042 class_ref = NULL;
3043
3044 for (ref = expr->ref; ref; ref = ref->next)
3045 {
3046 if (ref->type == REF_COMPONENT
3047 && ref->u.c.component->ts.type == BT_CLASS
3048 && ref->next && ref->next->type == REF_COMPONENT
3049 && strcmp (ref->next->u.c.component->name, "_data") == 0
3050 && ref->next->next
3051 && ref->next->next->type == REF_ARRAY
3052 && ref->next->next->u.ar.type != AR_ELEMENT)
3053 {
3054 ts = &ref->u.c.component->ts;
3055 class_ref = ref;
3056 break;
3057 }
3058 }
3059
3060 if (ts == NULL)
3061 return false;
3062
3063 if (class_ref == NULL && expr->symtree->n.sym->attr.function
3064 && expr->symtree->n.sym == expr->symtree->n.sym->result)
3065 {
3066 gcc_assert (expr->symtree->n.sym->backend_decl == current_function_decl);
3067 decl = gfc_get_fake_result_decl (expr->symtree->n.sym, 0);
3068 }
3069 else if (gfc_is_alloc_class_array_function (expr))
3070 {
3071 size = NULL_TREE;
3072 decl = NULL_TREE;
3073 for (tmp = base; tmp; tmp = TREE_OPERAND (tmp, 0))
3074 {
3075 tree type;
3076 type = TREE_TYPE (tmp);
3077 while (type)
3078 {
3079 if (GFC_CLASS_TYPE_P (type))
3080 decl = tmp;
3081 if (type != TYPE_CANONICAL (type))
3082 type = TYPE_CANONICAL (type);
3083 else
3084 type = NULL_TREE;
3085 }
3086 if (TREE_CODE (tmp) == VAR_DECL)
3087 break;
3088 }
3089
3090 if (decl == NULL_TREE)
3091 return false;
3092 }
3093 else if (class_ref == NULL)
3094 {
3095 decl = expr->symtree->n.sym->backend_decl;
3096 /* For class arrays the tree containing the class is stored in
3097 GFC_DECL_SAVED_DESCRIPTOR of the sym's backend_decl.
3098 For all others it's sym's backend_decl directly. */
3099 if (DECL_LANG_SPECIFIC (decl) && GFC_DECL_SAVED_DESCRIPTOR (decl))
3100 decl = GFC_DECL_SAVED_DESCRIPTOR (decl);
3101 }
3102 else
3103 {
3104 /* Remove everything after the last class reference, convert the
3105 expression and then recover its tailend once more. */
3106 gfc_se tmpse;
3107 ref = class_ref->next;
3108 class_ref->next = NULL;
3109 gfc_init_se (&tmpse, NULL);
3110 gfc_conv_expr (&tmpse, expr);
3111 decl = tmpse.expr;
3112 class_ref->next = ref;
3113 }
3114
3115 if (POINTER_TYPE_P (TREE_TYPE (decl)))
3116 decl = build_fold_indirect_ref_loc (input_location, decl);
3117
3118 if (!GFC_CLASS_TYPE_P (TREE_TYPE (decl)))
3119 return false;
3120
3121 size = gfc_class_vtab_size_get (decl);
3122
3123 /* Build the address of the element. */
3124 type = TREE_TYPE (TREE_TYPE (base));
3125 size = fold_convert (TREE_TYPE (index), size);
3126 offset = fold_build2_loc (input_location, MULT_EXPR,
3127 gfc_array_index_type,
3128 index, size);
3129 tmp = gfc_build_addr_expr (pvoid_type_node, base);
3130 tmp = fold_build_pointer_plus_loc (input_location, tmp, offset);
3131 tmp = fold_convert (build_pointer_type (type), tmp);
3132
3133 /* Return the element in the se expression. */
3134 se->expr = build_fold_indirect_ref_loc (input_location, tmp);
3135 return true;
3136 }
3137
3138
3139 /* Build a scalarized reference to an array. */
3140
3141 static void
3142 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
3143 {
3144 gfc_array_info *info;
3145 tree decl = NULL_TREE;
3146 tree index;
3147 tree tmp;
3148 gfc_ss *ss;
3149 gfc_expr *expr;
3150 int n;
3151
3152 ss = se->ss;
3153 expr = ss->info->expr;
3154 info = &ss->info->data.array;
3155 if (ar)
3156 n = se->loop->order[0];
3157 else
3158 n = 0;
3159
3160 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
3161 /* Add the offset for this dimension to the stored offset for all other
3162 dimensions. */
3163 if (info->offset && !integer_zerop (info->offset))
3164 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3165 index, info->offset);
3166
3167 if (expr && (is_subref_array (expr)
3168 || (expr->ts.deferred && expr->expr_type == EXPR_VARIABLE)))
3169 decl = expr->symtree->n.sym->backend_decl;
3170
3171 tmp = build_fold_indirect_ref_loc (input_location, info->data);
3172
3173 /* Use the vptr 'size' field to access a class the element of a class
3174 array. */
3175 if (build_class_array_ref (se, tmp, index))
3176 return;
3177
3178 se->expr = gfc_build_array_ref (tmp, index, decl);
3179 }
3180
3181
3182 /* Translate access of temporary array. */
3183
3184 void
3185 gfc_conv_tmp_array_ref (gfc_se * se)
3186 {
3187 se->string_length = se->ss->info->string_length;
3188 gfc_conv_scalarized_array_ref (se, NULL);
3189 gfc_advance_se_ss_chain (se);
3190 }
3191
3192 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
3193
3194 static void
3195 add_to_offset (tree *cst_offset, tree *offset, tree t)
3196 {
3197 if (TREE_CODE (t) == INTEGER_CST)
3198 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
3199 else
3200 {
3201 if (!integer_zerop (*offset))
3202 *offset = fold_build2_loc (input_location, PLUS_EXPR,
3203 gfc_array_index_type, *offset, t);
3204 else
3205 *offset = t;
3206 }
3207 }
3208
3209
3210 static tree
3211 build_array_ref (tree desc, tree offset, tree decl, tree vptr)
3212 {
3213 tree tmp;
3214 tree type;
3215 tree cdecl;
3216 bool classarray = false;
3217
3218 /* For class arrays the class declaration is stored in the saved
3219 descriptor. */
3220 if (INDIRECT_REF_P (desc)
3221 && DECL_LANG_SPECIFIC (TREE_OPERAND (desc, 0))
3222 && GFC_DECL_SAVED_DESCRIPTOR (TREE_OPERAND (desc, 0)))
3223 cdecl = gfc_class_data_get (GFC_DECL_SAVED_DESCRIPTOR (
3224 TREE_OPERAND (desc, 0)));
3225 else
3226 cdecl = desc;
3227
3228 /* Class container types do not always have the GFC_CLASS_TYPE_P
3229 but the canonical type does. */
3230 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (cdecl))
3231 && TREE_CODE (cdecl) == COMPONENT_REF)
3232 {
3233 type = TREE_TYPE (TREE_OPERAND (cdecl, 0));
3234 if (TYPE_CANONICAL (type)
3235 && GFC_CLASS_TYPE_P (TYPE_CANONICAL (type)))
3236 {
3237 type = TREE_TYPE (desc);
3238 classarray = true;
3239 }
3240 }
3241 else
3242 type = NULL;
3243
3244 /* Class array references need special treatment because the assigned
3245 type size needs to be used to point to the element. */
3246 if (classarray)
3247 {
3248 type = gfc_get_element_type (type);
3249 tmp = TREE_OPERAND (cdecl, 0);
3250 tmp = gfc_get_class_array_ref (offset, tmp, NULL_TREE);
3251 tmp = fold_convert (build_pointer_type (type), tmp);
3252 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3253 return tmp;
3254 }
3255
3256 tmp = gfc_conv_array_data (desc);
3257 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3258 tmp = gfc_build_array_ref (tmp, offset, decl, vptr);
3259 return tmp;
3260 }
3261
3262
3263 /* Build an array reference. se->expr already holds the array descriptor.
3264 This should be either a variable, indirect variable reference or component
3265 reference. For arrays which do not have a descriptor, se->expr will be
3266 the data pointer.
3267 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
3268
3269 void
3270 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_expr *expr,
3271 locus * where)
3272 {
3273 int n;
3274 tree offset, cst_offset;
3275 tree tmp;
3276 tree stride;
3277 gfc_se indexse;
3278 gfc_se tmpse;
3279 gfc_symbol * sym = expr->symtree->n.sym;
3280 char *var_name = NULL;
3281
3282 if (ar->dimen == 0)
3283 {
3284 gcc_assert (ar->codimen);
3285
3286 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
3287 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
3288 else
3289 {
3290 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
3291 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
3292 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
3293
3294 /* Use the actual tree type and not the wrapped coarray. */
3295 if (!se->want_pointer)
3296 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
3297 se->expr);
3298 }
3299
3300 return;
3301 }
3302
3303 /* Handle scalarized references separately. */
3304 if (ar->type != AR_ELEMENT)
3305 {
3306 gfc_conv_scalarized_array_ref (se, ar);
3307 gfc_advance_se_ss_chain (se);
3308 return;
3309 }
3310
3311 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3312 {
3313 size_t len;
3314 gfc_ref *ref;
3315
3316 len = strlen (sym->name) + 1;
3317 for (ref = expr->ref; ref; ref = ref->next)
3318 {
3319 if (ref->type == REF_ARRAY && &ref->u.ar == ar)
3320 break;
3321 if (ref->type == REF_COMPONENT)
3322 len += 1 + strlen (ref->u.c.component->name);
3323 }
3324
3325 var_name = XALLOCAVEC (char, len);
3326 strcpy (var_name, sym->name);
3327
3328 for (ref = expr->ref; ref; ref = ref->next)
3329 {
3330 if (ref->type == REF_ARRAY && &ref->u.ar == ar)
3331 break;
3332 if (ref->type == REF_COMPONENT)
3333 {
3334 strcat (var_name, "%%");
3335 strcat (var_name, ref->u.c.component->name);
3336 }
3337 }
3338 }
3339
3340 cst_offset = offset = gfc_index_zero_node;
3341 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
3342
3343 /* Calculate the offsets from all the dimensions. Make sure to associate
3344 the final offset so that we form a chain of loop invariant summands. */
3345 for (n = ar->dimen - 1; n >= 0; n--)
3346 {
3347 /* Calculate the index for this dimension. */
3348 gfc_init_se (&indexse, se);
3349 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
3350 gfc_add_block_to_block (&se->pre, &indexse.pre);
3351
3352 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3353 {
3354 /* Check array bounds. */
3355 tree cond;
3356 char *msg;
3357
3358 /* Evaluate the indexse.expr only once. */
3359 indexse.expr = save_expr (indexse.expr);
3360
3361 /* Lower bound. */
3362 tmp = gfc_conv_array_lbound (se->expr, n);
3363 if (sym->attr.temporary)
3364 {
3365 gfc_init_se (&tmpse, se);
3366 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
3367 gfc_array_index_type);
3368 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3369 tmp = tmpse.expr;
3370 }
3371
3372 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
3373 indexse.expr, tmp);
3374 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
3375 "below lower bound of %%ld", n+1, var_name);
3376 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3377 fold_convert (long_integer_type_node,
3378 indexse.expr),
3379 fold_convert (long_integer_type_node, tmp));
3380 free (msg);
3381
3382 /* Upper bound, but not for the last dimension of assumed-size
3383 arrays. */
3384 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
3385 {
3386 tmp = gfc_conv_array_ubound (se->expr, n);
3387 if (sym->attr.temporary)
3388 {
3389 gfc_init_se (&tmpse, se);
3390 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
3391 gfc_array_index_type);
3392 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3393 tmp = tmpse.expr;
3394 }
3395
3396 cond = fold_build2_loc (input_location, GT_EXPR,
3397 boolean_type_node, indexse.expr, tmp);
3398 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
3399 "above upper bound of %%ld", n+1, var_name);
3400 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3401 fold_convert (long_integer_type_node,
3402 indexse.expr),
3403 fold_convert (long_integer_type_node, tmp));
3404 free (msg);
3405 }
3406 }
3407
3408 /* Multiply the index by the stride. */
3409 stride = gfc_conv_array_stride (se->expr, n);
3410 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3411 indexse.expr, stride);
3412
3413 /* And add it to the total. */
3414 add_to_offset (&cst_offset, &offset, tmp);
3415 }
3416
3417 if (!integer_zerop (cst_offset))
3418 offset = fold_build2_loc (input_location, PLUS_EXPR,
3419 gfc_array_index_type, offset, cst_offset);
3420
3421 se->expr = build_array_ref (se->expr, offset, sym->ts.type == BT_CLASS ?
3422 NULL_TREE : sym->backend_decl, se->class_vptr);
3423 }
3424
3425
3426 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
3427 LOOP_DIM dimension (if any) to array's offset. */
3428
3429 static void
3430 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
3431 gfc_array_ref *ar, int array_dim, int loop_dim)
3432 {
3433 gfc_se se;
3434 gfc_array_info *info;
3435 tree stride, index;
3436
3437 info = &ss->info->data.array;
3438
3439 gfc_init_se (&se, NULL);
3440 se.loop = loop;
3441 se.expr = info->descriptor;
3442 stride = gfc_conv_array_stride (info->descriptor, array_dim);
3443 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
3444 gfc_add_block_to_block (pblock, &se.pre);
3445
3446 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
3447 gfc_array_index_type,
3448 info->offset, index);
3449 info->offset = gfc_evaluate_now (info->offset, pblock);
3450 }
3451
3452
3453 /* Generate the code to be executed immediately before entering a
3454 scalarization loop. */
3455
3456 static void
3457 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
3458 stmtblock_t * pblock)
3459 {
3460 tree stride;
3461 gfc_ss_info *ss_info;
3462 gfc_array_info *info;
3463 gfc_ss_type ss_type;
3464 gfc_ss *ss, *pss;
3465 gfc_loopinfo *ploop;
3466 gfc_array_ref *ar;
3467 int i;
3468
3469 /* This code will be executed before entering the scalarization loop
3470 for this dimension. */
3471 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3472 {
3473 ss_info = ss->info;
3474
3475 if ((ss_info->useflags & flag) == 0)
3476 continue;
3477
3478 ss_type = ss_info->type;
3479 if (ss_type != GFC_SS_SECTION
3480 && ss_type != GFC_SS_FUNCTION
3481 && ss_type != GFC_SS_CONSTRUCTOR
3482 && ss_type != GFC_SS_COMPONENT)
3483 continue;
3484
3485 info = &ss_info->data.array;
3486
3487 gcc_assert (dim < ss->dimen);
3488 gcc_assert (ss->dimen == loop->dimen);
3489
3490 if (info->ref)
3491 ar = &info->ref->u.ar;
3492 else
3493 ar = NULL;
3494
3495 if (dim == loop->dimen - 1 && loop->parent != NULL)
3496 {
3497 /* If we are in the outermost dimension of this loop, the previous
3498 dimension shall be in the parent loop. */
3499 gcc_assert (ss->parent != NULL);
3500
3501 pss = ss->parent;
3502 ploop = loop->parent;
3503
3504 /* ss and ss->parent are about the same array. */
3505 gcc_assert (ss_info == pss->info);
3506 }
3507 else
3508 {
3509 ploop = loop;
3510 pss = ss;
3511 }
3512
3513 if (dim == loop->dimen - 1)
3514 i = 0;
3515 else
3516 i = dim + 1;
3517
3518 /* For the time being, there is no loop reordering. */
3519 gcc_assert (i == ploop->order[i]);
3520 i = ploop->order[i];
3521
3522 if (dim == loop->dimen - 1 && loop->parent == NULL)
3523 {
3524 stride = gfc_conv_array_stride (info->descriptor,
3525 innermost_ss (ss)->dim[i]);
3526
3527 /* Calculate the stride of the innermost loop. Hopefully this will
3528 allow the backend optimizers to do their stuff more effectively.
3529 */
3530 info->stride0 = gfc_evaluate_now (stride, pblock);
3531
3532 /* For the outermost loop calculate the offset due to any
3533 elemental dimensions. It will have been initialized with the
3534 base offset of the array. */
3535 if (info->ref)
3536 {
3537 for (i = 0; i < ar->dimen; i++)
3538 {
3539 if (ar->dimen_type[i] != DIMEN_ELEMENT)
3540 continue;
3541
3542 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
3543 }
3544 }
3545 }
3546 else
3547 /* Add the offset for the previous loop dimension. */
3548 add_array_offset (pblock, ploop, ss, ar, pss->dim[i], i);
3549
3550 /* Remember this offset for the second loop. */
3551 if (dim == loop->temp_dim - 1 && loop->parent == NULL)
3552 info->saved_offset = info->offset;
3553 }
3554 }
3555
3556
3557 /* Start a scalarized expression. Creates a scope and declares loop
3558 variables. */
3559
3560 void
3561 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
3562 {
3563 int dim;
3564 int n;
3565 int flags;
3566
3567 gcc_assert (!loop->array_parameter);
3568
3569 for (dim = loop->dimen - 1; dim >= 0; dim--)
3570 {
3571 n = loop->order[dim];
3572
3573 gfc_start_block (&loop->code[n]);
3574
3575 /* Create the loop variable. */
3576 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
3577
3578 if (dim < loop->temp_dim)
3579 flags = 3;
3580 else
3581 flags = 1;
3582 /* Calculate values that will be constant within this loop. */
3583 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
3584 }
3585 gfc_start_block (pbody);
3586 }
3587
3588
3589 /* Generates the actual loop code for a scalarization loop. */
3590
3591 void
3592 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
3593 stmtblock_t * pbody)
3594 {
3595 stmtblock_t block;
3596 tree cond;
3597 tree tmp;
3598 tree loopbody;
3599 tree exit_label;
3600 tree stmt;
3601 tree init;
3602 tree incr;
3603
3604 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS
3605 | OMPWS_SCALARIZER_BODY))
3606 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
3607 && n == loop->dimen - 1)
3608 {
3609 /* We create an OMP_FOR construct for the outermost scalarized loop. */
3610 init = make_tree_vec (1);
3611 cond = make_tree_vec (1);
3612 incr = make_tree_vec (1);
3613
3614 /* Cycle statement is implemented with a goto. Exit statement must not
3615 be present for this loop. */
3616 exit_label = gfc_build_label_decl (NULL_TREE);
3617 TREE_USED (exit_label) = 1;
3618
3619 /* Label for cycle statements (if needed). */
3620 tmp = build1_v (LABEL_EXPR, exit_label);
3621 gfc_add_expr_to_block (pbody, tmp);
3622
3623 stmt = make_node (OMP_FOR);
3624
3625 TREE_TYPE (stmt) = void_type_node;
3626 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
3627
3628 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
3629 OMP_CLAUSE_SCHEDULE);
3630 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
3631 = OMP_CLAUSE_SCHEDULE_STATIC;
3632 if (ompws_flags & OMPWS_NOWAIT)
3633 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3634 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3635
3636 /* Initialize the loopvar. */
3637 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3638 loop->from[n]);
3639 OMP_FOR_INIT (stmt) = init;
3640 /* The exit condition. */
3641 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3642 boolean_type_node,
3643 loop->loopvar[n], loop->to[n]);
3644 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3645 OMP_FOR_COND (stmt) = cond;
3646 /* Increment the loopvar. */
3647 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3648 loop->loopvar[n], gfc_index_one_node);
3649 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3650 void_type_node, loop->loopvar[n], tmp);
3651 OMP_FOR_INCR (stmt) = incr;
3652
3653 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3654 gfc_add_expr_to_block (&loop->code[n], stmt);
3655 }
3656 else
3657 {
3658 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3659 && (loop->temp_ss == NULL);
3660
3661 loopbody = gfc_finish_block (pbody);
3662
3663 if (reverse_loop)
3664 std::swap (loop->from[n], loop->to[n]);
3665
3666 /* Initialize the loopvar. */
3667 if (loop->loopvar[n] != loop->from[n])
3668 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3669
3670 exit_label = gfc_build_label_decl (NULL_TREE);
3671
3672 /* Generate the loop body. */
3673 gfc_init_block (&block);
3674
3675 /* The exit condition. */
3676 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3677 boolean_type_node, loop->loopvar[n], loop->to[n]);
3678 tmp = build1_v (GOTO_EXPR, exit_label);
3679 TREE_USED (exit_label) = 1;
3680 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3681 gfc_add_expr_to_block (&block, tmp);
3682
3683 /* The main body. */
3684 gfc_add_expr_to_block (&block, loopbody);
3685
3686 /* Increment the loopvar. */
3687 tmp = fold_build2_loc (input_location,
3688 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3689 gfc_array_index_type, loop->loopvar[n],
3690 gfc_index_one_node);
3691
3692 gfc_add_modify (&block, loop->loopvar[n], tmp);
3693
3694 /* Build the loop. */
3695 tmp = gfc_finish_block (&block);
3696 tmp = build1_v (LOOP_EXPR, tmp);
3697 gfc_add_expr_to_block (&loop->code[n], tmp);
3698
3699 /* Add the exit label. */
3700 tmp = build1_v (LABEL_EXPR, exit_label);
3701 gfc_add_expr_to_block (&loop->code[n], tmp);
3702 }
3703
3704 }
3705
3706
3707 /* Finishes and generates the loops for a scalarized expression. */
3708
3709 void
3710 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3711 {
3712 int dim;
3713 int n;
3714 gfc_ss *ss;
3715 stmtblock_t *pblock;
3716 tree tmp;
3717
3718 pblock = body;
3719 /* Generate the loops. */
3720 for (dim = 0; dim < loop->dimen; dim++)
3721 {
3722 n = loop->order[dim];
3723 gfc_trans_scalarized_loop_end (loop, n, pblock);
3724 loop->loopvar[n] = NULL_TREE;
3725 pblock = &loop->code[n];
3726 }
3727
3728 tmp = gfc_finish_block (pblock);
3729 gfc_add_expr_to_block (&loop->pre, tmp);
3730
3731 /* Clear all the used flags. */
3732 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3733 if (ss->parent == NULL)
3734 ss->info->useflags = 0;
3735 }
3736
3737
3738 /* Finish the main body of a scalarized expression, and start the secondary
3739 copying body. */
3740
3741 void
3742 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3743 {
3744 int dim;
3745 int n;
3746 stmtblock_t *pblock;
3747 gfc_ss *ss;
3748
3749 pblock = body;
3750 /* We finish as many loops as are used by the temporary. */
3751 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3752 {
3753 n = loop->order[dim];
3754 gfc_trans_scalarized_loop_end (loop, n, pblock);
3755 loop->loopvar[n] = NULL_TREE;
3756 pblock = &loop->code[n];
3757 }
3758
3759 /* We don't want to finish the outermost loop entirely. */
3760 n = loop->order[loop->temp_dim - 1];
3761 gfc_trans_scalarized_loop_end (loop, n, pblock);
3762
3763 /* Restore the initial offsets. */
3764 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3765 {
3766 gfc_ss_type ss_type;
3767 gfc_ss_info *ss_info;
3768
3769 ss_info = ss->info;
3770
3771 if ((ss_info->useflags & 2) == 0)
3772 continue;
3773
3774 ss_type = ss_info->type;
3775 if (ss_type != GFC_SS_SECTION
3776 && ss_type != GFC_SS_FUNCTION
3777 && ss_type != GFC_SS_CONSTRUCTOR
3778 && ss_type != GFC_SS_COMPONENT)
3779 continue;
3780
3781 ss_info->data.array.offset = ss_info->data.array.saved_offset;
3782 }
3783
3784 /* Restart all the inner loops we just finished. */
3785 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3786 {
3787 n = loop->order[dim];
3788
3789 gfc_start_block (&loop->code[n]);
3790
3791 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3792
3793 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3794 }
3795
3796 /* Start a block for the secondary copying code. */
3797 gfc_start_block (body);
3798 }
3799
3800
3801 /* Precalculate (either lower or upper) bound of an array section.
3802 BLOCK: Block in which the (pre)calculation code will go.
3803 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3804 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3805 DESC: Array descriptor from which the bound will be picked if unspecified
3806 (either lower or upper bound according to LBOUND). */
3807
3808 static void
3809 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3810 tree desc, int dim, bool lbound, bool deferred)
3811 {
3812 gfc_se se;
3813 gfc_expr * input_val = values[dim];
3814 tree *output = &bounds[dim];
3815
3816
3817 if (input_val)
3818 {
3819 /* Specified section bound. */
3820 gfc_init_se (&se, NULL);
3821 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3822 gfc_add_block_to_block (block, &se.pre);
3823 *output = se.expr;
3824 }
3825 else if (deferred && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
3826 {
3827 /* The gfc_conv_array_lbound () routine returns a constant zero for
3828 deferred length arrays, which in the scalarizer wreaks havoc, when
3829 copying to a (newly allocated) one-based array.
3830 Keep returning the actual result in sync for both bounds. */
3831 *output = lbound ? gfc_conv_descriptor_lbound_get (desc,
3832 gfc_rank_cst[dim]):
3833 gfc_conv_descriptor_ubound_get (desc,
3834 gfc_rank_cst[dim]);
3835 }
3836 else
3837 {
3838 /* No specific bound specified so use the bound of the array. */
3839 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3840 gfc_conv_array_ubound (desc, dim);
3841 }
3842 *output = gfc_evaluate_now (*output, block);
3843 }
3844
3845
3846 /* Calculate the lower bound of an array section. */
3847
3848 static void
3849 gfc_conv_section_startstride (stmtblock_t * block, gfc_ss * ss, int dim)
3850 {
3851 gfc_expr *stride = NULL;
3852 tree desc;
3853 gfc_se se;
3854 gfc_array_info *info;
3855 gfc_array_ref *ar;
3856
3857 gcc_assert (ss->info->type == GFC_SS_SECTION);
3858
3859 info = &ss->info->data.array;
3860 ar = &info->ref->u.ar;
3861
3862 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3863 {
3864 /* We use a zero-based index to access the vector. */
3865 info->start[dim] = gfc_index_zero_node;
3866 info->end[dim] = NULL;
3867 info->stride[dim] = gfc_index_one_node;
3868 return;
3869 }
3870
3871 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3872 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3873 desc = info->descriptor;
3874 stride = ar->stride[dim];
3875
3876
3877 /* Calculate the start of the range. For vector subscripts this will
3878 be the range of the vector. */
3879 evaluate_bound (block, info->start, ar->start, desc, dim, true,
3880 ar->as->type == AS_DEFERRED);
3881
3882 /* Similarly calculate the end. Although this is not used in the
3883 scalarizer, it is needed when checking bounds and where the end
3884 is an expression with side-effects. */
3885 evaluate_bound (block, info->end, ar->end, desc, dim, false,
3886 ar->as->type == AS_DEFERRED);
3887
3888
3889 /* Calculate the stride. */
3890 if (stride == NULL)
3891 info->stride[dim] = gfc_index_one_node;
3892 else
3893 {
3894 gfc_init_se (&se, NULL);
3895 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3896 gfc_add_block_to_block (block, &se.pre);
3897 info->stride[dim] = gfc_evaluate_now (se.expr, block);
3898 }
3899 }
3900
3901
3902 /* Calculates the range start and stride for a SS chain. Also gets the
3903 descriptor and data pointer. The range of vector subscripts is the size
3904 of the vector. Array bounds are also checked. */
3905
3906 void
3907 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3908 {
3909 int n;
3910 tree tmp;
3911 gfc_ss *ss;
3912 tree desc;
3913
3914 gfc_loopinfo * const outer_loop = outermost_loop (loop);
3915
3916 loop->dimen = 0;
3917 /* Determine the rank of the loop. */
3918 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3919 {
3920 switch (ss->info->type)
3921 {
3922 case GFC_SS_SECTION:
3923 case GFC_SS_CONSTRUCTOR:
3924 case GFC_SS_FUNCTION:
3925 case GFC_SS_COMPONENT:
3926 loop->dimen = ss->dimen;
3927 goto done;
3928
3929 /* As usual, lbound and ubound are exceptions!. */
3930 case GFC_SS_INTRINSIC:
3931 switch (ss->info->expr->value.function.isym->id)
3932 {
3933 case GFC_ISYM_LBOUND:
3934 case GFC_ISYM_UBOUND:
3935 case GFC_ISYM_LCOBOUND:
3936 case GFC_ISYM_UCOBOUND:
3937 case GFC_ISYM_THIS_IMAGE:
3938 loop->dimen = ss->dimen;
3939 goto done;
3940
3941 default:
3942 break;
3943 }
3944
3945 default:
3946 break;
3947 }
3948 }
3949
3950 /* We should have determined the rank of the expression by now. If
3951 not, that's bad news. */
3952 gcc_unreachable ();
3953
3954 done:
3955 /* Loop over all the SS in the chain. */
3956 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3957 {
3958 gfc_ss_info *ss_info;
3959 gfc_array_info *info;
3960 gfc_expr *expr;
3961
3962 ss_info = ss->info;
3963 expr = ss_info->expr;
3964 info = &ss_info->data.array;
3965
3966 if (expr && expr->shape && !info->shape)
3967 info->shape = expr->shape;
3968
3969 switch (ss_info->type)
3970 {
3971 case GFC_SS_SECTION:
3972 /* Get the descriptor for the array. If it is a cross loops array,
3973 we got the descriptor already in the outermost loop. */
3974 if (ss->parent == NULL)
3975 gfc_conv_ss_descriptor (&outer_loop->pre, ss,
3976 !loop->array_parameter);
3977
3978 for (n = 0; n < ss->dimen; n++)
3979 gfc_conv_section_startstride (&outer_loop->pre, ss, ss->dim[n]);
3980 break;
3981
3982 case GFC_SS_INTRINSIC:
3983 switch (expr->value.function.isym->id)
3984 {
3985 /* Fall through to supply start and stride. */
3986 case GFC_ISYM_LBOUND:
3987 case GFC_ISYM_UBOUND:
3988 {
3989 gfc_expr *arg;
3990
3991 /* This is the variant without DIM=... */
3992 gcc_assert (expr->value.function.actual->next->expr == NULL);
3993
3994 arg = expr->value.function.actual->expr;
3995 if (arg->rank == -1)
3996 {
3997 gfc_se se;
3998 tree rank, tmp;
3999
4000 /* The rank (hence the return value's shape) is unknown,
4001 we have to retrieve it. */
4002 gfc_init_se (&se, NULL);
4003 se.descriptor_only = 1;
4004 gfc_conv_expr (&se, arg);
4005 /* This is a bare variable, so there is no preliminary
4006 or cleanup code. */
4007 gcc_assert (se.pre.head == NULL_TREE
4008 && se.post.head == NULL_TREE);
4009 rank = gfc_conv_descriptor_rank (se.expr);
4010 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4011 gfc_array_index_type,
4012 fold_convert (gfc_array_index_type,
4013 rank),
4014 gfc_index_one_node);
4015 info->end[0] = gfc_evaluate_now (tmp, &outer_loop->pre);
4016 info->start[0] = gfc_index_zero_node;
4017 info->stride[0] = gfc_index_one_node;
4018 continue;
4019 }
4020 /* Otherwise fall through GFC_SS_FUNCTION. */
4021 }
4022 case GFC_ISYM_LCOBOUND:
4023 case GFC_ISYM_UCOBOUND:
4024 case GFC_ISYM_THIS_IMAGE:
4025 break;
4026
4027 default:
4028 continue;
4029 }
4030
4031 case GFC_SS_CONSTRUCTOR:
4032 case GFC_SS_FUNCTION:
4033 for (n = 0; n < ss->dimen; n++)
4034 {
4035 int dim = ss->dim[n];
4036
4037 info->start[dim] = gfc_index_zero_node;
4038 info->end[dim] = gfc_index_zero_node;
4039 info->stride[dim] = gfc_index_one_node;
4040 }
4041 break;
4042
4043 default:
4044 break;
4045 }
4046 }
4047
4048 /* The rest is just runtime bound checking. */
4049 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
4050 {
4051 stmtblock_t block;
4052 tree lbound, ubound;
4053 tree end;
4054 tree size[GFC_MAX_DIMENSIONS];
4055 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
4056 gfc_array_info *info;
4057 char *msg;
4058 int dim;
4059
4060 gfc_start_block (&block);
4061
4062 for (n = 0; n < loop->dimen; n++)
4063 size[n] = NULL_TREE;
4064
4065 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4066 {
4067 stmtblock_t inner;
4068 gfc_ss_info *ss_info;
4069 gfc_expr *expr;
4070 locus *expr_loc;
4071 const char *expr_name;
4072
4073 ss_info = ss->info;
4074 if (ss_info->type != GFC_SS_SECTION)
4075 continue;
4076
4077 /* Catch allocatable lhs in f2003. */
4078 if (flag_realloc_lhs && ss->is_alloc_lhs)
4079 continue;
4080
4081 expr = ss_info->expr;
4082 expr_loc = &expr->where;
4083 expr_name = expr->symtree->name;
4084
4085 gfc_start_block (&inner);
4086
4087 /* TODO: range checking for mapped dimensions. */
4088 info = &ss_info->data.array;
4089
4090 /* This code only checks ranges. Elemental and vector
4091 dimensions are checked later. */
4092 for (n = 0; n < loop->dimen; n++)
4093 {
4094 bool check_upper;
4095
4096 dim = ss->dim[n];
4097 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
4098 continue;
4099
4100 if (dim == info->ref->u.ar.dimen - 1
4101 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
4102 check_upper = false;
4103 else
4104 check_upper = true;
4105
4106 /* Zero stride is not allowed. */
4107 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
4108 info->stride[dim], gfc_index_zero_node);
4109 msg = xasprintf ("Zero stride is not allowed, for dimension %d "
4110 "of array '%s'", dim + 1, expr_name);
4111 gfc_trans_runtime_check (true, false, tmp, &inner,
4112 expr_loc, msg);
4113 free (msg);
4114
4115 desc = info->descriptor;
4116
4117 /* This is the run-time equivalent of resolve.c's
4118 check_dimension(). The logical is more readable there
4119 than it is here, with all the trees. */
4120 lbound = gfc_conv_array_lbound (desc, dim);
4121 end = info->end[dim];
4122 if (check_upper)
4123 ubound = gfc_conv_array_ubound (desc, dim);
4124 else
4125 ubound = NULL;
4126
4127 /* non_zerosized is true when the selected range is not
4128 empty. */
4129 stride_pos = fold_build2_loc (input_location, GT_EXPR,
4130 boolean_type_node, info->stride[dim],
4131 gfc_index_zero_node);
4132 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
4133 info->start[dim], end);
4134 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4135 boolean_type_node, stride_pos, tmp);
4136
4137 stride_neg = fold_build2_loc (input_location, LT_EXPR,
4138 boolean_type_node,
4139 info->stride[dim], gfc_index_zero_node);
4140 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4141 info->start[dim], end);
4142 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4143 boolean_type_node,
4144 stride_neg, tmp);
4145 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4146 boolean_type_node,
4147 stride_pos, stride_neg);
4148
4149 /* Check the start of the range against the lower and upper
4150 bounds of the array, if the range is not empty.
4151 If upper bound is present, include both bounds in the
4152 error message. */
4153 if (check_upper)
4154 {
4155 tmp = fold_build2_loc (input_location, LT_EXPR,
4156 boolean_type_node,
4157 info->start[dim], lbound);
4158 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4159 boolean_type_node,
4160 non_zerosized, tmp);
4161 tmp2 = fold_build2_loc (input_location, GT_EXPR,
4162 boolean_type_node,
4163 info->start[dim], ubound);
4164 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4165 boolean_type_node,
4166 non_zerosized, tmp2);
4167 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4168 "outside of expected range (%%ld:%%ld)",
4169 dim + 1, expr_name);
4170 gfc_trans_runtime_check (true, false, tmp, &inner,
4171 expr_loc, msg,
4172 fold_convert (long_integer_type_node, info->start[dim]),
4173 fold_convert (long_integer_type_node, lbound),
4174 fold_convert (long_integer_type_node, ubound));
4175 gfc_trans_runtime_check (true, false, tmp2, &inner,
4176 expr_loc, msg,
4177 fold_convert (long_integer_type_node, info->start[dim]),
4178 fold_convert (long_integer_type_node, lbound),
4179 fold_convert (long_integer_type_node, ubound));
4180 free (msg);
4181 }
4182 else
4183 {
4184 tmp = fold_build2_loc (input_location, LT_EXPR,
4185 boolean_type_node,
4186 info->start[dim], lbound);
4187 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4188 boolean_type_node, non_zerosized, tmp);
4189 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4190 "below lower bound of %%ld",
4191 dim + 1, expr_name);
4192 gfc_trans_runtime_check (true, false, tmp, &inner,
4193 expr_loc, msg,
4194 fold_convert (long_integer_type_node, info->start[dim]),
4195 fold_convert (long_integer_type_node, lbound));
4196 free (msg);
4197 }
4198
4199 /* Compute the last element of the range, which is not
4200 necessarily "end" (think 0:5:3, which doesn't contain 5)
4201 and check it against both lower and upper bounds. */
4202
4203 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4204 gfc_array_index_type, end,
4205 info->start[dim]);
4206 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
4207 gfc_array_index_type, tmp,
4208 info->stride[dim]);
4209 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4210 gfc_array_index_type, end, tmp);
4211 tmp2 = fold_build2_loc (input_location, LT_EXPR,
4212 boolean_type_node, tmp, lbound);
4213 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4214 boolean_type_node, non_zerosized, tmp2);
4215 if (check_upper)
4216 {
4217 tmp3 = fold_build2_loc (input_location, GT_EXPR,
4218 boolean_type_node, tmp, ubound);
4219 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4220 boolean_type_node, non_zerosized, tmp3);
4221 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4222 "outside of expected range (%%ld:%%ld)",
4223 dim + 1, expr_name);
4224 gfc_trans_runtime_check (true, false, tmp2, &inner,
4225 expr_loc, msg,
4226 fold_convert (long_integer_type_node, tmp),
4227 fold_convert (long_integer_type_node, ubound),
4228 fold_convert (long_integer_type_node, lbound));
4229 gfc_trans_runtime_check (true, false, tmp3, &inner,
4230 expr_loc, msg,
4231 fold_convert (long_integer_type_node, tmp),
4232 fold_convert (long_integer_type_node, ubound),
4233 fold_convert (long_integer_type_node, lbound));
4234 free (msg);
4235 }
4236 else
4237 {
4238 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4239 "below lower bound of %%ld",
4240 dim + 1, expr_name);
4241 gfc_trans_runtime_check (true, false, tmp2, &inner,
4242 expr_loc, msg,
4243 fold_convert (long_integer_type_node, tmp),
4244 fold_convert (long_integer_type_node, lbound));
4245 free (msg);
4246 }
4247
4248 /* Check the section sizes match. */
4249 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4250 gfc_array_index_type, end,
4251 info->start[dim]);
4252 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4253 gfc_array_index_type, tmp,
4254 info->stride[dim]);
4255 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4256 gfc_array_index_type,
4257 gfc_index_one_node, tmp);
4258 tmp = fold_build2_loc (input_location, MAX_EXPR,
4259 gfc_array_index_type, tmp,
4260 build_int_cst (gfc_array_index_type, 0));
4261 /* We remember the size of the first section, and check all the
4262 others against this. */
4263 if (size[n])
4264 {
4265 tmp3 = fold_build2_loc (input_location, NE_EXPR,
4266 boolean_type_node, tmp, size[n]);
4267 msg = xasprintf ("Array bound mismatch for dimension %d "
4268 "of array '%s' (%%ld/%%ld)",
4269 dim + 1, expr_name);
4270
4271 gfc_trans_runtime_check (true, false, tmp3, &inner,
4272 expr_loc, msg,
4273 fold_convert (long_integer_type_node, tmp),
4274 fold_convert (long_integer_type_node, size[n]));
4275
4276 free (msg);
4277 }
4278 else
4279 size[n] = gfc_evaluate_now (tmp, &inner);
4280 }
4281
4282 tmp = gfc_finish_block (&inner);
4283
4284 /* For optional arguments, only check bounds if the argument is
4285 present. */
4286 if (expr->symtree->n.sym->attr.optional
4287 || expr->symtree->n.sym->attr.not_always_present)
4288 tmp = build3_v (COND_EXPR,
4289 gfc_conv_expr_present (expr->symtree->n.sym),
4290 tmp, build_empty_stmt (input_location));
4291
4292 gfc_add_expr_to_block (&block, tmp);
4293
4294 }
4295
4296 tmp = gfc_finish_block (&block);
4297 gfc_add_expr_to_block (&outer_loop->pre, tmp);
4298 }
4299
4300 for (loop = loop->nested; loop; loop = loop->next)
4301 gfc_conv_ss_startstride (loop);
4302 }
4303
4304 /* Return true if both symbols could refer to the same data object. Does
4305 not take account of aliasing due to equivalence statements. */
4306
4307 static int
4308 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
4309 bool lsym_target, bool rsym_pointer, bool rsym_target)
4310 {
4311 /* Aliasing isn't possible if the symbols have different base types. */
4312 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
4313 return 0;
4314
4315 /* Pointers can point to other pointers and target objects. */
4316
4317 if ((lsym_pointer && (rsym_pointer || rsym_target))
4318 || (rsym_pointer && (lsym_pointer || lsym_target)))
4319 return 1;
4320
4321 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
4322 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
4323 checked above. */
4324 if (lsym_target && rsym_target
4325 && ((lsym->attr.dummy && !lsym->attr.contiguous
4326 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
4327 || (rsym->attr.dummy && !rsym->attr.contiguous
4328 && (!rsym->attr.dimension
4329 || rsym->as->type == AS_ASSUMED_SHAPE))))
4330 return 1;
4331
4332 return 0;
4333 }
4334
4335
4336 /* Return true if the two SS could be aliased, i.e. both point to the same data
4337 object. */
4338 /* TODO: resolve aliases based on frontend expressions. */
4339
4340 static int
4341 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
4342 {
4343 gfc_ref *lref;
4344 gfc_ref *rref;
4345 gfc_expr *lexpr, *rexpr;
4346 gfc_symbol *lsym;
4347 gfc_symbol *rsym;
4348 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
4349
4350 lexpr = lss->info->expr;
4351 rexpr = rss->info->expr;
4352
4353 lsym = lexpr->symtree->n.sym;
4354 rsym = rexpr->symtree->n.sym;
4355
4356 lsym_pointer = lsym->attr.pointer;
4357 lsym_target = lsym->attr.target;
4358 rsym_pointer = rsym->attr.pointer;
4359 rsym_target = rsym->attr.target;
4360
4361 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
4362 rsym_pointer, rsym_target))
4363 return 1;
4364
4365 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
4366 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
4367 return 0;
4368
4369 /* For derived types we must check all the component types. We can ignore
4370 array references as these will have the same base type as the previous
4371 component ref. */
4372 for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next)
4373 {
4374 if (lref->type != REF_COMPONENT)
4375 continue;
4376
4377 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
4378 lsym_target = lsym_target || lref->u.c.sym->attr.target;
4379
4380 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
4381 rsym_pointer, rsym_target))
4382 return 1;
4383
4384 if ((lsym_pointer && (rsym_pointer || rsym_target))
4385 || (rsym_pointer && (lsym_pointer || lsym_target)))
4386 {
4387 if (gfc_compare_types (&lref->u.c.component->ts,
4388 &rsym->ts))
4389 return 1;
4390 }
4391
4392 for (rref = rexpr->ref; rref != rss->info->data.array.ref;
4393 rref = rref->next)
4394 {
4395 if (rref->type != REF_COMPONENT)
4396 continue;
4397
4398 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4399 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4400
4401 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
4402 lsym_pointer, lsym_target,
4403 rsym_pointer, rsym_target))
4404 return 1;
4405
4406 if ((lsym_pointer && (rsym_pointer || rsym_target))
4407 || (rsym_pointer && (lsym_pointer || lsym_target)))
4408 {
4409 if (gfc_compare_types (&lref->u.c.component->ts,
4410 &rref->u.c.sym->ts))
4411 return 1;
4412 if (gfc_compare_types (&lref->u.c.sym->ts,
4413 &rref->u.c.component->ts))
4414 return 1;
4415 if (gfc_compare_types (&lref->u.c.component->ts,
4416 &rref->u.c.component->ts))
4417 return 1;
4418 }
4419 }
4420 }
4421
4422 lsym_pointer = lsym->attr.pointer;
4423 lsym_target = lsym->attr.target;
4424 lsym_pointer = lsym->attr.pointer;
4425 lsym_target = lsym->attr.target;
4426
4427 for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next)
4428 {
4429 if (rref->type != REF_COMPONENT)
4430 break;
4431
4432 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4433 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4434
4435 if (symbols_could_alias (rref->u.c.sym, lsym,
4436 lsym_pointer, lsym_target,
4437 rsym_pointer, rsym_target))
4438 return 1;
4439
4440 if ((lsym_pointer && (rsym_pointer || rsym_target))
4441 || (rsym_pointer && (lsym_pointer || lsym_target)))
4442 {
4443 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
4444 return 1;
4445 }
4446 }
4447
4448 return 0;
4449 }
4450
4451
4452 /* Resolve array data dependencies. Creates a temporary if required. */
4453 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
4454 dependency.c. */
4455
4456 void
4457 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
4458 gfc_ss * rss)
4459 {
4460 gfc_ss *ss;
4461 gfc_ref *lref;
4462 gfc_ref *rref;
4463 gfc_expr *dest_expr;
4464 gfc_expr *ss_expr;
4465 int nDepend = 0;
4466 int i, j;
4467
4468 loop->temp_ss = NULL;
4469 dest_expr = dest->info->expr;
4470
4471 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
4472 {
4473 ss_expr = ss->info->expr;
4474
4475 if (ss->info->array_outer_dependency)
4476 {
4477 nDepend = 1;
4478 break;
4479 }
4480
4481 if (ss->info->type != GFC_SS_SECTION)
4482 {
4483 if (flag_realloc_lhs
4484 && dest_expr != ss_expr
4485 && gfc_is_reallocatable_lhs (dest_expr)
4486 && ss_expr->rank)
4487 nDepend = gfc_check_dependency (dest_expr, ss_expr, true);
4488
4489 /* Check for cases like c(:)(1:2) = c(2)(2:3) */
4490 if (!nDepend && dest_expr->rank > 0
4491 && dest_expr->ts.type == BT_CHARACTER
4492 && ss_expr->expr_type == EXPR_VARIABLE)
4493
4494 nDepend = gfc_check_dependency (dest_expr, ss_expr, false);
4495
4496 continue;
4497 }
4498
4499 if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym)
4500 {
4501 if (gfc_could_be_alias (dest, ss)
4502 || gfc_are_equivalenced_arrays (dest_expr, ss_expr))
4503 {
4504 nDepend = 1;
4505 break;
4506 }
4507 }
4508 else
4509 {
4510 lref = dest_expr->ref;
4511 rref = ss_expr->ref;
4512
4513 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
4514
4515 if (nDepend == 1)
4516 break;
4517
4518 for (i = 0; i < dest->dimen; i++)
4519 for (j = 0; j < ss->dimen; j++)
4520 if (i != j
4521 && dest->dim[i] == ss->dim[j])
4522 {
4523 /* If we don't access array elements in the same order,
4524 there is a dependency. */
4525 nDepend = 1;
4526 goto temporary;
4527 }
4528 #if 0
4529 /* TODO : loop shifting. */
4530 if (nDepend == 1)
4531 {
4532 /* Mark the dimensions for LOOP SHIFTING */
4533 for (n = 0; n < loop->dimen; n++)
4534 {
4535 int dim = dest->data.info.dim[n];
4536
4537 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
4538 depends[n] = 2;
4539 else if (! gfc_is_same_range (&lref->u.ar,
4540 &rref->u.ar, dim, 0))
4541 depends[n] = 1;
4542 }
4543
4544 /* Put all the dimensions with dependencies in the
4545 innermost loops. */
4546 dim = 0;
4547 for (n = 0; n < loop->dimen; n++)
4548 {
4549 gcc_assert (loop->order[n] == n);
4550 if (depends[n])
4551 loop->order[dim++] = n;
4552 }
4553 for (n = 0; n < loop->dimen; n++)
4554 {
4555 if (! depends[n])
4556 loop->order[dim++] = n;
4557 }
4558
4559 gcc_assert (dim == loop->dimen);
4560 break;
4561 }
4562 #endif
4563 }
4564 }
4565
4566 temporary:
4567
4568 if (nDepend == 1)
4569 {
4570 tree base_type = gfc_typenode_for_spec (&dest_expr->ts);
4571 if (GFC_ARRAY_TYPE_P (base_type)
4572 || GFC_DESCRIPTOR_TYPE_P (base_type))
4573 base_type = gfc_get_element_type (base_type);
4574 loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length,
4575 loop->dimen);
4576 gfc_add_ss_to_loop (loop, loop->temp_ss);
4577 }
4578 else
4579 loop->temp_ss = NULL;
4580 }
4581
4582
4583 /* Browse through each array's information from the scalarizer and set the loop
4584 bounds according to the "best" one (per dimension), i.e. the one which
4585 provides the most information (constant bounds, shape, etc.). */
4586
4587 static void
4588 set_loop_bounds (gfc_loopinfo *loop)
4589 {
4590 int n, dim, spec_dim;
4591 gfc_array_info *info;
4592 gfc_array_info *specinfo;
4593 gfc_ss *ss;
4594 tree tmp;
4595 gfc_ss **loopspec;
4596 bool dynamic[GFC_MAX_DIMENSIONS];
4597 mpz_t *cshape;
4598 mpz_t i;
4599 bool nonoptional_arr;
4600
4601 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4602
4603 loopspec = loop->specloop;
4604
4605 mpz_init (i);
4606 for (n = 0; n < loop->dimen; n++)
4607 {
4608 loopspec[n] = NULL;
4609 dynamic[n] = false;
4610
4611 /* If there are both optional and nonoptional array arguments, scalarize
4612 over the nonoptional; otherwise, it does not matter as then all
4613 (optional) arrays have to be present per F2008, 125.2.12p3(6). */
4614
4615 nonoptional_arr = false;
4616
4617 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4618 if (ss->info->type != GFC_SS_SCALAR && ss->info->type != GFC_SS_TEMP
4619 && ss->info->type != GFC_SS_REFERENCE && !ss->info->can_be_null_ref)
4620 {
4621 nonoptional_arr = true;
4622 break;
4623 }
4624
4625 /* We use one SS term, and use that to determine the bounds of the
4626 loop for this dimension. We try to pick the simplest term. */
4627 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4628 {
4629 gfc_ss_type ss_type;
4630
4631 ss_type = ss->info->type;
4632 if (ss_type == GFC_SS_SCALAR
4633 || ss_type == GFC_SS_TEMP
4634 || ss_type == GFC_SS_REFERENCE
4635 || (ss->info->can_be_null_ref && nonoptional_arr))
4636 continue;
4637
4638 info = &ss->info->data.array;
4639 dim = ss->dim[n];
4640
4641 if (loopspec[n] != NULL)
4642 {
4643 specinfo = &loopspec[n]->info->data.array;
4644 spec_dim = loopspec[n]->dim[n];
4645 }
4646 else
4647 {
4648 /* Silence uninitialized warnings. */
4649 specinfo = NULL;
4650 spec_dim = 0;
4651 }
4652
4653 if (info->shape)
4654 {
4655 gcc_assert (info->shape[dim]);
4656 /* The frontend has worked out the size for us. */
4657 if (!loopspec[n]
4658 || !specinfo->shape
4659 || !integer_zerop (specinfo->start[spec_dim]))
4660 /* Prefer zero-based descriptors if possible. */
4661 loopspec[n] = ss;
4662 continue;
4663 }
4664
4665 if (ss_type == GFC_SS_CONSTRUCTOR)
4666 {
4667 gfc_constructor_base base;
4668 /* An unknown size constructor will always be rank one.
4669 Higher rank constructors will either have known shape,
4670 or still be wrapped in a call to reshape. */
4671 gcc_assert (loop->dimen == 1);
4672
4673 /* Always prefer to use the constructor bounds if the size
4674 can be determined at compile time. Prefer not to otherwise,
4675 since the general case involves realloc, and it's better to
4676 avoid that overhead if possible. */
4677 base = ss->info->expr->value.constructor;
4678 dynamic[n] = gfc_get_array_constructor_size (&i, base);
4679 if (!dynamic[n] || !loopspec[n])
4680 loopspec[n] = ss;
4681 continue;
4682 }
4683
4684 /* Avoid using an allocatable lhs in an assignment, since
4685 there might be a reallocation coming. */
4686 if (loopspec[n] && ss->is_alloc_lhs)
4687 continue;
4688
4689 if (!loopspec[n])
4690 loopspec[n] = ss;
4691 /* Criteria for choosing a loop specifier (most important first):
4692 doesn't need realloc
4693 stride of one
4694 known stride
4695 known lower bound
4696 known upper bound
4697 */
4698 else if (loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n])
4699 loopspec[n] = ss;
4700 else if (integer_onep (info->stride[dim])
4701 && !integer_onep (specinfo->stride[spec_dim]))
4702 loopspec[n] = ss;
4703 else if (INTEGER_CST_P (info->stride[dim])
4704 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
4705 loopspec[n] = ss;
4706 else if (INTEGER_CST_P (info->start[dim])
4707 && !INTEGER_CST_P (specinfo->start[spec_dim])
4708 && integer_onep (info->stride[dim])
4709 == integer_onep (specinfo->stride[spec_dim])
4710 && INTEGER_CST_P (info->stride[dim])
4711 == INTEGER_CST_P (specinfo->stride[spec_dim]))
4712 loopspec[n] = ss;
4713 /* We don't work out the upper bound.
4714 else if (INTEGER_CST_P (info->finish[n])
4715 && ! INTEGER_CST_P (specinfo->finish[n]))
4716 loopspec[n] = ss; */
4717 }
4718
4719 /* We should have found the scalarization loop specifier. If not,
4720 that's bad news. */
4721 gcc_assert (loopspec[n]);
4722
4723 info = &loopspec[n]->info->data.array;
4724 dim = loopspec[n]->dim[n];
4725
4726 /* Set the extents of this range. */
4727 cshape = info->shape;
4728 if (cshape && INTEGER_CST_P (info->start[dim])
4729 && INTEGER_CST_P (info->stride[dim]))
4730 {
4731 loop->from[n] = info->start[dim];
4732 mpz_set (i, cshape[get_array_ref_dim_for_loop_dim (loopspec[n], n)]);
4733 mpz_sub_ui (i, i, 1);
4734 /* To = from + (size - 1) * stride. */
4735 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
4736 if (!integer_onep (info->stride[dim]))
4737 tmp = fold_build2_loc (input_location, MULT_EXPR,
4738 gfc_array_index_type, tmp,
4739 info->stride[dim]);
4740 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
4741 gfc_array_index_type,
4742 loop->from[n], tmp);
4743 }
4744 else
4745 {
4746 loop->from[n] = info->start[dim];
4747 switch (loopspec[n]->info->type)
4748 {
4749 case GFC_SS_CONSTRUCTOR:
4750 /* The upper bound is calculated when we expand the
4751 constructor. */
4752 gcc_assert (loop->to[n] == NULL_TREE);
4753 break;
4754
4755 case GFC_SS_SECTION:
4756 /* Use the end expression if it exists and is not constant,
4757 so that it is only evaluated once. */
4758 loop->to[n] = info->end[dim];
4759 break;
4760
4761 case GFC_SS_FUNCTION:
4762 /* The loop bound will be set when we generate the call. */
4763 gcc_assert (loop->to[n] == NULL_TREE);
4764 break;
4765
4766 case GFC_SS_INTRINSIC:
4767 {
4768 gfc_expr *expr = loopspec[n]->info->expr;
4769
4770 /* The {l,u}bound of an assumed rank. */
4771 gcc_assert ((expr->value.function.isym->id == GFC_ISYM_LBOUND
4772 || expr->value.function.isym->id == GFC_ISYM_UBOUND)
4773 && expr->value.function.actual->next->expr == NULL
4774 && expr->value.function.actual->expr->rank == -1);
4775
4776 loop->to[n] = info->end[dim];
4777 break;
4778 }
4779
4780 default:
4781 gcc_unreachable ();
4782 }
4783 }
4784
4785 /* Transform everything so we have a simple incrementing variable. */
4786 if (integer_onep (info->stride[dim]))
4787 info->delta[dim] = gfc_index_zero_node;
4788 else
4789 {
4790 /* Set the delta for this section. */
4791 info->delta[dim] = gfc_evaluate_now (loop->from[n], &outer_loop->pre);
4792 /* Number of iterations is (end - start + step) / step.
4793 with start = 0, this simplifies to
4794 last = end / step;
4795 for (i = 0; i<=last; i++){...}; */
4796 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4797 gfc_array_index_type, loop->to[n],
4798 loop->from[n]);
4799 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4800 gfc_array_index_type, tmp, info->stride[dim]);
4801 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4802 tmp, build_int_cst (gfc_array_index_type, -1));
4803 loop->to[n] = gfc_evaluate_now (tmp, &outer_loop->pre);
4804 /* Make the loop variable start at 0. */
4805 loop->from[n] = gfc_index_zero_node;
4806 }
4807 }
4808 mpz_clear (i);
4809
4810 for (loop = loop->nested; loop; loop = loop->next)
4811 set_loop_bounds (loop);
4812 }
4813
4814
4815 /* Initialize the scalarization loop. Creates the loop variables. Determines
4816 the range of the loop variables. Creates a temporary if required.
4817 Also generates code for scalar expressions which have been
4818 moved outside the loop. */
4819
4820 void
4821 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
4822 {
4823 gfc_ss *tmp_ss;
4824 tree tmp;
4825
4826 set_loop_bounds (loop);
4827
4828 /* Add all the scalar code that can be taken out of the loops.
4829 This may include calculating the loop bounds, so do it before
4830 allocating the temporary. */
4831 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4832
4833 tmp_ss = loop->temp_ss;
4834 /* If we want a temporary then create it. */
4835 if (tmp_ss != NULL)
4836 {
4837 gfc_ss_info *tmp_ss_info;
4838
4839 tmp_ss_info = tmp_ss->info;
4840 gcc_assert (tmp_ss_info->type == GFC_SS_TEMP);
4841 gcc_assert (loop->parent == NULL);
4842
4843 /* Make absolutely sure that this is a complete type. */
4844 if (tmp_ss_info->string_length)
4845 tmp_ss_info->data.temp.type
4846 = gfc_get_character_type_len_for_eltype
4847 (TREE_TYPE (tmp_ss_info->data.temp.type),
4848 tmp_ss_info->string_length);
4849
4850 tmp = tmp_ss_info->data.temp.type;
4851 memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info));
4852 tmp_ss_info->type = GFC_SS_SECTION;
4853
4854 gcc_assert (tmp_ss->dimen != 0);
4855
4856 gfc_trans_create_temp_array (&loop->pre, &loop->post, tmp_ss, tmp,
4857 NULL_TREE, false, true, false, where);
4858 }
4859
4860 /* For array parameters we don't have loop variables, so don't calculate the
4861 translations. */
4862 if (!loop->array_parameter)
4863 gfc_set_delta (loop);
4864 }
4865
4866
4867 /* Calculates how to transform from loop variables to array indices for each
4868 array: once loop bounds are chosen, sets the difference (DELTA field) between
4869 loop bounds and array reference bounds, for each array info. */
4870
4871 void
4872 gfc_set_delta (gfc_loopinfo *loop)
4873 {
4874 gfc_ss *ss, **loopspec;
4875 gfc_array_info *info;
4876 tree tmp;
4877 int n, dim;
4878
4879 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4880
4881 loopspec = loop->specloop;
4882
4883 /* Calculate the translation from loop variables to array indices. */
4884 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4885 {
4886 gfc_ss_type ss_type;
4887
4888 ss_type = ss->info->type;
4889 if (ss_type != GFC_SS_SECTION
4890 && ss_type != GFC_SS_COMPONENT
4891 && ss_type != GFC_SS_CONSTRUCTOR)
4892 continue;
4893
4894 info = &ss->info->data.array;
4895
4896 for (n = 0; n < ss->dimen; n++)
4897 {
4898 /* If we are specifying the range the delta is already set. */
4899 if (loopspec[n] != ss)
4900 {
4901 dim = ss->dim[n];
4902
4903 /* Calculate the offset relative to the loop variable.
4904 First multiply by the stride. */
4905 tmp = loop->from[n];
4906 if (!integer_onep (info->stride[dim]))
4907 tmp = fold_build2_loc (input_location, MULT_EXPR,
4908 gfc_array_index_type,
4909 tmp, info->stride[dim]);
4910
4911 /* Then subtract this from our starting value. */
4912 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4913 gfc_array_index_type,
4914 info->start[dim], tmp);
4915
4916 info->delta[dim] = gfc_evaluate_now (tmp, &outer_loop->pre);
4917 }
4918 }
4919 }
4920
4921 for (loop = loop->nested; loop; loop = loop->next)
4922 gfc_set_delta (loop);
4923 }
4924
4925
4926 /* Calculate the size of a given array dimension from the bounds. This
4927 is simply (ubound - lbound + 1) if this expression is positive
4928 or 0 if it is negative (pick either one if it is zero). Optionally
4929 (if or_expr is present) OR the (expression != 0) condition to it. */
4930
4931 tree
4932 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4933 {
4934 tree res;
4935 tree cond;
4936
4937 /* Calculate (ubound - lbound + 1). */
4938 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4939 ubound, lbound);
4940 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4941 gfc_index_one_node);
4942
4943 /* Check whether the size for this dimension is negative. */
4944 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4945 gfc_index_zero_node);
4946 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4947 gfc_index_zero_node, res);
4948
4949 /* Build OR expression. */
4950 if (or_expr)
4951 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4952 boolean_type_node, *or_expr, cond);
4953
4954 return res;
4955 }
4956
4957
4958 /* For an array descriptor, get the total number of elements. This is just
4959 the product of the extents along from_dim to to_dim. */
4960
4961 static tree
4962 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4963 {
4964 tree res;
4965 int dim;
4966
4967 res = gfc_index_one_node;
4968
4969 for (dim = from_dim; dim < to_dim; ++dim)
4970 {
4971 tree lbound;
4972 tree ubound;
4973 tree extent;
4974
4975 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4976 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4977
4978 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4979 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4980 res, extent);
4981 }
4982
4983 return res;
4984 }
4985
4986
4987 /* Full size of an array. */
4988
4989 tree
4990 gfc_conv_descriptor_size (tree desc, int rank)
4991 {
4992 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4993 }
4994
4995
4996 /* Size of a coarray for all dimensions but the last. */
4997
4998 tree
4999 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
5000 {
5001 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
5002 }
5003
5004
5005 /* Fills in an array descriptor, and returns the size of the array.
5006 The size will be a simple_val, ie a variable or a constant. Also
5007 calculates the offset of the base. The pointer argument overflow,
5008 which should be of integer type, will increase in value if overflow
5009 occurs during the size calculation. Returns the size of the array.
5010 {
5011 stride = 1;
5012 offset = 0;
5013 for (n = 0; n < rank; n++)
5014 {
5015 a.lbound[n] = specified_lower_bound;
5016 offset = offset + a.lbond[n] * stride;
5017 size = 1 - lbound;
5018 a.ubound[n] = specified_upper_bound;
5019 a.stride[n] = stride;
5020 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
5021 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
5022 stride = stride * size;
5023 }
5024 for (n = rank; n < rank+corank; n++)
5025 (Set lcobound/ucobound as above.)
5026 element_size = sizeof (array element);
5027 if (!rank)
5028 return element_size
5029 stride = (size_t) stride;
5030 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
5031 stride = stride * element_size;
5032 return (stride);
5033 } */
5034 /*GCC ARRAYS*/
5035
5036 static tree
5037 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
5038 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
5039 stmtblock_t * descriptor_block, tree * overflow,
5040 tree expr3_elem_size, tree *nelems, gfc_expr *expr3,
5041 tree expr3_desc, bool e3_is_array_constr)
5042 {
5043 tree type;
5044 tree tmp;
5045 tree size;
5046 tree offset;
5047 tree stride;
5048 tree element_size;
5049 tree or_expr;
5050 tree thencase;
5051 tree elsecase;
5052 tree cond;
5053 tree var;
5054 stmtblock_t thenblock;
5055 stmtblock_t elseblock;
5056 gfc_expr *ubound;
5057 gfc_se se;
5058 int n;
5059
5060 type = TREE_TYPE (descriptor);
5061
5062 stride = gfc_index_one_node;
5063 offset = gfc_index_zero_node;
5064
5065 /* Set the dtype. */
5066 tmp = gfc_conv_descriptor_dtype (descriptor);
5067 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (type));
5068
5069 or_expr = boolean_false_node;
5070
5071 for (n = 0; n < rank; n++)
5072 {
5073 tree conv_lbound;
5074 tree conv_ubound;
5075
5076 /* We have 3 possibilities for determining the size of the array:
5077 lower == NULL => lbound = 1, ubound = upper[n]
5078 upper[n] = NULL => lbound = 1, ubound = lower[n]
5079 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
5080 ubound = upper[n];
5081
5082 /* Set lower bound. */
5083 gfc_init_se (&se, NULL);
5084 if (expr3_desc != NULL_TREE)
5085 {
5086 if (e3_is_array_constr)
5087 /* The lbound of a constant array [] starts at zero, but when
5088 allocating it, the standard expects the array to start at
5089 one. */
5090 se.expr = gfc_index_one_node;
5091 else
5092 se.expr = gfc_conv_descriptor_lbound_get (expr3_desc,
5093 gfc_rank_cst[n]);
5094 }
5095 else if (lower == NULL)
5096 se.expr = gfc_index_one_node;
5097 else
5098 {
5099 gcc_assert (lower[n]);
5100 if (ubound)
5101 {
5102 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
5103 gfc_add_block_to_block (pblock, &se.pre);
5104 }
5105 else
5106 {
5107 se.expr = gfc_index_one_node;
5108 ubound = lower[n];
5109 }
5110 }
5111 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
5112 gfc_rank_cst[n], se.expr);
5113 conv_lbound = se.expr;
5114
5115 /* Work out the offset for this component. */
5116 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5117 se.expr, stride);
5118 offset = fold_build2_loc (input_location, MINUS_EXPR,
5119 gfc_array_index_type, offset, tmp);
5120
5121 /* Set upper bound. */
5122 gfc_init_se (&se, NULL);
5123 if (expr3_desc != NULL_TREE)
5124 {
5125 if (e3_is_array_constr)
5126 {
5127 /* The lbound of a constant array [] starts at zero, but when
5128 allocating it, the standard expects the array to start at
5129 one. Therefore fix the upper bound to be
5130 (desc.ubound - desc.lbound)+ 1. */
5131 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5132 gfc_array_index_type,
5133 gfc_conv_descriptor_ubound_get (
5134 expr3_desc, gfc_rank_cst[n]),
5135 gfc_conv_descriptor_lbound_get (
5136 expr3_desc, gfc_rank_cst[n]));
5137 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5138 gfc_array_index_type, tmp,
5139 gfc_index_one_node);
5140 se.expr = gfc_evaluate_now (tmp, pblock);
5141 }
5142 else
5143 se.expr = gfc_conv_descriptor_ubound_get (expr3_desc,
5144 gfc_rank_cst[n]);
5145 }
5146 else
5147 {
5148 gcc_assert (ubound);
5149 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
5150 gfc_add_block_to_block (pblock, &se.pre);
5151 if (ubound->expr_type == EXPR_FUNCTION)
5152 se.expr = gfc_evaluate_now (se.expr, pblock);
5153 }
5154 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
5155 gfc_rank_cst[n], se.expr);
5156 conv_ubound = se.expr;
5157
5158 /* Store the stride. */
5159 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
5160 gfc_rank_cst[n], stride);
5161
5162 /* Calculate size and check whether extent is negative. */
5163 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
5164 size = gfc_evaluate_now (size, pblock);
5165
5166 /* Check whether multiplying the stride by the number of
5167 elements in this dimension would overflow. We must also check
5168 whether the current dimension has zero size in order to avoid
5169 division by zero.
5170 */
5171 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
5172 gfc_array_index_type,
5173 fold_convert (gfc_array_index_type,
5174 TYPE_MAX_VALUE (gfc_array_index_type)),
5175 size);
5176 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
5177 boolean_type_node, tmp, stride),
5178 PRED_FORTRAN_OVERFLOW);
5179 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5180 integer_one_node, integer_zero_node);
5181 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5182 boolean_type_node, size,
5183 gfc_index_zero_node),
5184 PRED_FORTRAN_SIZE_ZERO);
5185 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5186 integer_zero_node, tmp);
5187 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5188 *overflow, tmp);
5189 *overflow = gfc_evaluate_now (tmp, pblock);
5190
5191 /* Multiply the stride by the number of elements in this dimension. */
5192 stride = fold_build2_loc (input_location, MULT_EXPR,
5193 gfc_array_index_type, stride, size);
5194 stride = gfc_evaluate_now (stride, pblock);
5195 }
5196
5197 for (n = rank; n < rank + corank; n++)
5198 {
5199 ubound = upper[n];
5200
5201 /* Set lower bound. */
5202 gfc_init_se (&se, NULL);
5203 if (lower == NULL || lower[n] == NULL)
5204 {
5205 gcc_assert (n == rank + corank - 1);
5206 se.expr = gfc_index_one_node;
5207 }
5208 else
5209 {
5210 if (ubound || n == rank + corank - 1)
5211 {
5212 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
5213 gfc_add_block_to_block (pblock, &se.pre);
5214 }
5215 else
5216 {
5217 se.expr = gfc_index_one_node;
5218 ubound = lower[n];
5219 }
5220 }
5221 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
5222 gfc_rank_cst[n], se.expr);
5223
5224 if (n < rank + corank - 1)
5225 {
5226 gfc_init_se (&se, NULL);
5227 gcc_assert (ubound);
5228 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
5229 gfc_add_block_to_block (pblock, &se.pre);
5230 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
5231 gfc_rank_cst[n], se.expr);
5232 }
5233 }
5234
5235 /* The stride is the number of elements in the array, so multiply by the
5236 size of an element to get the total size. Obviously, if there is a
5237 SOURCE expression (expr3) we must use its element size. */
5238 if (expr3_elem_size != NULL_TREE)
5239 tmp = expr3_elem_size;
5240 else if (expr3 != NULL)
5241 {
5242 if (expr3->ts.type == BT_CLASS)
5243 {
5244 gfc_se se_sz;
5245 gfc_expr *sz = gfc_copy_expr (expr3);
5246 gfc_add_vptr_component (sz);
5247 gfc_add_size_component (sz);
5248 gfc_init_se (&se_sz, NULL);
5249 gfc_conv_expr (&se_sz, sz);
5250 gfc_free_expr (sz);
5251 tmp = se_sz.expr;
5252 }
5253 else
5254 {
5255 tmp = gfc_typenode_for_spec (&expr3->ts);
5256 tmp = TYPE_SIZE_UNIT (tmp);
5257 }
5258 }
5259 else
5260 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5261
5262 /* Convert to size_t. */
5263 element_size = fold_convert (size_type_node, tmp);
5264
5265 if (rank == 0)
5266 return element_size;
5267
5268 *nelems = gfc_evaluate_now (stride, pblock);
5269 stride = fold_convert (size_type_node, stride);
5270
5271 /* First check for overflow. Since an array of type character can
5272 have zero element_size, we must check for that before
5273 dividing. */
5274 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
5275 size_type_node,
5276 TYPE_MAX_VALUE (size_type_node), element_size);
5277 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
5278 boolean_type_node, tmp, stride),
5279 PRED_FORTRAN_OVERFLOW);
5280 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5281 integer_one_node, integer_zero_node);
5282 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5283 boolean_type_node, element_size,
5284 build_int_cst (size_type_node, 0)),
5285 PRED_FORTRAN_SIZE_ZERO);
5286 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5287 integer_zero_node, tmp);
5288 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5289 *overflow, tmp);
5290 *overflow = gfc_evaluate_now (tmp, pblock);
5291
5292 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
5293 stride, element_size);
5294
5295 if (poffset != NULL)
5296 {
5297 offset = gfc_evaluate_now (offset, pblock);
5298 *poffset = offset;
5299 }
5300
5301 if (integer_zerop (or_expr))
5302 return size;
5303 if (integer_onep (or_expr))
5304 return build_int_cst (size_type_node, 0);
5305
5306 var = gfc_create_var (TREE_TYPE (size), "size");
5307 gfc_start_block (&thenblock);
5308 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
5309 thencase = gfc_finish_block (&thenblock);
5310
5311 gfc_start_block (&elseblock);
5312 gfc_add_modify (&elseblock, var, size);
5313 elsecase = gfc_finish_block (&elseblock);
5314
5315 tmp = gfc_evaluate_now (or_expr, pblock);
5316 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
5317 gfc_add_expr_to_block (pblock, tmp);
5318
5319 return var;
5320 }
5321
5322
5323 /* Retrieve the last ref from the chain. This routine is specific to
5324 gfc_array_allocate ()'s needs. */
5325
5326 bool
5327 retrieve_last_ref (gfc_ref **ref_in, gfc_ref **prev_ref_in)
5328 {
5329 gfc_ref *ref, *prev_ref;
5330
5331 ref = *ref_in;
5332 /* Prevent warnings for uninitialized variables. */
5333 prev_ref = *prev_ref_in;
5334 while (ref && ref->next != NULL)
5335 {
5336 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
5337 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
5338 prev_ref = ref;
5339 ref = ref->next;
5340 }
5341
5342 if (ref == NULL || ref->type != REF_ARRAY)
5343 return false;
5344
5345 *ref_in = ref;
5346 *prev_ref_in = prev_ref;
5347 return true;
5348 }
5349
5350 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
5351 the work for an ALLOCATE statement. */
5352 /*GCC ARRAYS*/
5353
5354 bool
5355 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
5356 tree errlen, tree label_finish, tree expr3_elem_size,
5357 tree *nelems, gfc_expr *expr3, tree e3_arr_desc,
5358 bool e3_is_array_constr)
5359 {
5360 tree tmp;
5361 tree pointer;
5362 tree offset = NULL_TREE;
5363 tree token = NULL_TREE;
5364 tree size;
5365 tree msg;
5366 tree error = NULL_TREE;
5367 tree overflow; /* Boolean storing whether size calculation overflows. */
5368 tree var_overflow = NULL_TREE;
5369 tree cond;
5370 tree set_descriptor;
5371 stmtblock_t set_descriptor_block;
5372 stmtblock_t elseblock;
5373 gfc_expr **lower;
5374 gfc_expr **upper;
5375 gfc_ref *ref, *prev_ref = NULL;
5376 bool allocatable, coarray, dimension, alloc_w_e3_arr_spec = false;
5377
5378 ref = expr->ref;
5379
5380 /* Find the last reference in the chain. */
5381 if (!retrieve_last_ref (&ref, &prev_ref))
5382 return false;
5383
5384 if (ref->u.ar.type == AR_FULL && expr3 != NULL)
5385 {
5386 /* F08:C633: Array shape from expr3. */
5387 ref = expr3->ref;
5388
5389 /* Find the last reference in the chain. */
5390 if (!retrieve_last_ref (&ref, &prev_ref))
5391 return false;
5392 alloc_w_e3_arr_spec = true;
5393 }
5394
5395 if (!prev_ref)
5396 {
5397 allocatable = expr->symtree->n.sym->attr.allocatable;
5398 coarray = expr->symtree->n.sym->attr.codimension;
5399 dimension = expr->symtree->n.sym->attr.dimension;
5400 }
5401 else
5402 {
5403 allocatable = prev_ref->u.c.component->attr.allocatable;
5404 coarray = prev_ref->u.c.component->attr.codimension;
5405 dimension = prev_ref->u.c.component->attr.dimension;
5406 }
5407
5408 if (!dimension)
5409 gcc_assert (coarray);
5410
5411 /* Figure out the size of the array. */
5412 switch (ref->u.ar.type)
5413 {
5414 case AR_ELEMENT:
5415 if (!coarray)
5416 {
5417 lower = NULL;
5418 upper = ref->u.ar.start;
5419 break;
5420 }
5421 /* Fall through. */
5422
5423 case AR_SECTION:
5424 lower = ref->u.ar.start;
5425 upper = ref->u.ar.end;
5426 break;
5427
5428 case AR_FULL:
5429 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT
5430 || alloc_w_e3_arr_spec);
5431
5432 lower = ref->u.ar.as->lower;
5433 upper = ref->u.ar.as->upper;
5434 break;
5435
5436 default:
5437 gcc_unreachable ();
5438 break;
5439 }
5440
5441 overflow = integer_zero_node;
5442
5443 gfc_init_block (&set_descriptor_block);
5444 size = gfc_array_init_size (se->expr, alloc_w_e3_arr_spec ? expr->rank
5445 : ref->u.ar.as->rank,
5446 ref->u.ar.as->corank, &offset, lower, upper,
5447 &se->pre, &set_descriptor_block, &overflow,
5448 expr3_elem_size, nelems, expr3, e3_arr_desc,
5449 e3_is_array_constr);
5450
5451 if (dimension)
5452 {
5453 var_overflow = gfc_create_var (integer_type_node, "overflow");
5454 gfc_add_modify (&se->pre, var_overflow, overflow);
5455
5456 if (status == NULL_TREE)
5457 {
5458 /* Generate the block of code handling overflow. */
5459 msg = gfc_build_addr_expr (pchar_type_node,
5460 gfc_build_localized_cstring_const
5461 ("Integer overflow when calculating the amount of "
5462 "memory to allocate"));
5463 error = build_call_expr_loc (input_location,
5464 gfor_fndecl_runtime_error, 1, msg);
5465 }
5466 else
5467 {
5468 tree status_type = TREE_TYPE (status);
5469 stmtblock_t set_status_block;
5470
5471 gfc_start_block (&set_status_block);
5472 gfc_add_modify (&set_status_block, status,
5473 build_int_cst (status_type, LIBERROR_ALLOCATION));
5474 error = gfc_finish_block (&set_status_block);
5475 }
5476 }
5477
5478 gfc_start_block (&elseblock);
5479
5480 /* Allocate memory to store the data. */
5481 if (POINTER_TYPE_P (TREE_TYPE (se->expr)))
5482 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
5483
5484 pointer = gfc_conv_descriptor_data_get (se->expr);
5485 STRIP_NOPS (pointer);
5486
5487 if (coarray && flag_coarray == GFC_FCOARRAY_LIB)
5488 token = gfc_build_addr_expr (NULL_TREE,
5489 gfc_conv_descriptor_token (se->expr));
5490
5491 /* The allocatable variant takes the old pointer as first argument. */
5492 if (allocatable)
5493 gfc_allocate_allocatable (&elseblock, pointer, size, token,
5494 status, errmsg, errlen, label_finish, expr);
5495 else
5496 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
5497
5498 if (dimension)
5499 {
5500 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
5501 boolean_type_node, var_overflow, integer_zero_node),
5502 PRED_FORTRAN_OVERFLOW);
5503 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
5504 error, gfc_finish_block (&elseblock));
5505 }
5506 else
5507 tmp = gfc_finish_block (&elseblock);
5508
5509 gfc_add_expr_to_block (&se->pre, tmp);
5510
5511 /* Update the array descriptors. */
5512 if (dimension)
5513 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
5514
5515 set_descriptor = gfc_finish_block (&set_descriptor_block);
5516 if (status != NULL_TREE)
5517 {
5518 cond = fold_build2_loc (input_location, EQ_EXPR,
5519 boolean_type_node, status,
5520 build_int_cst (TREE_TYPE (status), 0));
5521 gfc_add_expr_to_block (&se->pre,
5522 fold_build3_loc (input_location, COND_EXPR, void_type_node,
5523 gfc_likely (cond, PRED_FORTRAN_FAIL_ALLOC),
5524 set_descriptor,
5525 build_empty_stmt (input_location)));
5526 }
5527 else
5528 gfc_add_expr_to_block (&se->pre, set_descriptor);
5529
5530 if ((expr->ts.type == BT_DERIVED)
5531 && expr->ts.u.derived->attr.alloc_comp)
5532 {
5533 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
5534 ref->u.ar.as->rank);
5535 gfc_add_expr_to_block (&se->pre, tmp);
5536 }
5537
5538 return true;
5539 }
5540
5541
5542 /* Deallocate an array variable. Also used when an allocated variable goes
5543 out of scope. */
5544 /*GCC ARRAYS*/
5545
5546 tree
5547 gfc_array_deallocate (tree descriptor, tree pstat, tree errmsg, tree errlen,
5548 tree label_finish, gfc_expr* expr)
5549 {
5550 tree var;
5551 tree tmp;
5552 stmtblock_t block;
5553 bool coarray = gfc_is_coarray (expr);
5554
5555 gfc_start_block (&block);
5556
5557 /* Get a pointer to the data. */
5558 var = gfc_conv_descriptor_data_get (descriptor);
5559 STRIP_NOPS (var);
5560
5561 /* Parameter is the address of the data component. */
5562 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, pstat, errmsg,
5563 errlen, label_finish, false, expr, coarray);
5564 gfc_add_expr_to_block (&block, tmp);
5565
5566 /* Zero the data pointer; only for coarrays an error can occur and then
5567 the allocation status may not be changed. */
5568 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5569 var, build_int_cst (TREE_TYPE (var), 0));
5570 if (pstat != NULL_TREE && coarray && flag_coarray == GFC_FCOARRAY_LIB)
5571 {
5572 tree cond;
5573 tree stat = build_fold_indirect_ref_loc (input_location, pstat);
5574
5575 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5576 stat, build_int_cst (TREE_TYPE (stat), 0));
5577 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5578 cond, tmp, build_empty_stmt (input_location));
5579 }
5580
5581 gfc_add_expr_to_block (&block, tmp);
5582
5583 return gfc_finish_block (&block);
5584 }
5585
5586
5587 /* Create an array constructor from an initialization expression.
5588 We assume the frontend already did any expansions and conversions. */
5589
5590 tree
5591 gfc_conv_array_initializer (tree type, gfc_expr * expr)
5592 {
5593 gfc_constructor *c;
5594 tree tmp;
5595 offset_int wtmp;
5596 gfc_se se;
5597 tree index, range;
5598 vec<constructor_elt, va_gc> *v = NULL;
5599
5600 if (expr->expr_type == EXPR_VARIABLE
5601 && expr->symtree->n.sym->attr.flavor == FL_PARAMETER
5602 && expr->symtree->n.sym->value)
5603 expr = expr->symtree->n.sym->value;
5604
5605 switch (expr->expr_type)
5606 {
5607 case EXPR_CONSTANT:
5608 case EXPR_STRUCTURE:
5609 /* A single scalar or derived type value. Create an array with all
5610 elements equal to that value. */
5611 gfc_init_se (&se, NULL);
5612
5613 if (expr->expr_type == EXPR_CONSTANT)
5614 gfc_conv_constant (&se, expr);
5615 else
5616 gfc_conv_structure (&se, expr, 1);
5617
5618 wtmp = wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
5619 /* This will probably eat buckets of memory for large arrays. */
5620 while (wtmp != 0)
5621 {
5622 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
5623 wtmp -= 1;
5624 }
5625 break;
5626
5627 case EXPR_ARRAY:
5628 /* Create a vector of all the elements. */
5629 for (c = gfc_constructor_first (expr->value.constructor);
5630 c; c = gfc_constructor_next (c))
5631 {
5632 if (c->iterator)
5633 {
5634 /* Problems occur when we get something like
5635 integer :: a(lots) = (/(i, i=1, lots)/) */
5636 gfc_fatal_error ("The number of elements in the array "
5637 "constructor at %L requires an increase of "
5638 "the allowed %d upper limit. See "
5639 "%<-fmax-array-constructor%> option",
5640 &expr->where, flag_max_array_constructor);
5641 return NULL_TREE;
5642 }
5643 if (mpz_cmp_si (c->offset, 0) != 0)
5644 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5645 else
5646 index = NULL_TREE;
5647
5648 if (mpz_cmp_si (c->repeat, 1) > 0)
5649 {
5650 tree tmp1, tmp2;
5651 mpz_t maxval;
5652
5653 mpz_init (maxval);
5654 mpz_add (maxval, c->offset, c->repeat);
5655 mpz_sub_ui (maxval, maxval, 1);
5656 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5657 if (mpz_cmp_si (c->offset, 0) != 0)
5658 {
5659 mpz_add_ui (maxval, c->offset, 1);
5660 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5661 }
5662 else
5663 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5664
5665 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
5666 mpz_clear (maxval);
5667 }
5668 else
5669 range = NULL;
5670
5671 gfc_init_se (&se, NULL);
5672 switch (c->expr->expr_type)
5673 {
5674 case EXPR_CONSTANT:
5675 gfc_conv_constant (&se, c->expr);
5676 break;
5677
5678 case EXPR_STRUCTURE:
5679 gfc_conv_structure (&se, c->expr, 1);
5680 break;
5681
5682 default:
5683 /* Catch those occasional beasts that do not simplify
5684 for one reason or another, assuming that if they are
5685 standard defying the frontend will catch them. */
5686 gfc_conv_expr (&se, c->expr);
5687 break;
5688 }
5689
5690 if (range == NULL_TREE)
5691 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5692 else
5693 {
5694 if (index != NULL_TREE)
5695 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5696 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
5697 }
5698 }
5699 break;
5700
5701 case EXPR_NULL:
5702 return gfc_build_null_descriptor (type);
5703
5704 default:
5705 gcc_unreachable ();
5706 }
5707
5708 /* Create a constructor from the list of elements. */
5709 tmp = build_constructor (type, v);
5710 TREE_CONSTANT (tmp) = 1;
5711 return tmp;
5712 }
5713
5714
5715 /* Generate code to evaluate non-constant coarray cobounds. */
5716
5717 void
5718 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
5719 const gfc_symbol *sym)
5720 {
5721 int dim;
5722 tree ubound;
5723 tree lbound;
5724 gfc_se se;
5725 gfc_array_spec *as;
5726
5727 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
5728
5729 for (dim = as->rank; dim < as->rank + as->corank; dim++)
5730 {
5731 /* Evaluate non-constant array bound expressions. */
5732 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5733 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5734 {
5735 gfc_init_se (&se, NULL);
5736 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5737 gfc_add_block_to_block (pblock, &se.pre);
5738 gfc_add_modify (pblock, lbound, se.expr);
5739 }
5740 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5741 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5742 {
5743 gfc_init_se (&se, NULL);
5744 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5745 gfc_add_block_to_block (pblock, &se.pre);
5746 gfc_add_modify (pblock, ubound, se.expr);
5747 }
5748 }
5749 }
5750
5751
5752 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
5753 returns the size (in elements) of the array. */
5754
5755 static tree
5756 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
5757 stmtblock_t * pblock)
5758 {
5759 gfc_array_spec *as;
5760 tree size;
5761 tree stride;
5762 tree offset;
5763 tree ubound;
5764 tree lbound;
5765 tree tmp;
5766 gfc_se se;
5767
5768 int dim;
5769
5770 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
5771
5772 size = gfc_index_one_node;
5773 offset = gfc_index_zero_node;
5774 for (dim = 0; dim < as->rank; dim++)
5775 {
5776 /* Evaluate non-constant array bound expressions. */
5777 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5778 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5779 {
5780 gfc_init_se (&se, NULL);
5781 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5782 gfc_add_block_to_block (pblock, &se.pre);
5783 gfc_add_modify (pblock, lbound, se.expr);
5784 }
5785 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5786 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5787 {
5788 gfc_init_se (&se, NULL);
5789 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5790 gfc_add_block_to_block (pblock, &se.pre);
5791 gfc_add_modify (pblock, ubound, se.expr);
5792 }
5793 /* The offset of this dimension. offset = offset - lbound * stride. */
5794 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5795 lbound, size);
5796 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
5797 offset, tmp);
5798
5799 /* The size of this dimension, and the stride of the next. */
5800 if (dim + 1 < as->rank)
5801 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
5802 else
5803 stride = GFC_TYPE_ARRAY_SIZE (type);
5804
5805 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
5806 {
5807 /* Calculate stride = size * (ubound + 1 - lbound). */
5808 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5809 gfc_array_index_type,
5810 gfc_index_one_node, lbound);
5811 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5812 gfc_array_index_type, ubound, tmp);
5813 tmp = fold_build2_loc (input_location, MULT_EXPR,
5814 gfc_array_index_type, size, tmp);
5815 if (stride)
5816 gfc_add_modify (pblock, stride, tmp);
5817 else
5818 stride = gfc_evaluate_now (tmp, pblock);
5819
5820 /* Make sure that negative size arrays are translated
5821 to being zero size. */
5822 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
5823 stride, gfc_index_zero_node);
5824 tmp = fold_build3_loc (input_location, COND_EXPR,
5825 gfc_array_index_type, tmp,
5826 stride, gfc_index_zero_node);
5827 gfc_add_modify (pblock, stride, tmp);
5828 }
5829
5830 size = stride;
5831 }
5832
5833 gfc_trans_array_cobounds (type, pblock, sym);
5834 gfc_trans_vla_type_sizes (sym, pblock);
5835
5836 *poffset = offset;
5837 return size;
5838 }
5839
5840
5841 /* Generate code to initialize/allocate an array variable. */
5842
5843 void
5844 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
5845 gfc_wrapped_block * block)
5846 {
5847 stmtblock_t init;
5848 tree type;
5849 tree tmp = NULL_TREE;
5850 tree size;
5851 tree offset;
5852 tree space;
5853 tree inittree;
5854 bool onstack;
5855
5856 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
5857
5858 /* Do nothing for USEd variables. */
5859 if (sym->attr.use_assoc)
5860 return;
5861
5862 type = TREE_TYPE (decl);
5863 gcc_assert (GFC_ARRAY_TYPE_P (type));
5864 onstack = TREE_CODE (type) != POINTER_TYPE;
5865
5866 gfc_init_block (&init);
5867
5868 /* Evaluate character string length. */
5869 if (sym->ts.type == BT_CHARACTER
5870 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5871 {
5872 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5873
5874 gfc_trans_vla_type_sizes (sym, &init);
5875
5876 /* Emit a DECL_EXPR for this variable, which will cause the
5877 gimplifier to allocate storage, and all that good stuff. */
5878 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
5879 gfc_add_expr_to_block (&init, tmp);
5880 }
5881
5882 if (onstack)
5883 {
5884 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5885 return;
5886 }
5887
5888 type = TREE_TYPE (type);
5889
5890 gcc_assert (!sym->attr.use_assoc);
5891 gcc_assert (!TREE_STATIC (decl));
5892 gcc_assert (!sym->module);
5893
5894 if (sym->ts.type == BT_CHARACTER
5895 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5896 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5897
5898 size = gfc_trans_array_bounds (type, sym, &offset, &init);
5899
5900 /* Don't actually allocate space for Cray Pointees. */
5901 if (sym->attr.cray_pointee)
5902 {
5903 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5904 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5905
5906 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5907 return;
5908 }
5909
5910 if (flag_stack_arrays)
5911 {
5912 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
5913 space = build_decl (sym->declared_at.lb->location,
5914 VAR_DECL, create_tmp_var_name ("A"),
5915 TREE_TYPE (TREE_TYPE (decl)));
5916 gfc_trans_vla_type_sizes (sym, &init);
5917 }
5918 else
5919 {
5920 /* The size is the number of elements in the array, so multiply by the
5921 size of an element to get the total size. */
5922 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5923 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5924 size, fold_convert (gfc_array_index_type, tmp));
5925
5926 /* Allocate memory to hold the data. */
5927 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5928 gfc_add_modify (&init, decl, tmp);
5929
5930 /* Free the temporary. */
5931 tmp = gfc_call_free (decl);
5932 space = NULL_TREE;
5933 }
5934
5935 /* Set offset of the array. */
5936 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5937 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5938
5939 /* Automatic arrays should not have initializers. */
5940 gcc_assert (!sym->value);
5941
5942 inittree = gfc_finish_block (&init);
5943
5944 if (space)
5945 {
5946 tree addr;
5947 pushdecl (space);
5948
5949 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5950 where also space is located. */
5951 gfc_init_block (&init);
5952 tmp = fold_build1_loc (input_location, DECL_EXPR,
5953 TREE_TYPE (space), space);
5954 gfc_add_expr_to_block (&init, tmp);
5955 addr = fold_build1_loc (sym->declared_at.lb->location,
5956 ADDR_EXPR, TREE_TYPE (decl), space);
5957 gfc_add_modify (&init, decl, addr);
5958 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5959 tmp = NULL_TREE;
5960 }
5961 gfc_add_init_cleanup (block, inittree, tmp);
5962 }
5963
5964
5965 /* Generate entry and exit code for g77 calling convention arrays. */
5966
5967 void
5968 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5969 {
5970 tree parm;
5971 tree type;
5972 locus loc;
5973 tree offset;
5974 tree tmp;
5975 tree stmt;
5976 stmtblock_t init;
5977
5978 gfc_save_backend_locus (&loc);
5979 gfc_set_backend_locus (&sym->declared_at);
5980
5981 /* Descriptor type. */
5982 parm = sym->backend_decl;
5983 type = TREE_TYPE (parm);
5984 gcc_assert (GFC_ARRAY_TYPE_P (type));
5985
5986 gfc_start_block (&init);
5987
5988 if (sym->ts.type == BT_CHARACTER
5989 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5990 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5991
5992 /* Evaluate the bounds of the array. */
5993 gfc_trans_array_bounds (type, sym, &offset, &init);
5994
5995 /* Set the offset. */
5996 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5997 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5998
5999 /* Set the pointer itself if we aren't using the parameter directly. */
6000 if (TREE_CODE (parm) != PARM_DECL)
6001 {
6002 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
6003 gfc_add_modify (&init, parm, tmp);
6004 }
6005 stmt = gfc_finish_block (&init);
6006
6007 gfc_restore_backend_locus (&loc);
6008
6009 /* Add the initialization code to the start of the function. */
6010
6011 if (sym->attr.optional || sym->attr.not_always_present)
6012 {
6013 tmp = gfc_conv_expr_present (sym);
6014 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
6015 }
6016
6017 gfc_add_init_cleanup (block, stmt, NULL_TREE);
6018 }
6019
6020
6021 /* Modify the descriptor of an array parameter so that it has the
6022 correct lower bound. Also move the upper bound accordingly.
6023 If the array is not packed, it will be copied into a temporary.
6024 For each dimension we set the new lower and upper bounds. Then we copy the
6025 stride and calculate the offset for this dimension. We also work out
6026 what the stride of a packed array would be, and see it the two match.
6027 If the array need repacking, we set the stride to the values we just
6028 calculated, recalculate the offset and copy the array data.
6029 Code is also added to copy the data back at the end of the function.
6030 */
6031
6032 void
6033 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
6034 gfc_wrapped_block * block)
6035 {
6036 tree size;
6037 tree type;
6038 tree offset;
6039 locus loc;
6040 stmtblock_t init;
6041 tree stmtInit, stmtCleanup;
6042 tree lbound;
6043 tree ubound;
6044 tree dubound;
6045 tree dlbound;
6046 tree dumdesc;
6047 tree tmp;
6048 tree stride, stride2;
6049 tree stmt_packed;
6050 tree stmt_unpacked;
6051 tree partial;
6052 gfc_se se;
6053 int n;
6054 int checkparm;
6055 int no_repack;
6056 bool optional_arg;
6057 gfc_array_spec *as;
6058 bool is_classarray = IS_CLASS_ARRAY (sym);
6059
6060 /* Do nothing for pointer and allocatable arrays. */
6061 if ((sym->ts.type != BT_CLASS && sym->attr.pointer)
6062 || (sym->ts.type == BT_CLASS && CLASS_DATA (sym)->attr.class_pointer)
6063 || sym->attr.allocatable
6064 || (is_classarray && CLASS_DATA (sym)->attr.allocatable))
6065 return;
6066
6067 if (!is_classarray && sym->attr.dummy && gfc_is_nodesc_array (sym))
6068 {
6069 gfc_trans_g77_array (sym, block);
6070 return;
6071 }
6072
6073 gfc_save_backend_locus (&loc);
6074 gfc_set_backend_locus (&sym->declared_at);
6075
6076 /* Descriptor type. */
6077 type = TREE_TYPE (tmpdesc);
6078 gcc_assert (GFC_ARRAY_TYPE_P (type));
6079 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6080 if (is_classarray)
6081 /* For a class array the dummy array descriptor is in the _class
6082 component. */
6083 dumdesc = gfc_class_data_get (dumdesc);
6084 else
6085 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
6086 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
6087 gfc_start_block (&init);
6088
6089 if (sym->ts.type == BT_CHARACTER
6090 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
6091 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
6092
6093 checkparm = (as->type == AS_EXPLICIT
6094 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
6095
6096 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
6097 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
6098
6099 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
6100 {
6101 /* For non-constant shape arrays we only check if the first dimension
6102 is contiguous. Repacking higher dimensions wouldn't gain us
6103 anything as we still don't know the array stride. */
6104 partial = gfc_create_var (boolean_type_node, "partial");
6105 TREE_USED (partial) = 1;
6106 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
6107 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
6108 gfc_index_one_node);
6109 gfc_add_modify (&init, partial, tmp);
6110 }
6111 else
6112 partial = NULL_TREE;
6113
6114 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
6115 here, however I think it does the right thing. */
6116 if (no_repack)
6117 {
6118 /* Set the first stride. */
6119 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
6120 stride = gfc_evaluate_now (stride, &init);
6121
6122 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
6123 stride, gfc_index_zero_node);
6124 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
6125 tmp, gfc_index_one_node, stride);
6126 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
6127 gfc_add_modify (&init, stride, tmp);
6128
6129 /* Allow the user to disable array repacking. */
6130 stmt_unpacked = NULL_TREE;
6131 }
6132 else
6133 {
6134 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
6135 /* A library call to repack the array if necessary. */
6136 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6137 stmt_unpacked = build_call_expr_loc (input_location,
6138 gfor_fndecl_in_pack, 1, tmp);
6139
6140 stride = gfc_index_one_node;
6141
6142 if (warn_array_temporaries)
6143 gfc_warning (OPT_Warray_temporaries,
6144 "Creating array temporary at %L", &loc);
6145 }
6146
6147 /* This is for the case where the array data is used directly without
6148 calling the repack function. */
6149 if (no_repack || partial != NULL_TREE)
6150 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
6151 else
6152 stmt_packed = NULL_TREE;
6153
6154 /* Assign the data pointer. */
6155 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
6156 {
6157 /* Don't repack unknown shape arrays when the first stride is 1. */
6158 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
6159 partial, stmt_packed, stmt_unpacked);
6160 }
6161 else
6162 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
6163 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
6164
6165 offset = gfc_index_zero_node;
6166 size = gfc_index_one_node;
6167
6168 /* Evaluate the bounds of the array. */
6169 for (n = 0; n < as->rank; n++)
6170 {
6171 if (checkparm || !as->upper[n])
6172 {
6173 /* Get the bounds of the actual parameter. */
6174 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
6175 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
6176 }
6177 else
6178 {
6179 dubound = NULL_TREE;
6180 dlbound = NULL_TREE;
6181 }
6182
6183 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
6184 if (!INTEGER_CST_P (lbound))
6185 {
6186 gfc_init_se (&se, NULL);
6187 gfc_conv_expr_type (&se, as->lower[n],
6188 gfc_array_index_type);
6189 gfc_add_block_to_block (&init, &se.pre);
6190 gfc_add_modify (&init, lbound, se.expr);
6191 }
6192
6193 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
6194 /* Set the desired upper bound. */
6195 if (as->upper[n])
6196 {
6197 /* We know what we want the upper bound to be. */
6198 if (!INTEGER_CST_P (ubound))
6199 {
6200 gfc_init_se (&se, NULL);
6201 gfc_conv_expr_type (&se, as->upper[n],
6202 gfc_array_index_type);
6203 gfc_add_block_to_block (&init, &se.pre);
6204 gfc_add_modify (&init, ubound, se.expr);
6205 }
6206
6207 /* Check the sizes match. */
6208 if (checkparm)
6209 {
6210 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
6211 char * msg;
6212 tree temp;
6213
6214 temp = fold_build2_loc (input_location, MINUS_EXPR,
6215 gfc_array_index_type, ubound, lbound);
6216 temp = fold_build2_loc (input_location, PLUS_EXPR,
6217 gfc_array_index_type,
6218 gfc_index_one_node, temp);
6219 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
6220 gfc_array_index_type, dubound,
6221 dlbound);
6222 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
6223 gfc_array_index_type,
6224 gfc_index_one_node, stride2);
6225 tmp = fold_build2_loc (input_location, NE_EXPR,
6226 gfc_array_index_type, temp, stride2);
6227 msg = xasprintf ("Dimension %d of array '%s' has extent "
6228 "%%ld instead of %%ld", n+1, sym->name);
6229
6230 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
6231 fold_convert (long_integer_type_node, temp),
6232 fold_convert (long_integer_type_node, stride2));
6233
6234 free (msg);
6235 }
6236 }
6237 else
6238 {
6239 /* For assumed shape arrays move the upper bound by the same amount
6240 as the lower bound. */
6241 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6242 gfc_array_index_type, dubound, dlbound);
6243 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6244 gfc_array_index_type, tmp, lbound);
6245 gfc_add_modify (&init, ubound, tmp);
6246 }
6247 /* The offset of this dimension. offset = offset - lbound * stride. */
6248 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6249 lbound, stride);
6250 offset = fold_build2_loc (input_location, MINUS_EXPR,
6251 gfc_array_index_type, offset, tmp);
6252
6253 /* The size of this dimension, and the stride of the next. */
6254 if (n + 1 < as->rank)
6255 {
6256 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
6257
6258 if (no_repack || partial != NULL_TREE)
6259 stmt_unpacked =
6260 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
6261
6262 /* Figure out the stride if not a known constant. */
6263 if (!INTEGER_CST_P (stride))
6264 {
6265 if (no_repack)
6266 stmt_packed = NULL_TREE;
6267 else
6268 {
6269 /* Calculate stride = size * (ubound + 1 - lbound). */
6270 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6271 gfc_array_index_type,
6272 gfc_index_one_node, lbound);
6273 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6274 gfc_array_index_type, ubound, tmp);
6275 size = fold_build2_loc (input_location, MULT_EXPR,
6276 gfc_array_index_type, size, tmp);
6277 stmt_packed = size;
6278 }
6279
6280 /* Assign the stride. */
6281 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
6282 tmp = fold_build3_loc (input_location, COND_EXPR,
6283 gfc_array_index_type, partial,
6284 stmt_unpacked, stmt_packed);
6285 else
6286 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
6287 gfc_add_modify (&init, stride, tmp);
6288 }
6289 }
6290 else
6291 {
6292 stride = GFC_TYPE_ARRAY_SIZE (type);
6293
6294 if (stride && !INTEGER_CST_P (stride))
6295 {
6296 /* Calculate size = stride * (ubound + 1 - lbound). */
6297 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6298 gfc_array_index_type,
6299 gfc_index_one_node, lbound);
6300 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6301 gfc_array_index_type,
6302 ubound, tmp);
6303 tmp = fold_build2_loc (input_location, MULT_EXPR,
6304 gfc_array_index_type,
6305 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
6306 gfc_add_modify (&init, stride, tmp);
6307 }
6308 }
6309 }
6310
6311 gfc_trans_array_cobounds (type, &init, sym);
6312
6313 /* Set the offset. */
6314 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
6315 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
6316
6317 gfc_trans_vla_type_sizes (sym, &init);
6318
6319 stmtInit = gfc_finish_block (&init);
6320
6321 /* Only do the entry/initialization code if the arg is present. */
6322 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6323 optional_arg = (sym->attr.optional
6324 || (sym->ns->proc_name->attr.entry_master
6325 && sym->attr.dummy));
6326 if (optional_arg)
6327 {
6328 tmp = gfc_conv_expr_present (sym);
6329 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
6330 build_empty_stmt (input_location));
6331 }
6332
6333 /* Cleanup code. */
6334 if (no_repack)
6335 stmtCleanup = NULL_TREE;
6336 else
6337 {
6338 stmtblock_t cleanup;
6339 gfc_start_block (&cleanup);
6340
6341 if (sym->attr.intent != INTENT_IN)
6342 {
6343 /* Copy the data back. */
6344 tmp = build_call_expr_loc (input_location,
6345 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
6346 gfc_add_expr_to_block (&cleanup, tmp);
6347 }
6348
6349 /* Free the temporary. */
6350 tmp = gfc_call_free (tmpdesc);
6351 gfc_add_expr_to_block (&cleanup, tmp);
6352
6353 stmtCleanup = gfc_finish_block (&cleanup);
6354
6355 /* Only do the cleanup if the array was repacked. */
6356 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
6357 tmp = gfc_conv_descriptor_data_get (tmp);
6358 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6359 tmp, tmpdesc);
6360 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6361 build_empty_stmt (input_location));
6362
6363 if (optional_arg)
6364 {
6365 tmp = gfc_conv_expr_present (sym);
6366 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6367 build_empty_stmt (input_location));
6368 }
6369 }
6370
6371 /* We don't need to free any memory allocated by internal_pack as it will
6372 be freed at the end of the function by pop_context. */
6373 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
6374
6375 gfc_restore_backend_locus (&loc);
6376 }
6377
6378
6379 /* Calculate the overall offset, including subreferences. */
6380 static void
6381 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
6382 bool subref, gfc_expr *expr)
6383 {
6384 tree tmp;
6385 tree field;
6386 tree stride;
6387 tree index;
6388 gfc_ref *ref;
6389 gfc_se start;
6390 int n;
6391
6392 /* If offset is NULL and this is not a subreferenced array, there is
6393 nothing to do. */
6394 if (offset == NULL_TREE)
6395 {
6396 if (subref)
6397 offset = gfc_index_zero_node;
6398 else
6399 return;
6400 }
6401
6402 tmp = build_array_ref (desc, offset, NULL, NULL);
6403
6404 /* Offset the data pointer for pointer assignments from arrays with
6405 subreferences; e.g. my_integer => my_type(:)%integer_component. */
6406 if (subref)
6407 {
6408 /* Go past the array reference. */
6409 for (ref = expr->ref; ref; ref = ref->next)
6410 if (ref->type == REF_ARRAY &&
6411 ref->u.ar.type != AR_ELEMENT)
6412 {
6413 ref = ref->next;
6414 break;
6415 }
6416
6417 /* Calculate the offset for each subsequent subreference. */
6418 for (; ref; ref = ref->next)
6419 {
6420 switch (ref->type)
6421 {
6422 case REF_COMPONENT:
6423 field = ref->u.c.component->backend_decl;
6424 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
6425 tmp = fold_build3_loc (input_location, COMPONENT_REF,
6426 TREE_TYPE (field),
6427 tmp, field, NULL_TREE);
6428 break;
6429
6430 case REF_SUBSTRING:
6431 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
6432 gfc_init_se (&start, NULL);
6433 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
6434 gfc_add_block_to_block (block, &start.pre);
6435 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
6436 break;
6437
6438 case REF_ARRAY:
6439 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
6440 && ref->u.ar.type == AR_ELEMENT);
6441
6442 /* TODO - Add bounds checking. */
6443 stride = gfc_index_one_node;
6444 index = gfc_index_zero_node;
6445 for (n = 0; n < ref->u.ar.dimen; n++)
6446 {
6447 tree itmp;
6448 tree jtmp;
6449
6450 /* Update the index. */
6451 gfc_init_se (&start, NULL);
6452 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
6453 itmp = gfc_evaluate_now (start.expr, block);
6454 gfc_init_se (&start, NULL);
6455 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
6456 jtmp = gfc_evaluate_now (start.expr, block);
6457 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6458 gfc_array_index_type, itmp, jtmp);
6459 itmp = fold_build2_loc (input_location, MULT_EXPR,
6460 gfc_array_index_type, itmp, stride);
6461 index = fold_build2_loc (input_location, PLUS_EXPR,
6462 gfc_array_index_type, itmp, index);
6463 index = gfc_evaluate_now (index, block);
6464
6465 /* Update the stride. */
6466 gfc_init_se (&start, NULL);
6467 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
6468 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6469 gfc_array_index_type, start.expr,
6470 jtmp);
6471 itmp = fold_build2_loc (input_location, PLUS_EXPR,
6472 gfc_array_index_type,
6473 gfc_index_one_node, itmp);
6474 stride = fold_build2_loc (input_location, MULT_EXPR,
6475 gfc_array_index_type, stride, itmp);
6476 stride = gfc_evaluate_now (stride, block);
6477 }
6478
6479 /* Apply the index to obtain the array element. */
6480 tmp = gfc_build_array_ref (tmp, index, NULL);
6481 break;
6482
6483 default:
6484 gcc_unreachable ();
6485 break;
6486 }
6487 }
6488 }
6489
6490 /* Set the target data pointer. */
6491 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
6492 gfc_conv_descriptor_data_set (block, parm, offset);
6493 }
6494
6495
6496 /* gfc_conv_expr_descriptor needs the string length an expression
6497 so that the size of the temporary can be obtained. This is done
6498 by adding up the string lengths of all the elements in the
6499 expression. Function with non-constant expressions have their
6500 string lengths mapped onto the actual arguments using the
6501 interface mapping machinery in trans-expr.c. */
6502 static void
6503 get_array_charlen (gfc_expr *expr, gfc_se *se)
6504 {
6505 gfc_interface_mapping mapping;
6506 gfc_formal_arglist *formal;
6507 gfc_actual_arglist *arg;
6508 gfc_se tse;
6509
6510 if (expr->ts.u.cl->length
6511 && gfc_is_constant_expr (expr->ts.u.cl->length))
6512 {
6513 if (!expr->ts.u.cl->backend_decl)
6514 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6515 return;
6516 }
6517
6518 switch (expr->expr_type)
6519 {
6520 case EXPR_OP:
6521 get_array_charlen (expr->value.op.op1, se);
6522
6523 /* For parentheses the expression ts.u.cl is identical. */
6524 if (expr->value.op.op == INTRINSIC_PARENTHESES)
6525 return;
6526
6527 expr->ts.u.cl->backend_decl =
6528 gfc_create_var (gfc_charlen_type_node, "sln");
6529
6530 if (expr->value.op.op2)
6531 {
6532 get_array_charlen (expr->value.op.op2, se);
6533
6534 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
6535
6536 /* Add the string lengths and assign them to the expression
6537 string length backend declaration. */
6538 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6539 fold_build2_loc (input_location, PLUS_EXPR,
6540 gfc_charlen_type_node,
6541 expr->value.op.op1->ts.u.cl->backend_decl,
6542 expr->value.op.op2->ts.u.cl->backend_decl));
6543 }
6544 else
6545 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6546 expr->value.op.op1->ts.u.cl->backend_decl);
6547 break;
6548
6549 case EXPR_FUNCTION:
6550 if (expr->value.function.esym == NULL
6551 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
6552 {
6553 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6554 break;
6555 }
6556
6557 /* Map expressions involving the dummy arguments onto the actual
6558 argument expressions. */
6559 gfc_init_interface_mapping (&mapping);
6560 formal = gfc_sym_get_dummy_args (expr->symtree->n.sym);
6561 arg = expr->value.function.actual;
6562
6563 /* Set se = NULL in the calls to the interface mapping, to suppress any
6564 backend stuff. */
6565 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
6566 {
6567 if (!arg->expr)
6568 continue;
6569 if (formal->sym)
6570 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
6571 }
6572
6573 gfc_init_se (&tse, NULL);
6574
6575 /* Build the expression for the character length and convert it. */
6576 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
6577
6578 gfc_add_block_to_block (&se->pre, &tse.pre);
6579 gfc_add_block_to_block (&se->post, &tse.post);
6580 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
6581 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
6582 gfc_charlen_type_node, tse.expr,
6583 build_int_cst (gfc_charlen_type_node, 0));
6584 expr->ts.u.cl->backend_decl = tse.expr;
6585 gfc_free_interface_mapping (&mapping);
6586 break;
6587
6588 default:
6589 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6590 break;
6591 }
6592 }
6593
6594
6595 /* Helper function to check dimensions. */
6596 static bool
6597 transposed_dims (gfc_ss *ss)
6598 {
6599 int n;
6600
6601 for (n = 0; n < ss->dimen; n++)
6602 if (ss->dim[n] != n)
6603 return true;
6604 return false;
6605 }
6606
6607
6608 /* Convert the last ref of a scalar coarray from an AR_ELEMENT to an
6609 AR_FULL, suitable for the scalarizer. */
6610
6611 static gfc_ss *
6612 walk_coarray (gfc_expr *e)
6613 {
6614 gfc_ss *ss;
6615
6616 gcc_assert (gfc_get_corank (e) > 0);
6617
6618 ss = gfc_walk_expr (e);
6619
6620 /* Fix scalar coarray. */
6621 if (ss == gfc_ss_terminator)
6622 {
6623 gfc_ref *ref;
6624
6625 ref = e->ref;
6626 while (ref)
6627 {
6628 if (ref->type == REF_ARRAY
6629 && ref->u.ar.codimen > 0)
6630 break;
6631
6632 ref = ref->next;
6633 }
6634
6635 gcc_assert (ref != NULL);
6636 if (ref->u.ar.type == AR_ELEMENT)
6637 ref->u.ar.type = AR_SECTION;
6638 ss = gfc_reverse_ss (gfc_walk_array_ref (ss, e, ref));
6639 }
6640
6641 return ss;
6642 }
6643
6644
6645 /* Convert an array for passing as an actual argument. Expressions and
6646 vector subscripts are evaluated and stored in a temporary, which is then
6647 passed. For whole arrays the descriptor is passed. For array sections
6648 a modified copy of the descriptor is passed, but using the original data.
6649
6650 This function is also used for array pointer assignments, and there
6651 are three cases:
6652
6653 - se->want_pointer && !se->direct_byref
6654 EXPR is an actual argument. On exit, se->expr contains a
6655 pointer to the array descriptor.
6656
6657 - !se->want_pointer && !se->direct_byref
6658 EXPR is an actual argument to an intrinsic function or the
6659 left-hand side of a pointer assignment. On exit, se->expr
6660 contains the descriptor for EXPR.
6661
6662 - !se->want_pointer && se->direct_byref
6663 EXPR is the right-hand side of a pointer assignment and
6664 se->expr is the descriptor for the previously-evaluated
6665 left-hand side. The function creates an assignment from
6666 EXPR to se->expr.
6667
6668
6669 The se->force_tmp flag disables the non-copying descriptor optimization
6670 that is used for transpose. It may be used in cases where there is an
6671 alias between the transpose argument and another argument in the same
6672 function call. */
6673
6674 void
6675 gfc_conv_expr_descriptor (gfc_se *se, gfc_expr *expr)
6676 {
6677 gfc_ss *ss;
6678 gfc_ss_type ss_type;
6679 gfc_ss_info *ss_info;
6680 gfc_loopinfo loop;
6681 gfc_array_info *info;
6682 int need_tmp;
6683 int n;
6684 tree tmp;
6685 tree desc;
6686 stmtblock_t block;
6687 tree start;
6688 tree offset;
6689 int full;
6690 bool subref_array_target = false;
6691 gfc_expr *arg, *ss_expr;
6692
6693 if (se->want_coarray)
6694 ss = walk_coarray (expr);
6695 else
6696 ss = gfc_walk_expr (expr);
6697
6698 gcc_assert (ss != NULL);
6699 gcc_assert (ss != gfc_ss_terminator);
6700
6701 ss_info = ss->info;
6702 ss_type = ss_info->type;
6703 ss_expr = ss_info->expr;
6704
6705 /* Special case: TRANSPOSE which needs no temporary. */
6706 while (expr->expr_type == EXPR_FUNCTION && expr->value.function.isym
6707 && NULL != (arg = gfc_get_noncopying_intrinsic_argument (expr)))
6708 {
6709 /* This is a call to transpose which has already been handled by the
6710 scalarizer, so that we just need to get its argument's descriptor. */
6711 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
6712 expr = expr->value.function.actual->expr;
6713 }
6714
6715 /* Special case things we know we can pass easily. */
6716 switch (expr->expr_type)
6717 {
6718 case EXPR_VARIABLE:
6719 /* If we have a linear array section, we can pass it directly.
6720 Otherwise we need to copy it into a temporary. */
6721
6722 gcc_assert (ss_type == GFC_SS_SECTION);
6723 gcc_assert (ss_expr == expr);
6724 info = &ss_info->data.array;
6725
6726 /* Get the descriptor for the array. */
6727 gfc_conv_ss_descriptor (&se->pre, ss, 0);
6728 desc = info->descriptor;
6729
6730 subref_array_target = se->direct_byref && is_subref_array (expr);
6731 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
6732 && !subref_array_target;
6733
6734 if (se->force_tmp)
6735 need_tmp = 1;
6736
6737 if (need_tmp)
6738 full = 0;
6739 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6740 {
6741 /* Create a new descriptor if the array doesn't have one. */
6742 full = 0;
6743 }
6744 else if (info->ref->u.ar.type == AR_FULL || se->descriptor_only)
6745 full = 1;
6746 else if (se->direct_byref)
6747 full = 0;
6748 else
6749 full = gfc_full_array_ref_p (info->ref, NULL);
6750
6751 if (full && !transposed_dims (ss))
6752 {
6753 if (se->direct_byref && !se->byref_noassign)
6754 {
6755 /* Copy the descriptor for pointer assignments. */
6756 gfc_add_modify (&se->pre, se->expr, desc);
6757
6758 /* Add any offsets from subreferences. */
6759 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
6760 subref_array_target, expr);
6761 }
6762 else if (se->want_pointer)
6763 {
6764 /* We pass full arrays directly. This means that pointers and
6765 allocatable arrays should also work. */
6766 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6767 }
6768 else
6769 {
6770 se->expr = desc;
6771 }
6772
6773 if (expr->ts.type == BT_CHARACTER)
6774 se->string_length = gfc_get_expr_charlen (expr);
6775
6776 gfc_free_ss_chain (ss);
6777 return;
6778 }
6779 break;
6780
6781 case EXPR_FUNCTION:
6782 /* A transformational function return value will be a temporary
6783 array descriptor. We still need to go through the scalarizer
6784 to create the descriptor. Elemental functions are handled as
6785 arbitrary expressions, i.e. copy to a temporary. */
6786
6787 if (se->direct_byref)
6788 {
6789 gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr);
6790
6791 /* For pointer assignments pass the descriptor directly. */
6792 if (se->ss == NULL)
6793 se->ss = ss;
6794 else
6795 gcc_assert (se->ss == ss);
6796 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6797 gfc_conv_expr (se, expr);
6798 gfc_free_ss_chain (ss);
6799 return;
6800 }
6801
6802 if (ss_expr != expr || ss_type != GFC_SS_FUNCTION)
6803 {
6804 if (ss_expr != expr)
6805 /* Elemental function. */
6806 gcc_assert ((expr->value.function.esym != NULL
6807 && expr->value.function.esym->attr.elemental)
6808 || (expr->value.function.isym != NULL
6809 && expr->value.function.isym->elemental)
6810 || gfc_inline_intrinsic_function_p (expr));
6811 else
6812 gcc_assert (ss_type == GFC_SS_INTRINSIC);
6813
6814 need_tmp = 1;
6815 if (expr->ts.type == BT_CHARACTER
6816 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
6817 get_array_charlen (expr, se);
6818
6819 info = NULL;
6820 }
6821 else
6822 {
6823 /* Transformational function. */
6824 info = &ss_info->data.array;
6825 need_tmp = 0;
6826 }
6827 break;
6828
6829 case EXPR_ARRAY:
6830 /* Constant array constructors don't need a temporary. */
6831 if (ss_type == GFC_SS_CONSTRUCTOR
6832 && expr->ts.type != BT_CHARACTER
6833 && gfc_constant_array_constructor_p (expr->value.constructor))
6834 {
6835 need_tmp = 0;
6836 info = &ss_info->data.array;
6837 }
6838 else
6839 {
6840 need_tmp = 1;
6841 info = NULL;
6842 }
6843 break;
6844
6845 default:
6846 /* Something complicated. Copy it into a temporary. */
6847 need_tmp = 1;
6848 info = NULL;
6849 break;
6850 }
6851
6852 /* If we are creating a temporary, we don't need to bother about aliases
6853 anymore. */
6854 if (need_tmp)
6855 se->force_tmp = 0;
6856
6857 gfc_init_loopinfo (&loop);
6858
6859 /* Associate the SS with the loop. */
6860 gfc_add_ss_to_loop (&loop, ss);
6861
6862 /* Tell the scalarizer not to bother creating loop variables, etc. */
6863 if (!need_tmp)
6864 loop.array_parameter = 1;
6865 else
6866 /* The right-hand side of a pointer assignment mustn't use a temporary. */
6867 gcc_assert (!se->direct_byref);
6868
6869 /* Setup the scalarizing loops and bounds. */
6870 gfc_conv_ss_startstride (&loop);
6871
6872 if (need_tmp)
6873 {
6874 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
6875 get_array_charlen (expr, se);
6876
6877 /* Tell the scalarizer to make a temporary. */
6878 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
6879 ((expr->ts.type == BT_CHARACTER)
6880 ? expr->ts.u.cl->backend_decl
6881 : NULL),
6882 loop.dimen);
6883
6884 se->string_length = loop.temp_ss->info->string_length;
6885 gcc_assert (loop.temp_ss->dimen == loop.dimen);
6886 gfc_add_ss_to_loop (&loop, loop.temp_ss);
6887 }
6888
6889 gfc_conv_loop_setup (&loop, & expr->where);
6890
6891 if (need_tmp)
6892 {
6893 /* Copy into a temporary and pass that. We don't need to copy the data
6894 back because expressions and vector subscripts must be INTENT_IN. */
6895 /* TODO: Optimize passing function return values. */
6896 gfc_se lse;
6897 gfc_se rse;
6898
6899 /* Start the copying loops. */
6900 gfc_mark_ss_chain_used (loop.temp_ss, 1);
6901 gfc_mark_ss_chain_used (ss, 1);
6902 gfc_start_scalarized_body (&loop, &block);
6903
6904 /* Copy each data element. */
6905 gfc_init_se (&lse, NULL);
6906 gfc_copy_loopinfo_to_se (&lse, &loop);
6907 gfc_init_se (&rse, NULL);
6908 gfc_copy_loopinfo_to_se (&rse, &loop);
6909
6910 lse.ss = loop.temp_ss;
6911 rse.ss = ss;
6912
6913 gfc_conv_scalarized_array_ref (&lse, NULL);
6914 if (expr->ts.type == BT_CHARACTER)
6915 {
6916 gfc_conv_expr (&rse, expr);
6917 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
6918 rse.expr = build_fold_indirect_ref_loc (input_location,
6919 rse.expr);
6920 }
6921 else
6922 gfc_conv_expr_val (&rse, expr);
6923
6924 gfc_add_block_to_block (&block, &rse.pre);
6925 gfc_add_block_to_block (&block, &lse.pre);
6926
6927 lse.string_length = rse.string_length;
6928 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts,
6929 expr->expr_type == EXPR_VARIABLE
6930 || expr->expr_type == EXPR_ARRAY, false);
6931 gfc_add_expr_to_block (&block, tmp);
6932
6933 /* Finish the copying loops. */
6934 gfc_trans_scalarizing_loops (&loop, &block);
6935
6936 desc = loop.temp_ss->info->data.array.descriptor;
6937 }
6938 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
6939 {
6940 desc = info->descriptor;
6941 se->string_length = ss_info->string_length;
6942 }
6943 else
6944 {
6945 /* We pass sections without copying to a temporary. Make a new
6946 descriptor and point it at the section we want. The loop variable
6947 limits will be the limits of the section.
6948 A function may decide to repack the array to speed up access, but
6949 we're not bothered about that here. */
6950 int dim, ndim, codim;
6951 tree parm;
6952 tree parmtype;
6953 tree stride;
6954 tree from;
6955 tree to;
6956 tree base;
6957 bool onebased = false, rank_remap;
6958
6959 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
6960 rank_remap = ss->dimen < ndim;
6961
6962 if (se->want_coarray)
6963 {
6964 gfc_array_ref *ar = &info->ref->u.ar;
6965
6966 codim = gfc_get_corank (expr);
6967 for (n = 0; n < codim - 1; n++)
6968 {
6969 /* Make sure we are not lost somehow. */
6970 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
6971
6972 /* Make sure the call to gfc_conv_section_startstride won't
6973 generate unnecessary code to calculate stride. */
6974 gcc_assert (ar->stride[n + ndim] == NULL);
6975
6976 gfc_conv_section_startstride (&loop.pre, ss, n + ndim);
6977 loop.from[n + loop.dimen] = info->start[n + ndim];
6978 loop.to[n + loop.dimen] = info->end[n + ndim];
6979 }
6980
6981 gcc_assert (n == codim - 1);
6982 evaluate_bound (&loop.pre, info->start, ar->start,
6983 info->descriptor, n + ndim, true,
6984 ar->as->type == AS_DEFERRED);
6985 loop.from[n + loop.dimen] = info->start[n + ndim];
6986 }
6987 else
6988 codim = 0;
6989
6990 /* Set the string_length for a character array. */
6991 if (expr->ts.type == BT_CHARACTER)
6992 se->string_length = gfc_get_expr_charlen (expr);
6993
6994 /* If we have an array section or are assigning make sure that
6995 the lower bound is 1. References to the full
6996 array should otherwise keep the original bounds. */
6997 if ((!info->ref || info->ref->u.ar.type != AR_FULL) && !se->want_pointer)
6998 for (dim = 0; dim < loop.dimen; dim++)
6999 if (!integer_onep (loop.from[dim]))
7000 {
7001 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7002 gfc_array_index_type, gfc_index_one_node,
7003 loop.from[dim]);
7004 loop.to[dim] = fold_build2_loc (input_location, PLUS_EXPR,
7005 gfc_array_index_type,
7006 loop.to[dim], tmp);
7007 loop.from[dim] = gfc_index_one_node;
7008 }
7009
7010 desc = info->descriptor;
7011 if (se->direct_byref && !se->byref_noassign)
7012 {
7013 /* For pointer assignments we fill in the destination. */
7014 parm = se->expr;
7015 parmtype = TREE_TYPE (parm);
7016 }
7017 else
7018 {
7019 /* Otherwise make a new one. */
7020 parmtype = gfc_get_element_type (TREE_TYPE (desc));
7021 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
7022 loop.from, loop.to, 0,
7023 GFC_ARRAY_UNKNOWN, false);
7024 parm = gfc_create_var (parmtype, "parm");
7025 }
7026
7027 offset = gfc_index_zero_node;
7028
7029 /* The following can be somewhat confusing. We have two
7030 descriptors, a new one and the original array.
7031 {parm, parmtype, dim} refer to the new one.
7032 {desc, type, n, loop} refer to the original, which maybe
7033 a descriptorless array.
7034 The bounds of the scalarization are the bounds of the section.
7035 We don't have to worry about numeric overflows when calculating
7036 the offsets because all elements are within the array data. */
7037
7038 /* Set the dtype. */
7039 tmp = gfc_conv_descriptor_dtype (parm);
7040 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
7041
7042 /* Set offset for assignments to pointer only to zero if it is not
7043 the full array. */
7044 if ((se->direct_byref || se->use_offset)
7045 && ((info->ref && info->ref->u.ar.type != AR_FULL)
7046 || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
7047 base = gfc_index_zero_node;
7048 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7049 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
7050 else
7051 base = NULL_TREE;
7052
7053 for (n = 0; n < ndim; n++)
7054 {
7055 stride = gfc_conv_array_stride (desc, n);
7056
7057 /* Work out the offset. */
7058 if (info->ref
7059 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
7060 {
7061 gcc_assert (info->subscript[n]
7062 && info->subscript[n]->info->type == GFC_SS_SCALAR);
7063 start = info->subscript[n]->info->data.scalar.value;
7064 }
7065 else
7066 {
7067 /* Evaluate and remember the start of the section. */
7068 start = info->start[n];
7069 stride = gfc_evaluate_now (stride, &loop.pre);
7070 }
7071
7072 tmp = gfc_conv_array_lbound (desc, n);
7073 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
7074 start, tmp);
7075 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
7076 tmp, stride);
7077 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
7078 offset, tmp);
7079
7080 if (info->ref
7081 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
7082 {
7083 /* For elemental dimensions, we only need the offset. */
7084 continue;
7085 }
7086
7087 /* Vector subscripts need copying and are handled elsewhere. */
7088 if (info->ref)
7089 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
7090
7091 /* look for the corresponding scalarizer dimension: dim. */
7092 for (dim = 0; dim < ndim; dim++)
7093 if (ss->dim[dim] == n)
7094 break;
7095
7096 /* loop exited early: the DIM being looked for has been found. */
7097 gcc_assert (dim < ndim);
7098
7099 /* Set the new lower bound. */
7100 from = loop.from[dim];
7101 to = loop.to[dim];
7102
7103 onebased = integer_onep (from);
7104 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
7105 gfc_rank_cst[dim], from);
7106
7107 /* Set the new upper bound. */
7108 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
7109 gfc_rank_cst[dim], to);
7110
7111 /* Multiply the stride by the section stride to get the
7112 total stride. */
7113 stride = fold_build2_loc (input_location, MULT_EXPR,
7114 gfc_array_index_type,
7115 stride, info->stride[n]);
7116
7117 if (se->direct_byref
7118 && ((info->ref && info->ref->u.ar.type != AR_FULL)
7119 || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
7120 {
7121 base = fold_build2_loc (input_location, MINUS_EXPR,
7122 TREE_TYPE (base), base, stride);
7123 }
7124 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)) || se->use_offset)
7125 {
7126 bool toonebased;
7127 tmp = gfc_conv_array_lbound (desc, n);
7128 toonebased = integer_onep (tmp);
7129 // lb(arr) - from (- start + 1)
7130 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7131 TREE_TYPE (base), tmp, from);
7132 if (onebased && toonebased)
7133 {
7134 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7135 TREE_TYPE (base), tmp, start);
7136 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7137 TREE_TYPE (base), tmp,
7138 gfc_index_one_node);
7139 }
7140 tmp = fold_build2_loc (input_location, MULT_EXPR,
7141 TREE_TYPE (base), tmp,
7142 gfc_conv_array_stride (desc, n));
7143 base = fold_build2_loc (input_location, PLUS_EXPR,
7144 TREE_TYPE (base), tmp, base);
7145 }
7146
7147 /* Store the new stride. */
7148 gfc_conv_descriptor_stride_set (&loop.pre, parm,
7149 gfc_rank_cst[dim], stride);
7150 }
7151
7152 for (n = loop.dimen; n < loop.dimen + codim; n++)
7153 {
7154 from = loop.from[n];
7155 to = loop.to[n];
7156 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
7157 gfc_rank_cst[n], from);
7158 if (n < loop.dimen + codim - 1)
7159 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
7160 gfc_rank_cst[n], to);
7161 }
7162
7163 if (se->data_not_needed)
7164 gfc_conv_descriptor_data_set (&loop.pre, parm,
7165 gfc_index_zero_node);
7166 else
7167 /* Point the data pointer at the 1st element in the section. */
7168 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
7169 subref_array_target, expr);
7170
7171 /* Force the offset to be -1, when the lower bound of the highest
7172 dimension is one and the symbol is present and is not a
7173 pointer/allocatable or associated. */
7174 if (((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7175 && !se->data_not_needed)
7176 || (se->use_offset && base != NULL_TREE))
7177 {
7178 /* Set the offset depending on base. */
7179 tmp = rank_remap && !se->direct_byref ?
7180 fold_build2_loc (input_location, PLUS_EXPR,
7181 gfc_array_index_type, base,
7182 offset)
7183 : base;
7184 gfc_conv_descriptor_offset_set (&loop.pre, parm, tmp);
7185 }
7186 else if (onebased && (!rank_remap || se->use_offset)
7187 && expr->symtree
7188 && !(expr->symtree->n.sym && expr->symtree->n.sym->ts.type == BT_CLASS
7189 && !CLASS_DATA (expr->symtree->n.sym)->attr.class_pointer)
7190 && !expr->symtree->n.sym->attr.allocatable
7191 && !expr->symtree->n.sym->attr.pointer
7192 && !expr->symtree->n.sym->attr.host_assoc
7193 && !expr->symtree->n.sym->attr.use_assoc)
7194 {
7195 /* Set the offset to -1. */
7196 mpz_t minus_one;
7197 mpz_init_set_si (minus_one, -1);
7198 tmp = gfc_conv_mpz_to_tree (minus_one, gfc_index_integer_kind);
7199 gfc_conv_descriptor_offset_set (&loop.pre, parm, tmp);
7200 }
7201 else
7202 {
7203 /* Only the callee knows what the correct offset it, so just set
7204 it to zero here. */
7205 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
7206 }
7207 desc = parm;
7208 }
7209
7210 /* For class arrays add the class tree into the saved descriptor to
7211 enable getting of _vptr and the like. */
7212 if (expr->expr_type == EXPR_VARIABLE && VAR_P (desc)
7213 && IS_CLASS_ARRAY (expr->symtree->n.sym))
7214 {
7215 gfc_allocate_lang_decl (desc);
7216 GFC_DECL_SAVED_DESCRIPTOR (desc) =
7217 DECL_LANG_SPECIFIC (expr->symtree->n.sym->backend_decl) ?
7218 GFC_DECL_SAVED_DESCRIPTOR (expr->symtree->n.sym->backend_decl)
7219 : expr->symtree->n.sym->backend_decl;
7220 }
7221 if (!se->direct_byref || se->byref_noassign)
7222 {
7223 /* Get a pointer to the new descriptor. */
7224 if (se->want_pointer)
7225 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
7226 else
7227 se->expr = desc;
7228 }
7229
7230 gfc_add_block_to_block (&se->pre, &loop.pre);
7231 gfc_add_block_to_block (&se->post, &loop.post);
7232
7233 /* Cleanup the scalarizer. */
7234 gfc_cleanup_loop (&loop);
7235 }
7236
7237 /* Helper function for gfc_conv_array_parameter if array size needs to be
7238 computed. */
7239
7240 static void
7241 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
7242 {
7243 tree elem;
7244 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7245 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
7246 else if (expr->rank > 1)
7247 *size = build_call_expr_loc (input_location,
7248 gfor_fndecl_size0, 1,
7249 gfc_build_addr_expr (NULL, desc));
7250 else
7251 {
7252 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
7253 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
7254
7255 *size = fold_build2_loc (input_location, MINUS_EXPR,
7256 gfc_array_index_type, ubound, lbound);
7257 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7258 *size, gfc_index_one_node);
7259 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
7260 *size, gfc_index_zero_node);
7261 }
7262 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
7263 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7264 *size, fold_convert (gfc_array_index_type, elem));
7265 }
7266
7267 /* Convert an array for passing as an actual parameter. */
7268 /* TODO: Optimize passing g77 arrays. */
7269
7270 void
7271 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, bool g77,
7272 const gfc_symbol *fsym, const char *proc_name,
7273 tree *size)
7274 {
7275 tree ptr;
7276 tree desc;
7277 tree tmp = NULL_TREE;
7278 tree stmt;
7279 tree parent = DECL_CONTEXT (current_function_decl);
7280 bool full_array_var;
7281 bool this_array_result;
7282 bool contiguous;
7283 bool no_pack;
7284 bool array_constructor;
7285 bool good_allocatable;
7286 bool ultimate_ptr_comp;
7287 bool ultimate_alloc_comp;
7288 gfc_symbol *sym;
7289 stmtblock_t block;
7290 gfc_ref *ref;
7291
7292 ultimate_ptr_comp = false;
7293 ultimate_alloc_comp = false;
7294
7295 for (ref = expr->ref; ref; ref = ref->next)
7296 {
7297 if (ref->next == NULL)
7298 break;
7299
7300 if (ref->type == REF_COMPONENT)
7301 {
7302 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
7303 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
7304 }
7305 }
7306
7307 full_array_var = false;
7308 contiguous = false;
7309
7310 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
7311 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
7312
7313 sym = full_array_var ? expr->symtree->n.sym : NULL;
7314
7315 /* The symbol should have an array specification. */
7316 gcc_assert (!sym || sym->as || ref->u.ar.as);
7317
7318 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
7319 {
7320 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
7321 expr->ts.u.cl->backend_decl = tmp;
7322 se->string_length = tmp;
7323 }
7324
7325 /* Is this the result of the enclosing procedure? */
7326 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
7327 if (this_array_result
7328 && (sym->backend_decl != current_function_decl)
7329 && (sym->backend_decl != parent))
7330 this_array_result = false;
7331
7332 /* Passing address of the array if it is not pointer or assumed-shape. */
7333 if (full_array_var && g77 && !this_array_result
7334 && sym->ts.type != BT_DERIVED && sym->ts.type != BT_CLASS)
7335 {
7336 tmp = gfc_get_symbol_decl (sym);
7337
7338 if (sym->ts.type == BT_CHARACTER)
7339 se->string_length = sym->ts.u.cl->backend_decl;
7340
7341 if (!sym->attr.pointer
7342 && sym->as
7343 && sym->as->type != AS_ASSUMED_SHAPE
7344 && sym->as->type != AS_DEFERRED
7345 && sym->as->type != AS_ASSUMED_RANK
7346 && !sym->attr.allocatable)
7347 {
7348 /* Some variables are declared directly, others are declared as
7349 pointers and allocated on the heap. */
7350 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
7351 se->expr = tmp;
7352 else
7353 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
7354 if (size)
7355 array_parameter_size (tmp, expr, size);
7356 return;
7357 }
7358
7359 if (sym->attr.allocatable)
7360 {
7361 if (sym->attr.dummy || sym->attr.result)
7362 {
7363 gfc_conv_expr_descriptor (se, expr);
7364 tmp = se->expr;
7365 }
7366 if (size)
7367 array_parameter_size (tmp, expr, size);
7368 se->expr = gfc_conv_array_data (tmp);
7369 return;
7370 }
7371 }
7372
7373 /* A convenient reduction in scope. */
7374 contiguous = g77 && !this_array_result && contiguous;
7375
7376 /* There is no need to pack and unpack the array, if it is contiguous
7377 and not a deferred- or assumed-shape array, or if it is simply
7378 contiguous. */
7379 no_pack = ((sym && sym->as
7380 && !sym->attr.pointer
7381 && sym->as->type != AS_DEFERRED
7382 && sym->as->type != AS_ASSUMED_RANK
7383 && sym->as->type != AS_ASSUMED_SHAPE)
7384 ||
7385 (ref && ref->u.ar.as
7386 && ref->u.ar.as->type != AS_DEFERRED
7387 && ref->u.ar.as->type != AS_ASSUMED_RANK
7388 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
7389 ||
7390 gfc_is_simply_contiguous (expr, false, true));
7391
7392 no_pack = contiguous && no_pack;
7393
7394 /* Array constructors are always contiguous and do not need packing. */
7395 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
7396
7397 /* Same is true of contiguous sections from allocatable variables. */
7398 good_allocatable = contiguous
7399 && expr->symtree
7400 && expr->symtree->n.sym->attr.allocatable;
7401
7402 /* Or ultimate allocatable components. */
7403 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
7404
7405 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
7406 {
7407 gfc_conv_expr_descriptor (se, expr);
7408 /* Deallocate the allocatable components of structures that are
7409 not variable. */
7410 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7411 && expr->ts.u.derived->attr.alloc_comp
7412 && expr->expr_type != EXPR_VARIABLE)
7413 {
7414 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se->expr, expr->rank);
7415
7416 /* The components shall be deallocated before their containing entity. */
7417 gfc_prepend_expr_to_block (&se->post, tmp);
7418 }
7419 if (expr->ts.type == BT_CHARACTER)
7420 se->string_length = expr->ts.u.cl->backend_decl;
7421 if (size)
7422 array_parameter_size (se->expr, expr, size);
7423 se->expr = gfc_conv_array_data (se->expr);
7424 return;
7425 }
7426
7427 if (this_array_result)
7428 {
7429 /* Result of the enclosing function. */
7430 gfc_conv_expr_descriptor (se, expr);
7431 if (size)
7432 array_parameter_size (se->expr, expr, size);
7433 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
7434
7435 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
7436 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
7437 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
7438 se->expr));
7439
7440 return;
7441 }
7442 else
7443 {
7444 /* Every other type of array. */
7445 se->want_pointer = 1;
7446 gfc_conv_expr_descriptor (se, expr);
7447 if (size)
7448 array_parameter_size (build_fold_indirect_ref_loc (input_location,
7449 se->expr),
7450 expr, size);
7451 }
7452
7453 /* Deallocate the allocatable components of structures that are
7454 not variable, for descriptorless arguments.
7455 Arguments with a descriptor are handled in gfc_conv_procedure_call. */
7456 if (g77 && (expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7457 && expr->ts.u.derived->attr.alloc_comp
7458 && expr->expr_type != EXPR_VARIABLE)
7459 {
7460 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
7461 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
7462
7463 /* The components shall be deallocated before their containing entity. */
7464 gfc_prepend_expr_to_block (&se->post, tmp);
7465 }
7466
7467 if (g77 || (fsym && fsym->attr.contiguous
7468 && !gfc_is_simply_contiguous (expr, false, true)))
7469 {
7470 tree origptr = NULL_TREE;
7471
7472 desc = se->expr;
7473
7474 /* For contiguous arrays, save the original value of the descriptor. */
7475 if (!g77)
7476 {
7477 origptr = gfc_create_var (pvoid_type_node, "origptr");
7478 tmp = build_fold_indirect_ref_loc (input_location, desc);
7479 tmp = gfc_conv_array_data (tmp);
7480 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7481 TREE_TYPE (origptr), origptr,
7482 fold_convert (TREE_TYPE (origptr), tmp));
7483 gfc_add_expr_to_block (&se->pre, tmp);
7484 }
7485
7486 /* Repack the array. */
7487 if (warn_array_temporaries)
7488 {
7489 if (fsym)
7490 gfc_warning (OPT_Warray_temporaries,
7491 "Creating array temporary at %L for argument %qs",
7492 &expr->where, fsym->name);
7493 else
7494 gfc_warning (OPT_Warray_temporaries,
7495 "Creating array temporary at %L", &expr->where);
7496 }
7497
7498 ptr = build_call_expr_loc (input_location,
7499 gfor_fndecl_in_pack, 1, desc);
7500
7501 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7502 {
7503 tmp = gfc_conv_expr_present (sym);
7504 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
7505 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
7506 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
7507 }
7508
7509 ptr = gfc_evaluate_now (ptr, &se->pre);
7510
7511 /* Use the packed data for the actual argument, except for contiguous arrays,
7512 where the descriptor's data component is set. */
7513 if (g77)
7514 se->expr = ptr;
7515 else
7516 {
7517 tmp = build_fold_indirect_ref_loc (input_location, desc);
7518
7519 gfc_ss * ss = gfc_walk_expr (expr);
7520 if (!transposed_dims (ss))
7521 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
7522 else
7523 {
7524 tree old_field, new_field;
7525
7526 /* The original descriptor has transposed dims so we can't reuse
7527 it directly; we have to create a new one. */
7528 tree old_desc = tmp;
7529 tree new_desc = gfc_create_var (TREE_TYPE (old_desc), "arg_desc");
7530
7531 old_field = gfc_conv_descriptor_dtype (old_desc);
7532 new_field = gfc_conv_descriptor_dtype (new_desc);
7533 gfc_add_modify (&se->pre, new_field, old_field);
7534
7535 old_field = gfc_conv_descriptor_offset (old_desc);
7536 new_field = gfc_conv_descriptor_offset (new_desc);
7537 gfc_add_modify (&se->pre, new_field, old_field);
7538
7539 for (int i = 0; i < expr->rank; i++)
7540 {
7541 old_field = gfc_conv_descriptor_dimension (old_desc,
7542 gfc_rank_cst[get_array_ref_dim_for_loop_dim (ss, i)]);
7543 new_field = gfc_conv_descriptor_dimension (new_desc,
7544 gfc_rank_cst[i]);
7545 gfc_add_modify (&se->pre, new_field, old_field);
7546 }
7547
7548 if (flag_coarray == GFC_FCOARRAY_LIB
7549 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (old_desc))
7550 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (old_desc))
7551 == GFC_ARRAY_ALLOCATABLE)
7552 {
7553 old_field = gfc_conv_descriptor_token (old_desc);
7554 new_field = gfc_conv_descriptor_token (new_desc);
7555 gfc_add_modify (&se->pre, new_field, old_field);
7556 }
7557
7558 gfc_conv_descriptor_data_set (&se->pre, new_desc, ptr);
7559 se->expr = gfc_build_addr_expr (NULL_TREE, new_desc);
7560 }
7561 gfc_free_ss (ss);
7562 }
7563
7564 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
7565 {
7566 char * msg;
7567
7568 if (fsym && proc_name)
7569 msg = xasprintf ("An array temporary was created for argument "
7570 "'%s' of procedure '%s'", fsym->name, proc_name);
7571 else
7572 msg = xasprintf ("An array temporary was created");
7573
7574 tmp = build_fold_indirect_ref_loc (input_location,
7575 desc);
7576 tmp = gfc_conv_array_data (tmp);
7577 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7578 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7579
7580 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7581 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7582 boolean_type_node,
7583 gfc_conv_expr_present (sym), tmp);
7584
7585 gfc_trans_runtime_check (false, true, tmp, &se->pre,
7586 &expr->where, msg);
7587 free (msg);
7588 }
7589
7590 gfc_start_block (&block);
7591
7592 /* Copy the data back. */
7593 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
7594 {
7595 tmp = build_call_expr_loc (input_location,
7596 gfor_fndecl_in_unpack, 2, desc, ptr);
7597 gfc_add_expr_to_block (&block, tmp);
7598 }
7599
7600 /* Free the temporary. */
7601 tmp = gfc_call_free (ptr);
7602 gfc_add_expr_to_block (&block, tmp);
7603
7604 stmt = gfc_finish_block (&block);
7605
7606 gfc_init_block (&block);
7607 /* Only if it was repacked. This code needs to be executed before the
7608 loop cleanup code. */
7609 tmp = build_fold_indirect_ref_loc (input_location,
7610 desc);
7611 tmp = gfc_conv_array_data (tmp);
7612 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7613 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7614
7615 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7616 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7617 boolean_type_node,
7618 gfc_conv_expr_present (sym), tmp);
7619
7620 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
7621
7622 gfc_add_expr_to_block (&block, tmp);
7623 gfc_add_block_to_block (&block, &se->post);
7624
7625 gfc_init_block (&se->post);
7626
7627 /* Reset the descriptor pointer. */
7628 if (!g77)
7629 {
7630 tmp = build_fold_indirect_ref_loc (input_location, desc);
7631 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
7632 }
7633
7634 gfc_add_block_to_block (&se->post, &block);
7635 }
7636 }
7637
7638
7639 /* Generate code to deallocate an array, if it is allocated. */
7640
7641 tree
7642 gfc_trans_dealloc_allocated (tree descriptor, bool coarray, gfc_expr *expr)
7643 {
7644 tree tmp;
7645 tree var;
7646 stmtblock_t block;
7647
7648 gfc_start_block (&block);
7649
7650 var = gfc_conv_descriptor_data_get (descriptor);
7651 STRIP_NOPS (var);
7652
7653 /* Call array_deallocate with an int * present in the second argument.
7654 Although it is ignored here, it's presence ensures that arrays that
7655 are already deallocated are ignored. */
7656 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, NULL_TREE,
7657 NULL_TREE, NULL_TREE, NULL_TREE, true,
7658 expr, coarray);
7659 gfc_add_expr_to_block (&block, tmp);
7660
7661 /* Zero the data pointer. */
7662 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7663 var, build_int_cst (TREE_TYPE (var), 0));
7664 gfc_add_expr_to_block (&block, tmp);
7665
7666 return gfc_finish_block (&block);
7667 }
7668
7669
7670 /* This helper function calculates the size in words of a full array. */
7671
7672 tree
7673 gfc_full_array_size (stmtblock_t *block, tree decl, int rank)
7674 {
7675 tree idx;
7676 tree nelems;
7677 tree tmp;
7678 idx = gfc_rank_cst[rank - 1];
7679 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
7680 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
7681 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
7682 nelems, tmp);
7683 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7684 tmp, gfc_index_one_node);
7685 tmp = gfc_evaluate_now (tmp, block);
7686
7687 nelems = gfc_conv_descriptor_stride_get (decl, idx);
7688 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7689 nelems, tmp);
7690 return gfc_evaluate_now (tmp, block);
7691 }
7692
7693
7694 /* Allocate dest to the same size as src, and copy src -> dest.
7695 If no_malloc is set, only the copy is done. */
7696
7697 static tree
7698 duplicate_allocatable (tree dest, tree src, tree type, int rank,
7699 bool no_malloc, bool no_memcpy, tree str_sz,
7700 tree add_when_allocated)
7701 {
7702 tree tmp;
7703 tree size;
7704 tree nelems;
7705 tree null_cond;
7706 tree null_data;
7707 stmtblock_t block;
7708
7709 /* If the source is null, set the destination to null. Then,
7710 allocate memory to the destination. */
7711 gfc_init_block (&block);
7712
7713 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7714 {
7715 tmp = null_pointer_node;
7716 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
7717 gfc_add_expr_to_block (&block, tmp);
7718 null_data = gfc_finish_block (&block);
7719
7720 gfc_init_block (&block);
7721 if (str_sz != NULL_TREE)
7722 size = str_sz;
7723 else
7724 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
7725
7726 if (!no_malloc)
7727 {
7728 tmp = gfc_call_malloc (&block, type, size);
7729 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7730 dest, fold_convert (type, tmp));
7731 gfc_add_expr_to_block (&block, tmp);
7732 }
7733
7734 if (!no_memcpy)
7735 {
7736 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7737 tmp = build_call_expr_loc (input_location, tmp, 3, dest, src,
7738 fold_convert (size_type_node, size));
7739 gfc_add_expr_to_block (&block, tmp);
7740 }
7741 }
7742 else
7743 {
7744 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
7745 null_data = gfc_finish_block (&block);
7746
7747 gfc_init_block (&block);
7748 if (rank)
7749 nelems = gfc_full_array_size (&block, src, rank);
7750 else
7751 nelems = gfc_index_one_node;
7752
7753 if (str_sz != NULL_TREE)
7754 tmp = fold_convert (gfc_array_index_type, str_sz);
7755 else
7756 tmp = fold_convert (gfc_array_index_type,
7757 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
7758 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7759 nelems, tmp);
7760 if (!no_malloc)
7761 {
7762 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
7763 tmp = gfc_call_malloc (&block, tmp, size);
7764 gfc_conv_descriptor_data_set (&block, dest, tmp);
7765 }
7766
7767 /* We know the temporary and the value will be the same length,
7768 so can use memcpy. */
7769 if (!no_memcpy)
7770 {
7771 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7772 tmp = build_call_expr_loc (input_location, tmp, 3,
7773 gfc_conv_descriptor_data_get (dest),
7774 gfc_conv_descriptor_data_get (src),
7775 fold_convert (size_type_node, size));
7776 gfc_add_expr_to_block (&block, tmp);
7777 }
7778 }
7779
7780 gfc_add_expr_to_block (&block, add_when_allocated);
7781 tmp = gfc_finish_block (&block);
7782
7783 /* Null the destination if the source is null; otherwise do
7784 the allocate and copy. */
7785 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (src)))
7786 null_cond = src;
7787 else
7788 null_cond = gfc_conv_descriptor_data_get (src);
7789
7790 null_cond = convert (pvoid_type_node, null_cond);
7791 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7792 null_cond, null_pointer_node);
7793 return build3_v (COND_EXPR, null_cond, tmp, null_data);
7794 }
7795
7796
7797 /* Allocate dest to the same size as src, and copy data src -> dest. */
7798
7799 tree
7800 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank,
7801 tree add_when_allocated)
7802 {
7803 return duplicate_allocatable (dest, src, type, rank, false, false,
7804 NULL_TREE, add_when_allocated);
7805 }
7806
7807
7808 /* Copy data src -> dest. */
7809
7810 tree
7811 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
7812 {
7813 return duplicate_allocatable (dest, src, type, rank, true, false,
7814 NULL_TREE, NULL_TREE);
7815 }
7816
7817 /* Allocate dest to the same size as src, but don't copy anything. */
7818
7819 tree
7820 gfc_duplicate_allocatable_nocopy (tree dest, tree src, tree type, int rank)
7821 {
7822 return duplicate_allocatable (dest, src, type, rank, false, true,
7823 NULL_TREE, NULL_TREE);
7824 }
7825
7826
7827 /* Recursively traverse an object of derived type, generating code to
7828 deallocate, nullify or copy allocatable components. This is the work horse
7829 function for the functions named in this enum. */
7830
7831 enum {DEALLOCATE_ALLOC_COMP = 1, DEALLOCATE_ALLOC_COMP_NO_CAF,
7832 NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP, COPY_ONLY_ALLOC_COMP,
7833 COPY_ALLOC_COMP_CAF};
7834
7835 static tree
7836 structure_alloc_comps (gfc_symbol * der_type, tree decl,
7837 tree dest, int rank, int purpose)
7838 {
7839 gfc_component *c;
7840 gfc_loopinfo loop;
7841 stmtblock_t fnblock;
7842 stmtblock_t loopbody;
7843 stmtblock_t tmpblock;
7844 tree decl_type;
7845 tree tmp;
7846 tree comp;
7847 tree dcmp;
7848 tree nelems;
7849 tree index;
7850 tree var;
7851 tree cdecl;
7852 tree ctype;
7853 tree vref, dref;
7854 tree null_cond = NULL_TREE;
7855 tree add_when_allocated;
7856 bool called_dealloc_with_status;
7857
7858 gfc_init_block (&fnblock);
7859
7860 decl_type = TREE_TYPE (decl);
7861
7862 if ((POINTER_TYPE_P (decl_type))
7863 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
7864 {
7865 decl = build_fold_indirect_ref_loc (input_location, decl);
7866 /* Deref dest in sync with decl, but only when it is not NULL. */
7867 if (dest)
7868 dest = build_fold_indirect_ref_loc (input_location, dest);
7869 }
7870
7871 /* Just in case it gets dereferenced. */
7872 decl_type = TREE_TYPE (decl);
7873
7874 /* If this is an array of derived types with allocatable components
7875 build a loop and recursively call this function. */
7876 if (TREE_CODE (decl_type) == ARRAY_TYPE
7877 || (GFC_DESCRIPTOR_TYPE_P (decl_type) && rank != 0))
7878 {
7879 tmp = gfc_conv_array_data (decl);
7880 var = build_fold_indirect_ref_loc (input_location, tmp);
7881
7882 /* Get the number of elements - 1 and set the counter. */
7883 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
7884 {
7885 /* Use the descriptor for an allocatable array. Since this
7886 is a full array reference, we only need the descriptor
7887 information from dimension = rank. */
7888 tmp = gfc_full_array_size (&fnblock, decl, rank);
7889 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7890 gfc_array_index_type, tmp,
7891 gfc_index_one_node);
7892
7893 null_cond = gfc_conv_descriptor_data_get (decl);
7894 null_cond = fold_build2_loc (input_location, NE_EXPR,
7895 boolean_type_node, null_cond,
7896 build_int_cst (TREE_TYPE (null_cond), 0));
7897 }
7898 else
7899 {
7900 /* Otherwise use the TYPE_DOMAIN information. */
7901 tmp = array_type_nelts (decl_type);
7902 tmp = fold_convert (gfc_array_index_type, tmp);
7903 }
7904
7905 /* Remember that this is, in fact, the no. of elements - 1. */
7906 nelems = gfc_evaluate_now (tmp, &fnblock);
7907 index = gfc_create_var (gfc_array_index_type, "S");
7908
7909 /* Build the body of the loop. */
7910 gfc_init_block (&loopbody);
7911
7912 vref = gfc_build_array_ref (var, index, NULL);
7913
7914 if (purpose == COPY_ALLOC_COMP || purpose == COPY_ONLY_ALLOC_COMP)
7915 {
7916 tmp = build_fold_indirect_ref_loc (input_location,
7917 gfc_conv_array_data (dest));
7918 dref = gfc_build_array_ref (tmp, index, NULL);
7919 tmp = structure_alloc_comps (der_type, vref, dref, rank,
7920 COPY_ALLOC_COMP);
7921 }
7922 else
7923 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
7924
7925 gfc_add_expr_to_block (&loopbody, tmp);
7926
7927 /* Build the loop and return. */
7928 gfc_init_loopinfo (&loop);
7929 loop.dimen = 1;
7930 loop.from[0] = gfc_index_zero_node;
7931 loop.loopvar[0] = index;
7932 loop.to[0] = nelems;
7933 gfc_trans_scalarizing_loops (&loop, &loopbody);
7934 gfc_add_block_to_block (&fnblock, &loop.pre);
7935
7936 tmp = gfc_finish_block (&fnblock);
7937 /* When copying allocateable components, the above implements the
7938 deep copy. Nevertheless is a deep copy only allowed, when the current
7939 component is allocated, for which code will be generated in
7940 gfc_duplicate_allocatable (), where the deep copy code is just added
7941 into the if's body, by adding tmp (the deep copy code) as last
7942 argument to gfc_duplicate_allocatable (). */
7943 if (purpose == COPY_ALLOC_COMP
7944 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7945 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank,
7946 tmp);
7947 else if (null_cond != NULL_TREE)
7948 tmp = build3_v (COND_EXPR, null_cond, tmp,
7949 build_empty_stmt (input_location));
7950
7951 return tmp;
7952 }
7953
7954 /* Otherwise, act on the components or recursively call self to
7955 act on a chain of components. */
7956 for (c = der_type->components; c; c = c->next)
7957 {
7958 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
7959 || c->ts.type == BT_CLASS)
7960 && c->ts.u.derived->attr.alloc_comp;
7961 cdecl = c->backend_decl;
7962 ctype = TREE_TYPE (cdecl);
7963
7964 switch (purpose)
7965 {
7966 case DEALLOCATE_ALLOC_COMP:
7967 case DEALLOCATE_ALLOC_COMP_NO_CAF:
7968
7969 /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp
7970 (i.e. this function) so generate all the calls and suppress the
7971 recursion from here, if necessary. */
7972 called_dealloc_with_status = false;
7973 gfc_init_block (&tmpblock);
7974
7975 if ((c->ts.type == BT_DERIVED && !c->attr.pointer)
7976 || (c->ts.type == BT_CLASS && !CLASS_DATA (c)->attr.class_pointer))
7977 {
7978 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7979 decl, cdecl, NULL_TREE);
7980
7981 /* The finalizer frees allocatable components. */
7982 called_dealloc_with_status
7983 = gfc_add_comp_finalizer_call (&tmpblock, comp, c,
7984 purpose == DEALLOCATE_ALLOC_COMP);
7985 }
7986 else
7987 comp = NULL_TREE;
7988
7989 if (c->attr.allocatable && !c->attr.proc_pointer
7990 && (c->attr.dimension
7991 || (c->attr.codimension
7992 && purpose != DEALLOCATE_ALLOC_COMP_NO_CAF)))
7993 {
7994 if (comp == NULL_TREE)
7995 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7996 decl, cdecl, NULL_TREE);
7997 tmp = gfc_trans_dealloc_allocated (comp, c->attr.codimension, NULL);
7998 gfc_add_expr_to_block (&tmpblock, tmp);
7999 }
8000 else if (c->attr.allocatable && !c->attr.codimension)
8001 {
8002 /* Allocatable scalar components. */
8003 if (comp == NULL_TREE)
8004 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8005 decl, cdecl, NULL_TREE);
8006
8007 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
8008 c->ts);
8009 gfc_add_expr_to_block (&tmpblock, tmp);
8010 called_dealloc_with_status = true;
8011
8012 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8013 void_type_node, comp,
8014 build_int_cst (TREE_TYPE (comp), 0));
8015 gfc_add_expr_to_block (&tmpblock, tmp);
8016 }
8017 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable
8018 && (!CLASS_DATA (c)->attr.codimension
8019 || purpose != DEALLOCATE_ALLOC_COMP_NO_CAF))
8020 {
8021 /* Allocatable CLASS components. */
8022
8023 /* Add reference to '_data' component. */
8024 tmp = CLASS_DATA (c)->backend_decl;
8025 comp = fold_build3_loc (input_location, COMPONENT_REF,
8026 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
8027
8028 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
8029 tmp = gfc_trans_dealloc_allocated (comp,
8030 CLASS_DATA (c)->attr.codimension, NULL);
8031 else
8032 {
8033 tmp = gfc_deallocate_scalar_with_status (comp, NULL_TREE, true, NULL,
8034 CLASS_DATA (c)->ts);
8035 gfc_add_expr_to_block (&tmpblock, tmp);
8036 called_dealloc_with_status = true;
8037
8038 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8039 void_type_node, comp,
8040 build_int_cst (TREE_TYPE (comp), 0));
8041 }
8042 gfc_add_expr_to_block (&tmpblock, tmp);
8043
8044 /* Finally, reset the vptr to the declared type vtable and, if
8045 necessary reset the _len field.
8046
8047 First recover the reference to the component and obtain
8048 the vptr. */
8049 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8050 decl, cdecl, NULL_TREE);
8051 tmp = gfc_class_vptr_get (comp);
8052
8053 if (UNLIMITED_POLY (c))
8054 {
8055 /* Both vptr and _len field should be nulled. */
8056 gfc_add_modify (&tmpblock, tmp,
8057 build_int_cst (TREE_TYPE (tmp), 0));
8058 tmp = gfc_class_len_get (comp);
8059 gfc_add_modify (&tmpblock, tmp,
8060 build_int_cst (TREE_TYPE (tmp), 0));
8061 }
8062 else
8063 {
8064 /* Build the vtable address and set the vptr with it. */
8065 tree vtab;
8066 gfc_symbol *vtable;
8067 vtable = gfc_find_derived_vtab (c->ts.u.derived);
8068 vtab = vtable->backend_decl;
8069 if (vtab == NULL_TREE)
8070 vtab = gfc_get_symbol_decl (vtable);
8071 vtab = gfc_build_addr_expr (NULL, vtab);
8072 vtab = fold_convert (TREE_TYPE (tmp), vtab);
8073 gfc_add_modify (&tmpblock, tmp, vtab);
8074 }
8075 }
8076
8077 if (cmp_has_alloc_comps
8078 && !c->attr.pointer && !c->attr.proc_pointer
8079 && !called_dealloc_with_status)
8080 {
8081 /* Do not deallocate the components of ultimate pointer
8082 components or iteratively call self if call has been made
8083 to gfc_trans_dealloc_allocated */
8084 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8085 decl, cdecl, NULL_TREE);
8086 rank = c->as ? c->as->rank : 0;
8087 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
8088 rank, purpose);
8089 gfc_add_expr_to_block (&fnblock, tmp);
8090 }
8091
8092 /* Now add the deallocation of this component. */
8093 gfc_add_block_to_block (&fnblock, &tmpblock);
8094 break;
8095
8096 case NULLIFY_ALLOC_COMP:
8097 if (c->attr.pointer || c->attr.proc_pointer)
8098 continue;
8099 else if (c->attr.allocatable
8100 && (c->attr.dimension|| c->attr.codimension))
8101 {
8102 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8103 decl, cdecl, NULL_TREE);
8104 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
8105 }
8106 else if (c->attr.allocatable)
8107 {
8108 /* Allocatable scalar components. */
8109 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8110 decl, cdecl, NULL_TREE);
8111 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8112 void_type_node, comp,
8113 build_int_cst (TREE_TYPE (comp), 0));
8114 gfc_add_expr_to_block (&fnblock, tmp);
8115 if (gfc_deferred_strlen (c, &comp))
8116 {
8117 comp = fold_build3_loc (input_location, COMPONENT_REF,
8118 TREE_TYPE (comp),
8119 decl, comp, NULL_TREE);
8120 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8121 TREE_TYPE (comp), comp,
8122 build_int_cst (TREE_TYPE (comp), 0));
8123 gfc_add_expr_to_block (&fnblock, tmp);
8124 }
8125 }
8126 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
8127 {
8128 /* Allocatable CLASS components. */
8129 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8130 decl, cdecl, NULL_TREE);
8131 /* Add reference to '_data' component. */
8132 tmp = CLASS_DATA (c)->backend_decl;
8133 comp = fold_build3_loc (input_location, COMPONENT_REF,
8134 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
8135 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
8136 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
8137 else
8138 {
8139 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8140 void_type_node, comp,
8141 build_int_cst (TREE_TYPE (comp), 0));
8142 gfc_add_expr_to_block (&fnblock, tmp);
8143 }
8144 }
8145 else if (cmp_has_alloc_comps)
8146 {
8147 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8148 decl, cdecl, NULL_TREE);
8149 rank = c->as ? c->as->rank : 0;
8150 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
8151 rank, purpose);
8152 gfc_add_expr_to_block (&fnblock, tmp);
8153 }
8154 break;
8155
8156 case COPY_ALLOC_COMP_CAF:
8157 if (!c->attr.codimension
8158 && (c->ts.type != BT_CLASS || CLASS_DATA (c)->attr.coarray_comp)
8159 && (c->ts.type != BT_DERIVED
8160 || !c->ts.u.derived->attr.coarray_comp))
8161 continue;
8162
8163 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
8164 cdecl, NULL_TREE);
8165 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
8166 cdecl, NULL_TREE);
8167
8168 if (c->attr.codimension)
8169 {
8170 if (c->ts.type == BT_CLASS)
8171 {
8172 comp = gfc_class_data_get (comp);
8173 dcmp = gfc_class_data_get (dcmp);
8174 }
8175 gfc_conv_descriptor_data_set (&fnblock, dcmp,
8176 gfc_conv_descriptor_data_get (comp));
8177 }
8178 else
8179 {
8180 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
8181 rank, purpose);
8182 gfc_add_expr_to_block (&fnblock, tmp);
8183
8184 }
8185 break;
8186
8187 case COPY_ALLOC_COMP:
8188 if (c->attr.pointer)
8189 continue;
8190
8191 /* We need source and destination components. */
8192 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
8193 cdecl, NULL_TREE);
8194 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
8195 cdecl, NULL_TREE);
8196 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
8197
8198 if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
8199 {
8200 tree ftn_tree;
8201 tree size;
8202 tree dst_data;
8203 tree src_data;
8204 tree null_data;
8205
8206 dst_data = gfc_class_data_get (dcmp);
8207 src_data = gfc_class_data_get (comp);
8208 size = fold_convert (size_type_node,
8209 gfc_class_vtab_size_get (comp));
8210
8211 if (CLASS_DATA (c)->attr.dimension)
8212 {
8213 nelems = gfc_conv_descriptor_size (src_data,
8214 CLASS_DATA (c)->as->rank);
8215 size = fold_build2_loc (input_location, MULT_EXPR,
8216 size_type_node, size,
8217 fold_convert (size_type_node,
8218 nelems));
8219 }
8220 else
8221 nelems = build_int_cst (size_type_node, 1);
8222
8223 if (CLASS_DATA (c)->attr.dimension
8224 || CLASS_DATA (c)->attr.codimension)
8225 {
8226 src_data = gfc_conv_descriptor_data_get (src_data);
8227 dst_data = gfc_conv_descriptor_data_get (dst_data);
8228 }
8229
8230 gfc_init_block (&tmpblock);
8231
8232 /* Coarray component have to have the same allocation status and
8233 shape/type-parameter/effective-type on the LHS and RHS of an
8234 intrinsic assignment. Hence, we did not deallocated them - and
8235 do not allocate them here. */
8236 if (!CLASS_DATA (c)->attr.codimension)
8237 {
8238 ftn_tree = builtin_decl_explicit (BUILT_IN_MALLOC);
8239 tmp = build_call_expr_loc (input_location, ftn_tree, 1, size);
8240 gfc_add_modify (&tmpblock, dst_data,
8241 fold_convert (TREE_TYPE (dst_data), tmp));
8242 }
8243
8244 tmp = gfc_copy_class_to_class (comp, dcmp, nelems,
8245 UNLIMITED_POLY (c));
8246 gfc_add_expr_to_block (&tmpblock, tmp);
8247 tmp = gfc_finish_block (&tmpblock);
8248
8249 gfc_init_block (&tmpblock);
8250 gfc_add_modify (&tmpblock, dst_data,
8251 fold_convert (TREE_TYPE (dst_data),
8252 null_pointer_node));
8253 null_data = gfc_finish_block (&tmpblock);
8254
8255 null_cond = fold_build2_loc (input_location, NE_EXPR,
8256 boolean_type_node, src_data,
8257 null_pointer_node);
8258
8259 gfc_add_expr_to_block (&fnblock, build3_v (COND_EXPR, null_cond,
8260 tmp, null_data));
8261 continue;
8262 }
8263
8264 /* To implement guarded deep copy, i.e., deep copy only allocatable
8265 components that are really allocated, the deep copy code has to
8266 be generated first and then added to the if-block in
8267 gfc_duplicate_allocatable (). */
8268 if (cmp_has_alloc_comps
8269 && !c->attr.proc_pointer)
8270 {
8271 rank = c->as ? c->as->rank : 0;
8272 tmp = fold_convert (TREE_TYPE (dcmp), comp);
8273 gfc_add_modify (&fnblock, dcmp, tmp);
8274 add_when_allocated = structure_alloc_comps (c->ts.u.derived,
8275 comp, dcmp,
8276 rank, purpose);
8277 }
8278 else
8279 add_when_allocated = NULL_TREE;
8280
8281 if (gfc_deferred_strlen (c, &tmp))
8282 {
8283 tree len, size;
8284 len = tmp;
8285 tmp = fold_build3_loc (input_location, COMPONENT_REF,
8286 TREE_TYPE (len),
8287 decl, len, NULL_TREE);
8288 len = fold_build3_loc (input_location, COMPONENT_REF,
8289 TREE_TYPE (len),
8290 dest, len, NULL_TREE);
8291 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8292 TREE_TYPE (len), len, tmp);
8293 gfc_add_expr_to_block (&fnblock, tmp);
8294 size = size_of_string_in_bytes (c->ts.kind, len);
8295 /* This component can not have allocatable components,
8296 therefore add_when_allocated of duplicate_allocatable ()
8297 is always NULL. */
8298 tmp = duplicate_allocatable (dcmp, comp, ctype, rank,
8299 false, false, size, NULL_TREE);
8300 gfc_add_expr_to_block (&fnblock, tmp);
8301 }
8302 else if (c->attr.allocatable && !c->attr.proc_pointer
8303 && (!(cmp_has_alloc_comps && c->as)
8304 || c->attr.codimension))
8305 {
8306 rank = c->as ? c->as->rank : 0;
8307 if (c->attr.codimension)
8308 tmp = gfc_copy_allocatable_data (dcmp, comp, ctype, rank);
8309 else
8310 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank,
8311 add_when_allocated);
8312 gfc_add_expr_to_block (&fnblock, tmp);
8313 }
8314 else
8315 if (cmp_has_alloc_comps)
8316 gfc_add_expr_to_block (&fnblock, add_when_allocated);
8317
8318 break;
8319
8320 default:
8321 gcc_unreachable ();
8322 break;
8323 }
8324 }
8325
8326 return gfc_finish_block (&fnblock);
8327 }
8328
8329 /* Recursively traverse an object of derived type, generating code to
8330 nullify allocatable components. */
8331
8332 tree
8333 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
8334 {
8335 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8336 NULLIFY_ALLOC_COMP);
8337 }
8338
8339
8340 /* Recursively traverse an object of derived type, generating code to
8341 deallocate allocatable components. */
8342
8343 tree
8344 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
8345 {
8346 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8347 DEALLOCATE_ALLOC_COMP);
8348 }
8349
8350
8351 /* Recursively traverse an object of derived type, generating code to
8352 deallocate allocatable components. But do not deallocate coarrays.
8353 To be used for intrinsic assignment, which may not change the allocation
8354 status of coarrays. */
8355
8356 tree
8357 gfc_deallocate_alloc_comp_no_caf (gfc_symbol * der_type, tree decl, int rank)
8358 {
8359 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8360 DEALLOCATE_ALLOC_COMP_NO_CAF);
8361 }
8362
8363
8364 tree
8365 gfc_reassign_alloc_comp_caf (gfc_symbol *der_type, tree decl, tree dest)
8366 {
8367 return structure_alloc_comps (der_type, decl, dest, 0, COPY_ALLOC_COMP_CAF);
8368 }
8369
8370
8371 /* Recursively traverse an object of derived type, generating code to
8372 copy it and its allocatable components. */
8373
8374 tree
8375 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
8376 {
8377 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
8378 }
8379
8380
8381 /* Recursively traverse an object of derived type, generating code to
8382 copy only its allocatable components. */
8383
8384 tree
8385 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
8386 {
8387 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
8388 }
8389
8390
8391 /* Returns the value of LBOUND for an expression. This could be broken out
8392 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
8393 called by gfc_alloc_allocatable_for_assignment. */
8394 static tree
8395 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
8396 {
8397 tree lbound;
8398 tree ubound;
8399 tree stride;
8400 tree cond, cond1, cond3, cond4;
8401 tree tmp;
8402 gfc_ref *ref;
8403
8404 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
8405 {
8406 tmp = gfc_rank_cst[dim];
8407 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
8408 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
8409 stride = gfc_conv_descriptor_stride_get (desc, tmp);
8410 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
8411 ubound, lbound);
8412 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
8413 stride, gfc_index_zero_node);
8414 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
8415 boolean_type_node, cond3, cond1);
8416 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
8417 stride, gfc_index_zero_node);
8418 if (assumed_size)
8419 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8420 tmp, build_int_cst (gfc_array_index_type,
8421 expr->rank - 1));
8422 else
8423 cond = boolean_false_node;
8424
8425 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
8426 boolean_type_node, cond3, cond4);
8427 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
8428 boolean_type_node, cond, cond1);
8429
8430 return fold_build3_loc (input_location, COND_EXPR,
8431 gfc_array_index_type, cond,
8432 lbound, gfc_index_one_node);
8433 }
8434
8435 if (expr->expr_type == EXPR_FUNCTION)
8436 {
8437 /* A conversion function, so use the argument. */
8438 gcc_assert (expr->value.function.isym
8439 && expr->value.function.isym->conversion);
8440 expr = expr->value.function.actual->expr;
8441 }
8442
8443 if (expr->expr_type == EXPR_VARIABLE)
8444 {
8445 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
8446 for (ref = expr->ref; ref; ref = ref->next)
8447 {
8448 if (ref->type == REF_COMPONENT
8449 && ref->u.c.component->as
8450 && ref->next
8451 && ref->next->u.ar.type == AR_FULL)
8452 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
8453 }
8454 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
8455 }
8456
8457 return gfc_index_one_node;
8458 }
8459
8460
8461 /* Returns true if an expression represents an lhs that can be reallocated
8462 on assignment. */
8463
8464 bool
8465 gfc_is_reallocatable_lhs (gfc_expr *expr)
8466 {
8467 gfc_ref * ref;
8468
8469 if (!expr->ref)
8470 return false;
8471
8472 /* An allocatable variable. */
8473 if (expr->symtree->n.sym->attr.allocatable
8474 && expr->ref
8475 && expr->ref->type == REF_ARRAY
8476 && expr->ref->u.ar.type == AR_FULL)
8477 return true;
8478
8479 /* All that can be left are allocatable components. */
8480 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
8481 && expr->symtree->n.sym->ts.type != BT_CLASS)
8482 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
8483 return false;
8484
8485 /* Find a component ref followed by an array reference. */
8486 for (ref = expr->ref; ref; ref = ref->next)
8487 if (ref->next
8488 && ref->type == REF_COMPONENT
8489 && ref->next->type == REF_ARRAY
8490 && !ref->next->next)
8491 break;
8492
8493 if (!ref)
8494 return false;
8495
8496 /* Return true if valid reallocatable lhs. */
8497 if (ref->u.c.component->attr.allocatable
8498 && ref->next->u.ar.type == AR_FULL)
8499 return true;
8500
8501 return false;
8502 }
8503
8504
8505 static tree
8506 concat_str_length (gfc_expr* expr)
8507 {
8508 tree type;
8509 tree len1;
8510 tree len2;
8511 gfc_se se;
8512
8513 type = gfc_typenode_for_spec (&expr->value.op.op1->ts);
8514 len1 = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8515 if (len1 == NULL_TREE)
8516 {
8517 if (expr->value.op.op1->expr_type == EXPR_OP)
8518 len1 = concat_str_length (expr->value.op.op1);
8519 else if (expr->value.op.op1->expr_type == EXPR_CONSTANT)
8520 len1 = build_int_cst (gfc_charlen_type_node,
8521 expr->value.op.op1->value.character.length);
8522 else if (expr->value.op.op1->ts.u.cl->length)
8523 {
8524 gfc_init_se (&se, NULL);
8525 gfc_conv_expr (&se, expr->value.op.op1->ts.u.cl->length);
8526 len1 = se.expr;
8527 }
8528 else
8529 {
8530 /* Last resort! */
8531 gfc_init_se (&se, NULL);
8532 se.want_pointer = 1;
8533 se.descriptor_only = 1;
8534 gfc_conv_expr (&se, expr->value.op.op1);
8535 len1 = se.string_length;
8536 }
8537 }
8538
8539 type = gfc_typenode_for_spec (&expr->value.op.op2->ts);
8540 len2 = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8541 if (len2 == NULL_TREE)
8542 {
8543 if (expr->value.op.op2->expr_type == EXPR_OP)
8544 len2 = concat_str_length (expr->value.op.op2);
8545 else if (expr->value.op.op2->expr_type == EXPR_CONSTANT)
8546 len2 = build_int_cst (gfc_charlen_type_node,
8547 expr->value.op.op2->value.character.length);
8548 else if (expr->value.op.op2->ts.u.cl->length)
8549 {
8550 gfc_init_se (&se, NULL);
8551 gfc_conv_expr (&se, expr->value.op.op2->ts.u.cl->length);
8552 len2 = se.expr;
8553 }
8554 else
8555 {
8556 /* Last resort! */
8557 gfc_init_se (&se, NULL);
8558 se.want_pointer = 1;
8559 se.descriptor_only = 1;
8560 gfc_conv_expr (&se, expr->value.op.op2);
8561 len2 = se.string_length;
8562 }
8563 }
8564
8565 gcc_assert(len1 && len2);
8566 len1 = fold_convert (gfc_charlen_type_node, len1);
8567 len2 = fold_convert (gfc_charlen_type_node, len2);
8568
8569 return fold_build2_loc (input_location, PLUS_EXPR,
8570 gfc_charlen_type_node, len1, len2);
8571 }
8572
8573
8574 /* Allocate the lhs of an assignment to an allocatable array, otherwise
8575 reallocate it. */
8576
8577 tree
8578 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
8579 gfc_expr *expr1,
8580 gfc_expr *expr2)
8581 {
8582 stmtblock_t realloc_block;
8583 stmtblock_t alloc_block;
8584 stmtblock_t fblock;
8585 gfc_ss *rss;
8586 gfc_ss *lss;
8587 gfc_array_info *linfo;
8588 tree realloc_expr;
8589 tree alloc_expr;
8590 tree size1;
8591 tree size2;
8592 tree array1;
8593 tree cond_null;
8594 tree cond;
8595 tree tmp;
8596 tree tmp2;
8597 tree lbound;
8598 tree ubound;
8599 tree desc;
8600 tree old_desc;
8601 tree desc2;
8602 tree offset;
8603 tree jump_label1;
8604 tree jump_label2;
8605 tree neq_size;
8606 tree lbd;
8607 int n;
8608 int dim;
8609 gfc_array_spec * as;
8610
8611 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
8612 Find the lhs expression in the loop chain and set expr1 and
8613 expr2 accordingly. */
8614 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
8615 {
8616 expr2 = expr1;
8617 /* Find the ss for the lhs. */
8618 lss = loop->ss;
8619 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
8620 if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE)
8621 break;
8622 if (lss == gfc_ss_terminator)
8623 return NULL_TREE;
8624 expr1 = lss->info->expr;
8625 }
8626
8627 /* Bail out if this is not a valid allocate on assignment. */
8628 if (!gfc_is_reallocatable_lhs (expr1)
8629 || (expr2 && !expr2->rank))
8630 return NULL_TREE;
8631
8632 /* Find the ss for the lhs. */
8633 lss = loop->ss;
8634 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
8635 if (lss->info->expr == expr1)
8636 break;
8637
8638 if (lss == gfc_ss_terminator)
8639 return NULL_TREE;
8640
8641 linfo = &lss->info->data.array;
8642
8643 /* Find an ss for the rhs. For operator expressions, we see the
8644 ss's for the operands. Any one of these will do. */
8645 rss = loop->ss;
8646 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
8647 if (rss->info->expr != expr1 && rss != loop->temp_ss)
8648 break;
8649
8650 if (expr2 && rss == gfc_ss_terminator)
8651 return NULL_TREE;
8652
8653 gfc_start_block (&fblock);
8654
8655 /* Since the lhs is allocatable, this must be a descriptor type.
8656 Get the data and array size. */
8657 desc = linfo->descriptor;
8658 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
8659 array1 = gfc_conv_descriptor_data_get (desc);
8660
8661 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
8662 deallocated if expr is an array of different shape or any of the
8663 corresponding length type parameter values of variable and expr
8664 differ." This assures F95 compatibility. */
8665 jump_label1 = gfc_build_label_decl (NULL_TREE);
8666 jump_label2 = gfc_build_label_decl (NULL_TREE);
8667
8668 /* Allocate if data is NULL. */
8669 cond_null = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8670 array1, build_int_cst (TREE_TYPE (array1), 0));
8671
8672 if (expr1->ts.deferred)
8673 cond_null = gfc_evaluate_now (boolean_true_node, &fblock);
8674 else
8675 cond_null= gfc_evaluate_now (cond_null, &fblock);
8676
8677 tmp = build3_v (COND_EXPR, cond_null,
8678 build1_v (GOTO_EXPR, jump_label1),
8679 build_empty_stmt (input_location));
8680 gfc_add_expr_to_block (&fblock, tmp);
8681
8682 /* Get arrayspec if expr is a full array. */
8683 if (expr2 && expr2->expr_type == EXPR_FUNCTION
8684 && expr2->value.function.isym
8685 && expr2->value.function.isym->conversion)
8686 {
8687 /* For conversion functions, take the arg. */
8688 gfc_expr *arg = expr2->value.function.actual->expr;
8689 as = gfc_get_full_arrayspec_from_expr (arg);
8690 }
8691 else if (expr2)
8692 as = gfc_get_full_arrayspec_from_expr (expr2);
8693 else
8694 as = NULL;
8695
8696 /* If the lhs shape is not the same as the rhs jump to setting the
8697 bounds and doing the reallocation....... */
8698 for (n = 0; n < expr1->rank; n++)
8699 {
8700 /* Check the shape. */
8701 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8702 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
8703 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8704 gfc_array_index_type,
8705 loop->to[n], loop->from[n]);
8706 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8707 gfc_array_index_type,
8708 tmp, lbound);
8709 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8710 gfc_array_index_type,
8711 tmp, ubound);
8712 cond = fold_build2_loc (input_location, NE_EXPR,
8713 boolean_type_node,
8714 tmp, gfc_index_zero_node);
8715 tmp = build3_v (COND_EXPR, cond,
8716 build1_v (GOTO_EXPR, jump_label1),
8717 build_empty_stmt (input_location));
8718 gfc_add_expr_to_block (&fblock, tmp);
8719 }
8720
8721 /* ....else jump past the (re)alloc code. */
8722 tmp = build1_v (GOTO_EXPR, jump_label2);
8723 gfc_add_expr_to_block (&fblock, tmp);
8724
8725 /* Add the label to start automatic (re)allocation. */
8726 tmp = build1_v (LABEL_EXPR, jump_label1);
8727 gfc_add_expr_to_block (&fblock, tmp);
8728
8729 /* If the lhs has not been allocated, its bounds will not have been
8730 initialized and so its size is set to zero. */
8731 size1 = gfc_create_var (gfc_array_index_type, NULL);
8732 gfc_init_block (&alloc_block);
8733 gfc_add_modify (&alloc_block, size1, gfc_index_zero_node);
8734 gfc_init_block (&realloc_block);
8735 gfc_add_modify (&realloc_block, size1,
8736 gfc_conv_descriptor_size (desc, expr1->rank));
8737 tmp = build3_v (COND_EXPR, cond_null,
8738 gfc_finish_block (&alloc_block),
8739 gfc_finish_block (&realloc_block));
8740 gfc_add_expr_to_block (&fblock, tmp);
8741
8742 /* Get the rhs size and fix it. */
8743 if (expr2)
8744 desc2 = rss->info->data.array.descriptor;
8745 else
8746 desc2 = NULL_TREE;
8747
8748 size2 = gfc_index_one_node;
8749 for (n = 0; n < expr2->rank; n++)
8750 {
8751 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8752 gfc_array_index_type,
8753 loop->to[n], loop->from[n]);
8754 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8755 gfc_array_index_type,
8756 tmp, gfc_index_one_node);
8757 size2 = fold_build2_loc (input_location, MULT_EXPR,
8758 gfc_array_index_type,
8759 tmp, size2);
8760 }
8761 size2 = gfc_evaluate_now (size2, &fblock);
8762
8763 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
8764 size1, size2);
8765
8766 /* If the lhs is deferred length, assume that the element size
8767 changes and force a reallocation. */
8768 if (expr1->ts.deferred)
8769 neq_size = gfc_evaluate_now (boolean_true_node, &fblock);
8770 else
8771 neq_size = gfc_evaluate_now (cond, &fblock);
8772
8773 /* Deallocation of allocatable components will have to occur on
8774 reallocation. Fix the old descriptor now. */
8775 if ((expr1->ts.type == BT_DERIVED)
8776 && expr1->ts.u.derived->attr.alloc_comp)
8777 old_desc = gfc_evaluate_now (desc, &fblock);
8778 else
8779 old_desc = NULL_TREE;
8780
8781 /* Now modify the lhs descriptor and the associated scalarizer
8782 variables. F2003 7.4.1.3: "If variable is or becomes an
8783 unallocated allocatable variable, then it is allocated with each
8784 deferred type parameter equal to the corresponding type parameters
8785 of expr , with the shape of expr , and with each lower bound equal
8786 to the corresponding element of LBOUND(expr)."
8787 Reuse size1 to keep a dimension-by-dimension track of the
8788 stride of the new array. */
8789 size1 = gfc_index_one_node;
8790 offset = gfc_index_zero_node;
8791
8792 for (n = 0; n < expr2->rank; n++)
8793 {
8794 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8795 gfc_array_index_type,
8796 loop->to[n], loop->from[n]);
8797 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8798 gfc_array_index_type,
8799 tmp, gfc_index_one_node);
8800
8801 lbound = gfc_index_one_node;
8802 ubound = tmp;
8803
8804 if (as)
8805 {
8806 lbd = get_std_lbound (expr2, desc2, n,
8807 as->type == AS_ASSUMED_SIZE);
8808 ubound = fold_build2_loc (input_location,
8809 MINUS_EXPR,
8810 gfc_array_index_type,
8811 ubound, lbound);
8812 ubound = fold_build2_loc (input_location,
8813 PLUS_EXPR,
8814 gfc_array_index_type,
8815 ubound, lbd);
8816 lbound = lbd;
8817 }
8818
8819 gfc_conv_descriptor_lbound_set (&fblock, desc,
8820 gfc_rank_cst[n],
8821 lbound);
8822 gfc_conv_descriptor_ubound_set (&fblock, desc,
8823 gfc_rank_cst[n],
8824 ubound);
8825 gfc_conv_descriptor_stride_set (&fblock, desc,
8826 gfc_rank_cst[n],
8827 size1);
8828 lbound = gfc_conv_descriptor_lbound_get (desc,
8829 gfc_rank_cst[n]);
8830 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
8831 gfc_array_index_type,
8832 lbound, size1);
8833 offset = fold_build2_loc (input_location, MINUS_EXPR,
8834 gfc_array_index_type,
8835 offset, tmp2);
8836 size1 = fold_build2_loc (input_location, MULT_EXPR,
8837 gfc_array_index_type,
8838 tmp, size1);
8839 }
8840
8841 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
8842 the array offset is saved and the info.offset is used for a
8843 running offset. Use the saved_offset instead. */
8844 tmp = gfc_conv_descriptor_offset (desc);
8845 gfc_add_modify (&fblock, tmp, offset);
8846 if (linfo->saved_offset
8847 && TREE_CODE (linfo->saved_offset) == VAR_DECL)
8848 gfc_add_modify (&fblock, linfo->saved_offset, tmp);
8849
8850 /* Now set the deltas for the lhs. */
8851 for (n = 0; n < expr1->rank; n++)
8852 {
8853 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8854 dim = lss->dim[n];
8855 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8856 gfc_array_index_type, tmp,
8857 loop->from[dim]);
8858 if (linfo->delta[dim]
8859 && TREE_CODE (linfo->delta[dim]) == VAR_DECL)
8860 gfc_add_modify (&fblock, linfo->delta[dim], tmp);
8861 }
8862
8863 /* Get the new lhs size in bytes. */
8864 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
8865 {
8866 if (expr2->ts.deferred)
8867 {
8868 if (TREE_CODE (expr2->ts.u.cl->backend_decl) == VAR_DECL)
8869 tmp = expr2->ts.u.cl->backend_decl;
8870 else
8871 tmp = rss->info->string_length;
8872 }
8873 else
8874 {
8875 tmp = expr2->ts.u.cl->backend_decl;
8876 if (!tmp && expr2->expr_type == EXPR_OP
8877 && expr2->value.op.op == INTRINSIC_CONCAT)
8878 {
8879 tmp = concat_str_length (expr2);
8880 expr2->ts.u.cl->backend_decl = gfc_evaluate_now (tmp, &fblock);
8881 }
8882 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
8883 }
8884
8885 if (expr1->ts.u.cl->backend_decl
8886 && TREE_CODE (expr1->ts.u.cl->backend_decl) == VAR_DECL)
8887 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
8888 else
8889 gfc_add_modify (&fblock, lss->info->string_length, tmp);
8890 }
8891 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
8892 {
8893 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
8894 tmp = fold_build2_loc (input_location, MULT_EXPR,
8895 gfc_array_index_type, tmp,
8896 expr1->ts.u.cl->backend_decl);
8897 }
8898 else
8899 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
8900 tmp = fold_convert (gfc_array_index_type, tmp);
8901 size2 = fold_build2_loc (input_location, MULT_EXPR,
8902 gfc_array_index_type,
8903 tmp, size2);
8904 size2 = fold_convert (size_type_node, size2);
8905 size2 = fold_build2_loc (input_location, MAX_EXPR, size_type_node,
8906 size2, size_one_node);
8907 size2 = gfc_evaluate_now (size2, &fblock);
8908
8909 /* For deferred character length, the 'size' field of the dtype might
8910 have changed so set the dtype. */
8911 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))
8912 && expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
8913 {
8914 tree type;
8915 tmp = gfc_conv_descriptor_dtype (desc);
8916 if (expr2->ts.u.cl->backend_decl)
8917 type = gfc_typenode_for_spec (&expr2->ts);
8918 else
8919 type = gfc_typenode_for_spec (&expr1->ts);
8920
8921 gfc_add_modify (&fblock, tmp,
8922 gfc_get_dtype_rank_type (expr1->rank,type));
8923 }
8924
8925 /* Realloc expression. Note that the scalarizer uses desc.data
8926 in the array reference - (*desc.data)[<element>]. */
8927 gfc_init_block (&realloc_block);
8928
8929 if ((expr1->ts.type == BT_DERIVED)
8930 && expr1->ts.u.derived->attr.alloc_comp)
8931 {
8932 tmp = gfc_deallocate_alloc_comp_no_caf (expr1->ts.u.derived, old_desc,
8933 expr1->rank);
8934 gfc_add_expr_to_block (&realloc_block, tmp);
8935 }
8936
8937 tmp = build_call_expr_loc (input_location,
8938 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
8939 fold_convert (pvoid_type_node, array1),
8940 size2);
8941 gfc_conv_descriptor_data_set (&realloc_block,
8942 desc, tmp);
8943
8944 if ((expr1->ts.type == BT_DERIVED)
8945 && expr1->ts.u.derived->attr.alloc_comp)
8946 {
8947 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
8948 expr1->rank);
8949 gfc_add_expr_to_block (&realloc_block, tmp);
8950 }
8951
8952 realloc_expr = gfc_finish_block (&realloc_block);
8953
8954 /* Only reallocate if sizes are different. */
8955 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
8956 build_empty_stmt (input_location));
8957 realloc_expr = tmp;
8958
8959
8960 /* Malloc expression. */
8961 gfc_init_block (&alloc_block);
8962 tmp = build_call_expr_loc (input_location,
8963 builtin_decl_explicit (BUILT_IN_MALLOC),
8964 1, size2);
8965 gfc_conv_descriptor_data_set (&alloc_block,
8966 desc, tmp);
8967
8968 /* We already set the dtype in the case of deferred character
8969 length arrays. */
8970 if (!(GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))
8971 && expr1->ts.type == BT_CHARACTER && expr1->ts.deferred))
8972 {
8973 tmp = gfc_conv_descriptor_dtype (desc);
8974 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
8975 }
8976
8977 if ((expr1->ts.type == BT_DERIVED)
8978 && expr1->ts.u.derived->attr.alloc_comp)
8979 {
8980 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
8981 expr1->rank);
8982 gfc_add_expr_to_block (&alloc_block, tmp);
8983 }
8984 alloc_expr = gfc_finish_block (&alloc_block);
8985
8986 /* Malloc if not allocated; realloc otherwise. */
8987 tmp = build_int_cst (TREE_TYPE (array1), 0);
8988 cond = fold_build2_loc (input_location, EQ_EXPR,
8989 boolean_type_node,
8990 array1, tmp);
8991 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
8992 gfc_add_expr_to_block (&fblock, tmp);
8993
8994 /* Make sure that the scalarizer data pointer is updated. */
8995 if (linfo->data
8996 && TREE_CODE (linfo->data) == VAR_DECL)
8997 {
8998 tmp = gfc_conv_descriptor_data_get (desc);
8999 gfc_add_modify (&fblock, linfo->data, tmp);
9000 }
9001
9002 /* Add the exit label. */
9003 tmp = build1_v (LABEL_EXPR, jump_label2);
9004 gfc_add_expr_to_block (&fblock, tmp);
9005
9006 return gfc_finish_block (&fblock);
9007 }
9008
9009
9010 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
9011 Do likewise, recursively if necessary, with the allocatable components of
9012 derived types. */
9013
9014 void
9015 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
9016 {
9017 tree type;
9018 tree tmp;
9019 tree descriptor;
9020 stmtblock_t init;
9021 stmtblock_t cleanup;
9022 locus loc;
9023 int rank;
9024 bool sym_has_alloc_comp, has_finalizer;
9025
9026 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
9027 || sym->ts.type == BT_CLASS)
9028 && sym->ts.u.derived->attr.alloc_comp;
9029 has_finalizer = sym->ts.type == BT_CLASS || sym->ts.type == BT_DERIVED
9030 ? gfc_is_finalizable (sym->ts.u.derived, NULL) : false;
9031
9032 /* Make sure the frontend gets these right. */
9033 gcc_assert (sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp
9034 || has_finalizer);
9035
9036 gfc_save_backend_locus (&loc);
9037 gfc_set_backend_locus (&sym->declared_at);
9038 gfc_init_block (&init);
9039
9040 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
9041 || TREE_CODE (sym->backend_decl) == PARM_DECL);
9042
9043 if (sym->ts.type == BT_CHARACTER
9044 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
9045 {
9046 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
9047 gfc_trans_vla_type_sizes (sym, &init);
9048 }
9049
9050 /* Dummy, use associated and result variables don't need anything special. */
9051 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
9052 {
9053 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
9054 gfc_restore_backend_locus (&loc);
9055 return;
9056 }
9057
9058 descriptor = sym->backend_decl;
9059
9060 /* Although static, derived types with default initializers and
9061 allocatable components must not be nulled wholesale; instead they
9062 are treated component by component. */
9063 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp && !has_finalizer)
9064 {
9065 /* SAVEd variables are not freed on exit. */
9066 gfc_trans_static_array_pointer (sym);
9067
9068 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
9069 gfc_restore_backend_locus (&loc);
9070 return;
9071 }
9072
9073 /* Get the descriptor type. */
9074 type = TREE_TYPE (sym->backend_decl);
9075
9076 if ((sym_has_alloc_comp || (has_finalizer && sym->ts.type != BT_CLASS))
9077 && !(sym->attr.pointer || sym->attr.allocatable))
9078 {
9079 if (!sym->attr.save
9080 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
9081 {
9082 if (sym->value == NULL
9083 || !gfc_has_default_initializer (sym->ts.u.derived))
9084 {
9085 rank = sym->as ? sym->as->rank : 0;
9086 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
9087 descriptor, rank);
9088 gfc_add_expr_to_block (&init, tmp);
9089 }
9090 else
9091 gfc_init_default_dt (sym, &init, false);
9092 }
9093 }
9094 else if (!GFC_DESCRIPTOR_TYPE_P (type))
9095 {
9096 /* If the backend_decl is not a descriptor, we must have a pointer
9097 to one. */
9098 descriptor = build_fold_indirect_ref_loc (input_location,
9099 sym->backend_decl);
9100 type = TREE_TYPE (descriptor);
9101 }
9102
9103 /* NULLIFY the data pointer, for non-saved allocatables. */
9104 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save && sym->attr.allocatable)
9105 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
9106
9107 gfc_restore_backend_locus (&loc);
9108 gfc_init_block (&cleanup);
9109
9110 /* Allocatable arrays need to be freed when they go out of scope.
9111 The allocatable components of pointers must not be touched. */
9112 if (!sym->attr.allocatable && has_finalizer && sym->ts.type != BT_CLASS
9113 && !sym->attr.pointer && !sym->attr.artificial && !sym->attr.save
9114 && !sym->ns->proc_name->attr.is_main_program)
9115 {
9116 gfc_expr *e;
9117 sym->attr.referenced = 1;
9118 e = gfc_lval_expr_from_sym (sym);
9119 gfc_add_finalizer_call (&cleanup, e);
9120 gfc_free_expr (e);
9121 }
9122 else if ((!sym->attr.allocatable || !has_finalizer)
9123 && sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
9124 && !sym->attr.pointer && !sym->attr.save
9125 && !sym->ns->proc_name->attr.is_main_program)
9126 {
9127 int rank;
9128 rank = sym->as ? sym->as->rank : 0;
9129 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
9130 gfc_add_expr_to_block (&cleanup, tmp);
9131 }
9132
9133 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
9134 && !sym->attr.save && !sym->attr.result
9135 && !sym->ns->proc_name->attr.is_main_program)
9136 {
9137 gfc_expr *e;
9138 e = has_finalizer ? gfc_lval_expr_from_sym (sym) : NULL;
9139 tmp = gfc_trans_dealloc_allocated (sym->backend_decl,
9140 sym->attr.codimension, e);
9141 if (e)
9142 gfc_free_expr (e);
9143 gfc_add_expr_to_block (&cleanup, tmp);
9144 }
9145
9146 gfc_add_init_cleanup (block, gfc_finish_block (&init),
9147 gfc_finish_block (&cleanup));
9148 }
9149
9150 /************ Expression Walking Functions ******************/
9151
9152 /* Walk a variable reference.
9153
9154 Possible extension - multiple component subscripts.
9155 x(:,:) = foo%a(:)%b(:)
9156 Transforms to
9157 forall (i=..., j=...)
9158 x(i,j) = foo%a(j)%b(i)
9159 end forall
9160 This adds a fair amount of complexity because you need to deal with more
9161 than one ref. Maybe handle in a similar manner to vector subscripts.
9162 Maybe not worth the effort. */
9163
9164
9165 static gfc_ss *
9166 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
9167 {
9168 gfc_ref *ref;
9169
9170 for (ref = expr->ref; ref; ref = ref->next)
9171 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
9172 break;
9173
9174 return gfc_walk_array_ref (ss, expr, ref);
9175 }
9176
9177
9178 gfc_ss *
9179 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
9180 {
9181 gfc_array_ref *ar;
9182 gfc_ss *newss;
9183 int n;
9184
9185 for (; ref; ref = ref->next)
9186 {
9187 if (ref->type == REF_SUBSTRING)
9188 {
9189 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
9190 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
9191 }
9192
9193 /* We're only interested in array sections from now on. */
9194 if (ref->type != REF_ARRAY)
9195 continue;
9196
9197 ar = &ref->u.ar;
9198
9199 switch (ar->type)
9200 {
9201 case AR_ELEMENT:
9202 for (n = ar->dimen - 1; n >= 0; n--)
9203 ss = gfc_get_scalar_ss (ss, ar->start[n]);
9204 break;
9205
9206 case AR_FULL:
9207 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
9208 newss->info->data.array.ref = ref;
9209
9210 /* Make sure array is the same as array(:,:), this way
9211 we don't need to special case all the time. */
9212 ar->dimen = ar->as->rank;
9213 for (n = 0; n < ar->dimen; n++)
9214 {
9215 ar->dimen_type[n] = DIMEN_RANGE;
9216
9217 gcc_assert (ar->start[n] == NULL);
9218 gcc_assert (ar->end[n] == NULL);
9219 gcc_assert (ar->stride[n] == NULL);
9220 }
9221 ss = newss;
9222 break;
9223
9224 case AR_SECTION:
9225 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
9226 newss->info->data.array.ref = ref;
9227
9228 /* We add SS chains for all the subscripts in the section. */
9229 for (n = 0; n < ar->dimen; n++)
9230 {
9231 gfc_ss *indexss;
9232
9233 switch (ar->dimen_type[n])
9234 {
9235 case DIMEN_ELEMENT:
9236 /* Add SS for elemental (scalar) subscripts. */
9237 gcc_assert (ar->start[n]);
9238 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
9239 indexss->loop_chain = gfc_ss_terminator;
9240 newss->info->data.array.subscript[n] = indexss;
9241 break;
9242
9243 case DIMEN_RANGE:
9244 /* We don't add anything for sections, just remember this
9245 dimension for later. */
9246 newss->dim[newss->dimen] = n;
9247 newss->dimen++;
9248 break;
9249
9250 case DIMEN_VECTOR:
9251 /* Create a GFC_SS_VECTOR index in which we can store
9252 the vector's descriptor. */
9253 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
9254 1, GFC_SS_VECTOR);
9255 indexss->loop_chain = gfc_ss_terminator;
9256 newss->info->data.array.subscript[n] = indexss;
9257 newss->dim[newss->dimen] = n;
9258 newss->dimen++;
9259 break;
9260
9261 default:
9262 /* We should know what sort of section it is by now. */
9263 gcc_unreachable ();
9264 }
9265 }
9266 /* We should have at least one non-elemental dimension,
9267 unless we are creating a descriptor for a (scalar) coarray. */
9268 gcc_assert (newss->dimen > 0
9269 || newss->info->data.array.ref->u.ar.as->corank > 0);
9270 ss = newss;
9271 break;
9272
9273 default:
9274 /* We should know what sort of section it is by now. */
9275 gcc_unreachable ();
9276 }
9277
9278 }
9279 return ss;
9280 }
9281
9282
9283 /* Walk an expression operator. If only one operand of a binary expression is
9284 scalar, we must also add the scalar term to the SS chain. */
9285
9286 static gfc_ss *
9287 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
9288 {
9289 gfc_ss *head;
9290 gfc_ss *head2;
9291
9292 head = gfc_walk_subexpr (ss, expr->value.op.op1);
9293 if (expr->value.op.op2 == NULL)
9294 head2 = head;
9295 else
9296 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
9297
9298 /* All operands are scalar. Pass back and let the caller deal with it. */
9299 if (head2 == ss)
9300 return head2;
9301
9302 /* All operands require scalarization. */
9303 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
9304 return head2;
9305
9306 /* One of the operands needs scalarization, the other is scalar.
9307 Create a gfc_ss for the scalar expression. */
9308 if (head == ss)
9309 {
9310 /* First operand is scalar. We build the chain in reverse order, so
9311 add the scalar SS after the second operand. */
9312 head = head2;
9313 while (head && head->next != ss)
9314 head = head->next;
9315 /* Check we haven't somehow broken the chain. */
9316 gcc_assert (head);
9317 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
9318 }
9319 else /* head2 == head */
9320 {
9321 gcc_assert (head2 == head);
9322 /* Second operand is scalar. */
9323 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
9324 }
9325
9326 return head2;
9327 }
9328
9329
9330 /* Reverse a SS chain. */
9331
9332 gfc_ss *
9333 gfc_reverse_ss (gfc_ss * ss)
9334 {
9335 gfc_ss *next;
9336 gfc_ss *head;
9337
9338 gcc_assert (ss != NULL);
9339
9340 head = gfc_ss_terminator;
9341 while (ss != gfc_ss_terminator)
9342 {
9343 next = ss->next;
9344 /* Check we didn't somehow break the chain. */
9345 gcc_assert (next != NULL);
9346 ss->next = head;
9347 head = ss;
9348 ss = next;
9349 }
9350
9351 return (head);
9352 }
9353
9354
9355 /* Given an expression referring to a procedure, return the symbol of its
9356 interface. We can't get the procedure symbol directly as we have to handle
9357 the case of (deferred) type-bound procedures. */
9358
9359 gfc_symbol *
9360 gfc_get_proc_ifc_for_expr (gfc_expr *procedure_ref)
9361 {
9362 gfc_symbol *sym;
9363 gfc_ref *ref;
9364
9365 if (procedure_ref == NULL)
9366 return NULL;
9367
9368 /* Normal procedure case. */
9369 if (procedure_ref->expr_type == EXPR_FUNCTION
9370 && procedure_ref->value.function.esym)
9371 sym = procedure_ref->value.function.esym;
9372 else
9373 sym = procedure_ref->symtree->n.sym;
9374
9375 /* Typebound procedure case. */
9376 for (ref = procedure_ref->ref; ref; ref = ref->next)
9377 {
9378 if (ref->type == REF_COMPONENT
9379 && ref->u.c.component->attr.proc_pointer)
9380 sym = ref->u.c.component->ts.interface;
9381 else
9382 sym = NULL;
9383 }
9384
9385 return sym;
9386 }
9387
9388
9389 /* Walk the arguments of an elemental function.
9390 PROC_EXPR is used to check whether an argument is permitted to be absent. If
9391 it is NULL, we don't do the check and the argument is assumed to be present.
9392 */
9393
9394 gfc_ss *
9395 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
9396 gfc_symbol *proc_ifc, gfc_ss_type type)
9397 {
9398 gfc_formal_arglist *dummy_arg;
9399 int scalar;
9400 gfc_ss *head;
9401 gfc_ss *tail;
9402 gfc_ss *newss;
9403
9404 head = gfc_ss_terminator;
9405 tail = NULL;
9406
9407 if (proc_ifc)
9408 dummy_arg = gfc_sym_get_dummy_args (proc_ifc);
9409 else
9410 dummy_arg = NULL;
9411
9412 scalar = 1;
9413 for (; arg; arg = arg->next)
9414 {
9415 if (!arg->expr || arg->expr->expr_type == EXPR_NULL)
9416 goto loop_continue;
9417
9418 newss = gfc_walk_subexpr (head, arg->expr);
9419 if (newss == head)
9420 {
9421 /* Scalar argument. */
9422 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
9423 newss = gfc_get_scalar_ss (head, arg->expr);
9424 newss->info->type = type;
9425 if (dummy_arg)
9426 newss->info->data.scalar.dummy_arg = dummy_arg->sym;
9427 }
9428 else
9429 scalar = 0;
9430
9431 if (dummy_arg != NULL
9432 && dummy_arg->sym->attr.optional
9433 && arg->expr->expr_type == EXPR_VARIABLE
9434 && (gfc_expr_attr (arg->expr).optional
9435 || gfc_expr_attr (arg->expr).allocatable
9436 || gfc_expr_attr (arg->expr).pointer))
9437 newss->info->can_be_null_ref = true;
9438
9439 head = newss;
9440 if (!tail)
9441 {
9442 tail = head;
9443 while (tail->next != gfc_ss_terminator)
9444 tail = tail->next;
9445 }
9446
9447 loop_continue:
9448 if (dummy_arg != NULL)
9449 dummy_arg = dummy_arg->next;
9450 }
9451
9452 if (scalar)
9453 {
9454 /* If all the arguments are scalar we don't need the argument SS. */
9455 gfc_free_ss_chain (head);
9456 /* Pass it back. */
9457 return ss;
9458 }
9459
9460 /* Add it onto the existing chain. */
9461 tail->next = ss;
9462 return head;
9463 }
9464
9465
9466 /* Walk a function call. Scalar functions are passed back, and taken out of
9467 scalarization loops. For elemental functions we walk their arguments.
9468 The result of functions returning arrays is stored in a temporary outside
9469 the loop, so that the function is only called once. Hence we do not need
9470 to walk their arguments. */
9471
9472 static gfc_ss *
9473 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
9474 {
9475 gfc_intrinsic_sym *isym;
9476 gfc_symbol *sym;
9477 gfc_component *comp = NULL;
9478
9479 isym = expr->value.function.isym;
9480
9481 /* Handle intrinsic functions separately. */
9482 if (isym)
9483 return gfc_walk_intrinsic_function (ss, expr, isym);
9484
9485 sym = expr->value.function.esym;
9486 if (!sym)
9487 sym = expr->symtree->n.sym;
9488
9489 if (gfc_is_alloc_class_array_function (expr))
9490 return gfc_get_array_ss (ss, expr,
9491 CLASS_DATA (expr->value.function.esym->result)->as->rank,
9492 GFC_SS_FUNCTION);
9493
9494 /* A function that returns arrays. */
9495 comp = gfc_get_proc_ptr_comp (expr);
9496 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
9497 || (comp && comp->attr.dimension))
9498 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
9499
9500 /* Walk the parameters of an elemental function. For now we always pass
9501 by reference. */
9502 if (sym->attr.elemental || (comp && comp->attr.elemental))
9503 {
9504 gfc_ss *old_ss = ss;
9505
9506 ss = gfc_walk_elemental_function_args (old_ss,
9507 expr->value.function.actual,
9508 gfc_get_proc_ifc_for_expr (expr),
9509 GFC_SS_REFERENCE);
9510 if (ss != old_ss
9511 && (comp
9512 || sym->attr.proc_pointer
9513 || sym->attr.if_source != IFSRC_DECL
9514 || sym->attr.array_outer_dependency))
9515 ss->info->array_outer_dependency = 1;
9516 }
9517
9518 /* Scalar functions are OK as these are evaluated outside the scalarization
9519 loop. Pass back and let the caller deal with it. */
9520 return ss;
9521 }
9522
9523
9524 /* An array temporary is constructed for array constructors. */
9525
9526 static gfc_ss *
9527 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
9528 {
9529 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
9530 }
9531
9532
9533 /* Walk an expression. Add walked expressions to the head of the SS chain.
9534 A wholly scalar expression will not be added. */
9535
9536 gfc_ss *
9537 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
9538 {
9539 gfc_ss *head;
9540
9541 switch (expr->expr_type)
9542 {
9543 case EXPR_VARIABLE:
9544 head = gfc_walk_variable_expr (ss, expr);
9545 return head;
9546
9547 case EXPR_OP:
9548 head = gfc_walk_op_expr (ss, expr);
9549 return head;
9550
9551 case EXPR_FUNCTION:
9552 head = gfc_walk_function_expr (ss, expr);
9553 return head;
9554
9555 case EXPR_CONSTANT:
9556 case EXPR_NULL:
9557 case EXPR_STRUCTURE:
9558 /* Pass back and let the caller deal with it. */
9559 break;
9560
9561 case EXPR_ARRAY:
9562 head = gfc_walk_array_constructor (ss, expr);
9563 return head;
9564
9565 case EXPR_SUBSTRING:
9566 /* Pass back and let the caller deal with it. */
9567 break;
9568
9569 default:
9570 gfc_internal_error ("bad expression type during walk (%d)",
9571 expr->expr_type);
9572 }
9573 return ss;
9574 }
9575
9576
9577 /* Entry point for expression walking.
9578 A return value equal to the passed chain means this is
9579 a scalar expression. It is up to the caller to take whatever action is
9580 necessary to translate these. */
9581
9582 gfc_ss *
9583 gfc_walk_expr (gfc_expr * expr)
9584 {
9585 gfc_ss *res;
9586
9587 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
9588 return gfc_reverse_ss (res);
9589 }