]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/fortran/trans-array.c
* trans-array.h (gfc_conv_descriptor_rank): New prototype.
[thirdparty/gcc.git] / gcc / fortran / trans-array.c
1 /* Array translation routines
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011, 2012
4 Free Software Foundation, Inc.
5 Contributed by Paul Brook <paul@nowt.org>
6 and Steven Bosscher <s.bosscher@student.tudelft.nl>
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 /* trans-array.c-- Various array related code, including scalarization,
25 allocation, initialization and other support routines. */
26
27 /* How the scalarizer works.
28 In gfortran, array expressions use the same core routines as scalar
29 expressions.
30 First, a Scalarization State (SS) chain is built. This is done by walking
31 the expression tree, and building a linear list of the terms in the
32 expression. As the tree is walked, scalar subexpressions are translated.
33
34 The scalarization parameters are stored in a gfc_loopinfo structure.
35 First the start and stride of each term is calculated by
36 gfc_conv_ss_startstride. During this process the expressions for the array
37 descriptors and data pointers are also translated.
38
39 If the expression is an assignment, we must then resolve any dependencies.
40 In Fortran all the rhs values of an assignment must be evaluated before
41 any assignments take place. This can require a temporary array to store the
42 values. We also require a temporary when we are passing array expressions
43 or vector subscripts as procedure parameters.
44
45 Array sections are passed without copying to a temporary. These use the
46 scalarizer to determine the shape of the section. The flag
47 loop->array_parameter tells the scalarizer that the actual values and loop
48 variables will not be required.
49
50 The function gfc_conv_loop_setup generates the scalarization setup code.
51 It determines the range of the scalarizing loop variables. If a temporary
52 is required, this is created and initialized. Code for scalar expressions
53 taken outside the loop is also generated at this time. Next the offset and
54 scaling required to translate from loop variables to array indices for each
55 term is calculated.
56
57 A call to gfc_start_scalarized_body marks the start of the scalarized
58 expression. This creates a scope and declares the loop variables. Before
59 calling this gfc_make_ss_chain_used must be used to indicate which terms
60 will be used inside this loop.
61
62 The scalar gfc_conv_* functions are then used to build the main body of the
63 scalarization loop. Scalarization loop variables and precalculated scalar
64 values are automatically substituted. Note that gfc_advance_se_ss_chain
65 must be used, rather than changing the se->ss directly.
66
67 For assignment expressions requiring a temporary two sub loops are
68 generated. The first stores the result of the expression in the temporary,
69 the second copies it to the result. A call to
70 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
71 the start of the copying loop. The temporary may be less than full rank.
72
73 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
74 loops. The loops are added to the pre chain of the loopinfo. The post
75 chain may still contain cleanup code.
76
77 After the loop code has been added into its parent scope gfc_cleanup_loop
78 is called to free all the SS allocated by the scalarizer. */
79
80 #include "config.h"
81 #include "system.h"
82 #include "coretypes.h"
83 #include "tree.h"
84 #include "gimple.h" /* For create_tmp_var_name. */
85 #include "diagnostic-core.h" /* For internal_error/fatal_error. */
86 #include "flags.h"
87 #include "gfortran.h"
88 #include "constructor.h"
89 #include "trans.h"
90 #include "trans-stmt.h"
91 #include "trans-types.h"
92 #include "trans-array.h"
93 #include "trans-const.h"
94 #include "dependency.h"
95
96 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
97
98 /* The contents of this structure aren't actually used, just the address. */
99 static gfc_ss gfc_ss_terminator_var;
100 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
101
102
103 static tree
104 gfc_array_dataptr_type (tree desc)
105 {
106 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
107 }
108
109
110 /* Build expressions to access the members of an array descriptor.
111 It's surprisingly easy to mess up here, so never access
112 an array descriptor by "brute force", always use these
113 functions. This also avoids problems if we change the format
114 of an array descriptor.
115
116 To understand these magic numbers, look at the comments
117 before gfc_build_array_type() in trans-types.c.
118
119 The code within these defines should be the only code which knows the format
120 of an array descriptor.
121
122 Any code just needing to read obtain the bounds of an array should use
123 gfc_conv_array_* rather than the following functions as these will return
124 know constant values, and work with arrays which do not have descriptors.
125
126 Don't forget to #undef these! */
127
128 #define DATA_FIELD 0
129 #define OFFSET_FIELD 1
130 #define DTYPE_FIELD 2
131 #define DIMENSION_FIELD 3
132 #define CAF_TOKEN_FIELD 4
133
134 #define STRIDE_SUBFIELD 0
135 #define LBOUND_SUBFIELD 1
136 #define UBOUND_SUBFIELD 2
137
138 /* This provides READ-ONLY access to the data field. The field itself
139 doesn't have the proper type. */
140
141 tree
142 gfc_conv_descriptor_data_get (tree desc)
143 {
144 tree field, type, t;
145
146 type = TREE_TYPE (desc);
147 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
148
149 field = TYPE_FIELDS (type);
150 gcc_assert (DATA_FIELD == 0);
151
152 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
153 field, NULL_TREE);
154 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
155
156 return t;
157 }
158
159 /* This provides WRITE access to the data field.
160
161 TUPLES_P is true if we are generating tuples.
162
163 This function gets called through the following macros:
164 gfc_conv_descriptor_data_set
165 gfc_conv_descriptor_data_set. */
166
167 void
168 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
169 {
170 tree field, type, t;
171
172 type = TREE_TYPE (desc);
173 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
174
175 field = TYPE_FIELDS (type);
176 gcc_assert (DATA_FIELD == 0);
177
178 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
179 field, NULL_TREE);
180 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
181 }
182
183
184 /* This provides address access to the data field. This should only be
185 used by array allocation, passing this on to the runtime. */
186
187 tree
188 gfc_conv_descriptor_data_addr (tree desc)
189 {
190 tree field, type, t;
191
192 type = TREE_TYPE (desc);
193 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
194
195 field = TYPE_FIELDS (type);
196 gcc_assert (DATA_FIELD == 0);
197
198 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
199 field, NULL_TREE);
200 return gfc_build_addr_expr (NULL_TREE, t);
201 }
202
203 static tree
204 gfc_conv_descriptor_offset (tree desc)
205 {
206 tree type;
207 tree field;
208
209 type = TREE_TYPE (desc);
210 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
211
212 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
213 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
214
215 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
216 desc, field, NULL_TREE);
217 }
218
219 tree
220 gfc_conv_descriptor_offset_get (tree desc)
221 {
222 return gfc_conv_descriptor_offset (desc);
223 }
224
225 void
226 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
227 tree value)
228 {
229 tree t = gfc_conv_descriptor_offset (desc);
230 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
231 }
232
233
234 tree
235 gfc_conv_descriptor_dtype (tree desc)
236 {
237 tree field;
238 tree type;
239
240 type = TREE_TYPE (desc);
241 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
242
243 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
244 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
245
246 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
247 desc, field, NULL_TREE);
248 }
249
250
251 tree
252 gfc_conv_descriptor_rank (tree desc)
253 {
254 tree tmp;
255 tree dtype;
256
257 dtype = gfc_conv_descriptor_dtype (desc);
258 tmp = build_int_cst (TREE_TYPE (dtype), GFC_DTYPE_RANK_MASK);
259 tmp = fold_build2_loc (input_location, BIT_AND_EXPR, TREE_TYPE (dtype),
260 dtype, tmp);
261 return fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp);
262 }
263
264
265 tree
266 gfc_get_descriptor_dimension (tree desc)
267 {
268 tree type, field;
269
270 type = TREE_TYPE (desc);
271 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
272
273 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
274 gcc_assert (field != NULL_TREE
275 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
276 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
277
278 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
279 desc, field, NULL_TREE);
280 }
281
282
283 static tree
284 gfc_conv_descriptor_dimension (tree desc, tree dim)
285 {
286 tree tmp;
287
288 tmp = gfc_get_descriptor_dimension (desc);
289
290 return gfc_build_array_ref (tmp, dim, NULL);
291 }
292
293
294 tree
295 gfc_conv_descriptor_token (tree desc)
296 {
297 tree type;
298 tree field;
299
300 type = TREE_TYPE (desc);
301 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
302 gcc_assert (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE);
303 gcc_assert (gfc_option.coarray == GFC_FCOARRAY_LIB);
304 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
305 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == prvoid_type_node);
306
307 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
308 desc, field, NULL_TREE);
309 }
310
311
312 static tree
313 gfc_conv_descriptor_stride (tree desc, tree dim)
314 {
315 tree tmp;
316 tree field;
317
318 tmp = gfc_conv_descriptor_dimension (desc, dim);
319 field = TYPE_FIELDS (TREE_TYPE (tmp));
320 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
321 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
322
323 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
324 tmp, field, NULL_TREE);
325 return tmp;
326 }
327
328 tree
329 gfc_conv_descriptor_stride_get (tree desc, tree dim)
330 {
331 tree type = TREE_TYPE (desc);
332 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
333 if (integer_zerop (dim)
334 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
335 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
336 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_RANK_CONT
337 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
338 return gfc_index_one_node;
339
340 return gfc_conv_descriptor_stride (desc, dim);
341 }
342
343 void
344 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
345 tree dim, tree value)
346 {
347 tree t = gfc_conv_descriptor_stride (desc, dim);
348 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
349 }
350
351 static tree
352 gfc_conv_descriptor_lbound (tree desc, tree dim)
353 {
354 tree tmp;
355 tree field;
356
357 tmp = gfc_conv_descriptor_dimension (desc, dim);
358 field = TYPE_FIELDS (TREE_TYPE (tmp));
359 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
360 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
361
362 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
363 tmp, field, NULL_TREE);
364 return tmp;
365 }
366
367 tree
368 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
369 {
370 return gfc_conv_descriptor_lbound (desc, dim);
371 }
372
373 void
374 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
375 tree dim, tree value)
376 {
377 tree t = gfc_conv_descriptor_lbound (desc, dim);
378 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
379 }
380
381 static tree
382 gfc_conv_descriptor_ubound (tree desc, tree dim)
383 {
384 tree tmp;
385 tree field;
386
387 tmp = gfc_conv_descriptor_dimension (desc, dim);
388 field = TYPE_FIELDS (TREE_TYPE (tmp));
389 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
390 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
391
392 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
393 tmp, field, NULL_TREE);
394 return tmp;
395 }
396
397 tree
398 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
399 {
400 return gfc_conv_descriptor_ubound (desc, dim);
401 }
402
403 void
404 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
405 tree dim, tree value)
406 {
407 tree t = gfc_conv_descriptor_ubound (desc, dim);
408 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
409 }
410
411 /* Build a null array descriptor constructor. */
412
413 tree
414 gfc_build_null_descriptor (tree type)
415 {
416 tree field;
417 tree tmp;
418
419 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
420 gcc_assert (DATA_FIELD == 0);
421 field = TYPE_FIELDS (type);
422
423 /* Set a NULL data pointer. */
424 tmp = build_constructor_single (type, field, null_pointer_node);
425 TREE_CONSTANT (tmp) = 1;
426 /* All other fields are ignored. */
427
428 return tmp;
429 }
430
431
432 /* Modify a descriptor such that the lbound of a given dimension is the value
433 specified. This also updates ubound and offset accordingly. */
434
435 void
436 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
437 int dim, tree new_lbound)
438 {
439 tree offs, ubound, lbound, stride;
440 tree diff, offs_diff;
441
442 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
443
444 offs = gfc_conv_descriptor_offset_get (desc);
445 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
446 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
447 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
448
449 /* Get difference (new - old) by which to shift stuff. */
450 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
451 new_lbound, lbound);
452
453 /* Shift ubound and offset accordingly. This has to be done before
454 updating the lbound, as they depend on the lbound expression! */
455 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
456 ubound, diff);
457 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
458 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
459 diff, stride);
460 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
461 offs, offs_diff);
462 gfc_conv_descriptor_offset_set (block, desc, offs);
463
464 /* Finally set lbound to value we want. */
465 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
466 }
467
468
469 /* Cleanup those #defines. */
470
471 #undef DATA_FIELD
472 #undef OFFSET_FIELD
473 #undef DTYPE_FIELD
474 #undef DIMENSION_FIELD
475 #undef CAF_TOKEN_FIELD
476 #undef STRIDE_SUBFIELD
477 #undef LBOUND_SUBFIELD
478 #undef UBOUND_SUBFIELD
479
480
481 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
482 flags & 1 = Main loop body.
483 flags & 2 = temp copy loop. */
484
485 void
486 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
487 {
488 for (; ss != gfc_ss_terminator; ss = ss->next)
489 ss->info->useflags = flags;
490 }
491
492
493 /* Free a gfc_ss chain. */
494
495 void
496 gfc_free_ss_chain (gfc_ss * ss)
497 {
498 gfc_ss *next;
499
500 while (ss != gfc_ss_terminator)
501 {
502 gcc_assert (ss != NULL);
503 next = ss->next;
504 gfc_free_ss (ss);
505 ss = next;
506 }
507 }
508
509
510 static void
511 free_ss_info (gfc_ss_info *ss_info)
512 {
513 ss_info->refcount--;
514 if (ss_info->refcount > 0)
515 return;
516
517 gcc_assert (ss_info->refcount == 0);
518 free (ss_info);
519 }
520
521
522 /* Free a SS. */
523
524 void
525 gfc_free_ss (gfc_ss * ss)
526 {
527 gfc_ss_info *ss_info;
528 int n;
529
530 ss_info = ss->info;
531
532 switch (ss_info->type)
533 {
534 case GFC_SS_SECTION:
535 for (n = 0; n < ss->dimen; n++)
536 {
537 if (ss_info->data.array.subscript[ss->dim[n]])
538 gfc_free_ss_chain (ss_info->data.array.subscript[ss->dim[n]]);
539 }
540 break;
541
542 default:
543 break;
544 }
545
546 free_ss_info (ss_info);
547 free (ss);
548 }
549
550
551 /* Creates and initializes an array type gfc_ss struct. */
552
553 gfc_ss *
554 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
555 {
556 gfc_ss *ss;
557 gfc_ss_info *ss_info;
558 int i;
559
560 ss_info = gfc_get_ss_info ();
561 ss_info->refcount++;
562 ss_info->type = type;
563 ss_info->expr = expr;
564
565 ss = gfc_get_ss ();
566 ss->info = ss_info;
567 ss->next = next;
568 ss->dimen = dimen;
569 for (i = 0; i < ss->dimen; i++)
570 ss->dim[i] = i;
571
572 return ss;
573 }
574
575
576 /* Creates and initializes a temporary type gfc_ss struct. */
577
578 gfc_ss *
579 gfc_get_temp_ss (tree type, tree string_length, int dimen)
580 {
581 gfc_ss *ss;
582 gfc_ss_info *ss_info;
583 int i;
584
585 ss_info = gfc_get_ss_info ();
586 ss_info->refcount++;
587 ss_info->type = GFC_SS_TEMP;
588 ss_info->string_length = string_length;
589 ss_info->data.temp.type = type;
590
591 ss = gfc_get_ss ();
592 ss->info = ss_info;
593 ss->next = gfc_ss_terminator;
594 ss->dimen = dimen;
595 for (i = 0; i < ss->dimen; i++)
596 ss->dim[i] = i;
597
598 return ss;
599 }
600
601
602 /* Creates and initializes a scalar type gfc_ss struct. */
603
604 gfc_ss *
605 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
606 {
607 gfc_ss *ss;
608 gfc_ss_info *ss_info;
609
610 ss_info = gfc_get_ss_info ();
611 ss_info->refcount++;
612 ss_info->type = GFC_SS_SCALAR;
613 ss_info->expr = expr;
614
615 ss = gfc_get_ss ();
616 ss->info = ss_info;
617 ss->next = next;
618
619 return ss;
620 }
621
622
623 /* Free all the SS associated with a loop. */
624
625 void
626 gfc_cleanup_loop (gfc_loopinfo * loop)
627 {
628 gfc_loopinfo *loop_next, **ploop;
629 gfc_ss *ss;
630 gfc_ss *next;
631
632 ss = loop->ss;
633 while (ss != gfc_ss_terminator)
634 {
635 gcc_assert (ss != NULL);
636 next = ss->loop_chain;
637 gfc_free_ss (ss);
638 ss = next;
639 }
640
641 /* Remove reference to self in the parent loop. */
642 if (loop->parent)
643 for (ploop = &loop->parent->nested; *ploop; ploop = &(*ploop)->next)
644 if (*ploop == loop)
645 {
646 *ploop = loop->next;
647 break;
648 }
649
650 /* Free non-freed nested loops. */
651 for (loop = loop->nested; loop; loop = loop_next)
652 {
653 loop_next = loop->next;
654 gfc_cleanup_loop (loop);
655 free (loop);
656 }
657 }
658
659
660 static void
661 set_ss_loop (gfc_ss *ss, gfc_loopinfo *loop)
662 {
663 int n;
664
665 for (; ss != gfc_ss_terminator; ss = ss->next)
666 {
667 ss->loop = loop;
668
669 if (ss->info->type == GFC_SS_SCALAR
670 || ss->info->type == GFC_SS_REFERENCE
671 || ss->info->type == GFC_SS_TEMP)
672 continue;
673
674 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
675 if (ss->info->data.array.subscript[n] != NULL)
676 set_ss_loop (ss->info->data.array.subscript[n], loop);
677 }
678 }
679
680
681 /* Associate a SS chain with a loop. */
682
683 void
684 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
685 {
686 gfc_ss *ss;
687 gfc_loopinfo *nested_loop;
688
689 if (head == gfc_ss_terminator)
690 return;
691
692 set_ss_loop (head, loop);
693
694 ss = head;
695 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
696 {
697 if (ss->nested_ss)
698 {
699 nested_loop = ss->nested_ss->loop;
700
701 /* More than one ss can belong to the same loop. Hence, we add the
702 loop to the chain only if it is different from the previously
703 added one, to avoid duplicate nested loops. */
704 if (nested_loop != loop->nested)
705 {
706 gcc_assert (nested_loop->parent == NULL);
707 nested_loop->parent = loop;
708
709 gcc_assert (nested_loop->next == NULL);
710 nested_loop->next = loop->nested;
711 loop->nested = nested_loop;
712 }
713 else
714 gcc_assert (nested_loop->parent == loop);
715 }
716
717 if (ss->next == gfc_ss_terminator)
718 ss->loop_chain = loop->ss;
719 else
720 ss->loop_chain = ss->next;
721 }
722 gcc_assert (ss == gfc_ss_terminator);
723 loop->ss = head;
724 }
725
726
727 /* Generate an initializer for a static pointer or allocatable array. */
728
729 void
730 gfc_trans_static_array_pointer (gfc_symbol * sym)
731 {
732 tree type;
733
734 gcc_assert (TREE_STATIC (sym->backend_decl));
735 /* Just zero the data member. */
736 type = TREE_TYPE (sym->backend_decl);
737 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
738 }
739
740
741 /* If the bounds of SE's loop have not yet been set, see if they can be
742 determined from array spec AS, which is the array spec of a called
743 function. MAPPING maps the callee's dummy arguments to the values
744 that the caller is passing. Add any initialization and finalization
745 code to SE. */
746
747 void
748 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
749 gfc_se * se, gfc_array_spec * as)
750 {
751 int n, dim, total_dim;
752 gfc_se tmpse;
753 gfc_ss *ss;
754 tree lower;
755 tree upper;
756 tree tmp;
757
758 total_dim = 0;
759
760 if (!as || as->type != AS_EXPLICIT)
761 return;
762
763 for (ss = se->ss; ss; ss = ss->parent)
764 {
765 total_dim += ss->loop->dimen;
766 for (n = 0; n < ss->loop->dimen; n++)
767 {
768 /* The bound is known, nothing to do. */
769 if (ss->loop->to[n] != NULL_TREE)
770 continue;
771
772 dim = ss->dim[n];
773 gcc_assert (dim < as->rank);
774 gcc_assert (ss->loop->dimen <= as->rank);
775
776 /* Evaluate the lower bound. */
777 gfc_init_se (&tmpse, NULL);
778 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
779 gfc_add_block_to_block (&se->pre, &tmpse.pre);
780 gfc_add_block_to_block (&se->post, &tmpse.post);
781 lower = fold_convert (gfc_array_index_type, tmpse.expr);
782
783 /* ...and the upper bound. */
784 gfc_init_se (&tmpse, NULL);
785 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
786 gfc_add_block_to_block (&se->pre, &tmpse.pre);
787 gfc_add_block_to_block (&se->post, &tmpse.post);
788 upper = fold_convert (gfc_array_index_type, tmpse.expr);
789
790 /* Set the upper bound of the loop to UPPER - LOWER. */
791 tmp = fold_build2_loc (input_location, MINUS_EXPR,
792 gfc_array_index_type, upper, lower);
793 tmp = gfc_evaluate_now (tmp, &se->pre);
794 ss->loop->to[n] = tmp;
795 }
796 }
797
798 gcc_assert (total_dim == as->rank);
799 }
800
801
802 /* Generate code to allocate an array temporary, or create a variable to
803 hold the data. If size is NULL, zero the descriptor so that the
804 callee will allocate the array. If DEALLOC is true, also generate code to
805 free the array afterwards.
806
807 If INITIAL is not NULL, it is packed using internal_pack and the result used
808 as data instead of allocating a fresh, unitialized area of memory.
809
810 Initialization code is added to PRE and finalization code to POST.
811 DYNAMIC is true if the caller may want to extend the array later
812 using realloc. This prevents us from putting the array on the stack. */
813
814 static void
815 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
816 gfc_array_info * info, tree size, tree nelem,
817 tree initial, bool dynamic, bool dealloc)
818 {
819 tree tmp;
820 tree desc;
821 bool onstack;
822
823 desc = info->descriptor;
824 info->offset = gfc_index_zero_node;
825 if (size == NULL_TREE || integer_zerop (size))
826 {
827 /* A callee allocated array. */
828 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
829 onstack = FALSE;
830 }
831 else
832 {
833 /* Allocate the temporary. */
834 onstack = !dynamic && initial == NULL_TREE
835 && (gfc_option.flag_stack_arrays
836 || gfc_can_put_var_on_stack (size));
837
838 if (onstack)
839 {
840 /* Make a temporary variable to hold the data. */
841 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
842 nelem, gfc_index_one_node);
843 tmp = gfc_evaluate_now (tmp, pre);
844 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
845 tmp);
846 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
847 tmp);
848 tmp = gfc_create_var (tmp, "A");
849 /* If we're here only because of -fstack-arrays we have to
850 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
851 if (!gfc_can_put_var_on_stack (size))
852 gfc_add_expr_to_block (pre,
853 fold_build1_loc (input_location,
854 DECL_EXPR, TREE_TYPE (tmp),
855 tmp));
856 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
857 gfc_conv_descriptor_data_set (pre, desc, tmp);
858 }
859 else
860 {
861 /* Allocate memory to hold the data or call internal_pack. */
862 if (initial == NULL_TREE)
863 {
864 tmp = gfc_call_malloc (pre, NULL, size);
865 tmp = gfc_evaluate_now (tmp, pre);
866 }
867 else
868 {
869 tree packed;
870 tree source_data;
871 tree was_packed;
872 stmtblock_t do_copying;
873
874 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
875 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
876 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
877 tmp = gfc_get_element_type (tmp);
878 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
879 packed = gfc_create_var (build_pointer_type (tmp), "data");
880
881 tmp = build_call_expr_loc (input_location,
882 gfor_fndecl_in_pack, 1, initial);
883 tmp = fold_convert (TREE_TYPE (packed), tmp);
884 gfc_add_modify (pre, packed, tmp);
885
886 tmp = build_fold_indirect_ref_loc (input_location,
887 initial);
888 source_data = gfc_conv_descriptor_data_get (tmp);
889
890 /* internal_pack may return source->data without any allocation
891 or copying if it is already packed. If that's the case, we
892 need to allocate and copy manually. */
893
894 gfc_start_block (&do_copying);
895 tmp = gfc_call_malloc (&do_copying, NULL, size);
896 tmp = fold_convert (TREE_TYPE (packed), tmp);
897 gfc_add_modify (&do_copying, packed, tmp);
898 tmp = gfc_build_memcpy_call (packed, source_data, size);
899 gfc_add_expr_to_block (&do_copying, tmp);
900
901 was_packed = fold_build2_loc (input_location, EQ_EXPR,
902 boolean_type_node, packed,
903 source_data);
904 tmp = gfc_finish_block (&do_copying);
905 tmp = build3_v (COND_EXPR, was_packed, tmp,
906 build_empty_stmt (input_location));
907 gfc_add_expr_to_block (pre, tmp);
908
909 tmp = fold_convert (pvoid_type_node, packed);
910 }
911
912 gfc_conv_descriptor_data_set (pre, desc, tmp);
913 }
914 }
915 info->data = gfc_conv_descriptor_data_get (desc);
916
917 /* The offset is zero because we create temporaries with a zero
918 lower bound. */
919 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
920
921 if (dealloc && !onstack)
922 {
923 /* Free the temporary. */
924 tmp = gfc_conv_descriptor_data_get (desc);
925 tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
926 gfc_add_expr_to_block (post, tmp);
927 }
928 }
929
930
931 /* Get the scalarizer array dimension corresponding to actual array dimension
932 given by ARRAY_DIM.
933
934 For example, if SS represents the array ref a(1,:,:,1), it is a
935 bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1,
936 and 1 for ARRAY_DIM=2.
937 If SS represents transpose(a(:,1,1,:)), it is again a bidimensional
938 scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for
939 ARRAY_DIM=3.
940 If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer
941 array. If called on the inner ss, the result would be respectively 0,1,2 for
942 ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1
943 for ARRAY_DIM=1,2. */
944
945 static int
946 get_scalarizer_dim_for_array_dim (gfc_ss *ss, int array_dim)
947 {
948 int array_ref_dim;
949 int n;
950
951 array_ref_dim = 0;
952
953 for (; ss; ss = ss->parent)
954 for (n = 0; n < ss->dimen; n++)
955 if (ss->dim[n] < array_dim)
956 array_ref_dim++;
957
958 return array_ref_dim;
959 }
960
961
962 static gfc_ss *
963 innermost_ss (gfc_ss *ss)
964 {
965 while (ss->nested_ss != NULL)
966 ss = ss->nested_ss;
967
968 return ss;
969 }
970
971
972
973 /* Get the array reference dimension corresponding to the given loop dimension.
974 It is different from the true array dimension given by the dim array in
975 the case of a partial array reference (i.e. a(:,:,1,:) for example)
976 It is different from the loop dimension in the case of a transposed array.
977 */
978
979 static int
980 get_array_ref_dim_for_loop_dim (gfc_ss *ss, int loop_dim)
981 {
982 return get_scalarizer_dim_for_array_dim (innermost_ss (ss),
983 ss->dim[loop_dim]);
984 }
985
986
987 /* Generate code to create and initialize the descriptor for a temporary
988 array. This is used for both temporaries needed by the scalarizer, and
989 functions returning arrays. Adjusts the loop variables to be
990 zero-based, and calculates the loop bounds for callee allocated arrays.
991 Allocate the array unless it's callee allocated (we have a callee
992 allocated array if 'callee_alloc' is true, or if loop->to[n] is
993 NULL_TREE for any n). Also fills in the descriptor, data and offset
994 fields of info if known. Returns the size of the array, or NULL for a
995 callee allocated array.
996
997 'eltype' == NULL signals that the temporary should be a class object.
998 The 'initial' expression is used to obtain the size of the dynamic
999 type; otherwise the allocation and initialisation proceeds as for any
1000 other expression
1001
1002 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
1003 gfc_trans_allocate_array_storage. */
1004
1005 tree
1006 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post, gfc_ss * ss,
1007 tree eltype, tree initial, bool dynamic,
1008 bool dealloc, bool callee_alloc, locus * where)
1009 {
1010 gfc_loopinfo *loop;
1011 gfc_ss *s;
1012 gfc_array_info *info;
1013 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
1014 tree type;
1015 tree desc;
1016 tree tmp;
1017 tree size;
1018 tree nelem;
1019 tree cond;
1020 tree or_expr;
1021 tree class_expr = NULL_TREE;
1022 int n, dim, tmp_dim;
1023 int total_dim = 0;
1024
1025 /* This signals a class array for which we need the size of the
1026 dynamic type. Generate an eltype and then the class expression. */
1027 if (eltype == NULL_TREE && initial)
1028 {
1029 if (POINTER_TYPE_P (TREE_TYPE (initial)))
1030 class_expr = build_fold_indirect_ref_loc (input_location, initial);
1031 eltype = TREE_TYPE (class_expr);
1032 eltype = gfc_get_element_type (eltype);
1033 /* Obtain the structure (class) expression. */
1034 class_expr = TREE_OPERAND (class_expr, 0);
1035 gcc_assert (class_expr);
1036 }
1037
1038 memset (from, 0, sizeof (from));
1039 memset (to, 0, sizeof (to));
1040
1041 info = &ss->info->data.array;
1042
1043 gcc_assert (ss->dimen > 0);
1044 gcc_assert (ss->loop->dimen == ss->dimen);
1045
1046 if (gfc_option.warn_array_temp && where)
1047 gfc_warning ("Creating array temporary at %L", where);
1048
1049 /* Set the lower bound to zero. */
1050 for (s = ss; s; s = s->parent)
1051 {
1052 loop = s->loop;
1053
1054 total_dim += loop->dimen;
1055 for (n = 0; n < loop->dimen; n++)
1056 {
1057 dim = s->dim[n];
1058
1059 /* Callee allocated arrays may not have a known bound yet. */
1060 if (loop->to[n])
1061 loop->to[n] = gfc_evaluate_now (
1062 fold_build2_loc (input_location, MINUS_EXPR,
1063 gfc_array_index_type,
1064 loop->to[n], loop->from[n]),
1065 pre);
1066 loop->from[n] = gfc_index_zero_node;
1067
1068 /* We have just changed the loop bounds, we must clear the
1069 corresponding specloop, so that delta calculation is not skipped
1070 later in gfc_set_delta. */
1071 loop->specloop[n] = NULL;
1072
1073 /* We are constructing the temporary's descriptor based on the loop
1074 dimensions. As the dimensions may be accessed in arbitrary order
1075 (think of transpose) the size taken from the n'th loop may not map
1076 to the n'th dimension of the array. We need to reconstruct loop
1077 infos in the right order before using it to set the descriptor
1078 bounds. */
1079 tmp_dim = get_scalarizer_dim_for_array_dim (ss, dim);
1080 from[tmp_dim] = loop->from[n];
1081 to[tmp_dim] = loop->to[n];
1082
1083 info->delta[dim] = gfc_index_zero_node;
1084 info->start[dim] = gfc_index_zero_node;
1085 info->end[dim] = gfc_index_zero_node;
1086 info->stride[dim] = gfc_index_one_node;
1087 }
1088 }
1089
1090 /* Initialize the descriptor. */
1091 type =
1092 gfc_get_array_type_bounds (eltype, total_dim, 0, from, to, 1,
1093 GFC_ARRAY_UNKNOWN, true);
1094 desc = gfc_create_var (type, "atmp");
1095 GFC_DECL_PACKED_ARRAY (desc) = 1;
1096
1097 info->descriptor = desc;
1098 size = gfc_index_one_node;
1099
1100 /* Fill in the array dtype. */
1101 tmp = gfc_conv_descriptor_dtype (desc);
1102 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
1103
1104 /*
1105 Fill in the bounds and stride. This is a packed array, so:
1106
1107 size = 1;
1108 for (n = 0; n < rank; n++)
1109 {
1110 stride[n] = size
1111 delta = ubound[n] + 1 - lbound[n];
1112 size = size * delta;
1113 }
1114 size = size * sizeof(element);
1115 */
1116
1117 or_expr = NULL_TREE;
1118
1119 /* If there is at least one null loop->to[n], it is a callee allocated
1120 array. */
1121 for (n = 0; n < total_dim; n++)
1122 if (to[n] == NULL_TREE)
1123 {
1124 size = NULL_TREE;
1125 break;
1126 }
1127
1128 if (size == NULL_TREE)
1129 for (s = ss; s; s = s->parent)
1130 for (n = 0; n < s->loop->dimen; n++)
1131 {
1132 dim = get_scalarizer_dim_for_array_dim (ss, s->dim[n]);
1133
1134 /* For a callee allocated array express the loop bounds in terms
1135 of the descriptor fields. */
1136 tmp = fold_build2_loc (input_location,
1137 MINUS_EXPR, gfc_array_index_type,
1138 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
1139 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
1140 s->loop->to[n] = tmp;
1141 }
1142 else
1143 {
1144 for (n = 0; n < total_dim; n++)
1145 {
1146 /* Store the stride and bound components in the descriptor. */
1147 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
1148
1149 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
1150 gfc_index_zero_node);
1151
1152 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], to[n]);
1153
1154 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1155 gfc_array_index_type,
1156 to[n], gfc_index_one_node);
1157
1158 /* Check whether the size for this dimension is negative. */
1159 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1160 tmp, gfc_index_zero_node);
1161 cond = gfc_evaluate_now (cond, pre);
1162
1163 if (n == 0)
1164 or_expr = cond;
1165 else
1166 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
1167 boolean_type_node, or_expr, cond);
1168
1169 size = fold_build2_loc (input_location, MULT_EXPR,
1170 gfc_array_index_type, size, tmp);
1171 size = gfc_evaluate_now (size, pre);
1172 }
1173 }
1174
1175 /* Get the size of the array. */
1176 if (size && !callee_alloc)
1177 {
1178 tree elemsize;
1179 /* If or_expr is true, then the extent in at least one
1180 dimension is zero and the size is set to zero. */
1181 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
1182 or_expr, gfc_index_zero_node, size);
1183
1184 nelem = size;
1185 if (class_expr == NULL_TREE)
1186 elemsize = fold_convert (gfc_array_index_type,
1187 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
1188 else
1189 elemsize = gfc_vtable_size_get (class_expr);
1190
1191 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1192 size, elemsize);
1193 }
1194 else
1195 {
1196 nelem = size;
1197 size = NULL_TREE;
1198 }
1199
1200 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1201 dynamic, dealloc);
1202
1203 while (ss->parent)
1204 ss = ss->parent;
1205
1206 if (ss->dimen > ss->loop->temp_dim)
1207 ss->loop->temp_dim = ss->dimen;
1208
1209 return size;
1210 }
1211
1212
1213 /* Return the number of iterations in a loop that starts at START,
1214 ends at END, and has step STEP. */
1215
1216 static tree
1217 gfc_get_iteration_count (tree start, tree end, tree step)
1218 {
1219 tree tmp;
1220 tree type;
1221
1222 type = TREE_TYPE (step);
1223 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1224 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1225 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1226 build_int_cst (type, 1));
1227 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1228 build_int_cst (type, 0));
1229 return fold_convert (gfc_array_index_type, tmp);
1230 }
1231
1232
1233 /* Extend the data in array DESC by EXTRA elements. */
1234
1235 static void
1236 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1237 {
1238 tree arg0, arg1;
1239 tree tmp;
1240 tree size;
1241 tree ubound;
1242
1243 if (integer_zerop (extra))
1244 return;
1245
1246 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1247
1248 /* Add EXTRA to the upper bound. */
1249 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1250 ubound, extra);
1251 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1252
1253 /* Get the value of the current data pointer. */
1254 arg0 = gfc_conv_descriptor_data_get (desc);
1255
1256 /* Calculate the new array size. */
1257 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1258 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1259 ubound, gfc_index_one_node);
1260 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1261 fold_convert (size_type_node, tmp),
1262 fold_convert (size_type_node, size));
1263
1264 /* Call the realloc() function. */
1265 tmp = gfc_call_realloc (pblock, arg0, arg1);
1266 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1267 }
1268
1269
1270 /* Return true if the bounds of iterator I can only be determined
1271 at run time. */
1272
1273 static inline bool
1274 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1275 {
1276 return (i->start->expr_type != EXPR_CONSTANT
1277 || i->end->expr_type != EXPR_CONSTANT
1278 || i->step->expr_type != EXPR_CONSTANT);
1279 }
1280
1281
1282 /* Split the size of constructor element EXPR into the sum of two terms,
1283 one of which can be determined at compile time and one of which must
1284 be calculated at run time. Set *SIZE to the former and return true
1285 if the latter might be nonzero. */
1286
1287 static bool
1288 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1289 {
1290 if (expr->expr_type == EXPR_ARRAY)
1291 return gfc_get_array_constructor_size (size, expr->value.constructor);
1292 else if (expr->rank > 0)
1293 {
1294 /* Calculate everything at run time. */
1295 mpz_set_ui (*size, 0);
1296 return true;
1297 }
1298 else
1299 {
1300 /* A single element. */
1301 mpz_set_ui (*size, 1);
1302 return false;
1303 }
1304 }
1305
1306
1307 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1308 of array constructor C. */
1309
1310 static bool
1311 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1312 {
1313 gfc_constructor *c;
1314 gfc_iterator *i;
1315 mpz_t val;
1316 mpz_t len;
1317 bool dynamic;
1318
1319 mpz_set_ui (*size, 0);
1320 mpz_init (len);
1321 mpz_init (val);
1322
1323 dynamic = false;
1324 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1325 {
1326 i = c->iterator;
1327 if (i && gfc_iterator_has_dynamic_bounds (i))
1328 dynamic = true;
1329 else
1330 {
1331 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1332 if (i)
1333 {
1334 /* Multiply the static part of the element size by the
1335 number of iterations. */
1336 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1337 mpz_fdiv_q (val, val, i->step->value.integer);
1338 mpz_add_ui (val, val, 1);
1339 if (mpz_sgn (val) > 0)
1340 mpz_mul (len, len, val);
1341 else
1342 mpz_set_ui (len, 0);
1343 }
1344 mpz_add (*size, *size, len);
1345 }
1346 }
1347 mpz_clear (len);
1348 mpz_clear (val);
1349 return dynamic;
1350 }
1351
1352
1353 /* Make sure offset is a variable. */
1354
1355 static void
1356 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1357 tree * offsetvar)
1358 {
1359 /* We should have already created the offset variable. We cannot
1360 create it here because we may be in an inner scope. */
1361 gcc_assert (*offsetvar != NULL_TREE);
1362 gfc_add_modify (pblock, *offsetvar, *poffset);
1363 *poffset = *offsetvar;
1364 TREE_USED (*offsetvar) = 1;
1365 }
1366
1367
1368 /* Variables needed for bounds-checking. */
1369 static bool first_len;
1370 static tree first_len_val;
1371 static bool typespec_chararray_ctor;
1372
1373 static void
1374 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1375 tree offset, gfc_se * se, gfc_expr * expr)
1376 {
1377 tree tmp;
1378
1379 gfc_conv_expr (se, expr);
1380
1381 /* Store the value. */
1382 tmp = build_fold_indirect_ref_loc (input_location,
1383 gfc_conv_descriptor_data_get (desc));
1384 tmp = gfc_build_array_ref (tmp, offset, NULL);
1385
1386 if (expr->ts.type == BT_CHARACTER)
1387 {
1388 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1389 tree esize;
1390
1391 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1392 esize = fold_convert (gfc_charlen_type_node, esize);
1393 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1394 gfc_charlen_type_node, esize,
1395 build_int_cst (gfc_charlen_type_node,
1396 gfc_character_kinds[i].bit_size / 8));
1397
1398 gfc_conv_string_parameter (se);
1399 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1400 {
1401 /* The temporary is an array of pointers. */
1402 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1403 gfc_add_modify (&se->pre, tmp, se->expr);
1404 }
1405 else
1406 {
1407 /* The temporary is an array of string values. */
1408 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1409 /* We know the temporary and the value will be the same length,
1410 so can use memcpy. */
1411 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1412 se->string_length, se->expr, expr->ts.kind);
1413 }
1414 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1415 {
1416 if (first_len)
1417 {
1418 gfc_add_modify (&se->pre, first_len_val,
1419 se->string_length);
1420 first_len = false;
1421 }
1422 else
1423 {
1424 /* Verify that all constructor elements are of the same
1425 length. */
1426 tree cond = fold_build2_loc (input_location, NE_EXPR,
1427 boolean_type_node, first_len_val,
1428 se->string_length);
1429 gfc_trans_runtime_check
1430 (true, false, cond, &se->pre, &expr->where,
1431 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1432 fold_convert (long_integer_type_node, first_len_val),
1433 fold_convert (long_integer_type_node, se->string_length));
1434 }
1435 }
1436 }
1437 else
1438 {
1439 /* TODO: Should the frontend already have done this conversion? */
1440 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1441 gfc_add_modify (&se->pre, tmp, se->expr);
1442 }
1443
1444 gfc_add_block_to_block (pblock, &se->pre);
1445 gfc_add_block_to_block (pblock, &se->post);
1446 }
1447
1448
1449 /* Add the contents of an array to the constructor. DYNAMIC is as for
1450 gfc_trans_array_constructor_value. */
1451
1452 static void
1453 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1454 tree type ATTRIBUTE_UNUSED,
1455 tree desc, gfc_expr * expr,
1456 tree * poffset, tree * offsetvar,
1457 bool dynamic)
1458 {
1459 gfc_se se;
1460 gfc_ss *ss;
1461 gfc_loopinfo loop;
1462 stmtblock_t body;
1463 tree tmp;
1464 tree size;
1465 int n;
1466
1467 /* We need this to be a variable so we can increment it. */
1468 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1469
1470 gfc_init_se (&se, NULL);
1471
1472 /* Walk the array expression. */
1473 ss = gfc_walk_expr (expr);
1474 gcc_assert (ss != gfc_ss_terminator);
1475
1476 /* Initialize the scalarizer. */
1477 gfc_init_loopinfo (&loop);
1478 gfc_add_ss_to_loop (&loop, ss);
1479
1480 /* Initialize the loop. */
1481 gfc_conv_ss_startstride (&loop);
1482 gfc_conv_loop_setup (&loop, &expr->where);
1483
1484 /* Make sure the constructed array has room for the new data. */
1485 if (dynamic)
1486 {
1487 /* Set SIZE to the total number of elements in the subarray. */
1488 size = gfc_index_one_node;
1489 for (n = 0; n < loop.dimen; n++)
1490 {
1491 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1492 gfc_index_one_node);
1493 size = fold_build2_loc (input_location, MULT_EXPR,
1494 gfc_array_index_type, size, tmp);
1495 }
1496
1497 /* Grow the constructed array by SIZE elements. */
1498 gfc_grow_array (&loop.pre, desc, size);
1499 }
1500
1501 /* Make the loop body. */
1502 gfc_mark_ss_chain_used (ss, 1);
1503 gfc_start_scalarized_body (&loop, &body);
1504 gfc_copy_loopinfo_to_se (&se, &loop);
1505 se.ss = ss;
1506
1507 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1508 gcc_assert (se.ss == gfc_ss_terminator);
1509
1510 /* Increment the offset. */
1511 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1512 *poffset, gfc_index_one_node);
1513 gfc_add_modify (&body, *poffset, tmp);
1514
1515 /* Finish the loop. */
1516 gfc_trans_scalarizing_loops (&loop, &body);
1517 gfc_add_block_to_block (&loop.pre, &loop.post);
1518 tmp = gfc_finish_block (&loop.pre);
1519 gfc_add_expr_to_block (pblock, tmp);
1520
1521 gfc_cleanup_loop (&loop);
1522 }
1523
1524
1525 /* Assign the values to the elements of an array constructor. DYNAMIC
1526 is true if descriptor DESC only contains enough data for the static
1527 size calculated by gfc_get_array_constructor_size. When true, memory
1528 for the dynamic parts must be allocated using realloc. */
1529
1530 static void
1531 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1532 tree desc, gfc_constructor_base base,
1533 tree * poffset, tree * offsetvar,
1534 bool dynamic)
1535 {
1536 tree tmp;
1537 tree start = NULL_TREE;
1538 tree end = NULL_TREE;
1539 tree step = NULL_TREE;
1540 stmtblock_t body;
1541 gfc_se se;
1542 mpz_t size;
1543 gfc_constructor *c;
1544
1545 tree shadow_loopvar = NULL_TREE;
1546 gfc_saved_var saved_loopvar;
1547
1548 mpz_init (size);
1549 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1550 {
1551 /* If this is an iterator or an array, the offset must be a variable. */
1552 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1553 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1554
1555 /* Shadowing the iterator avoids changing its value and saves us from
1556 keeping track of it. Further, it makes sure that there's always a
1557 backend-decl for the symbol, even if there wasn't one before,
1558 e.g. in the case of an iterator that appears in a specification
1559 expression in an interface mapping. */
1560 if (c->iterator)
1561 {
1562 gfc_symbol *sym;
1563 tree type;
1564
1565 /* Evaluate loop bounds before substituting the loop variable
1566 in case they depend on it. Such a case is invalid, but it is
1567 not more expensive to do the right thing here.
1568 See PR 44354. */
1569 gfc_init_se (&se, NULL);
1570 gfc_conv_expr_val (&se, c->iterator->start);
1571 gfc_add_block_to_block (pblock, &se.pre);
1572 start = gfc_evaluate_now (se.expr, pblock);
1573
1574 gfc_init_se (&se, NULL);
1575 gfc_conv_expr_val (&se, c->iterator->end);
1576 gfc_add_block_to_block (pblock, &se.pre);
1577 end = gfc_evaluate_now (se.expr, pblock);
1578
1579 gfc_init_se (&se, NULL);
1580 gfc_conv_expr_val (&se, c->iterator->step);
1581 gfc_add_block_to_block (pblock, &se.pre);
1582 step = gfc_evaluate_now (se.expr, pblock);
1583
1584 sym = c->iterator->var->symtree->n.sym;
1585 type = gfc_typenode_for_spec (&sym->ts);
1586
1587 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1588 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1589 }
1590
1591 gfc_start_block (&body);
1592
1593 if (c->expr->expr_type == EXPR_ARRAY)
1594 {
1595 /* Array constructors can be nested. */
1596 gfc_trans_array_constructor_value (&body, type, desc,
1597 c->expr->value.constructor,
1598 poffset, offsetvar, dynamic);
1599 }
1600 else if (c->expr->rank > 0)
1601 {
1602 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1603 poffset, offsetvar, dynamic);
1604 }
1605 else
1606 {
1607 /* This code really upsets the gimplifier so don't bother for now. */
1608 gfc_constructor *p;
1609 HOST_WIDE_INT n;
1610 HOST_WIDE_INT size;
1611
1612 p = c;
1613 n = 0;
1614 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1615 {
1616 p = gfc_constructor_next (p);
1617 n++;
1618 }
1619 if (n < 4)
1620 {
1621 /* Scalar values. */
1622 gfc_init_se (&se, NULL);
1623 gfc_trans_array_ctor_element (&body, desc, *poffset,
1624 &se, c->expr);
1625
1626 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1627 gfc_array_index_type,
1628 *poffset, gfc_index_one_node);
1629 }
1630 else
1631 {
1632 /* Collect multiple scalar constants into a constructor. */
1633 VEC(constructor_elt,gc) *v = NULL;
1634 tree init;
1635 tree bound;
1636 tree tmptype;
1637 HOST_WIDE_INT idx = 0;
1638
1639 p = c;
1640 /* Count the number of consecutive scalar constants. */
1641 while (p && !(p->iterator
1642 || p->expr->expr_type != EXPR_CONSTANT))
1643 {
1644 gfc_init_se (&se, NULL);
1645 gfc_conv_constant (&se, p->expr);
1646
1647 if (c->expr->ts.type != BT_CHARACTER)
1648 se.expr = fold_convert (type, se.expr);
1649 /* For constant character array constructors we build
1650 an array of pointers. */
1651 else if (POINTER_TYPE_P (type))
1652 se.expr = gfc_build_addr_expr
1653 (gfc_get_pchar_type (p->expr->ts.kind),
1654 se.expr);
1655
1656 CONSTRUCTOR_APPEND_ELT (v,
1657 build_int_cst (gfc_array_index_type,
1658 idx++),
1659 se.expr);
1660 c = p;
1661 p = gfc_constructor_next (p);
1662 }
1663
1664 bound = size_int (n - 1);
1665 /* Create an array type to hold them. */
1666 tmptype = build_range_type (gfc_array_index_type,
1667 gfc_index_zero_node, bound);
1668 tmptype = build_array_type (type, tmptype);
1669
1670 init = build_constructor (tmptype, v);
1671 TREE_CONSTANT (init) = 1;
1672 TREE_STATIC (init) = 1;
1673 /* Create a static variable to hold the data. */
1674 tmp = gfc_create_var (tmptype, "data");
1675 TREE_STATIC (tmp) = 1;
1676 TREE_CONSTANT (tmp) = 1;
1677 TREE_READONLY (tmp) = 1;
1678 DECL_INITIAL (tmp) = init;
1679 init = tmp;
1680
1681 /* Use BUILTIN_MEMCPY to assign the values. */
1682 tmp = gfc_conv_descriptor_data_get (desc);
1683 tmp = build_fold_indirect_ref_loc (input_location,
1684 tmp);
1685 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1686 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1687 init = gfc_build_addr_expr (NULL_TREE, init);
1688
1689 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1690 bound = build_int_cst (size_type_node, n * size);
1691 tmp = build_call_expr_loc (input_location,
1692 builtin_decl_explicit (BUILT_IN_MEMCPY),
1693 3, tmp, init, bound);
1694 gfc_add_expr_to_block (&body, tmp);
1695
1696 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1697 gfc_array_index_type, *poffset,
1698 build_int_cst (gfc_array_index_type, n));
1699 }
1700 if (!INTEGER_CST_P (*poffset))
1701 {
1702 gfc_add_modify (&body, *offsetvar, *poffset);
1703 *poffset = *offsetvar;
1704 }
1705 }
1706
1707 /* The frontend should already have done any expansions
1708 at compile-time. */
1709 if (!c->iterator)
1710 {
1711 /* Pass the code as is. */
1712 tmp = gfc_finish_block (&body);
1713 gfc_add_expr_to_block (pblock, tmp);
1714 }
1715 else
1716 {
1717 /* Build the implied do-loop. */
1718 stmtblock_t implied_do_block;
1719 tree cond;
1720 tree exit_label;
1721 tree loopbody;
1722 tree tmp2;
1723
1724 loopbody = gfc_finish_block (&body);
1725
1726 /* Create a new block that holds the implied-do loop. A temporary
1727 loop-variable is used. */
1728 gfc_start_block(&implied_do_block);
1729
1730 /* Initialize the loop. */
1731 gfc_add_modify (&implied_do_block, shadow_loopvar, start);
1732
1733 /* If this array expands dynamically, and the number of iterations
1734 is not constant, we won't have allocated space for the static
1735 part of C->EXPR's size. Do that now. */
1736 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1737 {
1738 /* Get the number of iterations. */
1739 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1740
1741 /* Get the static part of C->EXPR's size. */
1742 gfc_get_array_constructor_element_size (&size, c->expr);
1743 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1744
1745 /* Grow the array by TMP * TMP2 elements. */
1746 tmp = fold_build2_loc (input_location, MULT_EXPR,
1747 gfc_array_index_type, tmp, tmp2);
1748 gfc_grow_array (&implied_do_block, desc, tmp);
1749 }
1750
1751 /* Generate the loop body. */
1752 exit_label = gfc_build_label_decl (NULL_TREE);
1753 gfc_start_block (&body);
1754
1755 /* Generate the exit condition. Depending on the sign of
1756 the step variable we have to generate the correct
1757 comparison. */
1758 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1759 step, build_int_cst (TREE_TYPE (step), 0));
1760 cond = fold_build3_loc (input_location, COND_EXPR,
1761 boolean_type_node, tmp,
1762 fold_build2_loc (input_location, GT_EXPR,
1763 boolean_type_node, shadow_loopvar, end),
1764 fold_build2_loc (input_location, LT_EXPR,
1765 boolean_type_node, shadow_loopvar, end));
1766 tmp = build1_v (GOTO_EXPR, exit_label);
1767 TREE_USED (exit_label) = 1;
1768 tmp = build3_v (COND_EXPR, cond, tmp,
1769 build_empty_stmt (input_location));
1770 gfc_add_expr_to_block (&body, tmp);
1771
1772 /* The main loop body. */
1773 gfc_add_expr_to_block (&body, loopbody);
1774
1775 /* Increase loop variable by step. */
1776 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1777 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1778 step);
1779 gfc_add_modify (&body, shadow_loopvar, tmp);
1780
1781 /* Finish the loop. */
1782 tmp = gfc_finish_block (&body);
1783 tmp = build1_v (LOOP_EXPR, tmp);
1784 gfc_add_expr_to_block (&implied_do_block, tmp);
1785
1786 /* Add the exit label. */
1787 tmp = build1_v (LABEL_EXPR, exit_label);
1788 gfc_add_expr_to_block (&implied_do_block, tmp);
1789
1790 /* Finish the implied-do loop. */
1791 tmp = gfc_finish_block(&implied_do_block);
1792 gfc_add_expr_to_block(pblock, tmp);
1793
1794 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1795 }
1796 }
1797 mpz_clear (size);
1798 }
1799
1800
1801 /* A catch-all to obtain the string length for anything that is not
1802 a substring of non-constant length, a constant, array or variable. */
1803
1804 static void
1805 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1806 {
1807 gfc_se se;
1808 gfc_ss *ss;
1809
1810 /* Don't bother if we already know the length is a constant. */
1811 if (*len && INTEGER_CST_P (*len))
1812 return;
1813
1814 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1815 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1816 {
1817 /* This is easy. */
1818 gfc_conv_const_charlen (e->ts.u.cl);
1819 *len = e->ts.u.cl->backend_decl;
1820 }
1821 else
1822 {
1823 /* Otherwise, be brutal even if inefficient. */
1824 ss = gfc_walk_expr (e);
1825 gfc_init_se (&se, NULL);
1826
1827 /* No function call, in case of side effects. */
1828 se.no_function_call = 1;
1829 if (ss == gfc_ss_terminator)
1830 gfc_conv_expr (&se, e);
1831 else
1832 gfc_conv_expr_descriptor (&se, e, ss);
1833
1834 /* Fix the value. */
1835 *len = gfc_evaluate_now (se.string_length, &se.pre);
1836
1837 gfc_add_block_to_block (block, &se.pre);
1838 gfc_add_block_to_block (block, &se.post);
1839
1840 e->ts.u.cl->backend_decl = *len;
1841 }
1842 }
1843
1844
1845 /* Figure out the string length of a variable reference expression.
1846 Used by get_array_ctor_strlen. */
1847
1848 static void
1849 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1850 {
1851 gfc_ref *ref;
1852 gfc_typespec *ts;
1853 mpz_t char_len;
1854
1855 /* Don't bother if we already know the length is a constant. */
1856 if (*len && INTEGER_CST_P (*len))
1857 return;
1858
1859 ts = &expr->symtree->n.sym->ts;
1860 for (ref = expr->ref; ref; ref = ref->next)
1861 {
1862 switch (ref->type)
1863 {
1864 case REF_ARRAY:
1865 /* Array references don't change the string length. */
1866 break;
1867
1868 case REF_COMPONENT:
1869 /* Use the length of the component. */
1870 ts = &ref->u.c.component->ts;
1871 break;
1872
1873 case REF_SUBSTRING:
1874 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1875 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1876 {
1877 /* Note that this might evaluate expr. */
1878 get_array_ctor_all_strlen (block, expr, len);
1879 return;
1880 }
1881 mpz_init_set_ui (char_len, 1);
1882 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1883 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1884 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1885 *len = convert (gfc_charlen_type_node, *len);
1886 mpz_clear (char_len);
1887 return;
1888
1889 default:
1890 gcc_unreachable ();
1891 }
1892 }
1893
1894 *len = ts->u.cl->backend_decl;
1895 }
1896
1897
1898 /* Figure out the string length of a character array constructor.
1899 If len is NULL, don't calculate the length; this happens for recursive calls
1900 when a sub-array-constructor is an element but not at the first position,
1901 so when we're not interested in the length.
1902 Returns TRUE if all elements are character constants. */
1903
1904 bool
1905 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1906 {
1907 gfc_constructor *c;
1908 bool is_const;
1909
1910 is_const = TRUE;
1911
1912 if (gfc_constructor_first (base) == NULL)
1913 {
1914 if (len)
1915 *len = build_int_cstu (gfc_charlen_type_node, 0);
1916 return is_const;
1917 }
1918
1919 /* Loop over all constructor elements to find out is_const, but in len we
1920 want to store the length of the first, not the last, element. We can
1921 of course exit the loop as soon as is_const is found to be false. */
1922 for (c = gfc_constructor_first (base);
1923 c && is_const; c = gfc_constructor_next (c))
1924 {
1925 switch (c->expr->expr_type)
1926 {
1927 case EXPR_CONSTANT:
1928 if (len && !(*len && INTEGER_CST_P (*len)))
1929 *len = build_int_cstu (gfc_charlen_type_node,
1930 c->expr->value.character.length);
1931 break;
1932
1933 case EXPR_ARRAY:
1934 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1935 is_const = false;
1936 break;
1937
1938 case EXPR_VARIABLE:
1939 is_const = false;
1940 if (len)
1941 get_array_ctor_var_strlen (block, c->expr, len);
1942 break;
1943
1944 default:
1945 is_const = false;
1946 if (len)
1947 get_array_ctor_all_strlen (block, c->expr, len);
1948 break;
1949 }
1950
1951 /* After the first iteration, we don't want the length modified. */
1952 len = NULL;
1953 }
1954
1955 return is_const;
1956 }
1957
1958 /* Check whether the array constructor C consists entirely of constant
1959 elements, and if so returns the number of those elements, otherwise
1960 return zero. Note, an empty or NULL array constructor returns zero. */
1961
1962 unsigned HOST_WIDE_INT
1963 gfc_constant_array_constructor_p (gfc_constructor_base base)
1964 {
1965 unsigned HOST_WIDE_INT nelem = 0;
1966
1967 gfc_constructor *c = gfc_constructor_first (base);
1968 while (c)
1969 {
1970 if (c->iterator
1971 || c->expr->rank > 0
1972 || c->expr->expr_type != EXPR_CONSTANT)
1973 return 0;
1974 c = gfc_constructor_next (c);
1975 nelem++;
1976 }
1977 return nelem;
1978 }
1979
1980
1981 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
1982 and the tree type of it's elements, TYPE, return a static constant
1983 variable that is compile-time initialized. */
1984
1985 tree
1986 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
1987 {
1988 tree tmptype, init, tmp;
1989 HOST_WIDE_INT nelem;
1990 gfc_constructor *c;
1991 gfc_array_spec as;
1992 gfc_se se;
1993 int i;
1994 VEC(constructor_elt,gc) *v = NULL;
1995
1996 /* First traverse the constructor list, converting the constants
1997 to tree to build an initializer. */
1998 nelem = 0;
1999 c = gfc_constructor_first (expr->value.constructor);
2000 while (c)
2001 {
2002 gfc_init_se (&se, NULL);
2003 gfc_conv_constant (&se, c->expr);
2004 if (c->expr->ts.type != BT_CHARACTER)
2005 se.expr = fold_convert (type, se.expr);
2006 else if (POINTER_TYPE_P (type))
2007 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
2008 se.expr);
2009 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
2010 se.expr);
2011 c = gfc_constructor_next (c);
2012 nelem++;
2013 }
2014
2015 /* Next determine the tree type for the array. We use the gfortran
2016 front-end's gfc_get_nodesc_array_type in order to create a suitable
2017 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
2018
2019 memset (&as, 0, sizeof (gfc_array_spec));
2020
2021 as.rank = expr->rank;
2022 as.type = AS_EXPLICIT;
2023 if (!expr->shape)
2024 {
2025 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2026 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
2027 NULL, nelem - 1);
2028 }
2029 else
2030 for (i = 0; i < expr->rank; i++)
2031 {
2032 int tmp = (int) mpz_get_si (expr->shape[i]);
2033 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2034 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
2035 NULL, tmp - 1);
2036 }
2037
2038 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
2039
2040 /* as is not needed anymore. */
2041 for (i = 0; i < as.rank + as.corank; i++)
2042 {
2043 gfc_free_expr (as.lower[i]);
2044 gfc_free_expr (as.upper[i]);
2045 }
2046
2047 init = build_constructor (tmptype, v);
2048
2049 TREE_CONSTANT (init) = 1;
2050 TREE_STATIC (init) = 1;
2051
2052 tmp = gfc_create_var (tmptype, "A");
2053 TREE_STATIC (tmp) = 1;
2054 TREE_CONSTANT (tmp) = 1;
2055 TREE_READONLY (tmp) = 1;
2056 DECL_INITIAL (tmp) = init;
2057
2058 return tmp;
2059 }
2060
2061
2062 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
2063 This mostly initializes the scalarizer state info structure with the
2064 appropriate values to directly use the array created by the function
2065 gfc_build_constant_array_constructor. */
2066
2067 static void
2068 trans_constant_array_constructor (gfc_ss * ss, tree type)
2069 {
2070 gfc_array_info *info;
2071 tree tmp;
2072 int i;
2073
2074 tmp = gfc_build_constant_array_constructor (ss->info->expr, type);
2075
2076 info = &ss->info->data.array;
2077
2078 info->descriptor = tmp;
2079 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
2080 info->offset = gfc_index_zero_node;
2081
2082 for (i = 0; i < ss->dimen; i++)
2083 {
2084 info->delta[i] = gfc_index_zero_node;
2085 info->start[i] = gfc_index_zero_node;
2086 info->end[i] = gfc_index_zero_node;
2087 info->stride[i] = gfc_index_one_node;
2088 }
2089 }
2090
2091
2092 static int
2093 get_rank (gfc_loopinfo *loop)
2094 {
2095 int rank;
2096
2097 rank = 0;
2098 for (; loop; loop = loop->parent)
2099 rank += loop->dimen;
2100
2101 return rank;
2102 }
2103
2104
2105 /* Helper routine of gfc_trans_array_constructor to determine if the
2106 bounds of the loop specified by LOOP are constant and simple enough
2107 to use with trans_constant_array_constructor. Returns the
2108 iteration count of the loop if suitable, and NULL_TREE otherwise. */
2109
2110 static tree
2111 constant_array_constructor_loop_size (gfc_loopinfo * l)
2112 {
2113 gfc_loopinfo *loop;
2114 tree size = gfc_index_one_node;
2115 tree tmp;
2116 int i, total_dim;
2117
2118 total_dim = get_rank (l);
2119
2120 for (loop = l; loop; loop = loop->parent)
2121 {
2122 for (i = 0; i < loop->dimen; i++)
2123 {
2124 /* If the bounds aren't constant, return NULL_TREE. */
2125 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
2126 return NULL_TREE;
2127 if (!integer_zerop (loop->from[i]))
2128 {
2129 /* Only allow nonzero "from" in one-dimensional arrays. */
2130 if (total_dim != 1)
2131 return NULL_TREE;
2132 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2133 gfc_array_index_type,
2134 loop->to[i], loop->from[i]);
2135 }
2136 else
2137 tmp = loop->to[i];
2138 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2139 gfc_array_index_type, tmp, gfc_index_one_node);
2140 size = fold_build2_loc (input_location, MULT_EXPR,
2141 gfc_array_index_type, size, tmp);
2142 }
2143 }
2144
2145 return size;
2146 }
2147
2148
2149 static tree *
2150 get_loop_upper_bound_for_array (gfc_ss *array, int array_dim)
2151 {
2152 gfc_ss *ss;
2153 int n;
2154
2155 gcc_assert (array->nested_ss == NULL);
2156
2157 for (ss = array; ss; ss = ss->parent)
2158 for (n = 0; n < ss->loop->dimen; n++)
2159 if (array_dim == get_array_ref_dim_for_loop_dim (ss, n))
2160 return &(ss->loop->to[n]);
2161
2162 gcc_unreachable ();
2163 }
2164
2165
2166 static gfc_loopinfo *
2167 outermost_loop (gfc_loopinfo * loop)
2168 {
2169 while (loop->parent != NULL)
2170 loop = loop->parent;
2171
2172 return loop;
2173 }
2174
2175
2176 /* Array constructors are handled by constructing a temporary, then using that
2177 within the scalarization loop. This is not optimal, but seems by far the
2178 simplest method. */
2179
2180 static void
2181 trans_array_constructor (gfc_ss * ss, locus * where)
2182 {
2183 gfc_constructor_base c;
2184 tree offset;
2185 tree offsetvar;
2186 tree desc;
2187 tree type;
2188 tree tmp;
2189 tree *loop_ubound0;
2190 bool dynamic;
2191 bool old_first_len, old_typespec_chararray_ctor;
2192 tree old_first_len_val;
2193 gfc_loopinfo *loop, *outer_loop;
2194 gfc_ss_info *ss_info;
2195 gfc_expr *expr;
2196 gfc_ss *s;
2197
2198 /* Save the old values for nested checking. */
2199 old_first_len = first_len;
2200 old_first_len_val = first_len_val;
2201 old_typespec_chararray_ctor = typespec_chararray_ctor;
2202
2203 loop = ss->loop;
2204 outer_loop = outermost_loop (loop);
2205 ss_info = ss->info;
2206 expr = ss_info->expr;
2207
2208 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
2209 typespec was given for the array constructor. */
2210 typespec_chararray_ctor = (expr->ts.u.cl
2211 && expr->ts.u.cl->length_from_typespec);
2212
2213 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2214 && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
2215 {
2216 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
2217 first_len = true;
2218 }
2219
2220 gcc_assert (ss->dimen == ss->loop->dimen);
2221
2222 c = expr->value.constructor;
2223 if (expr->ts.type == BT_CHARACTER)
2224 {
2225 bool const_string;
2226
2227 /* get_array_ctor_strlen walks the elements of the constructor, if a
2228 typespec was given, we already know the string length and want the one
2229 specified there. */
2230 if (typespec_chararray_ctor && expr->ts.u.cl->length
2231 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
2232 {
2233 gfc_se length_se;
2234
2235 const_string = false;
2236 gfc_init_se (&length_se, NULL);
2237 gfc_conv_expr_type (&length_se, expr->ts.u.cl->length,
2238 gfc_charlen_type_node);
2239 ss_info->string_length = length_se.expr;
2240 gfc_add_block_to_block (&outer_loop->pre, &length_se.pre);
2241 gfc_add_block_to_block (&outer_loop->post, &length_se.post);
2242 }
2243 else
2244 const_string = get_array_ctor_strlen (&outer_loop->pre, c,
2245 &ss_info->string_length);
2246
2247 /* Complex character array constructors should have been taken care of
2248 and not end up here. */
2249 gcc_assert (ss_info->string_length);
2250
2251 expr->ts.u.cl->backend_decl = ss_info->string_length;
2252
2253 type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length);
2254 if (const_string)
2255 type = build_pointer_type (type);
2256 }
2257 else
2258 type = gfc_typenode_for_spec (&expr->ts);
2259
2260 /* See if the constructor determines the loop bounds. */
2261 dynamic = false;
2262
2263 loop_ubound0 = get_loop_upper_bound_for_array (ss, 0);
2264
2265 if (expr->shape && get_rank (loop) > 1 && *loop_ubound0 == NULL_TREE)
2266 {
2267 /* We have a multidimensional parameter. */
2268 for (s = ss; s; s = s->parent)
2269 {
2270 int n;
2271 for (n = 0; n < s->loop->dimen; n++)
2272 {
2273 s->loop->from[n] = gfc_index_zero_node;
2274 s->loop->to[n] = gfc_conv_mpz_to_tree (expr->shape[s->dim[n]],
2275 gfc_index_integer_kind);
2276 s->loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2277 gfc_array_index_type,
2278 s->loop->to[n],
2279 gfc_index_one_node);
2280 }
2281 }
2282 }
2283
2284 if (*loop_ubound0 == NULL_TREE)
2285 {
2286 mpz_t size;
2287
2288 /* We should have a 1-dimensional, zero-based loop. */
2289 gcc_assert (loop->parent == NULL && loop->nested == NULL);
2290 gcc_assert (loop->dimen == 1);
2291 gcc_assert (integer_zerop (loop->from[0]));
2292
2293 /* Split the constructor size into a static part and a dynamic part.
2294 Allocate the static size up-front and record whether the dynamic
2295 size might be nonzero. */
2296 mpz_init (size);
2297 dynamic = gfc_get_array_constructor_size (&size, c);
2298 mpz_sub_ui (size, size, 1);
2299 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2300 mpz_clear (size);
2301 }
2302
2303 /* Special case constant array constructors. */
2304 if (!dynamic)
2305 {
2306 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2307 if (nelem > 0)
2308 {
2309 tree size = constant_array_constructor_loop_size (loop);
2310 if (size && compare_tree_int (size, nelem) == 0)
2311 {
2312 trans_constant_array_constructor (ss, type);
2313 goto finish;
2314 }
2315 }
2316 }
2317
2318 if (TREE_CODE (*loop_ubound0) == VAR_DECL)
2319 dynamic = true;
2320
2321 gfc_trans_create_temp_array (&outer_loop->pre, &outer_loop->post, ss, type,
2322 NULL_TREE, dynamic, true, false, where);
2323
2324 desc = ss_info->data.array.descriptor;
2325 offset = gfc_index_zero_node;
2326 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2327 TREE_NO_WARNING (offsetvar) = 1;
2328 TREE_USED (offsetvar) = 0;
2329 gfc_trans_array_constructor_value (&outer_loop->pre, type, desc, c,
2330 &offset, &offsetvar, dynamic);
2331
2332 /* If the array grows dynamically, the upper bound of the loop variable
2333 is determined by the array's final upper bound. */
2334 if (dynamic)
2335 {
2336 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2337 gfc_array_index_type,
2338 offsetvar, gfc_index_one_node);
2339 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2340 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2341 if (*loop_ubound0 && TREE_CODE (*loop_ubound0) == VAR_DECL)
2342 gfc_add_modify (&outer_loop->pre, *loop_ubound0, tmp);
2343 else
2344 *loop_ubound0 = tmp;
2345 }
2346
2347 if (TREE_USED (offsetvar))
2348 pushdecl (offsetvar);
2349 else
2350 gcc_assert (INTEGER_CST_P (offset));
2351
2352 #if 0
2353 /* Disable bound checking for now because it's probably broken. */
2354 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2355 {
2356 gcc_unreachable ();
2357 }
2358 #endif
2359
2360 finish:
2361 /* Restore old values of globals. */
2362 first_len = old_first_len;
2363 first_len_val = old_first_len_val;
2364 typespec_chararray_ctor = old_typespec_chararray_ctor;
2365 }
2366
2367
2368 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2369 called after evaluating all of INFO's vector dimensions. Go through
2370 each such vector dimension and see if we can now fill in any missing
2371 loop bounds. */
2372
2373 static void
2374 set_vector_loop_bounds (gfc_ss * ss)
2375 {
2376 gfc_loopinfo *loop, *outer_loop;
2377 gfc_array_info *info;
2378 gfc_se se;
2379 tree tmp;
2380 tree desc;
2381 tree zero;
2382 int n;
2383 int dim;
2384
2385 outer_loop = outermost_loop (ss->loop);
2386
2387 info = &ss->info->data.array;
2388
2389 for (; ss; ss = ss->parent)
2390 {
2391 loop = ss->loop;
2392
2393 for (n = 0; n < loop->dimen; n++)
2394 {
2395 dim = ss->dim[n];
2396 if (info->ref->u.ar.dimen_type[dim] != DIMEN_VECTOR
2397 || loop->to[n] != NULL)
2398 continue;
2399
2400 /* Loop variable N indexes vector dimension DIM, and we don't
2401 yet know the upper bound of loop variable N. Set it to the
2402 difference between the vector's upper and lower bounds. */
2403 gcc_assert (loop->from[n] == gfc_index_zero_node);
2404 gcc_assert (info->subscript[dim]
2405 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2406
2407 gfc_init_se (&se, NULL);
2408 desc = info->subscript[dim]->info->data.array.descriptor;
2409 zero = gfc_rank_cst[0];
2410 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2411 gfc_array_index_type,
2412 gfc_conv_descriptor_ubound_get (desc, zero),
2413 gfc_conv_descriptor_lbound_get (desc, zero));
2414 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2415 loop->to[n] = tmp;
2416 }
2417 }
2418 }
2419
2420
2421 /* Add the pre and post chains for all the scalar expressions in a SS chain
2422 to loop. This is called after the loop parameters have been calculated,
2423 but before the actual scalarizing loops. */
2424
2425 static void
2426 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2427 locus * where)
2428 {
2429 gfc_loopinfo *nested_loop, *outer_loop;
2430 gfc_se se;
2431 gfc_ss_info *ss_info;
2432 gfc_array_info *info;
2433 gfc_expr *expr;
2434 int n;
2435
2436 /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise,
2437 arguments could get evaluated multiple times. */
2438 if (ss->is_alloc_lhs)
2439 return;
2440
2441 outer_loop = outermost_loop (loop);
2442
2443 /* TODO: This can generate bad code if there are ordering dependencies,
2444 e.g., a callee allocated function and an unknown size constructor. */
2445 gcc_assert (ss != NULL);
2446
2447 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2448 {
2449 gcc_assert (ss);
2450
2451 /* Cross loop arrays are handled from within the most nested loop. */
2452 if (ss->nested_ss != NULL)
2453 continue;
2454
2455 ss_info = ss->info;
2456 expr = ss_info->expr;
2457 info = &ss_info->data.array;
2458
2459 switch (ss_info->type)
2460 {
2461 case GFC_SS_SCALAR:
2462 /* Scalar expression. Evaluate this now. This includes elemental
2463 dimension indices, but not array section bounds. */
2464 gfc_init_se (&se, NULL);
2465 gfc_conv_expr (&se, expr);
2466 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2467
2468 if (expr->ts.type != BT_CHARACTER)
2469 {
2470 /* Move the evaluation of scalar expressions outside the
2471 scalarization loop, except for WHERE assignments. */
2472 if (subscript)
2473 se.expr = convert(gfc_array_index_type, se.expr);
2474 if (!ss_info->where)
2475 se.expr = gfc_evaluate_now (se.expr, &outer_loop->pre);
2476 gfc_add_block_to_block (&outer_loop->pre, &se.post);
2477 }
2478 else
2479 gfc_add_block_to_block (&outer_loop->post, &se.post);
2480
2481 ss_info->data.scalar.value = se.expr;
2482 ss_info->string_length = se.string_length;
2483 break;
2484
2485 case GFC_SS_REFERENCE:
2486 /* Scalar argument to elemental procedure. */
2487 gfc_init_se (&se, NULL);
2488 if (ss_info->can_be_null_ref)
2489 {
2490 /* If the actual argument can be absent (in other words, it can
2491 be a NULL reference), don't try to evaluate it; pass instead
2492 the reference directly. */
2493 gfc_conv_expr_reference (&se, expr);
2494 }
2495 else
2496 {
2497 /* Otherwise, evaluate the argument outside the loop and pass
2498 a reference to the value. */
2499 gfc_conv_expr (&se, expr);
2500 }
2501 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2502 gfc_add_block_to_block (&outer_loop->post, &se.post);
2503 if (gfc_is_class_scalar_expr (expr))
2504 /* This is necessary because the dynamic type will always be
2505 large than the declared type. In consequence, assigning
2506 the value to a temporary could segfault.
2507 OOP-TODO: see if this is generally correct or is the value
2508 has to be written to an allocated temporary, whose address
2509 is passed via ss_info. */
2510 ss_info->data.scalar.value = se.expr;
2511 else
2512 ss_info->data.scalar.value = gfc_evaluate_now (se.expr,
2513 &outer_loop->pre);
2514
2515 ss_info->string_length = se.string_length;
2516 break;
2517
2518 case GFC_SS_SECTION:
2519 /* Add the expressions for scalar and vector subscripts. */
2520 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2521 if (info->subscript[n])
2522 gfc_add_loop_ss_code (loop, info->subscript[n], true, where);
2523
2524 set_vector_loop_bounds (ss);
2525 break;
2526
2527 case GFC_SS_VECTOR:
2528 /* Get the vector's descriptor and store it in SS. */
2529 gfc_init_se (&se, NULL);
2530 gfc_conv_expr_descriptor (&se, expr, gfc_walk_expr (expr));
2531 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2532 gfc_add_block_to_block (&outer_loop->post, &se.post);
2533 info->descriptor = se.expr;
2534 break;
2535
2536 case GFC_SS_INTRINSIC:
2537 gfc_add_intrinsic_ss_code (loop, ss);
2538 break;
2539
2540 case GFC_SS_FUNCTION:
2541 /* Array function return value. We call the function and save its
2542 result in a temporary for use inside the loop. */
2543 gfc_init_se (&se, NULL);
2544 se.loop = loop;
2545 se.ss = ss;
2546 gfc_conv_expr (&se, expr);
2547 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2548 gfc_add_block_to_block (&outer_loop->post, &se.post);
2549 ss_info->string_length = se.string_length;
2550 break;
2551
2552 case GFC_SS_CONSTRUCTOR:
2553 if (expr->ts.type == BT_CHARACTER
2554 && ss_info->string_length == NULL
2555 && expr->ts.u.cl
2556 && expr->ts.u.cl->length)
2557 {
2558 gfc_init_se (&se, NULL);
2559 gfc_conv_expr_type (&se, expr->ts.u.cl->length,
2560 gfc_charlen_type_node);
2561 ss_info->string_length = se.expr;
2562 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2563 gfc_add_block_to_block (&outer_loop->post, &se.post);
2564 }
2565 trans_array_constructor (ss, where);
2566 break;
2567
2568 case GFC_SS_TEMP:
2569 case GFC_SS_COMPONENT:
2570 /* Do nothing. These are handled elsewhere. */
2571 break;
2572
2573 default:
2574 gcc_unreachable ();
2575 }
2576 }
2577
2578 if (!subscript)
2579 for (nested_loop = loop->nested; nested_loop;
2580 nested_loop = nested_loop->next)
2581 gfc_add_loop_ss_code (nested_loop, nested_loop->ss, subscript, where);
2582 }
2583
2584
2585 /* Translate expressions for the descriptor and data pointer of a SS. */
2586 /*GCC ARRAYS*/
2587
2588 static void
2589 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2590 {
2591 gfc_se se;
2592 gfc_ss_info *ss_info;
2593 gfc_array_info *info;
2594 tree tmp;
2595
2596 ss_info = ss->info;
2597 info = &ss_info->data.array;
2598
2599 /* Get the descriptor for the array to be scalarized. */
2600 gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE);
2601 gfc_init_se (&se, NULL);
2602 se.descriptor_only = 1;
2603 gfc_conv_expr_lhs (&se, ss_info->expr);
2604 gfc_add_block_to_block (block, &se.pre);
2605 info->descriptor = se.expr;
2606 ss_info->string_length = se.string_length;
2607
2608 if (base)
2609 {
2610 /* Also the data pointer. */
2611 tmp = gfc_conv_array_data (se.expr);
2612 /* If this is a variable or address of a variable we use it directly.
2613 Otherwise we must evaluate it now to avoid breaking dependency
2614 analysis by pulling the expressions for elemental array indices
2615 inside the loop. */
2616 if (!(DECL_P (tmp)
2617 || (TREE_CODE (tmp) == ADDR_EXPR
2618 && DECL_P (TREE_OPERAND (tmp, 0)))))
2619 tmp = gfc_evaluate_now (tmp, block);
2620 info->data = tmp;
2621
2622 tmp = gfc_conv_array_offset (se.expr);
2623 info->offset = gfc_evaluate_now (tmp, block);
2624
2625 /* Make absolutely sure that the saved_offset is indeed saved
2626 so that the variable is still accessible after the loops
2627 are translated. */
2628 info->saved_offset = info->offset;
2629 }
2630 }
2631
2632
2633 /* Initialize a gfc_loopinfo structure. */
2634
2635 void
2636 gfc_init_loopinfo (gfc_loopinfo * loop)
2637 {
2638 int n;
2639
2640 memset (loop, 0, sizeof (gfc_loopinfo));
2641 gfc_init_block (&loop->pre);
2642 gfc_init_block (&loop->post);
2643
2644 /* Initially scalarize in order and default to no loop reversal. */
2645 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2646 {
2647 loop->order[n] = n;
2648 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2649 }
2650
2651 loop->ss = gfc_ss_terminator;
2652 }
2653
2654
2655 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2656 chain. */
2657
2658 void
2659 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2660 {
2661 se->loop = loop;
2662 }
2663
2664
2665 /* Return an expression for the data pointer of an array. */
2666
2667 tree
2668 gfc_conv_array_data (tree descriptor)
2669 {
2670 tree type;
2671
2672 type = TREE_TYPE (descriptor);
2673 if (GFC_ARRAY_TYPE_P (type))
2674 {
2675 if (TREE_CODE (type) == POINTER_TYPE)
2676 return descriptor;
2677 else
2678 {
2679 /* Descriptorless arrays. */
2680 return gfc_build_addr_expr (NULL_TREE, descriptor);
2681 }
2682 }
2683 else
2684 return gfc_conv_descriptor_data_get (descriptor);
2685 }
2686
2687
2688 /* Return an expression for the base offset of an array. */
2689
2690 tree
2691 gfc_conv_array_offset (tree descriptor)
2692 {
2693 tree type;
2694
2695 type = TREE_TYPE (descriptor);
2696 if (GFC_ARRAY_TYPE_P (type))
2697 return GFC_TYPE_ARRAY_OFFSET (type);
2698 else
2699 return gfc_conv_descriptor_offset_get (descriptor);
2700 }
2701
2702
2703 /* Get an expression for the array stride. */
2704
2705 tree
2706 gfc_conv_array_stride (tree descriptor, int dim)
2707 {
2708 tree tmp;
2709 tree type;
2710
2711 type = TREE_TYPE (descriptor);
2712
2713 /* For descriptorless arrays use the array size. */
2714 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2715 if (tmp != NULL_TREE)
2716 return tmp;
2717
2718 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2719 return tmp;
2720 }
2721
2722
2723 /* Like gfc_conv_array_stride, but for the lower bound. */
2724
2725 tree
2726 gfc_conv_array_lbound (tree descriptor, int dim)
2727 {
2728 tree tmp;
2729 tree type;
2730
2731 type = TREE_TYPE (descriptor);
2732
2733 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2734 if (tmp != NULL_TREE)
2735 return tmp;
2736
2737 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2738 return tmp;
2739 }
2740
2741
2742 /* Like gfc_conv_array_stride, but for the upper bound. */
2743
2744 tree
2745 gfc_conv_array_ubound (tree descriptor, int dim)
2746 {
2747 tree tmp;
2748 tree type;
2749
2750 type = TREE_TYPE (descriptor);
2751
2752 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2753 if (tmp != NULL_TREE)
2754 return tmp;
2755
2756 /* This should only ever happen when passing an assumed shape array
2757 as an actual parameter. The value will never be used. */
2758 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2759 return gfc_index_zero_node;
2760
2761 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2762 return tmp;
2763 }
2764
2765
2766 /* Generate code to perform an array index bound check. */
2767
2768 static tree
2769 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2770 locus * where, bool check_upper)
2771 {
2772 tree fault;
2773 tree tmp_lo, tmp_up;
2774 tree descriptor;
2775 char *msg;
2776 const char * name = NULL;
2777
2778 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2779 return index;
2780
2781 descriptor = ss->info->data.array.descriptor;
2782
2783 index = gfc_evaluate_now (index, &se->pre);
2784
2785 /* We find a name for the error message. */
2786 name = ss->info->expr->symtree->n.sym->name;
2787 gcc_assert (name != NULL);
2788
2789 if (TREE_CODE (descriptor) == VAR_DECL)
2790 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2791
2792 /* If upper bound is present, include both bounds in the error message. */
2793 if (check_upper)
2794 {
2795 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2796 tmp_up = gfc_conv_array_ubound (descriptor, n);
2797
2798 if (name)
2799 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2800 "outside of expected range (%%ld:%%ld)", n+1, name);
2801 else
2802 asprintf (&msg, "Index '%%ld' of dimension %d "
2803 "outside of expected range (%%ld:%%ld)", n+1);
2804
2805 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2806 index, tmp_lo);
2807 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2808 fold_convert (long_integer_type_node, index),
2809 fold_convert (long_integer_type_node, tmp_lo),
2810 fold_convert (long_integer_type_node, tmp_up));
2811 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2812 index, tmp_up);
2813 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2814 fold_convert (long_integer_type_node, index),
2815 fold_convert (long_integer_type_node, tmp_lo),
2816 fold_convert (long_integer_type_node, tmp_up));
2817 free (msg);
2818 }
2819 else
2820 {
2821 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2822
2823 if (name)
2824 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
2825 "below lower bound of %%ld", n+1, name);
2826 else
2827 asprintf (&msg, "Index '%%ld' of dimension %d "
2828 "below lower bound of %%ld", n+1);
2829
2830 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2831 index, tmp_lo);
2832 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2833 fold_convert (long_integer_type_node, index),
2834 fold_convert (long_integer_type_node, tmp_lo));
2835 free (msg);
2836 }
2837
2838 return index;
2839 }
2840
2841
2842 /* Return the offset for an index. Performs bound checking for elemental
2843 dimensions. Single element references are processed separately.
2844 DIM is the array dimension, I is the loop dimension. */
2845
2846 static tree
2847 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2848 gfc_array_ref * ar, tree stride)
2849 {
2850 gfc_array_info *info;
2851 tree index;
2852 tree desc;
2853 tree data;
2854
2855 info = &ss->info->data.array;
2856
2857 /* Get the index into the array for this dimension. */
2858 if (ar)
2859 {
2860 gcc_assert (ar->type != AR_ELEMENT);
2861 switch (ar->dimen_type[dim])
2862 {
2863 case DIMEN_THIS_IMAGE:
2864 gcc_unreachable ();
2865 break;
2866 case DIMEN_ELEMENT:
2867 /* Elemental dimension. */
2868 gcc_assert (info->subscript[dim]
2869 && info->subscript[dim]->info->type == GFC_SS_SCALAR);
2870 /* We've already translated this value outside the loop. */
2871 index = info->subscript[dim]->info->data.scalar.value;
2872
2873 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2874 ar->as->type != AS_ASSUMED_SIZE
2875 || dim < ar->dimen - 1);
2876 break;
2877
2878 case DIMEN_VECTOR:
2879 gcc_assert (info && se->loop);
2880 gcc_assert (info->subscript[dim]
2881 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2882 desc = info->subscript[dim]->info->data.array.descriptor;
2883
2884 /* Get a zero-based index into the vector. */
2885 index = fold_build2_loc (input_location, MINUS_EXPR,
2886 gfc_array_index_type,
2887 se->loop->loopvar[i], se->loop->from[i]);
2888
2889 /* Multiply the index by the stride. */
2890 index = fold_build2_loc (input_location, MULT_EXPR,
2891 gfc_array_index_type,
2892 index, gfc_conv_array_stride (desc, 0));
2893
2894 /* Read the vector to get an index into info->descriptor. */
2895 data = build_fold_indirect_ref_loc (input_location,
2896 gfc_conv_array_data (desc));
2897 index = gfc_build_array_ref (data, index, NULL);
2898 index = gfc_evaluate_now (index, &se->pre);
2899 index = fold_convert (gfc_array_index_type, index);
2900
2901 /* Do any bounds checking on the final info->descriptor index. */
2902 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2903 ar->as->type != AS_ASSUMED_SIZE
2904 || dim < ar->dimen - 1);
2905 break;
2906
2907 case DIMEN_RANGE:
2908 /* Scalarized dimension. */
2909 gcc_assert (info && se->loop);
2910
2911 /* Multiply the loop variable by the stride and delta. */
2912 index = se->loop->loopvar[i];
2913 if (!integer_onep (info->stride[dim]))
2914 index = fold_build2_loc (input_location, MULT_EXPR,
2915 gfc_array_index_type, index,
2916 info->stride[dim]);
2917 if (!integer_zerop (info->delta[dim]))
2918 index = fold_build2_loc (input_location, PLUS_EXPR,
2919 gfc_array_index_type, index,
2920 info->delta[dim]);
2921 break;
2922
2923 default:
2924 gcc_unreachable ();
2925 }
2926 }
2927 else
2928 {
2929 /* Temporary array or derived type component. */
2930 gcc_assert (se->loop);
2931 index = se->loop->loopvar[se->loop->order[i]];
2932
2933 /* Pointer functions can have stride[0] different from unity.
2934 Use the stride returned by the function call and stored in
2935 the descriptor for the temporary. */
2936 if (se->ss && se->ss->info->type == GFC_SS_FUNCTION
2937 && se->ss->info->expr
2938 && se->ss->info->expr->symtree
2939 && se->ss->info->expr->symtree->n.sym->result
2940 && se->ss->info->expr->symtree->n.sym->result->attr.pointer)
2941 stride = gfc_conv_descriptor_stride_get (info->descriptor,
2942 gfc_rank_cst[dim]);
2943
2944 if (!integer_zerop (info->delta[dim]))
2945 index = fold_build2_loc (input_location, PLUS_EXPR,
2946 gfc_array_index_type, index, info->delta[dim]);
2947 }
2948
2949 /* Multiply by the stride. */
2950 if (!integer_onep (stride))
2951 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
2952 index, stride);
2953
2954 return index;
2955 }
2956
2957
2958 /* Build a scalarized array reference using the vptr 'size'. */
2959
2960 static bool
2961 build_class_array_ref (gfc_se *se, tree base, tree index)
2962 {
2963 tree type;
2964 tree size;
2965 tree offset;
2966 tree decl;
2967 tree tmp;
2968 gfc_expr *expr = se->ss->info->expr;
2969 gfc_ref *ref;
2970 gfc_ref *class_ref;
2971 gfc_typespec *ts;
2972
2973 if (expr == NULL || expr->ts.type != BT_CLASS)
2974 return false;
2975
2976 if (expr->symtree && expr->symtree->n.sym->ts.type == BT_CLASS)
2977 ts = &expr->symtree->n.sym->ts;
2978 else
2979 ts = NULL;
2980 class_ref = NULL;
2981
2982 for (ref = expr->ref; ref; ref = ref->next)
2983 {
2984 if (ref->type == REF_COMPONENT
2985 && ref->u.c.component->ts.type == BT_CLASS
2986 && ref->next && ref->next->type == REF_COMPONENT
2987 && strcmp (ref->next->u.c.component->name, "_data") == 0
2988 && ref->next->next
2989 && ref->next->next->type == REF_ARRAY
2990 && ref->next->next->u.ar.type != AR_ELEMENT)
2991 {
2992 ts = &ref->u.c.component->ts;
2993 class_ref = ref;
2994 break;
2995 }
2996 }
2997
2998 if (ts == NULL)
2999 return false;
3000
3001 if (class_ref == NULL)
3002 decl = expr->symtree->n.sym->backend_decl;
3003 else
3004 {
3005 /* Remove everything after the last class reference, convert the
3006 expression and then recover its tailend once more. */
3007 gfc_se tmpse;
3008 ref = class_ref->next;
3009 class_ref->next = NULL;
3010 gfc_init_se (&tmpse, NULL);
3011 gfc_conv_expr (&tmpse, expr);
3012 decl = tmpse.expr;
3013 class_ref->next = ref;
3014 }
3015
3016 size = gfc_vtable_size_get (decl);
3017
3018 /* Build the address of the element. */
3019 type = TREE_TYPE (TREE_TYPE (base));
3020 size = fold_convert (TREE_TYPE (index), size);
3021 offset = fold_build2_loc (input_location, MULT_EXPR,
3022 gfc_array_index_type,
3023 index, size);
3024 tmp = gfc_build_addr_expr (pvoid_type_node, base);
3025 tmp = fold_build_pointer_plus_loc (input_location, tmp, offset);
3026 tmp = fold_convert (build_pointer_type (type), tmp);
3027
3028 /* Return the element in the se expression. */
3029 se->expr = build_fold_indirect_ref_loc (input_location, tmp);
3030 return true;
3031 }
3032
3033
3034 /* Build a scalarized reference to an array. */
3035
3036 static void
3037 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
3038 {
3039 gfc_array_info *info;
3040 tree decl = NULL_TREE;
3041 tree index;
3042 tree tmp;
3043 gfc_ss *ss;
3044 gfc_expr *expr;
3045 int n;
3046
3047 ss = se->ss;
3048 expr = ss->info->expr;
3049 info = &ss->info->data.array;
3050 if (ar)
3051 n = se->loop->order[0];
3052 else
3053 n = 0;
3054
3055 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
3056 /* Add the offset for this dimension to the stored offset for all other
3057 dimensions. */
3058 if (!integer_zerop (info->offset))
3059 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3060 index, info->offset);
3061
3062 if (expr && is_subref_array (expr))
3063 decl = expr->symtree->n.sym->backend_decl;
3064
3065 tmp = build_fold_indirect_ref_loc (input_location, info->data);
3066
3067 /* Use the vptr 'size' field to access a class the element of a class
3068 array. */
3069 if (build_class_array_ref (se, tmp, index))
3070 return;
3071
3072 se->expr = gfc_build_array_ref (tmp, index, decl);
3073 }
3074
3075
3076 /* Translate access of temporary array. */
3077
3078 void
3079 gfc_conv_tmp_array_ref (gfc_se * se)
3080 {
3081 se->string_length = se->ss->info->string_length;
3082 gfc_conv_scalarized_array_ref (se, NULL);
3083 gfc_advance_se_ss_chain (se);
3084 }
3085
3086 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
3087
3088 static void
3089 add_to_offset (tree *cst_offset, tree *offset, tree t)
3090 {
3091 if (TREE_CODE (t) == INTEGER_CST)
3092 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
3093 else
3094 {
3095 if (!integer_zerop (*offset))
3096 *offset = fold_build2_loc (input_location, PLUS_EXPR,
3097 gfc_array_index_type, *offset, t);
3098 else
3099 *offset = t;
3100 }
3101 }
3102
3103
3104 static tree
3105 build_array_ref (tree desc, tree offset, tree decl)
3106 {
3107 tree tmp;
3108
3109 /* Class array references need special treatment because the assigned
3110 type size needs to be used to point to the element. */
3111 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))
3112 && TREE_CODE (desc) == COMPONENT_REF
3113 && GFC_CLASS_TYPE_P (TREE_TYPE (TREE_OPERAND (desc, 0))))
3114 {
3115 tree type = gfc_get_element_type (TREE_TYPE (desc));
3116 tmp = TREE_OPERAND (desc, 0);
3117 tmp = gfc_get_class_array_ref (offset, tmp);
3118 tmp = fold_convert (build_pointer_type (type), tmp);
3119 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3120 }
3121 else
3122 {
3123 tmp = gfc_conv_array_data (desc);
3124 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3125 tmp = gfc_build_array_ref (tmp, offset, decl);
3126 }
3127
3128 return tmp;
3129 }
3130
3131
3132
3133 /* Build an array reference. se->expr already holds the array descriptor.
3134 This should be either a variable, indirect variable reference or component
3135 reference. For arrays which do not have a descriptor, se->expr will be
3136 the data pointer.
3137 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
3138
3139 void
3140 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_symbol * sym,
3141 locus * where)
3142 {
3143 int n;
3144 tree offset, cst_offset;
3145 tree tmp;
3146 tree stride;
3147 gfc_se indexse;
3148 gfc_se tmpse;
3149
3150 if (ar->dimen == 0)
3151 {
3152 gcc_assert (ar->codimen);
3153
3154 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
3155 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
3156 else
3157 {
3158 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
3159 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
3160 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
3161
3162 /* Use the actual tree type and not the wrapped coarray. */
3163 if (!se->want_pointer)
3164 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
3165 se->expr);
3166 }
3167
3168 return;
3169 }
3170
3171 /* Handle scalarized references separately. */
3172 if (ar->type != AR_ELEMENT)
3173 {
3174 gfc_conv_scalarized_array_ref (se, ar);
3175 gfc_advance_se_ss_chain (se);
3176 return;
3177 }
3178
3179 cst_offset = offset = gfc_index_zero_node;
3180 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
3181
3182 /* Calculate the offsets from all the dimensions. Make sure to associate
3183 the final offset so that we form a chain of loop invariant summands. */
3184 for (n = ar->dimen - 1; n >= 0; n--)
3185 {
3186 /* Calculate the index for this dimension. */
3187 gfc_init_se (&indexse, se);
3188 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
3189 gfc_add_block_to_block (&se->pre, &indexse.pre);
3190
3191 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3192 {
3193 /* Check array bounds. */
3194 tree cond;
3195 char *msg;
3196
3197 /* Evaluate the indexse.expr only once. */
3198 indexse.expr = save_expr (indexse.expr);
3199
3200 /* Lower bound. */
3201 tmp = gfc_conv_array_lbound (se->expr, n);
3202 if (sym->attr.temporary)
3203 {
3204 gfc_init_se (&tmpse, se);
3205 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
3206 gfc_array_index_type);
3207 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3208 tmp = tmpse.expr;
3209 }
3210
3211 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
3212 indexse.expr, tmp);
3213 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3214 "below lower bound of %%ld", n+1, sym->name);
3215 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3216 fold_convert (long_integer_type_node,
3217 indexse.expr),
3218 fold_convert (long_integer_type_node, tmp));
3219 free (msg);
3220
3221 /* Upper bound, but not for the last dimension of assumed-size
3222 arrays. */
3223 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
3224 {
3225 tmp = gfc_conv_array_ubound (se->expr, n);
3226 if (sym->attr.temporary)
3227 {
3228 gfc_init_se (&tmpse, se);
3229 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
3230 gfc_array_index_type);
3231 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3232 tmp = tmpse.expr;
3233 }
3234
3235 cond = fold_build2_loc (input_location, GT_EXPR,
3236 boolean_type_node, indexse.expr, tmp);
3237 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3238 "above upper bound of %%ld", n+1, sym->name);
3239 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3240 fold_convert (long_integer_type_node,
3241 indexse.expr),
3242 fold_convert (long_integer_type_node, tmp));
3243 free (msg);
3244 }
3245 }
3246
3247 /* Multiply the index by the stride. */
3248 stride = gfc_conv_array_stride (se->expr, n);
3249 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3250 indexse.expr, stride);
3251
3252 /* And add it to the total. */
3253 add_to_offset (&cst_offset, &offset, tmp);
3254 }
3255
3256 if (!integer_zerop (cst_offset))
3257 offset = fold_build2_loc (input_location, PLUS_EXPR,
3258 gfc_array_index_type, offset, cst_offset);
3259
3260 se->expr = build_array_ref (se->expr, offset, sym->backend_decl);
3261 }
3262
3263
3264 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
3265 LOOP_DIM dimension (if any) to array's offset. */
3266
3267 static void
3268 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
3269 gfc_array_ref *ar, int array_dim, int loop_dim)
3270 {
3271 gfc_se se;
3272 gfc_array_info *info;
3273 tree stride, index;
3274
3275 info = &ss->info->data.array;
3276
3277 gfc_init_se (&se, NULL);
3278 se.loop = loop;
3279 se.expr = info->descriptor;
3280 stride = gfc_conv_array_stride (info->descriptor, array_dim);
3281 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
3282 gfc_add_block_to_block (pblock, &se.pre);
3283
3284 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
3285 gfc_array_index_type,
3286 info->offset, index);
3287 info->offset = gfc_evaluate_now (info->offset, pblock);
3288 }
3289
3290
3291 /* Generate the code to be executed immediately before entering a
3292 scalarization loop. */
3293
3294 static void
3295 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
3296 stmtblock_t * pblock)
3297 {
3298 tree stride;
3299 gfc_ss_info *ss_info;
3300 gfc_array_info *info;
3301 gfc_ss_type ss_type;
3302 gfc_ss *ss, *pss;
3303 gfc_loopinfo *ploop;
3304 gfc_array_ref *ar;
3305 int i;
3306
3307 /* This code will be executed before entering the scalarization loop
3308 for this dimension. */
3309 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3310 {
3311 ss_info = ss->info;
3312
3313 if ((ss_info->useflags & flag) == 0)
3314 continue;
3315
3316 ss_type = ss_info->type;
3317 if (ss_type != GFC_SS_SECTION
3318 && ss_type != GFC_SS_FUNCTION
3319 && ss_type != GFC_SS_CONSTRUCTOR
3320 && ss_type != GFC_SS_COMPONENT)
3321 continue;
3322
3323 info = &ss_info->data.array;
3324
3325 gcc_assert (dim < ss->dimen);
3326 gcc_assert (ss->dimen == loop->dimen);
3327
3328 if (info->ref)
3329 ar = &info->ref->u.ar;
3330 else
3331 ar = NULL;
3332
3333 if (dim == loop->dimen - 1 && loop->parent != NULL)
3334 {
3335 /* If we are in the outermost dimension of this loop, the previous
3336 dimension shall be in the parent loop. */
3337 gcc_assert (ss->parent != NULL);
3338
3339 pss = ss->parent;
3340 ploop = loop->parent;
3341
3342 /* ss and ss->parent are about the same array. */
3343 gcc_assert (ss_info == pss->info);
3344 }
3345 else
3346 {
3347 ploop = loop;
3348 pss = ss;
3349 }
3350
3351 if (dim == loop->dimen - 1)
3352 i = 0;
3353 else
3354 i = dim + 1;
3355
3356 /* For the time being, there is no loop reordering. */
3357 gcc_assert (i == ploop->order[i]);
3358 i = ploop->order[i];
3359
3360 if (dim == loop->dimen - 1 && loop->parent == NULL)
3361 {
3362 stride = gfc_conv_array_stride (info->descriptor,
3363 innermost_ss (ss)->dim[i]);
3364
3365 /* Calculate the stride of the innermost loop. Hopefully this will
3366 allow the backend optimizers to do their stuff more effectively.
3367 */
3368 info->stride0 = gfc_evaluate_now (stride, pblock);
3369
3370 /* For the outermost loop calculate the offset due to any
3371 elemental dimensions. It will have been initialized with the
3372 base offset of the array. */
3373 if (info->ref)
3374 {
3375 for (i = 0; i < ar->dimen; i++)
3376 {
3377 if (ar->dimen_type[i] != DIMEN_ELEMENT)
3378 continue;
3379
3380 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
3381 }
3382 }
3383 }
3384 else
3385 /* Add the offset for the previous loop dimension. */
3386 add_array_offset (pblock, ploop, ss, ar, pss->dim[i], i);
3387
3388 /* Remember this offset for the second loop. */
3389 if (dim == loop->temp_dim - 1 && loop->parent == NULL)
3390 info->saved_offset = info->offset;
3391 }
3392 }
3393
3394
3395 /* Start a scalarized expression. Creates a scope and declares loop
3396 variables. */
3397
3398 void
3399 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
3400 {
3401 int dim;
3402 int n;
3403 int flags;
3404
3405 gcc_assert (!loop->array_parameter);
3406
3407 for (dim = loop->dimen - 1; dim >= 0; dim--)
3408 {
3409 n = loop->order[dim];
3410
3411 gfc_start_block (&loop->code[n]);
3412
3413 /* Create the loop variable. */
3414 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
3415
3416 if (dim < loop->temp_dim)
3417 flags = 3;
3418 else
3419 flags = 1;
3420 /* Calculate values that will be constant within this loop. */
3421 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
3422 }
3423 gfc_start_block (pbody);
3424 }
3425
3426
3427 /* Generates the actual loop code for a scalarization loop. */
3428
3429 void
3430 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
3431 stmtblock_t * pbody)
3432 {
3433 stmtblock_t block;
3434 tree cond;
3435 tree tmp;
3436 tree loopbody;
3437 tree exit_label;
3438 tree stmt;
3439 tree init;
3440 tree incr;
3441
3442 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
3443 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
3444 && n == loop->dimen - 1)
3445 {
3446 /* We create an OMP_FOR construct for the outermost scalarized loop. */
3447 init = make_tree_vec (1);
3448 cond = make_tree_vec (1);
3449 incr = make_tree_vec (1);
3450
3451 /* Cycle statement is implemented with a goto. Exit statement must not
3452 be present for this loop. */
3453 exit_label = gfc_build_label_decl (NULL_TREE);
3454 TREE_USED (exit_label) = 1;
3455
3456 /* Label for cycle statements (if needed). */
3457 tmp = build1_v (LABEL_EXPR, exit_label);
3458 gfc_add_expr_to_block (pbody, tmp);
3459
3460 stmt = make_node (OMP_FOR);
3461
3462 TREE_TYPE (stmt) = void_type_node;
3463 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
3464
3465 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
3466 OMP_CLAUSE_SCHEDULE);
3467 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
3468 = OMP_CLAUSE_SCHEDULE_STATIC;
3469 if (ompws_flags & OMPWS_NOWAIT)
3470 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3471 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3472
3473 /* Initialize the loopvar. */
3474 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3475 loop->from[n]);
3476 OMP_FOR_INIT (stmt) = init;
3477 /* The exit condition. */
3478 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3479 boolean_type_node,
3480 loop->loopvar[n], loop->to[n]);
3481 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3482 OMP_FOR_COND (stmt) = cond;
3483 /* Increment the loopvar. */
3484 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3485 loop->loopvar[n], gfc_index_one_node);
3486 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3487 void_type_node, loop->loopvar[n], tmp);
3488 OMP_FOR_INCR (stmt) = incr;
3489
3490 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3491 gfc_add_expr_to_block (&loop->code[n], stmt);
3492 }
3493 else
3494 {
3495 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3496 && (loop->temp_ss == NULL);
3497
3498 loopbody = gfc_finish_block (pbody);
3499
3500 if (reverse_loop)
3501 {
3502 tmp = loop->from[n];
3503 loop->from[n] = loop->to[n];
3504 loop->to[n] = tmp;
3505 }
3506
3507 /* Initialize the loopvar. */
3508 if (loop->loopvar[n] != loop->from[n])
3509 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3510
3511 exit_label = gfc_build_label_decl (NULL_TREE);
3512
3513 /* Generate the loop body. */
3514 gfc_init_block (&block);
3515
3516 /* The exit condition. */
3517 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3518 boolean_type_node, loop->loopvar[n], loop->to[n]);
3519 tmp = build1_v (GOTO_EXPR, exit_label);
3520 TREE_USED (exit_label) = 1;
3521 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3522 gfc_add_expr_to_block (&block, tmp);
3523
3524 /* The main body. */
3525 gfc_add_expr_to_block (&block, loopbody);
3526
3527 /* Increment the loopvar. */
3528 tmp = fold_build2_loc (input_location,
3529 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3530 gfc_array_index_type, loop->loopvar[n],
3531 gfc_index_one_node);
3532
3533 gfc_add_modify (&block, loop->loopvar[n], tmp);
3534
3535 /* Build the loop. */
3536 tmp = gfc_finish_block (&block);
3537 tmp = build1_v (LOOP_EXPR, tmp);
3538 gfc_add_expr_to_block (&loop->code[n], tmp);
3539
3540 /* Add the exit label. */
3541 tmp = build1_v (LABEL_EXPR, exit_label);
3542 gfc_add_expr_to_block (&loop->code[n], tmp);
3543 }
3544
3545 }
3546
3547
3548 /* Finishes and generates the loops for a scalarized expression. */
3549
3550 void
3551 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3552 {
3553 int dim;
3554 int n;
3555 gfc_ss *ss;
3556 stmtblock_t *pblock;
3557 tree tmp;
3558
3559 pblock = body;
3560 /* Generate the loops. */
3561 for (dim = 0; dim < loop->dimen; dim++)
3562 {
3563 n = loop->order[dim];
3564 gfc_trans_scalarized_loop_end (loop, n, pblock);
3565 loop->loopvar[n] = NULL_TREE;
3566 pblock = &loop->code[n];
3567 }
3568
3569 tmp = gfc_finish_block (pblock);
3570 gfc_add_expr_to_block (&loop->pre, tmp);
3571
3572 /* Clear all the used flags. */
3573 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3574 if (ss->parent == NULL)
3575 ss->info->useflags = 0;
3576 }
3577
3578
3579 /* Finish the main body of a scalarized expression, and start the secondary
3580 copying body. */
3581
3582 void
3583 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3584 {
3585 int dim;
3586 int n;
3587 stmtblock_t *pblock;
3588 gfc_ss *ss;
3589
3590 pblock = body;
3591 /* We finish as many loops as are used by the temporary. */
3592 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3593 {
3594 n = loop->order[dim];
3595 gfc_trans_scalarized_loop_end (loop, n, pblock);
3596 loop->loopvar[n] = NULL_TREE;
3597 pblock = &loop->code[n];
3598 }
3599
3600 /* We don't want to finish the outermost loop entirely. */
3601 n = loop->order[loop->temp_dim - 1];
3602 gfc_trans_scalarized_loop_end (loop, n, pblock);
3603
3604 /* Restore the initial offsets. */
3605 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3606 {
3607 gfc_ss_type ss_type;
3608 gfc_ss_info *ss_info;
3609
3610 ss_info = ss->info;
3611
3612 if ((ss_info->useflags & 2) == 0)
3613 continue;
3614
3615 ss_type = ss_info->type;
3616 if (ss_type != GFC_SS_SECTION
3617 && ss_type != GFC_SS_FUNCTION
3618 && ss_type != GFC_SS_CONSTRUCTOR
3619 && ss_type != GFC_SS_COMPONENT)
3620 continue;
3621
3622 ss_info->data.array.offset = ss_info->data.array.saved_offset;
3623 }
3624
3625 /* Restart all the inner loops we just finished. */
3626 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3627 {
3628 n = loop->order[dim];
3629
3630 gfc_start_block (&loop->code[n]);
3631
3632 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3633
3634 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3635 }
3636
3637 /* Start a block for the secondary copying code. */
3638 gfc_start_block (body);
3639 }
3640
3641
3642 /* Precalculate (either lower or upper) bound of an array section.
3643 BLOCK: Block in which the (pre)calculation code will go.
3644 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3645 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3646 DESC: Array descriptor from which the bound will be picked if unspecified
3647 (either lower or upper bound according to LBOUND). */
3648
3649 static void
3650 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3651 tree desc, int dim, bool lbound)
3652 {
3653 gfc_se se;
3654 gfc_expr * input_val = values[dim];
3655 tree *output = &bounds[dim];
3656
3657
3658 if (input_val)
3659 {
3660 /* Specified section bound. */
3661 gfc_init_se (&se, NULL);
3662 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3663 gfc_add_block_to_block (block, &se.pre);
3664 *output = se.expr;
3665 }
3666 else
3667 {
3668 /* No specific bound specified so use the bound of the array. */
3669 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3670 gfc_conv_array_ubound (desc, dim);
3671 }
3672 *output = gfc_evaluate_now (*output, block);
3673 }
3674
3675
3676 /* Calculate the lower bound of an array section. */
3677
3678 static void
3679 gfc_conv_section_startstride (gfc_loopinfo * loop, gfc_ss * ss, int dim)
3680 {
3681 gfc_expr *stride = NULL;
3682 tree desc;
3683 gfc_se se;
3684 gfc_array_info *info;
3685 gfc_array_ref *ar;
3686
3687 gcc_assert (ss->info->type == GFC_SS_SECTION);
3688
3689 info = &ss->info->data.array;
3690 ar = &info->ref->u.ar;
3691
3692 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3693 {
3694 /* We use a zero-based index to access the vector. */
3695 info->start[dim] = gfc_index_zero_node;
3696 info->end[dim] = NULL;
3697 info->stride[dim] = gfc_index_one_node;
3698 return;
3699 }
3700
3701 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3702 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3703 desc = info->descriptor;
3704 stride = ar->stride[dim];
3705
3706 /* Calculate the start of the range. For vector subscripts this will
3707 be the range of the vector. */
3708 evaluate_bound (&loop->pre, info->start, ar->start, desc, dim, true);
3709
3710 /* Similarly calculate the end. Although this is not used in the
3711 scalarizer, it is needed when checking bounds and where the end
3712 is an expression with side-effects. */
3713 evaluate_bound (&loop->pre, info->end, ar->end, desc, dim, false);
3714
3715 /* Calculate the stride. */
3716 if (stride == NULL)
3717 info->stride[dim] = gfc_index_one_node;
3718 else
3719 {
3720 gfc_init_se (&se, NULL);
3721 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3722 gfc_add_block_to_block (&loop->pre, &se.pre);
3723 info->stride[dim] = gfc_evaluate_now (se.expr, &loop->pre);
3724 }
3725 }
3726
3727
3728 /* Calculates the range start and stride for a SS chain. Also gets the
3729 descriptor and data pointer. The range of vector subscripts is the size
3730 of the vector. Array bounds are also checked. */
3731
3732 void
3733 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3734 {
3735 int n;
3736 tree tmp;
3737 gfc_ss *ss;
3738 tree desc;
3739
3740 loop->dimen = 0;
3741 /* Determine the rank of the loop. */
3742 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3743 {
3744 switch (ss->info->type)
3745 {
3746 case GFC_SS_SECTION:
3747 case GFC_SS_CONSTRUCTOR:
3748 case GFC_SS_FUNCTION:
3749 case GFC_SS_COMPONENT:
3750 loop->dimen = ss->dimen;
3751 goto done;
3752
3753 /* As usual, lbound and ubound are exceptions!. */
3754 case GFC_SS_INTRINSIC:
3755 switch (ss->info->expr->value.function.isym->id)
3756 {
3757 case GFC_ISYM_LBOUND:
3758 case GFC_ISYM_UBOUND:
3759 case GFC_ISYM_LCOBOUND:
3760 case GFC_ISYM_UCOBOUND:
3761 case GFC_ISYM_THIS_IMAGE:
3762 loop->dimen = ss->dimen;
3763 goto done;
3764
3765 default:
3766 break;
3767 }
3768
3769 default:
3770 break;
3771 }
3772 }
3773
3774 /* We should have determined the rank of the expression by now. If
3775 not, that's bad news. */
3776 gcc_unreachable ();
3777
3778 done:
3779 /* Loop over all the SS in the chain. */
3780 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3781 {
3782 gfc_ss_info *ss_info;
3783 gfc_array_info *info;
3784 gfc_expr *expr;
3785
3786 ss_info = ss->info;
3787 expr = ss_info->expr;
3788 info = &ss_info->data.array;
3789
3790 if (expr && expr->shape && !info->shape)
3791 info->shape = expr->shape;
3792
3793 switch (ss_info->type)
3794 {
3795 case GFC_SS_SECTION:
3796 /* Get the descriptor for the array. If it is a cross loops array,
3797 we got the descriptor already in the outermost loop. */
3798 if (ss->parent == NULL)
3799 gfc_conv_ss_descriptor (&loop->pre, ss, !loop->array_parameter);
3800
3801 for (n = 0; n < ss->dimen; n++)
3802 gfc_conv_section_startstride (loop, ss, ss->dim[n]);
3803 break;
3804
3805 case GFC_SS_INTRINSIC:
3806 switch (expr->value.function.isym->id)
3807 {
3808 /* Fall through to supply start and stride. */
3809 case GFC_ISYM_LBOUND:
3810 case GFC_ISYM_UBOUND:
3811 case GFC_ISYM_LCOBOUND:
3812 case GFC_ISYM_UCOBOUND:
3813 case GFC_ISYM_THIS_IMAGE:
3814 break;
3815
3816 default:
3817 continue;
3818 }
3819
3820 case GFC_SS_CONSTRUCTOR:
3821 case GFC_SS_FUNCTION:
3822 for (n = 0; n < ss->dimen; n++)
3823 {
3824 int dim = ss->dim[n];
3825
3826 info->start[dim] = gfc_index_zero_node;
3827 info->end[dim] = gfc_index_zero_node;
3828 info->stride[dim] = gfc_index_one_node;
3829 }
3830 break;
3831
3832 default:
3833 break;
3834 }
3835 }
3836
3837 /* The rest is just runtime bound checking. */
3838 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3839 {
3840 stmtblock_t block;
3841 tree lbound, ubound;
3842 tree end;
3843 tree size[GFC_MAX_DIMENSIONS];
3844 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
3845 gfc_array_info *info;
3846 char *msg;
3847 int dim;
3848
3849 gfc_start_block (&block);
3850
3851 for (n = 0; n < loop->dimen; n++)
3852 size[n] = NULL_TREE;
3853
3854 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3855 {
3856 stmtblock_t inner;
3857 gfc_ss_info *ss_info;
3858 gfc_expr *expr;
3859 locus *expr_loc;
3860 const char *expr_name;
3861
3862 ss_info = ss->info;
3863 if (ss_info->type != GFC_SS_SECTION)
3864 continue;
3865
3866 /* Catch allocatable lhs in f2003. */
3867 if (gfc_option.flag_realloc_lhs && ss->is_alloc_lhs)
3868 continue;
3869
3870 expr = ss_info->expr;
3871 expr_loc = &expr->where;
3872 expr_name = expr->symtree->name;
3873
3874 gfc_start_block (&inner);
3875
3876 /* TODO: range checking for mapped dimensions. */
3877 info = &ss_info->data.array;
3878
3879 /* This code only checks ranges. Elemental and vector
3880 dimensions are checked later. */
3881 for (n = 0; n < loop->dimen; n++)
3882 {
3883 bool check_upper;
3884
3885 dim = ss->dim[n];
3886 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
3887 continue;
3888
3889 if (dim == info->ref->u.ar.dimen - 1
3890 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
3891 check_upper = false;
3892 else
3893 check_upper = true;
3894
3895 /* Zero stride is not allowed. */
3896 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
3897 info->stride[dim], gfc_index_zero_node);
3898 asprintf (&msg, "Zero stride is not allowed, for dimension %d "
3899 "of array '%s'", dim + 1, expr_name);
3900 gfc_trans_runtime_check (true, false, tmp, &inner,
3901 expr_loc, msg);
3902 free (msg);
3903
3904 desc = info->descriptor;
3905
3906 /* This is the run-time equivalent of resolve.c's
3907 check_dimension(). The logical is more readable there
3908 than it is here, with all the trees. */
3909 lbound = gfc_conv_array_lbound (desc, dim);
3910 end = info->end[dim];
3911 if (check_upper)
3912 ubound = gfc_conv_array_ubound (desc, dim);
3913 else
3914 ubound = NULL;
3915
3916 /* non_zerosized is true when the selected range is not
3917 empty. */
3918 stride_pos = fold_build2_loc (input_location, GT_EXPR,
3919 boolean_type_node, info->stride[dim],
3920 gfc_index_zero_node);
3921 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
3922 info->start[dim], end);
3923 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3924 boolean_type_node, stride_pos, tmp);
3925
3926 stride_neg = fold_build2_loc (input_location, LT_EXPR,
3927 boolean_type_node,
3928 info->stride[dim], gfc_index_zero_node);
3929 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
3930 info->start[dim], end);
3931 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3932 boolean_type_node,
3933 stride_neg, tmp);
3934 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
3935 boolean_type_node,
3936 stride_pos, stride_neg);
3937
3938 /* Check the start of the range against the lower and upper
3939 bounds of the array, if the range is not empty.
3940 If upper bound is present, include both bounds in the
3941 error message. */
3942 if (check_upper)
3943 {
3944 tmp = fold_build2_loc (input_location, LT_EXPR,
3945 boolean_type_node,
3946 info->start[dim], lbound);
3947 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3948 boolean_type_node,
3949 non_zerosized, tmp);
3950 tmp2 = fold_build2_loc (input_location, GT_EXPR,
3951 boolean_type_node,
3952 info->start[dim], ubound);
3953 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3954 boolean_type_node,
3955 non_zerosized, tmp2);
3956 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3957 "outside of expected range (%%ld:%%ld)",
3958 dim + 1, expr_name);
3959 gfc_trans_runtime_check (true, false, tmp, &inner,
3960 expr_loc, msg,
3961 fold_convert (long_integer_type_node, info->start[dim]),
3962 fold_convert (long_integer_type_node, lbound),
3963 fold_convert (long_integer_type_node, ubound));
3964 gfc_trans_runtime_check (true, false, tmp2, &inner,
3965 expr_loc, msg,
3966 fold_convert (long_integer_type_node, info->start[dim]),
3967 fold_convert (long_integer_type_node, lbound),
3968 fold_convert (long_integer_type_node, ubound));
3969 free (msg);
3970 }
3971 else
3972 {
3973 tmp = fold_build2_loc (input_location, LT_EXPR,
3974 boolean_type_node,
3975 info->start[dim], lbound);
3976 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
3977 boolean_type_node, non_zerosized, tmp);
3978 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
3979 "below lower bound of %%ld",
3980 dim + 1, expr_name);
3981 gfc_trans_runtime_check (true, false, tmp, &inner,
3982 expr_loc, msg,
3983 fold_convert (long_integer_type_node, info->start[dim]),
3984 fold_convert (long_integer_type_node, lbound));
3985 free (msg);
3986 }
3987
3988 /* Compute the last element of the range, which is not
3989 necessarily "end" (think 0:5:3, which doesn't contain 5)
3990 and check it against both lower and upper bounds. */
3991
3992 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3993 gfc_array_index_type, end,
3994 info->start[dim]);
3995 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
3996 gfc_array_index_type, tmp,
3997 info->stride[dim]);
3998 tmp = fold_build2_loc (input_location, MINUS_EXPR,
3999 gfc_array_index_type, end, tmp);
4000 tmp2 = fold_build2_loc (input_location, LT_EXPR,
4001 boolean_type_node, tmp, lbound);
4002 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4003 boolean_type_node, non_zerosized, tmp2);
4004 if (check_upper)
4005 {
4006 tmp3 = fold_build2_loc (input_location, GT_EXPR,
4007 boolean_type_node, tmp, ubound);
4008 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4009 boolean_type_node, non_zerosized, tmp3);
4010 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4011 "outside of expected range (%%ld:%%ld)",
4012 dim + 1, expr_name);
4013 gfc_trans_runtime_check (true, false, tmp2, &inner,
4014 expr_loc, msg,
4015 fold_convert (long_integer_type_node, tmp),
4016 fold_convert (long_integer_type_node, ubound),
4017 fold_convert (long_integer_type_node, lbound));
4018 gfc_trans_runtime_check (true, false, tmp3, &inner,
4019 expr_loc, msg,
4020 fold_convert (long_integer_type_node, tmp),
4021 fold_convert (long_integer_type_node, ubound),
4022 fold_convert (long_integer_type_node, lbound));
4023 free (msg);
4024 }
4025 else
4026 {
4027 asprintf (&msg, "Index '%%ld' of dimension %d of array '%s' "
4028 "below lower bound of %%ld",
4029 dim + 1, expr_name);
4030 gfc_trans_runtime_check (true, false, tmp2, &inner,
4031 expr_loc, msg,
4032 fold_convert (long_integer_type_node, tmp),
4033 fold_convert (long_integer_type_node, lbound));
4034 free (msg);
4035 }
4036
4037 /* Check the section sizes match. */
4038 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4039 gfc_array_index_type, end,
4040 info->start[dim]);
4041 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4042 gfc_array_index_type, tmp,
4043 info->stride[dim]);
4044 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4045 gfc_array_index_type,
4046 gfc_index_one_node, tmp);
4047 tmp = fold_build2_loc (input_location, MAX_EXPR,
4048 gfc_array_index_type, tmp,
4049 build_int_cst (gfc_array_index_type, 0));
4050 /* We remember the size of the first section, and check all the
4051 others against this. */
4052 if (size[n])
4053 {
4054 tmp3 = fold_build2_loc (input_location, NE_EXPR,
4055 boolean_type_node, tmp, size[n]);
4056 asprintf (&msg, "Array bound mismatch for dimension %d "
4057 "of array '%s' (%%ld/%%ld)",
4058 dim + 1, expr_name);
4059
4060 gfc_trans_runtime_check (true, false, tmp3, &inner,
4061 expr_loc, msg,
4062 fold_convert (long_integer_type_node, tmp),
4063 fold_convert (long_integer_type_node, size[n]));
4064
4065 free (msg);
4066 }
4067 else
4068 size[n] = gfc_evaluate_now (tmp, &inner);
4069 }
4070
4071 tmp = gfc_finish_block (&inner);
4072
4073 /* For optional arguments, only check bounds if the argument is
4074 present. */
4075 if (expr->symtree->n.sym->attr.optional
4076 || expr->symtree->n.sym->attr.not_always_present)
4077 tmp = build3_v (COND_EXPR,
4078 gfc_conv_expr_present (expr->symtree->n.sym),
4079 tmp, build_empty_stmt (input_location));
4080
4081 gfc_add_expr_to_block (&block, tmp);
4082
4083 }
4084
4085 tmp = gfc_finish_block (&block);
4086 gfc_add_expr_to_block (&loop->pre, tmp);
4087 }
4088
4089 for (loop = loop->nested; loop; loop = loop->next)
4090 gfc_conv_ss_startstride (loop);
4091 }
4092
4093 /* Return true if both symbols could refer to the same data object. Does
4094 not take account of aliasing due to equivalence statements. */
4095
4096 static int
4097 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
4098 bool lsym_target, bool rsym_pointer, bool rsym_target)
4099 {
4100 /* Aliasing isn't possible if the symbols have different base types. */
4101 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
4102 return 0;
4103
4104 /* Pointers can point to other pointers and target objects. */
4105
4106 if ((lsym_pointer && (rsym_pointer || rsym_target))
4107 || (rsym_pointer && (lsym_pointer || lsym_target)))
4108 return 1;
4109
4110 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
4111 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
4112 checked above. */
4113 if (lsym_target && rsym_target
4114 && ((lsym->attr.dummy && !lsym->attr.contiguous
4115 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
4116 || (rsym->attr.dummy && !rsym->attr.contiguous
4117 && (!rsym->attr.dimension
4118 || rsym->as->type == AS_ASSUMED_SHAPE))))
4119 return 1;
4120
4121 return 0;
4122 }
4123
4124
4125 /* Return true if the two SS could be aliased, i.e. both point to the same data
4126 object. */
4127 /* TODO: resolve aliases based on frontend expressions. */
4128
4129 static int
4130 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
4131 {
4132 gfc_ref *lref;
4133 gfc_ref *rref;
4134 gfc_expr *lexpr, *rexpr;
4135 gfc_symbol *lsym;
4136 gfc_symbol *rsym;
4137 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
4138
4139 lexpr = lss->info->expr;
4140 rexpr = rss->info->expr;
4141
4142 lsym = lexpr->symtree->n.sym;
4143 rsym = rexpr->symtree->n.sym;
4144
4145 lsym_pointer = lsym->attr.pointer;
4146 lsym_target = lsym->attr.target;
4147 rsym_pointer = rsym->attr.pointer;
4148 rsym_target = rsym->attr.target;
4149
4150 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
4151 rsym_pointer, rsym_target))
4152 return 1;
4153
4154 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
4155 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
4156 return 0;
4157
4158 /* For derived types we must check all the component types. We can ignore
4159 array references as these will have the same base type as the previous
4160 component ref. */
4161 for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next)
4162 {
4163 if (lref->type != REF_COMPONENT)
4164 continue;
4165
4166 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
4167 lsym_target = lsym_target || lref->u.c.sym->attr.target;
4168
4169 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
4170 rsym_pointer, rsym_target))
4171 return 1;
4172
4173 if ((lsym_pointer && (rsym_pointer || rsym_target))
4174 || (rsym_pointer && (lsym_pointer || lsym_target)))
4175 {
4176 if (gfc_compare_types (&lref->u.c.component->ts,
4177 &rsym->ts))
4178 return 1;
4179 }
4180
4181 for (rref = rexpr->ref; rref != rss->info->data.array.ref;
4182 rref = rref->next)
4183 {
4184 if (rref->type != REF_COMPONENT)
4185 continue;
4186
4187 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4188 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4189
4190 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
4191 lsym_pointer, lsym_target,
4192 rsym_pointer, rsym_target))
4193 return 1;
4194
4195 if ((lsym_pointer && (rsym_pointer || rsym_target))
4196 || (rsym_pointer && (lsym_pointer || lsym_target)))
4197 {
4198 if (gfc_compare_types (&lref->u.c.component->ts,
4199 &rref->u.c.sym->ts))
4200 return 1;
4201 if (gfc_compare_types (&lref->u.c.sym->ts,
4202 &rref->u.c.component->ts))
4203 return 1;
4204 if (gfc_compare_types (&lref->u.c.component->ts,
4205 &rref->u.c.component->ts))
4206 return 1;
4207 }
4208 }
4209 }
4210
4211 lsym_pointer = lsym->attr.pointer;
4212 lsym_target = lsym->attr.target;
4213 lsym_pointer = lsym->attr.pointer;
4214 lsym_target = lsym->attr.target;
4215
4216 for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next)
4217 {
4218 if (rref->type != REF_COMPONENT)
4219 break;
4220
4221 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4222 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4223
4224 if (symbols_could_alias (rref->u.c.sym, lsym,
4225 lsym_pointer, lsym_target,
4226 rsym_pointer, rsym_target))
4227 return 1;
4228
4229 if ((lsym_pointer && (rsym_pointer || rsym_target))
4230 || (rsym_pointer && (lsym_pointer || lsym_target)))
4231 {
4232 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
4233 return 1;
4234 }
4235 }
4236
4237 return 0;
4238 }
4239
4240
4241 /* Resolve array data dependencies. Creates a temporary if required. */
4242 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
4243 dependency.c. */
4244
4245 void
4246 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
4247 gfc_ss * rss)
4248 {
4249 gfc_ss *ss;
4250 gfc_ref *lref;
4251 gfc_ref *rref;
4252 gfc_expr *dest_expr;
4253 gfc_expr *ss_expr;
4254 int nDepend = 0;
4255 int i, j;
4256
4257 loop->temp_ss = NULL;
4258 dest_expr = dest->info->expr;
4259
4260 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
4261 {
4262 if (ss->info->type != GFC_SS_SECTION)
4263 continue;
4264
4265 ss_expr = ss->info->expr;
4266
4267 if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym)
4268 {
4269 if (gfc_could_be_alias (dest, ss)
4270 || gfc_are_equivalenced_arrays (dest_expr, ss_expr))
4271 {
4272 nDepend = 1;
4273 break;
4274 }
4275 }
4276 else
4277 {
4278 lref = dest_expr->ref;
4279 rref = ss_expr->ref;
4280
4281 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
4282
4283 if (nDepend == 1)
4284 break;
4285
4286 for (i = 0; i < dest->dimen; i++)
4287 for (j = 0; j < ss->dimen; j++)
4288 if (i != j
4289 && dest->dim[i] == ss->dim[j])
4290 {
4291 /* If we don't access array elements in the same order,
4292 there is a dependency. */
4293 nDepend = 1;
4294 goto temporary;
4295 }
4296 #if 0
4297 /* TODO : loop shifting. */
4298 if (nDepend == 1)
4299 {
4300 /* Mark the dimensions for LOOP SHIFTING */
4301 for (n = 0; n < loop->dimen; n++)
4302 {
4303 int dim = dest->data.info.dim[n];
4304
4305 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
4306 depends[n] = 2;
4307 else if (! gfc_is_same_range (&lref->u.ar,
4308 &rref->u.ar, dim, 0))
4309 depends[n] = 1;
4310 }
4311
4312 /* Put all the dimensions with dependencies in the
4313 innermost loops. */
4314 dim = 0;
4315 for (n = 0; n < loop->dimen; n++)
4316 {
4317 gcc_assert (loop->order[n] == n);
4318 if (depends[n])
4319 loop->order[dim++] = n;
4320 }
4321 for (n = 0; n < loop->dimen; n++)
4322 {
4323 if (! depends[n])
4324 loop->order[dim++] = n;
4325 }
4326
4327 gcc_assert (dim == loop->dimen);
4328 break;
4329 }
4330 #endif
4331 }
4332 }
4333
4334 temporary:
4335
4336 if (nDepend == 1)
4337 {
4338 tree base_type = gfc_typenode_for_spec (&dest_expr->ts);
4339 if (GFC_ARRAY_TYPE_P (base_type)
4340 || GFC_DESCRIPTOR_TYPE_P (base_type))
4341 base_type = gfc_get_element_type (base_type);
4342 loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length,
4343 loop->dimen);
4344 gfc_add_ss_to_loop (loop, loop->temp_ss);
4345 }
4346 else
4347 loop->temp_ss = NULL;
4348 }
4349
4350
4351 /* Browse through each array's information from the scalarizer and set the loop
4352 bounds according to the "best" one (per dimension), i.e. the one which
4353 provides the most information (constant bounds, shape, etc.). */
4354
4355 static void
4356 set_loop_bounds (gfc_loopinfo *loop)
4357 {
4358 int n, dim, spec_dim;
4359 gfc_array_info *info;
4360 gfc_array_info *specinfo;
4361 gfc_ss *ss;
4362 tree tmp;
4363 gfc_ss **loopspec;
4364 bool dynamic[GFC_MAX_DIMENSIONS];
4365 mpz_t *cshape;
4366 mpz_t i;
4367 bool nonoptional_arr;
4368
4369 loopspec = loop->specloop;
4370
4371 mpz_init (i);
4372 for (n = 0; n < loop->dimen; n++)
4373 {
4374 loopspec[n] = NULL;
4375 dynamic[n] = false;
4376
4377 /* If there are both optional and nonoptional array arguments, scalarize
4378 over the nonoptional; otherwise, it does not matter as then all
4379 (optional) arrays have to be present per F2008, 125.2.12p3(6). */
4380
4381 nonoptional_arr = false;
4382
4383 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4384 if (ss->info->type != GFC_SS_SCALAR && ss->info->type != GFC_SS_TEMP
4385 && ss->info->type != GFC_SS_REFERENCE && !ss->info->can_be_null_ref)
4386 nonoptional_arr = true;
4387
4388 /* We use one SS term, and use that to determine the bounds of the
4389 loop for this dimension. We try to pick the simplest term. */
4390 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4391 {
4392 gfc_ss_type ss_type;
4393
4394 ss_type = ss->info->type;
4395 if (ss_type == GFC_SS_SCALAR
4396 || ss_type == GFC_SS_TEMP
4397 || ss_type == GFC_SS_REFERENCE
4398 || (ss->info->can_be_null_ref && nonoptional_arr))
4399 continue;
4400
4401 info = &ss->info->data.array;
4402 dim = ss->dim[n];
4403
4404 if (loopspec[n] != NULL)
4405 {
4406 specinfo = &loopspec[n]->info->data.array;
4407 spec_dim = loopspec[n]->dim[n];
4408 }
4409 else
4410 {
4411 /* Silence uninitialized warnings. */
4412 specinfo = NULL;
4413 spec_dim = 0;
4414 }
4415
4416 if (info->shape)
4417 {
4418 gcc_assert (info->shape[dim]);
4419 /* The frontend has worked out the size for us. */
4420 if (!loopspec[n]
4421 || !specinfo->shape
4422 || !integer_zerop (specinfo->start[spec_dim]))
4423 /* Prefer zero-based descriptors if possible. */
4424 loopspec[n] = ss;
4425 continue;
4426 }
4427
4428 if (ss_type == GFC_SS_CONSTRUCTOR)
4429 {
4430 gfc_constructor_base base;
4431 /* An unknown size constructor will always be rank one.
4432 Higher rank constructors will either have known shape,
4433 or still be wrapped in a call to reshape. */
4434 gcc_assert (loop->dimen == 1);
4435
4436 /* Always prefer to use the constructor bounds if the size
4437 can be determined at compile time. Prefer not to otherwise,
4438 since the general case involves realloc, and it's better to
4439 avoid that overhead if possible. */
4440 base = ss->info->expr->value.constructor;
4441 dynamic[n] = gfc_get_array_constructor_size (&i, base);
4442 if (!dynamic[n] || !loopspec[n])
4443 loopspec[n] = ss;
4444 continue;
4445 }
4446
4447 /* TODO: Pick the best bound if we have a choice between a
4448 function and something else. */
4449 if (ss_type == GFC_SS_FUNCTION)
4450 {
4451 loopspec[n] = ss;
4452 continue;
4453 }
4454
4455 /* Avoid using an allocatable lhs in an assignment, since
4456 there might be a reallocation coming. */
4457 if (loopspec[n] && ss->is_alloc_lhs)
4458 continue;
4459
4460 if (ss_type != GFC_SS_SECTION)
4461 continue;
4462
4463 if (!loopspec[n])
4464 loopspec[n] = ss;
4465 /* Criteria for choosing a loop specifier (most important first):
4466 doesn't need realloc
4467 stride of one
4468 known stride
4469 known lower bound
4470 known upper bound
4471 */
4472 else if ((loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n])
4473 || n >= loop->dimen)
4474 loopspec[n] = ss;
4475 else if (integer_onep (info->stride[dim])
4476 && !integer_onep (specinfo->stride[spec_dim]))
4477 loopspec[n] = ss;
4478 else if (INTEGER_CST_P (info->stride[dim])
4479 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
4480 loopspec[n] = ss;
4481 else if (INTEGER_CST_P (info->start[dim])
4482 && !INTEGER_CST_P (specinfo->start[spec_dim]))
4483 loopspec[n] = ss;
4484 /* We don't work out the upper bound.
4485 else if (INTEGER_CST_P (info->finish[n])
4486 && ! INTEGER_CST_P (specinfo->finish[n]))
4487 loopspec[n] = ss; */
4488 }
4489
4490 /* We should have found the scalarization loop specifier. If not,
4491 that's bad news. */
4492 gcc_assert (loopspec[n]);
4493
4494 info = &loopspec[n]->info->data.array;
4495 dim = loopspec[n]->dim[n];
4496
4497 /* Set the extents of this range. */
4498 cshape = info->shape;
4499 if (cshape && INTEGER_CST_P (info->start[dim])
4500 && INTEGER_CST_P (info->stride[dim]))
4501 {
4502 loop->from[n] = info->start[dim];
4503 mpz_set (i, cshape[get_array_ref_dim_for_loop_dim (loopspec[n], n)]);
4504 mpz_sub_ui (i, i, 1);
4505 /* To = from + (size - 1) * stride. */
4506 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
4507 if (!integer_onep (info->stride[dim]))
4508 tmp = fold_build2_loc (input_location, MULT_EXPR,
4509 gfc_array_index_type, tmp,
4510 info->stride[dim]);
4511 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
4512 gfc_array_index_type,
4513 loop->from[n], tmp);
4514 }
4515 else
4516 {
4517 loop->from[n] = info->start[dim];
4518 switch (loopspec[n]->info->type)
4519 {
4520 case GFC_SS_CONSTRUCTOR:
4521 /* The upper bound is calculated when we expand the
4522 constructor. */
4523 gcc_assert (loop->to[n] == NULL_TREE);
4524 break;
4525
4526 case GFC_SS_SECTION:
4527 /* Use the end expression if it exists and is not constant,
4528 so that it is only evaluated once. */
4529 loop->to[n] = info->end[dim];
4530 break;
4531
4532 case GFC_SS_FUNCTION:
4533 /* The loop bound will be set when we generate the call. */
4534 gcc_assert (loop->to[n] == NULL_TREE);
4535 break;
4536
4537 default:
4538 gcc_unreachable ();
4539 }
4540 }
4541
4542 /* Transform everything so we have a simple incrementing variable. */
4543 if (integer_onep (info->stride[dim]))
4544 info->delta[dim] = gfc_index_zero_node;
4545 else
4546 {
4547 /* Set the delta for this section. */
4548 info->delta[dim] = gfc_evaluate_now (loop->from[n], &loop->pre);
4549 /* Number of iterations is (end - start + step) / step.
4550 with start = 0, this simplifies to
4551 last = end / step;
4552 for (i = 0; i<=last; i++){...}; */
4553 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4554 gfc_array_index_type, loop->to[n],
4555 loop->from[n]);
4556 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4557 gfc_array_index_type, tmp, info->stride[dim]);
4558 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4559 tmp, build_int_cst (gfc_array_index_type, -1));
4560 loop->to[n] = gfc_evaluate_now (tmp, &loop->pre);
4561 /* Make the loop variable start at 0. */
4562 loop->from[n] = gfc_index_zero_node;
4563 }
4564 }
4565 mpz_clear (i);
4566
4567 for (loop = loop->nested; loop; loop = loop->next)
4568 set_loop_bounds (loop);
4569 }
4570
4571
4572 /* Initialize the scalarization loop. Creates the loop variables. Determines
4573 the range of the loop variables. Creates a temporary if required.
4574 Also generates code for scalar expressions which have been
4575 moved outside the loop. */
4576
4577 void
4578 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
4579 {
4580 gfc_ss *tmp_ss;
4581 tree tmp;
4582
4583 set_loop_bounds (loop);
4584
4585 /* Add all the scalar code that can be taken out of the loops.
4586 This may include calculating the loop bounds, so do it before
4587 allocating the temporary. */
4588 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4589
4590 tmp_ss = loop->temp_ss;
4591 /* If we want a temporary then create it. */
4592 if (tmp_ss != NULL)
4593 {
4594 gfc_ss_info *tmp_ss_info;
4595
4596 tmp_ss_info = tmp_ss->info;
4597 gcc_assert (tmp_ss_info->type == GFC_SS_TEMP);
4598 gcc_assert (loop->parent == NULL);
4599
4600 /* Make absolutely sure that this is a complete type. */
4601 if (tmp_ss_info->string_length)
4602 tmp_ss_info->data.temp.type
4603 = gfc_get_character_type_len_for_eltype
4604 (TREE_TYPE (tmp_ss_info->data.temp.type),
4605 tmp_ss_info->string_length);
4606
4607 tmp = tmp_ss_info->data.temp.type;
4608 memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info));
4609 tmp_ss_info->type = GFC_SS_SECTION;
4610
4611 gcc_assert (tmp_ss->dimen != 0);
4612
4613 gfc_trans_create_temp_array (&loop->pre, &loop->post, tmp_ss, tmp,
4614 NULL_TREE, false, true, false, where);
4615 }
4616
4617 /* For array parameters we don't have loop variables, so don't calculate the
4618 translations. */
4619 if (!loop->array_parameter)
4620 gfc_set_delta (loop);
4621 }
4622
4623
4624 /* Calculates how to transform from loop variables to array indices for each
4625 array: once loop bounds are chosen, sets the difference (DELTA field) between
4626 loop bounds and array reference bounds, for each array info. */
4627
4628 void
4629 gfc_set_delta (gfc_loopinfo *loop)
4630 {
4631 gfc_ss *ss, **loopspec;
4632 gfc_array_info *info;
4633 tree tmp;
4634 int n, dim;
4635
4636 loopspec = loop->specloop;
4637
4638 /* Calculate the translation from loop variables to array indices. */
4639 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4640 {
4641 gfc_ss_type ss_type;
4642
4643 ss_type = ss->info->type;
4644 if (ss_type != GFC_SS_SECTION
4645 && ss_type != GFC_SS_COMPONENT
4646 && ss_type != GFC_SS_CONSTRUCTOR)
4647 continue;
4648
4649 info = &ss->info->data.array;
4650
4651 for (n = 0; n < ss->dimen; n++)
4652 {
4653 /* If we are specifying the range the delta is already set. */
4654 if (loopspec[n] != ss)
4655 {
4656 dim = ss->dim[n];
4657
4658 /* Calculate the offset relative to the loop variable.
4659 First multiply by the stride. */
4660 tmp = loop->from[n];
4661 if (!integer_onep (info->stride[dim]))
4662 tmp = fold_build2_loc (input_location, MULT_EXPR,
4663 gfc_array_index_type,
4664 tmp, info->stride[dim]);
4665
4666 /* Then subtract this from our starting value. */
4667 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4668 gfc_array_index_type,
4669 info->start[dim], tmp);
4670
4671 info->delta[dim] = gfc_evaluate_now (tmp, &loop->pre);
4672 }
4673 }
4674 }
4675
4676 for (loop = loop->nested; loop; loop = loop->next)
4677 gfc_set_delta (loop);
4678 }
4679
4680
4681 /* Calculate the size of a given array dimension from the bounds. This
4682 is simply (ubound - lbound + 1) if this expression is positive
4683 or 0 if it is negative (pick either one if it is zero). Optionally
4684 (if or_expr is present) OR the (expression != 0) condition to it. */
4685
4686 tree
4687 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4688 {
4689 tree res;
4690 tree cond;
4691
4692 /* Calculate (ubound - lbound + 1). */
4693 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4694 ubound, lbound);
4695 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4696 gfc_index_one_node);
4697
4698 /* Check whether the size for this dimension is negative. */
4699 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4700 gfc_index_zero_node);
4701 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4702 gfc_index_zero_node, res);
4703
4704 /* Build OR expression. */
4705 if (or_expr)
4706 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4707 boolean_type_node, *or_expr, cond);
4708
4709 return res;
4710 }
4711
4712
4713 /* For an array descriptor, get the total number of elements. This is just
4714 the product of the extents along from_dim to to_dim. */
4715
4716 static tree
4717 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4718 {
4719 tree res;
4720 int dim;
4721
4722 res = gfc_index_one_node;
4723
4724 for (dim = from_dim; dim < to_dim; ++dim)
4725 {
4726 tree lbound;
4727 tree ubound;
4728 tree extent;
4729
4730 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
4731 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
4732
4733 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
4734 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4735 res, extent);
4736 }
4737
4738 return res;
4739 }
4740
4741
4742 /* Full size of an array. */
4743
4744 tree
4745 gfc_conv_descriptor_size (tree desc, int rank)
4746 {
4747 return gfc_conv_descriptor_size_1 (desc, 0, rank);
4748 }
4749
4750
4751 /* Size of a coarray for all dimensions but the last. */
4752
4753 tree
4754 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
4755 {
4756 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
4757 }
4758
4759
4760 /* Fills in an array descriptor, and returns the size of the array.
4761 The size will be a simple_val, ie a variable or a constant. Also
4762 calculates the offset of the base. The pointer argument overflow,
4763 which should be of integer type, will increase in value if overflow
4764 occurs during the size calculation. Returns the size of the array.
4765 {
4766 stride = 1;
4767 offset = 0;
4768 for (n = 0; n < rank; n++)
4769 {
4770 a.lbound[n] = specified_lower_bound;
4771 offset = offset + a.lbond[n] * stride;
4772 size = 1 - lbound;
4773 a.ubound[n] = specified_upper_bound;
4774 a.stride[n] = stride;
4775 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
4776 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
4777 stride = stride * size;
4778 }
4779 for (n = rank; n < rank+corank; n++)
4780 (Set lcobound/ucobound as above.)
4781 element_size = sizeof (array element);
4782 if (!rank)
4783 return element_size
4784 stride = (size_t) stride;
4785 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
4786 stride = stride * element_size;
4787 return (stride);
4788 } */
4789 /*GCC ARRAYS*/
4790
4791 static tree
4792 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
4793 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
4794 stmtblock_t * descriptor_block, tree * overflow,
4795 tree expr3_elem_size, tree *nelems, gfc_expr *expr3)
4796 {
4797 tree type;
4798 tree tmp;
4799 tree size;
4800 tree offset;
4801 tree stride;
4802 tree element_size;
4803 tree or_expr;
4804 tree thencase;
4805 tree elsecase;
4806 tree cond;
4807 tree var;
4808 stmtblock_t thenblock;
4809 stmtblock_t elseblock;
4810 gfc_expr *ubound;
4811 gfc_se se;
4812 int n;
4813
4814 type = TREE_TYPE (descriptor);
4815
4816 stride = gfc_index_one_node;
4817 offset = gfc_index_zero_node;
4818
4819 /* Set the dtype. */
4820 tmp = gfc_conv_descriptor_dtype (descriptor);
4821 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
4822
4823 or_expr = boolean_false_node;
4824
4825 for (n = 0; n < rank; n++)
4826 {
4827 tree conv_lbound;
4828 tree conv_ubound;
4829
4830 /* We have 3 possibilities for determining the size of the array:
4831 lower == NULL => lbound = 1, ubound = upper[n]
4832 upper[n] = NULL => lbound = 1, ubound = lower[n]
4833 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
4834 ubound = upper[n];
4835
4836 /* Set lower bound. */
4837 gfc_init_se (&se, NULL);
4838 if (lower == NULL)
4839 se.expr = gfc_index_one_node;
4840 else
4841 {
4842 gcc_assert (lower[n]);
4843 if (ubound)
4844 {
4845 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4846 gfc_add_block_to_block (pblock, &se.pre);
4847 }
4848 else
4849 {
4850 se.expr = gfc_index_one_node;
4851 ubound = lower[n];
4852 }
4853 }
4854 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4855 gfc_rank_cst[n], se.expr);
4856 conv_lbound = se.expr;
4857
4858 /* Work out the offset for this component. */
4859 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
4860 se.expr, stride);
4861 offset = fold_build2_loc (input_location, MINUS_EXPR,
4862 gfc_array_index_type, offset, tmp);
4863
4864 /* Set upper bound. */
4865 gfc_init_se (&se, NULL);
4866 gcc_assert (ubound);
4867 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4868 gfc_add_block_to_block (pblock, &se.pre);
4869
4870 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4871 gfc_rank_cst[n], se.expr);
4872 conv_ubound = se.expr;
4873
4874 /* Store the stride. */
4875 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
4876 gfc_rank_cst[n], stride);
4877
4878 /* Calculate size and check whether extent is negative. */
4879 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
4880 size = gfc_evaluate_now (size, pblock);
4881
4882 /* Check whether multiplying the stride by the number of
4883 elements in this dimension would overflow. We must also check
4884 whether the current dimension has zero size in order to avoid
4885 division by zero.
4886 */
4887 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4888 gfc_array_index_type,
4889 fold_convert (gfc_array_index_type,
4890 TYPE_MAX_VALUE (gfc_array_index_type)),
4891 size);
4892 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4893 boolean_type_node, tmp, stride));
4894 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4895 integer_one_node, integer_zero_node);
4896 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4897 boolean_type_node, size,
4898 gfc_index_zero_node));
4899 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4900 integer_zero_node, tmp);
4901 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
4902 *overflow, tmp);
4903 *overflow = gfc_evaluate_now (tmp, pblock);
4904
4905 /* Multiply the stride by the number of elements in this dimension. */
4906 stride = fold_build2_loc (input_location, MULT_EXPR,
4907 gfc_array_index_type, stride, size);
4908 stride = gfc_evaluate_now (stride, pblock);
4909 }
4910
4911 for (n = rank; n < rank + corank; n++)
4912 {
4913 ubound = upper[n];
4914
4915 /* Set lower bound. */
4916 gfc_init_se (&se, NULL);
4917 if (lower == NULL || lower[n] == NULL)
4918 {
4919 gcc_assert (n == rank + corank - 1);
4920 se.expr = gfc_index_one_node;
4921 }
4922 else
4923 {
4924 if (ubound || n == rank + corank - 1)
4925 {
4926 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
4927 gfc_add_block_to_block (pblock, &se.pre);
4928 }
4929 else
4930 {
4931 se.expr = gfc_index_one_node;
4932 ubound = lower[n];
4933 }
4934 }
4935 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
4936 gfc_rank_cst[n], se.expr);
4937
4938 if (n < rank + corank - 1)
4939 {
4940 gfc_init_se (&se, NULL);
4941 gcc_assert (ubound);
4942 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
4943 gfc_add_block_to_block (pblock, &se.pre);
4944 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
4945 gfc_rank_cst[n], se.expr);
4946 }
4947 }
4948
4949 /* The stride is the number of elements in the array, so multiply by the
4950 size of an element to get the total size. Obviously, if there is a
4951 SOURCE expression (expr3) we must use its element size. */
4952 if (expr3_elem_size != NULL_TREE)
4953 tmp = expr3_elem_size;
4954 else if (expr3 != NULL)
4955 {
4956 if (expr3->ts.type == BT_CLASS)
4957 {
4958 gfc_se se_sz;
4959 gfc_expr *sz = gfc_copy_expr (expr3);
4960 gfc_add_vptr_component (sz);
4961 gfc_add_size_component (sz);
4962 gfc_init_se (&se_sz, NULL);
4963 gfc_conv_expr (&se_sz, sz);
4964 gfc_free_expr (sz);
4965 tmp = se_sz.expr;
4966 }
4967 else
4968 {
4969 tmp = gfc_typenode_for_spec (&expr3->ts);
4970 tmp = TYPE_SIZE_UNIT (tmp);
4971 }
4972 }
4973 else
4974 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
4975
4976 /* Convert to size_t. */
4977 element_size = fold_convert (size_type_node, tmp);
4978
4979 if (rank == 0)
4980 return element_size;
4981
4982 *nelems = gfc_evaluate_now (stride, pblock);
4983 stride = fold_convert (size_type_node, stride);
4984
4985 /* First check for overflow. Since an array of type character can
4986 have zero element_size, we must check for that before
4987 dividing. */
4988 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
4989 size_type_node,
4990 TYPE_MAX_VALUE (size_type_node), element_size);
4991 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
4992 boolean_type_node, tmp, stride));
4993 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4994 integer_one_node, integer_zero_node);
4995 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
4996 boolean_type_node, element_size,
4997 build_int_cst (size_type_node, 0)));
4998 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
4999 integer_zero_node, tmp);
5000 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5001 *overflow, tmp);
5002 *overflow = gfc_evaluate_now (tmp, pblock);
5003
5004 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
5005 stride, element_size);
5006
5007 if (poffset != NULL)
5008 {
5009 offset = gfc_evaluate_now (offset, pblock);
5010 *poffset = offset;
5011 }
5012
5013 if (integer_zerop (or_expr))
5014 return size;
5015 if (integer_onep (or_expr))
5016 return build_int_cst (size_type_node, 0);
5017
5018 var = gfc_create_var (TREE_TYPE (size), "size");
5019 gfc_start_block (&thenblock);
5020 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
5021 thencase = gfc_finish_block (&thenblock);
5022
5023 gfc_start_block (&elseblock);
5024 gfc_add_modify (&elseblock, var, size);
5025 elsecase = gfc_finish_block (&elseblock);
5026
5027 tmp = gfc_evaluate_now (or_expr, pblock);
5028 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
5029 gfc_add_expr_to_block (pblock, tmp);
5030
5031 return var;
5032 }
5033
5034
5035 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
5036 the work for an ALLOCATE statement. */
5037 /*GCC ARRAYS*/
5038
5039 bool
5040 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
5041 tree errlen, tree label_finish, tree expr3_elem_size,
5042 tree *nelems, gfc_expr *expr3)
5043 {
5044 tree tmp;
5045 tree pointer;
5046 tree offset = NULL_TREE;
5047 tree token = NULL_TREE;
5048 tree size;
5049 tree msg;
5050 tree error = NULL_TREE;
5051 tree overflow; /* Boolean storing whether size calculation overflows. */
5052 tree var_overflow = NULL_TREE;
5053 tree cond;
5054 tree set_descriptor;
5055 stmtblock_t set_descriptor_block;
5056 stmtblock_t elseblock;
5057 gfc_expr **lower;
5058 gfc_expr **upper;
5059 gfc_ref *ref, *prev_ref = NULL;
5060 bool allocatable, coarray, dimension;
5061
5062 ref = expr->ref;
5063
5064 /* Find the last reference in the chain. */
5065 while (ref && ref->next != NULL)
5066 {
5067 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
5068 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
5069 prev_ref = ref;
5070 ref = ref->next;
5071 }
5072
5073 if (ref == NULL || ref->type != REF_ARRAY)
5074 return false;
5075
5076 if (!prev_ref)
5077 {
5078 allocatable = expr->symtree->n.sym->attr.allocatable;
5079 coarray = expr->symtree->n.sym->attr.codimension;
5080 dimension = expr->symtree->n.sym->attr.dimension;
5081 }
5082 else
5083 {
5084 allocatable = prev_ref->u.c.component->attr.allocatable;
5085 coarray = prev_ref->u.c.component->attr.codimension;
5086 dimension = prev_ref->u.c.component->attr.dimension;
5087 }
5088
5089 if (!dimension)
5090 gcc_assert (coarray);
5091
5092 /* Figure out the size of the array. */
5093 switch (ref->u.ar.type)
5094 {
5095 case AR_ELEMENT:
5096 if (!coarray)
5097 {
5098 lower = NULL;
5099 upper = ref->u.ar.start;
5100 break;
5101 }
5102 /* Fall through. */
5103
5104 case AR_SECTION:
5105 lower = ref->u.ar.start;
5106 upper = ref->u.ar.end;
5107 break;
5108
5109 case AR_FULL:
5110 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
5111
5112 lower = ref->u.ar.as->lower;
5113 upper = ref->u.ar.as->upper;
5114 break;
5115
5116 default:
5117 gcc_unreachable ();
5118 break;
5119 }
5120
5121 overflow = integer_zero_node;
5122
5123 gfc_init_block (&set_descriptor_block);
5124 size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
5125 ref->u.ar.as->corank, &offset, lower, upper,
5126 &se->pre, &set_descriptor_block, &overflow,
5127 expr3_elem_size, nelems, expr3);
5128
5129 if (dimension)
5130 {
5131
5132 var_overflow = gfc_create_var (integer_type_node, "overflow");
5133 gfc_add_modify (&se->pre, var_overflow, overflow);
5134
5135 /* Generate the block of code handling overflow. */
5136 msg = gfc_build_addr_expr (pchar_type_node,
5137 gfc_build_localized_cstring_const
5138 ("Integer overflow when calculating the amount of "
5139 "memory to allocate"));
5140 error = build_call_expr_loc (input_location, gfor_fndecl_runtime_error,
5141 1, msg);
5142 }
5143
5144 if (status != NULL_TREE)
5145 {
5146 tree status_type = TREE_TYPE (status);
5147 stmtblock_t set_status_block;
5148
5149 gfc_start_block (&set_status_block);
5150 gfc_add_modify (&set_status_block, status,
5151 build_int_cst (status_type, LIBERROR_ALLOCATION));
5152 error = gfc_finish_block (&set_status_block);
5153 }
5154
5155 gfc_start_block (&elseblock);
5156
5157 /* Allocate memory to store the data. */
5158 if (POINTER_TYPE_P (TREE_TYPE (se->expr)))
5159 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
5160
5161 pointer = gfc_conv_descriptor_data_get (se->expr);
5162 STRIP_NOPS (pointer);
5163
5164 if (coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
5165 token = gfc_build_addr_expr (NULL_TREE,
5166 gfc_conv_descriptor_token (se->expr));
5167
5168 /* The allocatable variant takes the old pointer as first argument. */
5169 if (allocatable)
5170 gfc_allocate_allocatable (&elseblock, pointer, size, token,
5171 status, errmsg, errlen, label_finish, expr);
5172 else
5173 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
5174
5175 if (dimension)
5176 {
5177 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
5178 boolean_type_node, var_overflow, integer_zero_node));
5179 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
5180 error, gfc_finish_block (&elseblock));
5181 }
5182 else
5183 tmp = gfc_finish_block (&elseblock);
5184
5185 gfc_add_expr_to_block (&se->pre, tmp);
5186
5187 if (expr->ts.type == BT_CLASS)
5188 {
5189 tmp = build_int_cst (unsigned_char_type_node, 0);
5190 /* With class objects, it is best to play safe and null the
5191 memory because we cannot know if dynamic types have allocatable
5192 components or not. */
5193 tmp = build_call_expr_loc (input_location,
5194 builtin_decl_explicit (BUILT_IN_MEMSET),
5195 3, pointer, tmp, size);
5196 gfc_add_expr_to_block (&se->pre, tmp);
5197 }
5198
5199 /* Update the array descriptors. */
5200 if (dimension)
5201 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
5202
5203 set_descriptor = gfc_finish_block (&set_descriptor_block);
5204 if (status != NULL_TREE)
5205 {
5206 cond = fold_build2_loc (input_location, EQ_EXPR,
5207 boolean_type_node, status,
5208 build_int_cst (TREE_TYPE (status), 0));
5209 gfc_add_expr_to_block (&se->pre,
5210 fold_build3_loc (input_location, COND_EXPR, void_type_node,
5211 gfc_likely (cond), set_descriptor,
5212 build_empty_stmt (input_location)));
5213 }
5214 else
5215 gfc_add_expr_to_block (&se->pre, set_descriptor);
5216
5217 if ((expr->ts.type == BT_DERIVED)
5218 && expr->ts.u.derived->attr.alloc_comp)
5219 {
5220 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
5221 ref->u.ar.as->rank);
5222 gfc_add_expr_to_block (&se->pre, tmp);
5223 }
5224
5225 return true;
5226 }
5227
5228
5229 /* Deallocate an array variable. Also used when an allocated variable goes
5230 out of scope. */
5231 /*GCC ARRAYS*/
5232
5233 tree
5234 gfc_array_deallocate (tree descriptor, tree pstat, tree errmsg, tree errlen,
5235 tree label_finish, gfc_expr* expr)
5236 {
5237 tree var;
5238 tree tmp;
5239 stmtblock_t block;
5240 bool coarray = gfc_is_coarray (expr);
5241
5242 gfc_start_block (&block);
5243
5244 /* Get a pointer to the data. */
5245 var = gfc_conv_descriptor_data_get (descriptor);
5246 STRIP_NOPS (var);
5247
5248 /* Parameter is the address of the data component. */
5249 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, pstat, errmsg,
5250 errlen, label_finish, false, expr, coarray);
5251 gfc_add_expr_to_block (&block, tmp);
5252
5253 /* Zero the data pointer; only for coarrays an error can occur and then
5254 the allocation status may not be changed. */
5255 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5256 var, build_int_cst (TREE_TYPE (var), 0));
5257 if (pstat != NULL_TREE && coarray && gfc_option.coarray == GFC_FCOARRAY_LIB)
5258 {
5259 tree cond;
5260 tree stat = build_fold_indirect_ref_loc (input_location, pstat);
5261
5262 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5263 stat, build_int_cst (TREE_TYPE (stat), 0));
5264 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5265 cond, tmp, build_empty_stmt (input_location));
5266 }
5267
5268 gfc_add_expr_to_block (&block, tmp);
5269
5270 return gfc_finish_block (&block);
5271 }
5272
5273
5274 /* Create an array constructor from an initialization expression.
5275 We assume the frontend already did any expansions and conversions. */
5276
5277 tree
5278 gfc_conv_array_initializer (tree type, gfc_expr * expr)
5279 {
5280 gfc_constructor *c;
5281 tree tmp;
5282 gfc_se se;
5283 HOST_WIDE_INT hi;
5284 unsigned HOST_WIDE_INT lo;
5285 tree index, range;
5286 VEC(constructor_elt,gc) *v = NULL;
5287
5288 if (expr->expr_type == EXPR_VARIABLE
5289 && expr->symtree->n.sym->attr.flavor == FL_PARAMETER
5290 && expr->symtree->n.sym->value)
5291 expr = expr->symtree->n.sym->value;
5292
5293 switch (expr->expr_type)
5294 {
5295 case EXPR_CONSTANT:
5296 case EXPR_STRUCTURE:
5297 /* A single scalar or derived type value. Create an array with all
5298 elements equal to that value. */
5299 gfc_init_se (&se, NULL);
5300
5301 if (expr->expr_type == EXPR_CONSTANT)
5302 gfc_conv_constant (&se, expr);
5303 else
5304 gfc_conv_structure (&se, expr, 1);
5305
5306 tmp = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5307 gcc_assert (tmp && INTEGER_CST_P (tmp));
5308 hi = TREE_INT_CST_HIGH (tmp);
5309 lo = TREE_INT_CST_LOW (tmp);
5310 lo++;
5311 if (lo == 0)
5312 hi++;
5313 /* This will probably eat buckets of memory for large arrays. */
5314 while (hi != 0 || lo != 0)
5315 {
5316 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
5317 if (lo == 0)
5318 hi--;
5319 lo--;
5320 }
5321 break;
5322
5323 case EXPR_ARRAY:
5324 /* Create a vector of all the elements. */
5325 for (c = gfc_constructor_first (expr->value.constructor);
5326 c; c = gfc_constructor_next (c))
5327 {
5328 if (c->iterator)
5329 {
5330 /* Problems occur when we get something like
5331 integer :: a(lots) = (/(i, i=1, lots)/) */
5332 gfc_fatal_error ("The number of elements in the array constructor "
5333 "at %L requires an increase of the allowed %d "
5334 "upper limit. See -fmax-array-constructor "
5335 "option", &expr->where,
5336 gfc_option.flag_max_array_constructor);
5337 return NULL_TREE;
5338 }
5339 if (mpz_cmp_si (c->offset, 0) != 0)
5340 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5341 else
5342 index = NULL_TREE;
5343
5344 if (mpz_cmp_si (c->repeat, 1) > 0)
5345 {
5346 tree tmp1, tmp2;
5347 mpz_t maxval;
5348
5349 mpz_init (maxval);
5350 mpz_add (maxval, c->offset, c->repeat);
5351 mpz_sub_ui (maxval, maxval, 1);
5352 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5353 if (mpz_cmp_si (c->offset, 0) != 0)
5354 {
5355 mpz_add_ui (maxval, c->offset, 1);
5356 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5357 }
5358 else
5359 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5360
5361 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
5362 mpz_clear (maxval);
5363 }
5364 else
5365 range = NULL;
5366
5367 gfc_init_se (&se, NULL);
5368 switch (c->expr->expr_type)
5369 {
5370 case EXPR_CONSTANT:
5371 gfc_conv_constant (&se, c->expr);
5372 break;
5373
5374 case EXPR_STRUCTURE:
5375 gfc_conv_structure (&se, c->expr, 1);
5376 break;
5377
5378 default:
5379 /* Catch those occasional beasts that do not simplify
5380 for one reason or another, assuming that if they are
5381 standard defying the frontend will catch them. */
5382 gfc_conv_expr (&se, c->expr);
5383 break;
5384 }
5385
5386 if (range == NULL_TREE)
5387 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5388 else
5389 {
5390 if (index != NULL_TREE)
5391 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5392 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
5393 }
5394 }
5395 break;
5396
5397 case EXPR_NULL:
5398 return gfc_build_null_descriptor (type);
5399
5400 default:
5401 gcc_unreachable ();
5402 }
5403
5404 /* Create a constructor from the list of elements. */
5405 tmp = build_constructor (type, v);
5406 TREE_CONSTANT (tmp) = 1;
5407 return tmp;
5408 }
5409
5410
5411 /* Generate code to evaluate non-constant coarray cobounds. */
5412
5413 void
5414 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
5415 const gfc_symbol *sym)
5416 {
5417 int dim;
5418 tree ubound;
5419 tree lbound;
5420 gfc_se se;
5421 gfc_array_spec *as;
5422
5423 as = sym->as;
5424
5425 for (dim = as->rank; dim < as->rank + as->corank; dim++)
5426 {
5427 /* Evaluate non-constant array bound expressions. */
5428 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5429 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5430 {
5431 gfc_init_se (&se, NULL);
5432 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5433 gfc_add_block_to_block (pblock, &se.pre);
5434 gfc_add_modify (pblock, lbound, se.expr);
5435 }
5436 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5437 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5438 {
5439 gfc_init_se (&se, NULL);
5440 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5441 gfc_add_block_to_block (pblock, &se.pre);
5442 gfc_add_modify (pblock, ubound, se.expr);
5443 }
5444 }
5445 }
5446
5447
5448 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
5449 returns the size (in elements) of the array. */
5450
5451 static tree
5452 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
5453 stmtblock_t * pblock)
5454 {
5455 gfc_array_spec *as;
5456 tree size;
5457 tree stride;
5458 tree offset;
5459 tree ubound;
5460 tree lbound;
5461 tree tmp;
5462 gfc_se se;
5463
5464 int dim;
5465
5466 as = sym->as;
5467
5468 size = gfc_index_one_node;
5469 offset = gfc_index_zero_node;
5470 for (dim = 0; dim < as->rank; dim++)
5471 {
5472 /* Evaluate non-constant array bound expressions. */
5473 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5474 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5475 {
5476 gfc_init_se (&se, NULL);
5477 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5478 gfc_add_block_to_block (pblock, &se.pre);
5479 gfc_add_modify (pblock, lbound, se.expr);
5480 }
5481 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5482 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5483 {
5484 gfc_init_se (&se, NULL);
5485 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5486 gfc_add_block_to_block (pblock, &se.pre);
5487 gfc_add_modify (pblock, ubound, se.expr);
5488 }
5489 /* The offset of this dimension. offset = offset - lbound * stride. */
5490 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5491 lbound, size);
5492 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
5493 offset, tmp);
5494
5495 /* The size of this dimension, and the stride of the next. */
5496 if (dim + 1 < as->rank)
5497 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
5498 else
5499 stride = GFC_TYPE_ARRAY_SIZE (type);
5500
5501 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
5502 {
5503 /* Calculate stride = size * (ubound + 1 - lbound). */
5504 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5505 gfc_array_index_type,
5506 gfc_index_one_node, lbound);
5507 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5508 gfc_array_index_type, ubound, tmp);
5509 tmp = fold_build2_loc (input_location, MULT_EXPR,
5510 gfc_array_index_type, size, tmp);
5511 if (stride)
5512 gfc_add_modify (pblock, stride, tmp);
5513 else
5514 stride = gfc_evaluate_now (tmp, pblock);
5515
5516 /* Make sure that negative size arrays are translated
5517 to being zero size. */
5518 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
5519 stride, gfc_index_zero_node);
5520 tmp = fold_build3_loc (input_location, COND_EXPR,
5521 gfc_array_index_type, tmp,
5522 stride, gfc_index_zero_node);
5523 gfc_add_modify (pblock, stride, tmp);
5524 }
5525
5526 size = stride;
5527 }
5528
5529 gfc_trans_array_cobounds (type, pblock, sym);
5530 gfc_trans_vla_type_sizes (sym, pblock);
5531
5532 *poffset = offset;
5533 return size;
5534 }
5535
5536
5537 /* Generate code to initialize/allocate an array variable. */
5538
5539 void
5540 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
5541 gfc_wrapped_block * block)
5542 {
5543 stmtblock_t init;
5544 tree type;
5545 tree tmp = NULL_TREE;
5546 tree size;
5547 tree offset;
5548 tree space;
5549 tree inittree;
5550 bool onstack;
5551
5552 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
5553
5554 /* Do nothing for USEd variables. */
5555 if (sym->attr.use_assoc)
5556 return;
5557
5558 type = TREE_TYPE (decl);
5559 gcc_assert (GFC_ARRAY_TYPE_P (type));
5560 onstack = TREE_CODE (type) != POINTER_TYPE;
5561
5562 gfc_init_block (&init);
5563
5564 /* Evaluate character string length. */
5565 if (sym->ts.type == BT_CHARACTER
5566 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5567 {
5568 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5569
5570 gfc_trans_vla_type_sizes (sym, &init);
5571
5572 /* Emit a DECL_EXPR for this variable, which will cause the
5573 gimplifier to allocate storage, and all that good stuff. */
5574 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
5575 gfc_add_expr_to_block (&init, tmp);
5576 }
5577
5578 if (onstack)
5579 {
5580 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5581 return;
5582 }
5583
5584 type = TREE_TYPE (type);
5585
5586 gcc_assert (!sym->attr.use_assoc);
5587 gcc_assert (!TREE_STATIC (decl));
5588 gcc_assert (!sym->module);
5589
5590 if (sym->ts.type == BT_CHARACTER
5591 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5592 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5593
5594 size = gfc_trans_array_bounds (type, sym, &offset, &init);
5595
5596 /* Don't actually allocate space for Cray Pointees. */
5597 if (sym->attr.cray_pointee)
5598 {
5599 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5600 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5601
5602 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5603 return;
5604 }
5605
5606 if (gfc_option.flag_stack_arrays)
5607 {
5608 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
5609 space = build_decl (sym->declared_at.lb->location,
5610 VAR_DECL, create_tmp_var_name ("A"),
5611 TREE_TYPE (TREE_TYPE (decl)));
5612 gfc_trans_vla_type_sizes (sym, &init);
5613 }
5614 else
5615 {
5616 /* The size is the number of elements in the array, so multiply by the
5617 size of an element to get the total size. */
5618 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5619 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5620 size, fold_convert (gfc_array_index_type, tmp));
5621
5622 /* Allocate memory to hold the data. */
5623 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5624 gfc_add_modify (&init, decl, tmp);
5625
5626 /* Free the temporary. */
5627 tmp = gfc_call_free (convert (pvoid_type_node, decl));
5628 space = NULL_TREE;
5629 }
5630
5631 /* Set offset of the array. */
5632 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5633 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5634
5635 /* Automatic arrays should not have initializers. */
5636 gcc_assert (!sym->value);
5637
5638 inittree = gfc_finish_block (&init);
5639
5640 if (space)
5641 {
5642 tree addr;
5643 pushdecl (space);
5644
5645 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5646 where also space is located. */
5647 gfc_init_block (&init);
5648 tmp = fold_build1_loc (input_location, DECL_EXPR,
5649 TREE_TYPE (space), space);
5650 gfc_add_expr_to_block (&init, tmp);
5651 addr = fold_build1_loc (sym->declared_at.lb->location,
5652 ADDR_EXPR, TREE_TYPE (decl), space);
5653 gfc_add_modify (&init, decl, addr);
5654 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5655 tmp = NULL_TREE;
5656 }
5657 gfc_add_init_cleanup (block, inittree, tmp);
5658 }
5659
5660
5661 /* Generate entry and exit code for g77 calling convention arrays. */
5662
5663 void
5664 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
5665 {
5666 tree parm;
5667 tree type;
5668 locus loc;
5669 tree offset;
5670 tree tmp;
5671 tree stmt;
5672 stmtblock_t init;
5673
5674 gfc_save_backend_locus (&loc);
5675 gfc_set_backend_locus (&sym->declared_at);
5676
5677 /* Descriptor type. */
5678 parm = sym->backend_decl;
5679 type = TREE_TYPE (parm);
5680 gcc_assert (GFC_ARRAY_TYPE_P (type));
5681
5682 gfc_start_block (&init);
5683
5684 if (sym->ts.type == BT_CHARACTER
5685 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5686 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5687
5688 /* Evaluate the bounds of the array. */
5689 gfc_trans_array_bounds (type, sym, &offset, &init);
5690
5691 /* Set the offset. */
5692 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5693 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5694
5695 /* Set the pointer itself if we aren't using the parameter directly. */
5696 if (TREE_CODE (parm) != PARM_DECL)
5697 {
5698 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
5699 gfc_add_modify (&init, parm, tmp);
5700 }
5701 stmt = gfc_finish_block (&init);
5702
5703 gfc_restore_backend_locus (&loc);
5704
5705 /* Add the initialization code to the start of the function. */
5706
5707 if (sym->attr.optional || sym->attr.not_always_present)
5708 {
5709 tmp = gfc_conv_expr_present (sym);
5710 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
5711 }
5712
5713 gfc_add_init_cleanup (block, stmt, NULL_TREE);
5714 }
5715
5716
5717 /* Modify the descriptor of an array parameter so that it has the
5718 correct lower bound. Also move the upper bound accordingly.
5719 If the array is not packed, it will be copied into a temporary.
5720 For each dimension we set the new lower and upper bounds. Then we copy the
5721 stride and calculate the offset for this dimension. We also work out
5722 what the stride of a packed array would be, and see it the two match.
5723 If the array need repacking, we set the stride to the values we just
5724 calculated, recalculate the offset and copy the array data.
5725 Code is also added to copy the data back at the end of the function.
5726 */
5727
5728 void
5729 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
5730 gfc_wrapped_block * block)
5731 {
5732 tree size;
5733 tree type;
5734 tree offset;
5735 locus loc;
5736 stmtblock_t init;
5737 tree stmtInit, stmtCleanup;
5738 tree lbound;
5739 tree ubound;
5740 tree dubound;
5741 tree dlbound;
5742 tree dumdesc;
5743 tree tmp;
5744 tree stride, stride2;
5745 tree stmt_packed;
5746 tree stmt_unpacked;
5747 tree partial;
5748 gfc_se se;
5749 int n;
5750 int checkparm;
5751 int no_repack;
5752 bool optional_arg;
5753
5754 /* Do nothing for pointer and allocatable arrays. */
5755 if (sym->attr.pointer || sym->attr.allocatable)
5756 return;
5757
5758 if (sym->attr.dummy && gfc_is_nodesc_array (sym))
5759 {
5760 gfc_trans_g77_array (sym, block);
5761 return;
5762 }
5763
5764 gfc_save_backend_locus (&loc);
5765 gfc_set_backend_locus (&sym->declared_at);
5766
5767 /* Descriptor type. */
5768 type = TREE_TYPE (tmpdesc);
5769 gcc_assert (GFC_ARRAY_TYPE_P (type));
5770 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5771 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
5772 gfc_start_block (&init);
5773
5774 if (sym->ts.type == BT_CHARACTER
5775 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
5776 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5777
5778 checkparm = (sym->as->type == AS_EXPLICIT
5779 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
5780
5781 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
5782 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
5783
5784 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
5785 {
5786 /* For non-constant shape arrays we only check if the first dimension
5787 is contiguous. Repacking higher dimensions wouldn't gain us
5788 anything as we still don't know the array stride. */
5789 partial = gfc_create_var (boolean_type_node, "partial");
5790 TREE_USED (partial) = 1;
5791 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5792 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
5793 gfc_index_one_node);
5794 gfc_add_modify (&init, partial, tmp);
5795 }
5796 else
5797 partial = NULL_TREE;
5798
5799 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
5800 here, however I think it does the right thing. */
5801 if (no_repack)
5802 {
5803 /* Set the first stride. */
5804 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
5805 stride = gfc_evaluate_now (stride, &init);
5806
5807 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5808 stride, gfc_index_zero_node);
5809 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
5810 tmp, gfc_index_one_node, stride);
5811 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
5812 gfc_add_modify (&init, stride, tmp);
5813
5814 /* Allow the user to disable array repacking. */
5815 stmt_unpacked = NULL_TREE;
5816 }
5817 else
5818 {
5819 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
5820 /* A library call to repack the array if necessary. */
5821 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
5822 stmt_unpacked = build_call_expr_loc (input_location,
5823 gfor_fndecl_in_pack, 1, tmp);
5824
5825 stride = gfc_index_one_node;
5826
5827 if (gfc_option.warn_array_temp)
5828 gfc_warning ("Creating array temporary at %L", &loc);
5829 }
5830
5831 /* This is for the case where the array data is used directly without
5832 calling the repack function. */
5833 if (no_repack || partial != NULL_TREE)
5834 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
5835 else
5836 stmt_packed = NULL_TREE;
5837
5838 /* Assign the data pointer. */
5839 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5840 {
5841 /* Don't repack unknown shape arrays when the first stride is 1. */
5842 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
5843 partial, stmt_packed, stmt_unpacked);
5844 }
5845 else
5846 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
5847 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
5848
5849 offset = gfc_index_zero_node;
5850 size = gfc_index_one_node;
5851
5852 /* Evaluate the bounds of the array. */
5853 for (n = 0; n < sym->as->rank; n++)
5854 {
5855 if (checkparm || !sym->as->upper[n])
5856 {
5857 /* Get the bounds of the actual parameter. */
5858 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
5859 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
5860 }
5861 else
5862 {
5863 dubound = NULL_TREE;
5864 dlbound = NULL_TREE;
5865 }
5866
5867 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
5868 if (!INTEGER_CST_P (lbound))
5869 {
5870 gfc_init_se (&se, NULL);
5871 gfc_conv_expr_type (&se, sym->as->lower[n],
5872 gfc_array_index_type);
5873 gfc_add_block_to_block (&init, &se.pre);
5874 gfc_add_modify (&init, lbound, se.expr);
5875 }
5876
5877 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
5878 /* Set the desired upper bound. */
5879 if (sym->as->upper[n])
5880 {
5881 /* We know what we want the upper bound to be. */
5882 if (!INTEGER_CST_P (ubound))
5883 {
5884 gfc_init_se (&se, NULL);
5885 gfc_conv_expr_type (&se, sym->as->upper[n],
5886 gfc_array_index_type);
5887 gfc_add_block_to_block (&init, &se.pre);
5888 gfc_add_modify (&init, ubound, se.expr);
5889 }
5890
5891 /* Check the sizes match. */
5892 if (checkparm)
5893 {
5894 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
5895 char * msg;
5896 tree temp;
5897
5898 temp = fold_build2_loc (input_location, MINUS_EXPR,
5899 gfc_array_index_type, ubound, lbound);
5900 temp = fold_build2_loc (input_location, PLUS_EXPR,
5901 gfc_array_index_type,
5902 gfc_index_one_node, temp);
5903 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
5904 gfc_array_index_type, dubound,
5905 dlbound);
5906 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
5907 gfc_array_index_type,
5908 gfc_index_one_node, stride2);
5909 tmp = fold_build2_loc (input_location, NE_EXPR,
5910 gfc_array_index_type, temp, stride2);
5911 asprintf (&msg, "Dimension %d of array '%s' has extent "
5912 "%%ld instead of %%ld", n+1, sym->name);
5913
5914 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
5915 fold_convert (long_integer_type_node, temp),
5916 fold_convert (long_integer_type_node, stride2));
5917
5918 free (msg);
5919 }
5920 }
5921 else
5922 {
5923 /* For assumed shape arrays move the upper bound by the same amount
5924 as the lower bound. */
5925 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5926 gfc_array_index_type, dubound, dlbound);
5927 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5928 gfc_array_index_type, tmp, lbound);
5929 gfc_add_modify (&init, ubound, tmp);
5930 }
5931 /* The offset of this dimension. offset = offset - lbound * stride. */
5932 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5933 lbound, stride);
5934 offset = fold_build2_loc (input_location, MINUS_EXPR,
5935 gfc_array_index_type, offset, tmp);
5936
5937 /* The size of this dimension, and the stride of the next. */
5938 if (n + 1 < sym->as->rank)
5939 {
5940 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
5941
5942 if (no_repack || partial != NULL_TREE)
5943 stmt_unpacked =
5944 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
5945
5946 /* Figure out the stride if not a known constant. */
5947 if (!INTEGER_CST_P (stride))
5948 {
5949 if (no_repack)
5950 stmt_packed = NULL_TREE;
5951 else
5952 {
5953 /* Calculate stride = size * (ubound + 1 - lbound). */
5954 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5955 gfc_array_index_type,
5956 gfc_index_one_node, lbound);
5957 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5958 gfc_array_index_type, ubound, tmp);
5959 size = fold_build2_loc (input_location, MULT_EXPR,
5960 gfc_array_index_type, size, tmp);
5961 stmt_packed = size;
5962 }
5963
5964 /* Assign the stride. */
5965 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
5966 tmp = fold_build3_loc (input_location, COND_EXPR,
5967 gfc_array_index_type, partial,
5968 stmt_unpacked, stmt_packed);
5969 else
5970 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
5971 gfc_add_modify (&init, stride, tmp);
5972 }
5973 }
5974 else
5975 {
5976 stride = GFC_TYPE_ARRAY_SIZE (type);
5977
5978 if (stride && !INTEGER_CST_P (stride))
5979 {
5980 /* Calculate size = stride * (ubound + 1 - lbound). */
5981 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5982 gfc_array_index_type,
5983 gfc_index_one_node, lbound);
5984 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5985 gfc_array_index_type,
5986 ubound, tmp);
5987 tmp = fold_build2_loc (input_location, MULT_EXPR,
5988 gfc_array_index_type,
5989 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
5990 gfc_add_modify (&init, stride, tmp);
5991 }
5992 }
5993 }
5994
5995 gfc_trans_array_cobounds (type, &init, sym);
5996
5997 /* Set the offset. */
5998 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5999 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
6000
6001 gfc_trans_vla_type_sizes (sym, &init);
6002
6003 stmtInit = gfc_finish_block (&init);
6004
6005 /* Only do the entry/initialization code if the arg is present. */
6006 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6007 optional_arg = (sym->attr.optional
6008 || (sym->ns->proc_name->attr.entry_master
6009 && sym->attr.dummy));
6010 if (optional_arg)
6011 {
6012 tmp = gfc_conv_expr_present (sym);
6013 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
6014 build_empty_stmt (input_location));
6015 }
6016
6017 /* Cleanup code. */
6018 if (no_repack)
6019 stmtCleanup = NULL_TREE;
6020 else
6021 {
6022 stmtblock_t cleanup;
6023 gfc_start_block (&cleanup);
6024
6025 if (sym->attr.intent != INTENT_IN)
6026 {
6027 /* Copy the data back. */
6028 tmp = build_call_expr_loc (input_location,
6029 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
6030 gfc_add_expr_to_block (&cleanup, tmp);
6031 }
6032
6033 /* Free the temporary. */
6034 tmp = gfc_call_free (tmpdesc);
6035 gfc_add_expr_to_block (&cleanup, tmp);
6036
6037 stmtCleanup = gfc_finish_block (&cleanup);
6038
6039 /* Only do the cleanup if the array was repacked. */
6040 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
6041 tmp = gfc_conv_descriptor_data_get (tmp);
6042 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6043 tmp, tmpdesc);
6044 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6045 build_empty_stmt (input_location));
6046
6047 if (optional_arg)
6048 {
6049 tmp = gfc_conv_expr_present (sym);
6050 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6051 build_empty_stmt (input_location));
6052 }
6053 }
6054
6055 /* We don't need to free any memory allocated by internal_pack as it will
6056 be freed at the end of the function by pop_context. */
6057 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
6058
6059 gfc_restore_backend_locus (&loc);
6060 }
6061
6062
6063 /* Calculate the overall offset, including subreferences. */
6064 static void
6065 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
6066 bool subref, gfc_expr *expr)
6067 {
6068 tree tmp;
6069 tree field;
6070 tree stride;
6071 tree index;
6072 gfc_ref *ref;
6073 gfc_se start;
6074 int n;
6075
6076 /* If offset is NULL and this is not a subreferenced array, there is
6077 nothing to do. */
6078 if (offset == NULL_TREE)
6079 {
6080 if (subref)
6081 offset = gfc_index_zero_node;
6082 else
6083 return;
6084 }
6085
6086 tmp = build_array_ref (desc, offset, NULL);
6087
6088 /* Offset the data pointer for pointer assignments from arrays with
6089 subreferences; e.g. my_integer => my_type(:)%integer_component. */
6090 if (subref)
6091 {
6092 /* Go past the array reference. */
6093 for (ref = expr->ref; ref; ref = ref->next)
6094 if (ref->type == REF_ARRAY &&
6095 ref->u.ar.type != AR_ELEMENT)
6096 {
6097 ref = ref->next;
6098 break;
6099 }
6100
6101 /* Calculate the offset for each subsequent subreference. */
6102 for (; ref; ref = ref->next)
6103 {
6104 switch (ref->type)
6105 {
6106 case REF_COMPONENT:
6107 field = ref->u.c.component->backend_decl;
6108 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
6109 tmp = fold_build3_loc (input_location, COMPONENT_REF,
6110 TREE_TYPE (field),
6111 tmp, field, NULL_TREE);
6112 break;
6113
6114 case REF_SUBSTRING:
6115 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
6116 gfc_init_se (&start, NULL);
6117 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
6118 gfc_add_block_to_block (block, &start.pre);
6119 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
6120 break;
6121
6122 case REF_ARRAY:
6123 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
6124 && ref->u.ar.type == AR_ELEMENT);
6125
6126 /* TODO - Add bounds checking. */
6127 stride = gfc_index_one_node;
6128 index = gfc_index_zero_node;
6129 for (n = 0; n < ref->u.ar.dimen; n++)
6130 {
6131 tree itmp;
6132 tree jtmp;
6133
6134 /* Update the index. */
6135 gfc_init_se (&start, NULL);
6136 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
6137 itmp = gfc_evaluate_now (start.expr, block);
6138 gfc_init_se (&start, NULL);
6139 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
6140 jtmp = gfc_evaluate_now (start.expr, block);
6141 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6142 gfc_array_index_type, itmp, jtmp);
6143 itmp = fold_build2_loc (input_location, MULT_EXPR,
6144 gfc_array_index_type, itmp, stride);
6145 index = fold_build2_loc (input_location, PLUS_EXPR,
6146 gfc_array_index_type, itmp, index);
6147 index = gfc_evaluate_now (index, block);
6148
6149 /* Update the stride. */
6150 gfc_init_se (&start, NULL);
6151 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
6152 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6153 gfc_array_index_type, start.expr,
6154 jtmp);
6155 itmp = fold_build2_loc (input_location, PLUS_EXPR,
6156 gfc_array_index_type,
6157 gfc_index_one_node, itmp);
6158 stride = fold_build2_loc (input_location, MULT_EXPR,
6159 gfc_array_index_type, stride, itmp);
6160 stride = gfc_evaluate_now (stride, block);
6161 }
6162
6163 /* Apply the index to obtain the array element. */
6164 tmp = gfc_build_array_ref (tmp, index, NULL);
6165 break;
6166
6167 default:
6168 gcc_unreachable ();
6169 break;
6170 }
6171 }
6172 }
6173
6174 /* Set the target data pointer. */
6175 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
6176 gfc_conv_descriptor_data_set (block, parm, offset);
6177 }
6178
6179
6180 /* gfc_conv_expr_descriptor needs the string length an expression
6181 so that the size of the temporary can be obtained. This is done
6182 by adding up the string lengths of all the elements in the
6183 expression. Function with non-constant expressions have their
6184 string lengths mapped onto the actual arguments using the
6185 interface mapping machinery in trans-expr.c. */
6186 static void
6187 get_array_charlen (gfc_expr *expr, gfc_se *se)
6188 {
6189 gfc_interface_mapping mapping;
6190 gfc_formal_arglist *formal;
6191 gfc_actual_arglist *arg;
6192 gfc_se tse;
6193
6194 if (expr->ts.u.cl->length
6195 && gfc_is_constant_expr (expr->ts.u.cl->length))
6196 {
6197 if (!expr->ts.u.cl->backend_decl)
6198 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6199 return;
6200 }
6201
6202 switch (expr->expr_type)
6203 {
6204 case EXPR_OP:
6205 get_array_charlen (expr->value.op.op1, se);
6206
6207 /* For parentheses the expression ts.u.cl is identical. */
6208 if (expr->value.op.op == INTRINSIC_PARENTHESES)
6209 return;
6210
6211 expr->ts.u.cl->backend_decl =
6212 gfc_create_var (gfc_charlen_type_node, "sln");
6213
6214 if (expr->value.op.op2)
6215 {
6216 get_array_charlen (expr->value.op.op2, se);
6217
6218 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
6219
6220 /* Add the string lengths and assign them to the expression
6221 string length backend declaration. */
6222 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6223 fold_build2_loc (input_location, PLUS_EXPR,
6224 gfc_charlen_type_node,
6225 expr->value.op.op1->ts.u.cl->backend_decl,
6226 expr->value.op.op2->ts.u.cl->backend_decl));
6227 }
6228 else
6229 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6230 expr->value.op.op1->ts.u.cl->backend_decl);
6231 break;
6232
6233 case EXPR_FUNCTION:
6234 if (expr->value.function.esym == NULL
6235 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
6236 {
6237 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6238 break;
6239 }
6240
6241 /* Map expressions involving the dummy arguments onto the actual
6242 argument expressions. */
6243 gfc_init_interface_mapping (&mapping);
6244 formal = expr->symtree->n.sym->formal;
6245 arg = expr->value.function.actual;
6246
6247 /* Set se = NULL in the calls to the interface mapping, to suppress any
6248 backend stuff. */
6249 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
6250 {
6251 if (!arg->expr)
6252 continue;
6253 if (formal->sym)
6254 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
6255 }
6256
6257 gfc_init_se (&tse, NULL);
6258
6259 /* Build the expression for the character length and convert it. */
6260 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
6261
6262 gfc_add_block_to_block (&se->pre, &tse.pre);
6263 gfc_add_block_to_block (&se->post, &tse.post);
6264 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
6265 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
6266 gfc_charlen_type_node, tse.expr,
6267 build_int_cst (gfc_charlen_type_node, 0));
6268 expr->ts.u.cl->backend_decl = tse.expr;
6269 gfc_free_interface_mapping (&mapping);
6270 break;
6271
6272 default:
6273 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6274 break;
6275 }
6276 }
6277
6278
6279 /* Helper function to check dimensions. */
6280 static bool
6281 transposed_dims (gfc_ss *ss)
6282 {
6283 int n;
6284
6285 for (n = 0; n < ss->dimen; n++)
6286 if (ss->dim[n] != n)
6287 return true;
6288 return false;
6289 }
6290
6291 /* Convert an array for passing as an actual argument. Expressions and
6292 vector subscripts are evaluated and stored in a temporary, which is then
6293 passed. For whole arrays the descriptor is passed. For array sections
6294 a modified copy of the descriptor is passed, but using the original data.
6295
6296 This function is also used for array pointer assignments, and there
6297 are three cases:
6298
6299 - se->want_pointer && !se->direct_byref
6300 EXPR is an actual argument. On exit, se->expr contains a
6301 pointer to the array descriptor.
6302
6303 - !se->want_pointer && !se->direct_byref
6304 EXPR is an actual argument to an intrinsic function or the
6305 left-hand side of a pointer assignment. On exit, se->expr
6306 contains the descriptor for EXPR.
6307
6308 - !se->want_pointer && se->direct_byref
6309 EXPR is the right-hand side of a pointer assignment and
6310 se->expr is the descriptor for the previously-evaluated
6311 left-hand side. The function creates an assignment from
6312 EXPR to se->expr.
6313
6314
6315 The se->force_tmp flag disables the non-copying descriptor optimization
6316 that is used for transpose. It may be used in cases where there is an
6317 alias between the transpose argument and another argument in the same
6318 function call. */
6319
6320 void
6321 gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
6322 {
6323 gfc_ss_type ss_type;
6324 gfc_ss_info *ss_info;
6325 gfc_loopinfo loop;
6326 gfc_array_info *info;
6327 int need_tmp;
6328 int n;
6329 tree tmp;
6330 tree desc;
6331 stmtblock_t block;
6332 tree start;
6333 tree offset;
6334 int full;
6335 bool subref_array_target = false;
6336 gfc_expr *arg, *ss_expr;
6337
6338 gcc_assert (ss != NULL);
6339 gcc_assert (ss != gfc_ss_terminator);
6340
6341 ss_info = ss->info;
6342 ss_type = ss_info->type;
6343 ss_expr = ss_info->expr;
6344
6345 /* Special case things we know we can pass easily. */
6346 switch (expr->expr_type)
6347 {
6348 case EXPR_VARIABLE:
6349 /* If we have a linear array section, we can pass it directly.
6350 Otherwise we need to copy it into a temporary. */
6351
6352 gcc_assert (ss_type == GFC_SS_SECTION);
6353 gcc_assert (ss_expr == expr);
6354 info = &ss_info->data.array;
6355
6356 /* Get the descriptor for the array. */
6357 gfc_conv_ss_descriptor (&se->pre, ss, 0);
6358 desc = info->descriptor;
6359
6360 subref_array_target = se->direct_byref && is_subref_array (expr);
6361 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
6362 && !subref_array_target;
6363
6364 if (se->force_tmp)
6365 need_tmp = 1;
6366
6367 if (need_tmp)
6368 full = 0;
6369 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6370 {
6371 /* Create a new descriptor if the array doesn't have one. */
6372 full = 0;
6373 }
6374 else if (info->ref->u.ar.type == AR_FULL)
6375 full = 1;
6376 else if (se->direct_byref)
6377 full = 0;
6378 else
6379 full = gfc_full_array_ref_p (info->ref, NULL);
6380
6381 if (full && !transposed_dims (ss))
6382 {
6383 if (se->direct_byref && !se->byref_noassign)
6384 {
6385 /* Copy the descriptor for pointer assignments. */
6386 gfc_add_modify (&se->pre, se->expr, desc);
6387
6388 /* Add any offsets from subreferences. */
6389 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
6390 subref_array_target, expr);
6391 }
6392 else if (se->want_pointer)
6393 {
6394 /* We pass full arrays directly. This means that pointers and
6395 allocatable arrays should also work. */
6396 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6397 }
6398 else
6399 {
6400 se->expr = desc;
6401 }
6402
6403 if (expr->ts.type == BT_CHARACTER)
6404 se->string_length = gfc_get_expr_charlen (expr);
6405
6406 return;
6407 }
6408 break;
6409
6410 case EXPR_FUNCTION:
6411
6412 /* We don't need to copy data in some cases. */
6413 arg = gfc_get_noncopying_intrinsic_argument (expr);
6414 if (arg)
6415 {
6416 /* This is a call to transpose... */
6417 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
6418 /* ... which has already been handled by the scalarizer, so
6419 that we just need to get its argument's descriptor. */
6420 gfc_conv_expr_descriptor (se, expr->value.function.actual->expr, ss);
6421 return;
6422 }
6423
6424 /* A transformational function return value will be a temporary
6425 array descriptor. We still need to go through the scalarizer
6426 to create the descriptor. Elemental functions are handled as
6427 arbitrary expressions, i.e. copy to a temporary. */
6428
6429 if (se->direct_byref)
6430 {
6431 gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr);
6432
6433 /* For pointer assignments pass the descriptor directly. */
6434 if (se->ss == NULL)
6435 se->ss = ss;
6436 else
6437 gcc_assert (se->ss == ss);
6438 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6439 gfc_conv_expr (se, expr);
6440 return;
6441 }
6442
6443 if (ss_expr != expr || ss_type != GFC_SS_FUNCTION)
6444 {
6445 if (ss_expr != expr)
6446 /* Elemental function. */
6447 gcc_assert ((expr->value.function.esym != NULL
6448 && expr->value.function.esym->attr.elemental)
6449 || (expr->value.function.isym != NULL
6450 && expr->value.function.isym->elemental)
6451 || gfc_inline_intrinsic_function_p (expr));
6452 else
6453 gcc_assert (ss_type == GFC_SS_INTRINSIC);
6454
6455 need_tmp = 1;
6456 if (expr->ts.type == BT_CHARACTER
6457 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
6458 get_array_charlen (expr, se);
6459
6460 info = NULL;
6461 }
6462 else
6463 {
6464 /* Transformational function. */
6465 info = &ss_info->data.array;
6466 need_tmp = 0;
6467 }
6468 break;
6469
6470 case EXPR_ARRAY:
6471 /* Constant array constructors don't need a temporary. */
6472 if (ss_type == GFC_SS_CONSTRUCTOR
6473 && expr->ts.type != BT_CHARACTER
6474 && gfc_constant_array_constructor_p (expr->value.constructor))
6475 {
6476 need_tmp = 0;
6477 info = &ss_info->data.array;
6478 }
6479 else
6480 {
6481 need_tmp = 1;
6482 info = NULL;
6483 }
6484 break;
6485
6486 default:
6487 /* Something complicated. Copy it into a temporary. */
6488 need_tmp = 1;
6489 info = NULL;
6490 break;
6491 }
6492
6493 /* If we are creating a temporary, we don't need to bother about aliases
6494 anymore. */
6495 if (need_tmp)
6496 se->force_tmp = 0;
6497
6498 gfc_init_loopinfo (&loop);
6499
6500 /* Associate the SS with the loop. */
6501 gfc_add_ss_to_loop (&loop, ss);
6502
6503 /* Tell the scalarizer not to bother creating loop variables, etc. */
6504 if (!need_tmp)
6505 loop.array_parameter = 1;
6506 else
6507 /* The right-hand side of a pointer assignment mustn't use a temporary. */
6508 gcc_assert (!se->direct_byref);
6509
6510 /* Setup the scalarizing loops and bounds. */
6511 gfc_conv_ss_startstride (&loop);
6512
6513 if (need_tmp)
6514 {
6515 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
6516 get_array_charlen (expr, se);
6517
6518 /* Tell the scalarizer to make a temporary. */
6519 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
6520 ((expr->ts.type == BT_CHARACTER)
6521 ? expr->ts.u.cl->backend_decl
6522 : NULL),
6523 loop.dimen);
6524
6525 se->string_length = loop.temp_ss->info->string_length;
6526 gcc_assert (loop.temp_ss->dimen == loop.dimen);
6527 gfc_add_ss_to_loop (&loop, loop.temp_ss);
6528 }
6529
6530 gfc_conv_loop_setup (&loop, & expr->where);
6531
6532 if (need_tmp)
6533 {
6534 /* Copy into a temporary and pass that. We don't need to copy the data
6535 back because expressions and vector subscripts must be INTENT_IN. */
6536 /* TODO: Optimize passing function return values. */
6537 gfc_se lse;
6538 gfc_se rse;
6539
6540 /* Start the copying loops. */
6541 gfc_mark_ss_chain_used (loop.temp_ss, 1);
6542 gfc_mark_ss_chain_used (ss, 1);
6543 gfc_start_scalarized_body (&loop, &block);
6544
6545 /* Copy each data element. */
6546 gfc_init_se (&lse, NULL);
6547 gfc_copy_loopinfo_to_se (&lse, &loop);
6548 gfc_init_se (&rse, NULL);
6549 gfc_copy_loopinfo_to_se (&rse, &loop);
6550
6551 lse.ss = loop.temp_ss;
6552 rse.ss = ss;
6553
6554 gfc_conv_scalarized_array_ref (&lse, NULL);
6555 if (expr->ts.type == BT_CHARACTER)
6556 {
6557 gfc_conv_expr (&rse, expr);
6558 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
6559 rse.expr = build_fold_indirect_ref_loc (input_location,
6560 rse.expr);
6561 }
6562 else
6563 gfc_conv_expr_val (&rse, expr);
6564
6565 gfc_add_block_to_block (&block, &rse.pre);
6566 gfc_add_block_to_block (&block, &lse.pre);
6567
6568 lse.string_length = rse.string_length;
6569 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
6570 expr->expr_type == EXPR_VARIABLE
6571 || expr->expr_type == EXPR_ARRAY, true);
6572 gfc_add_expr_to_block (&block, tmp);
6573
6574 /* Finish the copying loops. */
6575 gfc_trans_scalarizing_loops (&loop, &block);
6576
6577 desc = loop.temp_ss->info->data.array.descriptor;
6578 }
6579 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
6580 {
6581 desc = info->descriptor;
6582 se->string_length = ss_info->string_length;
6583 }
6584 else
6585 {
6586 /* We pass sections without copying to a temporary. Make a new
6587 descriptor and point it at the section we want. The loop variable
6588 limits will be the limits of the section.
6589 A function may decide to repack the array to speed up access, but
6590 we're not bothered about that here. */
6591 int dim, ndim, codim;
6592 tree parm;
6593 tree parmtype;
6594 tree stride;
6595 tree from;
6596 tree to;
6597 tree base;
6598
6599 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
6600
6601 if (se->want_coarray)
6602 {
6603 gfc_array_ref *ar = &info->ref->u.ar;
6604
6605 codim = gfc_get_corank (expr);
6606 for (n = 0; n < codim - 1; n++)
6607 {
6608 /* Make sure we are not lost somehow. */
6609 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
6610
6611 /* Make sure the call to gfc_conv_section_startstride won't
6612 generate unnecessary code to calculate stride. */
6613 gcc_assert (ar->stride[n + ndim] == NULL);
6614
6615 gfc_conv_section_startstride (&loop, ss, n + ndim);
6616 loop.from[n + loop.dimen] = info->start[n + ndim];
6617 loop.to[n + loop.dimen] = info->end[n + ndim];
6618 }
6619
6620 gcc_assert (n == codim - 1);
6621 evaluate_bound (&loop.pre, info->start, ar->start,
6622 info->descriptor, n + ndim, true);
6623 loop.from[n + loop.dimen] = info->start[n + ndim];
6624 }
6625 else
6626 codim = 0;
6627
6628 /* Set the string_length for a character array. */
6629 if (expr->ts.type == BT_CHARACTER)
6630 se->string_length = gfc_get_expr_charlen (expr);
6631
6632 desc = info->descriptor;
6633 if (se->direct_byref && !se->byref_noassign)
6634 {
6635 /* For pointer assignments we fill in the destination. */
6636 parm = se->expr;
6637 parmtype = TREE_TYPE (parm);
6638 }
6639 else
6640 {
6641 /* Otherwise make a new one. */
6642 parmtype = gfc_get_element_type (TREE_TYPE (desc));
6643 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
6644 loop.from, loop.to, 0,
6645 GFC_ARRAY_UNKNOWN, false);
6646 parm = gfc_create_var (parmtype, "parm");
6647 }
6648
6649 offset = gfc_index_zero_node;
6650
6651 /* The following can be somewhat confusing. We have two
6652 descriptors, a new one and the original array.
6653 {parm, parmtype, dim} refer to the new one.
6654 {desc, type, n, loop} refer to the original, which maybe
6655 a descriptorless array.
6656 The bounds of the scalarization are the bounds of the section.
6657 We don't have to worry about numeric overflows when calculating
6658 the offsets because all elements are within the array data. */
6659
6660 /* Set the dtype. */
6661 tmp = gfc_conv_descriptor_dtype (parm);
6662 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
6663
6664 /* Set offset for assignments to pointer only to zero if it is not
6665 the full array. */
6666 if (se->direct_byref
6667 && info->ref && info->ref->u.ar.type != AR_FULL)
6668 base = gfc_index_zero_node;
6669 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6670 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
6671 else
6672 base = NULL_TREE;
6673
6674 for (n = 0; n < ndim; n++)
6675 {
6676 stride = gfc_conv_array_stride (desc, n);
6677
6678 /* Work out the offset. */
6679 if (info->ref
6680 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6681 {
6682 gcc_assert (info->subscript[n]
6683 && info->subscript[n]->info->type == GFC_SS_SCALAR);
6684 start = info->subscript[n]->info->data.scalar.value;
6685 }
6686 else
6687 {
6688 /* Evaluate and remember the start of the section. */
6689 start = info->start[n];
6690 stride = gfc_evaluate_now (stride, &loop.pre);
6691 }
6692
6693 tmp = gfc_conv_array_lbound (desc, n);
6694 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
6695 start, tmp);
6696 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
6697 tmp, stride);
6698 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
6699 offset, tmp);
6700
6701 if (info->ref
6702 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
6703 {
6704 /* For elemental dimensions, we only need the offset. */
6705 continue;
6706 }
6707
6708 /* Vector subscripts need copying and are handled elsewhere. */
6709 if (info->ref)
6710 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
6711
6712 /* look for the corresponding scalarizer dimension: dim. */
6713 for (dim = 0; dim < ndim; dim++)
6714 if (ss->dim[dim] == n)
6715 break;
6716
6717 /* loop exited early: the DIM being looked for has been found. */
6718 gcc_assert (dim < ndim);
6719
6720 /* Set the new lower bound. */
6721 from = loop.from[dim];
6722 to = loop.to[dim];
6723
6724 /* If we have an array section or are assigning make sure that
6725 the lower bound is 1. References to the full
6726 array should otherwise keep the original bounds. */
6727 if ((!info->ref
6728 || info->ref->u.ar.type != AR_FULL)
6729 && !integer_onep (from))
6730 {
6731 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6732 gfc_array_index_type, gfc_index_one_node,
6733 from);
6734 to = fold_build2_loc (input_location, PLUS_EXPR,
6735 gfc_array_index_type, to, tmp);
6736 from = gfc_index_one_node;
6737 }
6738 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6739 gfc_rank_cst[dim], from);
6740
6741 /* Set the new upper bound. */
6742 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6743 gfc_rank_cst[dim], to);
6744
6745 /* Multiply the stride by the section stride to get the
6746 total stride. */
6747 stride = fold_build2_loc (input_location, MULT_EXPR,
6748 gfc_array_index_type,
6749 stride, info->stride[n]);
6750
6751 if (se->direct_byref
6752 && info->ref
6753 && info->ref->u.ar.type != AR_FULL)
6754 {
6755 base = fold_build2_loc (input_location, MINUS_EXPR,
6756 TREE_TYPE (base), base, stride);
6757 }
6758 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6759 {
6760 tmp = gfc_conv_array_lbound (desc, n);
6761 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6762 TREE_TYPE (base), tmp, loop.from[dim]);
6763 tmp = fold_build2_loc (input_location, MULT_EXPR,
6764 TREE_TYPE (base), tmp,
6765 gfc_conv_array_stride (desc, n));
6766 base = fold_build2_loc (input_location, PLUS_EXPR,
6767 TREE_TYPE (base), tmp, base);
6768 }
6769
6770 /* Store the new stride. */
6771 gfc_conv_descriptor_stride_set (&loop.pre, parm,
6772 gfc_rank_cst[dim], stride);
6773 }
6774
6775 for (n = loop.dimen; n < loop.dimen + codim; n++)
6776 {
6777 from = loop.from[n];
6778 to = loop.to[n];
6779 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
6780 gfc_rank_cst[n], from);
6781 if (n < loop.dimen + codim - 1)
6782 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
6783 gfc_rank_cst[n], to);
6784 }
6785
6786 if (se->data_not_needed)
6787 gfc_conv_descriptor_data_set (&loop.pre, parm,
6788 gfc_index_zero_node);
6789 else
6790 /* Point the data pointer at the 1st element in the section. */
6791 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
6792 subref_array_target, expr);
6793
6794 if ((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6795 && !se->data_not_needed)
6796 {
6797 /* Set the offset. */
6798 gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
6799 }
6800 else
6801 {
6802 /* Only the callee knows what the correct offset it, so just set
6803 it to zero here. */
6804 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
6805 }
6806 desc = parm;
6807 }
6808
6809 if (!se->direct_byref || se->byref_noassign)
6810 {
6811 /* Get a pointer to the new descriptor. */
6812 if (se->want_pointer)
6813 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6814 else
6815 se->expr = desc;
6816 }
6817
6818 gfc_add_block_to_block (&se->pre, &loop.pre);
6819 gfc_add_block_to_block (&se->post, &loop.post);
6820
6821 /* Cleanup the scalarizer. */
6822 gfc_cleanup_loop (&loop);
6823 }
6824
6825 /* Helper function for gfc_conv_array_parameter if array size needs to be
6826 computed. */
6827
6828 static void
6829 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
6830 {
6831 tree elem;
6832 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6833 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
6834 else if (expr->rank > 1)
6835 *size = build_call_expr_loc (input_location,
6836 gfor_fndecl_size0, 1,
6837 gfc_build_addr_expr (NULL, desc));
6838 else
6839 {
6840 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
6841 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
6842
6843 *size = fold_build2_loc (input_location, MINUS_EXPR,
6844 gfc_array_index_type, ubound, lbound);
6845 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
6846 *size, gfc_index_one_node);
6847 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
6848 *size, gfc_index_zero_node);
6849 }
6850 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
6851 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6852 *size, fold_convert (gfc_array_index_type, elem));
6853 }
6854
6855 /* Convert an array for passing as an actual parameter. */
6856 /* TODO: Optimize passing g77 arrays. */
6857
6858 void
6859 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, gfc_ss * ss, bool g77,
6860 const gfc_symbol *fsym, const char *proc_name,
6861 tree *size)
6862 {
6863 tree ptr;
6864 tree desc;
6865 tree tmp = NULL_TREE;
6866 tree stmt;
6867 tree parent = DECL_CONTEXT (current_function_decl);
6868 bool full_array_var;
6869 bool this_array_result;
6870 bool contiguous;
6871 bool no_pack;
6872 bool array_constructor;
6873 bool good_allocatable;
6874 bool ultimate_ptr_comp;
6875 bool ultimate_alloc_comp;
6876 gfc_symbol *sym;
6877 stmtblock_t block;
6878 gfc_ref *ref;
6879
6880 ultimate_ptr_comp = false;
6881 ultimate_alloc_comp = false;
6882
6883 for (ref = expr->ref; ref; ref = ref->next)
6884 {
6885 if (ref->next == NULL)
6886 break;
6887
6888 if (ref->type == REF_COMPONENT)
6889 {
6890 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
6891 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
6892 }
6893 }
6894
6895 full_array_var = false;
6896 contiguous = false;
6897
6898 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
6899 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
6900
6901 sym = full_array_var ? expr->symtree->n.sym : NULL;
6902
6903 /* The symbol should have an array specification. */
6904 gcc_assert (!sym || sym->as || ref->u.ar.as);
6905
6906 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
6907 {
6908 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
6909 expr->ts.u.cl->backend_decl = tmp;
6910 se->string_length = tmp;
6911 }
6912
6913 /* Is this the result of the enclosing procedure? */
6914 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
6915 if (this_array_result
6916 && (sym->backend_decl != current_function_decl)
6917 && (sym->backend_decl != parent))
6918 this_array_result = false;
6919
6920 /* Passing address of the array if it is not pointer or assumed-shape. */
6921 if (full_array_var && g77 && !this_array_result)
6922 {
6923 tmp = gfc_get_symbol_decl (sym);
6924
6925 if (sym->ts.type == BT_CHARACTER)
6926 se->string_length = sym->ts.u.cl->backend_decl;
6927
6928 if (sym->ts.type == BT_DERIVED || sym->ts.type == BT_CLASS)
6929 {
6930 gfc_conv_expr_descriptor (se, expr, ss);
6931 se->expr = gfc_conv_array_data (se->expr);
6932 return;
6933 }
6934
6935 if (!sym->attr.pointer
6936 && sym->as
6937 && sym->as->type != AS_ASSUMED_SHAPE
6938 && sym->as->type != AS_ASSUMED_RANK
6939 && !sym->attr.allocatable)
6940 {
6941 /* Some variables are declared directly, others are declared as
6942 pointers and allocated on the heap. */
6943 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
6944 se->expr = tmp;
6945 else
6946 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
6947 if (size)
6948 array_parameter_size (tmp, expr, size);
6949 return;
6950 }
6951
6952 if (sym->attr.allocatable)
6953 {
6954 if (sym->attr.dummy || sym->attr.result)
6955 {
6956 gfc_conv_expr_descriptor (se, expr, ss);
6957 tmp = se->expr;
6958 }
6959 if (size)
6960 array_parameter_size (tmp, expr, size);
6961 se->expr = gfc_conv_array_data (tmp);
6962 return;
6963 }
6964 }
6965
6966 /* A convenient reduction in scope. */
6967 contiguous = g77 && !this_array_result && contiguous;
6968
6969 /* There is no need to pack and unpack the array, if it is contiguous
6970 and not a deferred- or assumed-shape array, or if it is simply
6971 contiguous. */
6972 no_pack = ((sym && sym->as
6973 && !sym->attr.pointer
6974 && sym->as->type != AS_DEFERRED
6975 && sym->as->type != AS_ASSUMED_RANK
6976 && sym->as->type != AS_ASSUMED_SHAPE)
6977 ||
6978 (ref && ref->u.ar.as
6979 && ref->u.ar.as->type != AS_DEFERRED
6980 && ref->u.ar.as->type != AS_ASSUMED_RANK
6981 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
6982 ||
6983 gfc_is_simply_contiguous (expr, false));
6984
6985 no_pack = contiguous && no_pack;
6986
6987 /* Array constructors are always contiguous and do not need packing. */
6988 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
6989
6990 /* Same is true of contiguous sections from allocatable variables. */
6991 good_allocatable = contiguous
6992 && expr->symtree
6993 && expr->symtree->n.sym->attr.allocatable;
6994
6995 /* Or ultimate allocatable components. */
6996 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
6997
6998 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
6999 {
7000 gfc_conv_expr_descriptor (se, expr, ss);
7001 if (expr->ts.type == BT_CHARACTER)
7002 se->string_length = expr->ts.u.cl->backend_decl;
7003 if (size)
7004 array_parameter_size (se->expr, expr, size);
7005 se->expr = gfc_conv_array_data (se->expr);
7006 return;
7007 }
7008
7009 if (this_array_result)
7010 {
7011 /* Result of the enclosing function. */
7012 gfc_conv_expr_descriptor (se, expr, ss);
7013 if (size)
7014 array_parameter_size (se->expr, expr, size);
7015 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
7016
7017 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
7018 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
7019 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
7020 se->expr));
7021
7022 return;
7023 }
7024 else
7025 {
7026 /* Every other type of array. */
7027 se->want_pointer = 1;
7028 gfc_conv_expr_descriptor (se, expr, ss);
7029 if (size)
7030 array_parameter_size (build_fold_indirect_ref_loc (input_location,
7031 se->expr),
7032 expr, size);
7033 }
7034
7035 /* Deallocate the allocatable components of structures that are
7036 not variable. */
7037 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7038 && expr->ts.u.derived->attr.alloc_comp
7039 && expr->expr_type != EXPR_VARIABLE)
7040 {
7041 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
7042 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
7043
7044 /* The components shall be deallocated before their containing entity. */
7045 gfc_prepend_expr_to_block (&se->post, tmp);
7046 }
7047
7048 if (g77 || (fsym && fsym->attr.contiguous
7049 && !gfc_is_simply_contiguous (expr, false)))
7050 {
7051 tree origptr = NULL_TREE;
7052
7053 desc = se->expr;
7054
7055 /* For contiguous arrays, save the original value of the descriptor. */
7056 if (!g77)
7057 {
7058 origptr = gfc_create_var (pvoid_type_node, "origptr");
7059 tmp = build_fold_indirect_ref_loc (input_location, desc);
7060 tmp = gfc_conv_array_data (tmp);
7061 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7062 TREE_TYPE (origptr), origptr,
7063 fold_convert (TREE_TYPE (origptr), tmp));
7064 gfc_add_expr_to_block (&se->pre, tmp);
7065 }
7066
7067 /* Repack the array. */
7068 if (gfc_option.warn_array_temp)
7069 {
7070 if (fsym)
7071 gfc_warning ("Creating array temporary at %L for argument '%s'",
7072 &expr->where, fsym->name);
7073 else
7074 gfc_warning ("Creating array temporary at %L", &expr->where);
7075 }
7076
7077 ptr = build_call_expr_loc (input_location,
7078 gfor_fndecl_in_pack, 1, desc);
7079
7080 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7081 {
7082 tmp = gfc_conv_expr_present (sym);
7083 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
7084 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
7085 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
7086 }
7087
7088 ptr = gfc_evaluate_now (ptr, &se->pre);
7089
7090 /* Use the packed data for the actual argument, except for contiguous arrays,
7091 where the descriptor's data component is set. */
7092 if (g77)
7093 se->expr = ptr;
7094 else
7095 {
7096 tmp = build_fold_indirect_ref_loc (input_location, desc);
7097 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
7098 }
7099
7100 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
7101 {
7102 char * msg;
7103
7104 if (fsym && proc_name)
7105 asprintf (&msg, "An array temporary was created for argument "
7106 "'%s' of procedure '%s'", fsym->name, proc_name);
7107 else
7108 asprintf (&msg, "An array temporary was created");
7109
7110 tmp = build_fold_indirect_ref_loc (input_location,
7111 desc);
7112 tmp = gfc_conv_array_data (tmp);
7113 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7114 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7115
7116 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7117 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7118 boolean_type_node,
7119 gfc_conv_expr_present (sym), tmp);
7120
7121 gfc_trans_runtime_check (false, true, tmp, &se->pre,
7122 &expr->where, msg);
7123 free (msg);
7124 }
7125
7126 gfc_start_block (&block);
7127
7128 /* Copy the data back. */
7129 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
7130 {
7131 tmp = build_call_expr_loc (input_location,
7132 gfor_fndecl_in_unpack, 2, desc, ptr);
7133 gfc_add_expr_to_block (&block, tmp);
7134 }
7135
7136 /* Free the temporary. */
7137 tmp = gfc_call_free (convert (pvoid_type_node, ptr));
7138 gfc_add_expr_to_block (&block, tmp);
7139
7140 stmt = gfc_finish_block (&block);
7141
7142 gfc_init_block (&block);
7143 /* Only if it was repacked. This code needs to be executed before the
7144 loop cleanup code. */
7145 tmp = build_fold_indirect_ref_loc (input_location,
7146 desc);
7147 tmp = gfc_conv_array_data (tmp);
7148 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7149 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7150
7151 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7152 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7153 boolean_type_node,
7154 gfc_conv_expr_present (sym), tmp);
7155
7156 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
7157
7158 gfc_add_expr_to_block (&block, tmp);
7159 gfc_add_block_to_block (&block, &se->post);
7160
7161 gfc_init_block (&se->post);
7162
7163 /* Reset the descriptor pointer. */
7164 if (!g77)
7165 {
7166 tmp = build_fold_indirect_ref_loc (input_location, desc);
7167 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
7168 }
7169
7170 gfc_add_block_to_block (&se->post, &block);
7171 }
7172 }
7173
7174
7175 /* Generate code to deallocate an array, if it is allocated. */
7176
7177 tree
7178 gfc_trans_dealloc_allocated (tree descriptor, bool coarray)
7179 {
7180 tree tmp;
7181 tree var;
7182 stmtblock_t block;
7183
7184 gfc_start_block (&block);
7185
7186 var = gfc_conv_descriptor_data_get (descriptor);
7187 STRIP_NOPS (var);
7188
7189 /* Call array_deallocate with an int * present in the second argument.
7190 Although it is ignored here, it's presence ensures that arrays that
7191 are already deallocated are ignored. */
7192 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, NULL_TREE,
7193 NULL_TREE, NULL_TREE, NULL_TREE, true,
7194 NULL, coarray);
7195 gfc_add_expr_to_block (&block, tmp);
7196
7197 /* Zero the data pointer. */
7198 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7199 var, build_int_cst (TREE_TYPE (var), 0));
7200 gfc_add_expr_to_block (&block, tmp);
7201
7202 return gfc_finish_block (&block);
7203 }
7204
7205
7206 /* This helper function calculates the size in words of a full array. */
7207
7208 static tree
7209 get_full_array_size (stmtblock_t *block, tree decl, int rank)
7210 {
7211 tree idx;
7212 tree nelems;
7213 tree tmp;
7214 idx = gfc_rank_cst[rank - 1];
7215 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
7216 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
7217 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
7218 nelems, tmp);
7219 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7220 tmp, gfc_index_one_node);
7221 tmp = gfc_evaluate_now (tmp, block);
7222
7223 nelems = gfc_conv_descriptor_stride_get (decl, idx);
7224 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7225 nelems, tmp);
7226 return gfc_evaluate_now (tmp, block);
7227 }
7228
7229
7230 /* Allocate dest to the same size as src, and copy src -> dest.
7231 If no_malloc is set, only the copy is done. */
7232
7233 static tree
7234 duplicate_allocatable (tree dest, tree src, tree type, int rank,
7235 bool no_malloc)
7236 {
7237 tree tmp;
7238 tree size;
7239 tree nelems;
7240 tree null_cond;
7241 tree null_data;
7242 stmtblock_t block;
7243
7244 /* If the source is null, set the destination to null. Then,
7245 allocate memory to the destination. */
7246 gfc_init_block (&block);
7247
7248 if (rank == 0)
7249 {
7250 tmp = null_pointer_node;
7251 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
7252 gfc_add_expr_to_block (&block, tmp);
7253 null_data = gfc_finish_block (&block);
7254
7255 gfc_init_block (&block);
7256 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
7257 if (!no_malloc)
7258 {
7259 tmp = gfc_call_malloc (&block, type, size);
7260 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7261 dest, fold_convert (type, tmp));
7262 gfc_add_expr_to_block (&block, tmp);
7263 }
7264
7265 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7266 tmp = build_call_expr_loc (input_location, tmp, 3,
7267 dest, src, size);
7268 }
7269 else
7270 {
7271 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
7272 null_data = gfc_finish_block (&block);
7273
7274 gfc_init_block (&block);
7275 nelems = get_full_array_size (&block, src, rank);
7276 tmp = fold_convert (gfc_array_index_type,
7277 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
7278 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7279 nelems, tmp);
7280 if (!no_malloc)
7281 {
7282 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
7283 tmp = gfc_call_malloc (&block, tmp, size);
7284 gfc_conv_descriptor_data_set (&block, dest, tmp);
7285 }
7286
7287 /* We know the temporary and the value will be the same length,
7288 so can use memcpy. */
7289 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7290 tmp = build_call_expr_loc (input_location,
7291 tmp, 3, gfc_conv_descriptor_data_get (dest),
7292 gfc_conv_descriptor_data_get (src), size);
7293 }
7294
7295 gfc_add_expr_to_block (&block, tmp);
7296 tmp = gfc_finish_block (&block);
7297
7298 /* Null the destination if the source is null; otherwise do
7299 the allocate and copy. */
7300 if (rank == 0)
7301 null_cond = src;
7302 else
7303 null_cond = gfc_conv_descriptor_data_get (src);
7304
7305 null_cond = convert (pvoid_type_node, null_cond);
7306 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7307 null_cond, null_pointer_node);
7308 return build3_v (COND_EXPR, null_cond, tmp, null_data);
7309 }
7310
7311
7312 /* Allocate dest to the same size as src, and copy data src -> dest. */
7313
7314 tree
7315 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
7316 {
7317 return duplicate_allocatable (dest, src, type, rank, false);
7318 }
7319
7320
7321 /* Copy data src -> dest. */
7322
7323 tree
7324 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
7325 {
7326 return duplicate_allocatable (dest, src, type, rank, true);
7327 }
7328
7329
7330 /* Recursively traverse an object of derived type, generating code to
7331 deallocate, nullify or copy allocatable components. This is the work horse
7332 function for the functions named in this enum. */
7333
7334 enum {DEALLOCATE_ALLOC_COMP = 1, NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP,
7335 COPY_ONLY_ALLOC_COMP};
7336
7337 static tree
7338 structure_alloc_comps (gfc_symbol * der_type, tree decl,
7339 tree dest, int rank, int purpose)
7340 {
7341 gfc_component *c;
7342 gfc_loopinfo loop;
7343 stmtblock_t fnblock;
7344 stmtblock_t loopbody;
7345 stmtblock_t tmpblock;
7346 tree decl_type;
7347 tree tmp;
7348 tree comp;
7349 tree dcmp;
7350 tree nelems;
7351 tree index;
7352 tree var;
7353 tree cdecl;
7354 tree ctype;
7355 tree vref, dref;
7356 tree null_cond = NULL_TREE;
7357 bool called_dealloc_with_status;
7358
7359 gfc_init_block (&fnblock);
7360
7361 decl_type = TREE_TYPE (decl);
7362
7363 if ((POINTER_TYPE_P (decl_type) && rank != 0)
7364 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
7365 decl = build_fold_indirect_ref_loc (input_location, decl);
7366
7367 /* Just in case in gets dereferenced. */
7368 decl_type = TREE_TYPE (decl);
7369
7370 /* If this an array of derived types with allocatable components
7371 build a loop and recursively call this function. */
7372 if (TREE_CODE (decl_type) == ARRAY_TYPE
7373 || (GFC_DESCRIPTOR_TYPE_P (decl_type) && rank != 0))
7374 {
7375 tmp = gfc_conv_array_data (decl);
7376 var = build_fold_indirect_ref_loc (input_location,
7377 tmp);
7378
7379 /* Get the number of elements - 1 and set the counter. */
7380 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
7381 {
7382 /* Use the descriptor for an allocatable array. Since this
7383 is a full array reference, we only need the descriptor
7384 information from dimension = rank. */
7385 tmp = get_full_array_size (&fnblock, decl, rank);
7386 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7387 gfc_array_index_type, tmp,
7388 gfc_index_one_node);
7389
7390 null_cond = gfc_conv_descriptor_data_get (decl);
7391 null_cond = fold_build2_loc (input_location, NE_EXPR,
7392 boolean_type_node, null_cond,
7393 build_int_cst (TREE_TYPE (null_cond), 0));
7394 }
7395 else
7396 {
7397 /* Otherwise use the TYPE_DOMAIN information. */
7398 tmp = array_type_nelts (decl_type);
7399 tmp = fold_convert (gfc_array_index_type, tmp);
7400 }
7401
7402 /* Remember that this is, in fact, the no. of elements - 1. */
7403 nelems = gfc_evaluate_now (tmp, &fnblock);
7404 index = gfc_create_var (gfc_array_index_type, "S");
7405
7406 /* Build the body of the loop. */
7407 gfc_init_block (&loopbody);
7408
7409 vref = gfc_build_array_ref (var, index, NULL);
7410
7411 if (purpose == COPY_ALLOC_COMP)
7412 {
7413 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7414 {
7415 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
7416 gfc_add_expr_to_block (&fnblock, tmp);
7417 }
7418 tmp = build_fold_indirect_ref_loc (input_location,
7419 gfc_conv_array_data (dest));
7420 dref = gfc_build_array_ref (tmp, index, NULL);
7421 tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
7422 }
7423 else if (purpose == COPY_ONLY_ALLOC_COMP)
7424 {
7425 tmp = build_fold_indirect_ref_loc (input_location,
7426 gfc_conv_array_data (dest));
7427 dref = gfc_build_array_ref (tmp, index, NULL);
7428 tmp = structure_alloc_comps (der_type, vref, dref, rank,
7429 COPY_ALLOC_COMP);
7430 }
7431 else
7432 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
7433
7434 gfc_add_expr_to_block (&loopbody, tmp);
7435
7436 /* Build the loop and return. */
7437 gfc_init_loopinfo (&loop);
7438 loop.dimen = 1;
7439 loop.from[0] = gfc_index_zero_node;
7440 loop.loopvar[0] = index;
7441 loop.to[0] = nelems;
7442 gfc_trans_scalarizing_loops (&loop, &loopbody);
7443 gfc_add_block_to_block (&fnblock, &loop.pre);
7444
7445 tmp = gfc_finish_block (&fnblock);
7446 if (null_cond != NULL_TREE)
7447 tmp = build3_v (COND_EXPR, null_cond, tmp,
7448 build_empty_stmt (input_location));
7449
7450 return tmp;
7451 }
7452
7453 /* Otherwise, act on the components or recursively call self to
7454 act on a chain of components. */
7455 for (c = der_type->components; c; c = c->next)
7456 {
7457 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
7458 || c->ts.type == BT_CLASS)
7459 && c->ts.u.derived->attr.alloc_comp;
7460 cdecl = c->backend_decl;
7461 ctype = TREE_TYPE (cdecl);
7462
7463 switch (purpose)
7464 {
7465 case DEALLOCATE_ALLOC_COMP:
7466
7467 /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp
7468 (i.e. this function) so generate all the calls and suppress the
7469 recursion from here, if necessary. */
7470 called_dealloc_with_status = false;
7471 gfc_init_block (&tmpblock);
7472
7473 if (c->attr.allocatable
7474 && (c->attr.dimension || c->attr.codimension))
7475 {
7476 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7477 decl, cdecl, NULL_TREE);
7478 tmp = gfc_trans_dealloc_allocated (comp, c->attr.codimension);
7479 gfc_add_expr_to_block (&tmpblock, tmp);
7480 }
7481 else if (c->attr.allocatable)
7482 {
7483 /* Allocatable scalar components. */
7484 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7485 decl, cdecl, NULL_TREE);
7486
7487 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
7488 c->ts);
7489 gfc_add_expr_to_block (&tmpblock, tmp);
7490 called_dealloc_with_status = true;
7491
7492 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7493 void_type_node, comp,
7494 build_int_cst (TREE_TYPE (comp), 0));
7495 gfc_add_expr_to_block (&tmpblock, tmp);
7496 }
7497 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7498 {
7499 /* Allocatable CLASS components. */
7500 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7501 decl, cdecl, NULL_TREE);
7502
7503 /* Add reference to '_data' component. */
7504 tmp = CLASS_DATA (c)->backend_decl;
7505 comp = fold_build3_loc (input_location, COMPONENT_REF,
7506 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7507
7508 if (GFC_DESCRIPTOR_TYPE_P(TREE_TYPE (comp)))
7509 tmp = gfc_trans_dealloc_allocated (comp,
7510 CLASS_DATA (c)->attr.codimension);
7511 else
7512 {
7513 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
7514 CLASS_DATA (c)->ts);
7515 gfc_add_expr_to_block (&tmpblock, tmp);
7516 called_dealloc_with_status = true;
7517
7518 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7519 void_type_node, comp,
7520 build_int_cst (TREE_TYPE (comp), 0));
7521 }
7522 gfc_add_expr_to_block (&tmpblock, tmp);
7523 }
7524
7525 if (cmp_has_alloc_comps
7526 && !c->attr.pointer
7527 && !called_dealloc_with_status)
7528 {
7529 /* Do not deallocate the components of ultimate pointer
7530 components or iteratively call self if call has been made
7531 to gfc_trans_dealloc_allocated */
7532 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7533 decl, cdecl, NULL_TREE);
7534 rank = c->as ? c->as->rank : 0;
7535 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
7536 rank, purpose);
7537 gfc_add_expr_to_block (&fnblock, tmp);
7538 }
7539
7540 /* Now add the deallocation of this component. */
7541 gfc_add_block_to_block (&fnblock, &tmpblock);
7542 break;
7543
7544 case NULLIFY_ALLOC_COMP:
7545 if (c->attr.pointer)
7546 continue;
7547 else if (c->attr.allocatable
7548 && (c->attr.dimension|| c->attr.codimension))
7549 {
7550 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7551 decl, cdecl, NULL_TREE);
7552 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
7553 }
7554 else if (c->attr.allocatable)
7555 {
7556 /* Allocatable scalar components. */
7557 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7558 decl, cdecl, NULL_TREE);
7559 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7560 void_type_node, comp,
7561 build_int_cst (TREE_TYPE (comp), 0));
7562 gfc_add_expr_to_block (&fnblock, tmp);
7563 }
7564 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7565 {
7566 /* Allocatable CLASS components. */
7567 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7568 decl, cdecl, NULL_TREE);
7569 /* Add reference to '_data' component. */
7570 tmp = CLASS_DATA (c)->backend_decl;
7571 comp = fold_build3_loc (input_location, COMPONENT_REF,
7572 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
7573 if (GFC_DESCRIPTOR_TYPE_P(TREE_TYPE (comp)))
7574 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
7575 else
7576 {
7577 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7578 void_type_node, comp,
7579 build_int_cst (TREE_TYPE (comp), 0));
7580 gfc_add_expr_to_block (&fnblock, tmp);
7581 }
7582 }
7583 else if (cmp_has_alloc_comps)
7584 {
7585 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
7586 decl, cdecl, NULL_TREE);
7587 rank = c->as ? c->as->rank : 0;
7588 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
7589 rank, purpose);
7590 gfc_add_expr_to_block (&fnblock, tmp);
7591 }
7592 break;
7593
7594 case COPY_ALLOC_COMP:
7595 if (c->attr.pointer)
7596 continue;
7597
7598 /* We need source and destination components. */
7599 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
7600 cdecl, NULL_TREE);
7601 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
7602 cdecl, NULL_TREE);
7603 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
7604
7605 if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
7606 {
7607 tree ftn_tree;
7608 tree size;
7609 tree dst_data;
7610 tree src_data;
7611 tree null_data;
7612
7613 dst_data = gfc_class_data_get (dcmp);
7614 src_data = gfc_class_data_get (comp);
7615 size = fold_convert (size_type_node, gfc_vtable_size_get (comp));
7616
7617 if (CLASS_DATA (c)->attr.dimension)
7618 {
7619 nelems = gfc_conv_descriptor_size (src_data,
7620 CLASS_DATA (c)->as->rank);
7621 src_data = gfc_conv_descriptor_data_get (src_data);
7622 dst_data = gfc_conv_descriptor_data_get (dst_data);
7623 }
7624 else
7625 nelems = build_int_cst (size_type_node, 1);
7626
7627 gfc_init_block (&tmpblock);
7628
7629 /* We need to use CALLOC as _copy might try to free allocatable
7630 components of the destination. */
7631 ftn_tree = builtin_decl_explicit (BUILT_IN_CALLOC);
7632 tmp = build_call_expr_loc (input_location, ftn_tree, 2, nelems,
7633 size);
7634 gfc_add_modify (&tmpblock, dst_data,
7635 fold_convert (TREE_TYPE (dst_data), tmp));
7636
7637 tmp = gfc_copy_class_to_class (comp, dcmp, nelems);
7638 gfc_add_expr_to_block (&tmpblock, tmp);
7639 tmp = gfc_finish_block (&tmpblock);
7640
7641 gfc_init_block (&tmpblock);
7642 gfc_add_modify (&tmpblock, dst_data,
7643 fold_convert (TREE_TYPE (dst_data),
7644 null_pointer_node));
7645 null_data = gfc_finish_block (&tmpblock);
7646
7647 null_cond = fold_build2_loc (input_location, NE_EXPR,
7648 boolean_type_node, src_data,
7649 null_pointer_node);
7650
7651 gfc_add_expr_to_block (&fnblock, build3_v (COND_EXPR, null_cond,
7652 tmp, null_data));
7653 continue;
7654 }
7655
7656 if (c->attr.allocatable && !cmp_has_alloc_comps)
7657 {
7658 rank = c->as ? c->as->rank : 0;
7659 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
7660 gfc_add_expr_to_block (&fnblock, tmp);
7661 }
7662
7663 if (cmp_has_alloc_comps)
7664 {
7665 rank = c->as ? c->as->rank : 0;
7666 tmp = fold_convert (TREE_TYPE (dcmp), comp);
7667 gfc_add_modify (&fnblock, dcmp, tmp);
7668 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
7669 rank, purpose);
7670 gfc_add_expr_to_block (&fnblock, tmp);
7671 }
7672 break;
7673
7674 default:
7675 gcc_unreachable ();
7676 break;
7677 }
7678 }
7679
7680 return gfc_finish_block (&fnblock);
7681 }
7682
7683 /* Recursively traverse an object of derived type, generating code to
7684 nullify allocatable components. */
7685
7686 tree
7687 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
7688 {
7689 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7690 NULLIFY_ALLOC_COMP);
7691 }
7692
7693
7694 /* Recursively traverse an object of derived type, generating code to
7695 deallocate allocatable components. */
7696
7697 tree
7698 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
7699 {
7700 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
7701 DEALLOCATE_ALLOC_COMP);
7702 }
7703
7704
7705 /* Recursively traverse an object of derived type, generating code to
7706 copy it and its allocatable components. */
7707
7708 tree
7709 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7710 {
7711 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
7712 }
7713
7714
7715 /* Recursively traverse an object of derived type, generating code to
7716 copy only its allocatable components. */
7717
7718 tree
7719 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
7720 {
7721 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
7722 }
7723
7724
7725 /* Returns the value of LBOUND for an expression. This could be broken out
7726 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
7727 called by gfc_alloc_allocatable_for_assignment. */
7728 static tree
7729 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
7730 {
7731 tree lbound;
7732 tree ubound;
7733 tree stride;
7734 tree cond, cond1, cond3, cond4;
7735 tree tmp;
7736 gfc_ref *ref;
7737
7738 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
7739 {
7740 tmp = gfc_rank_cst[dim];
7741 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
7742 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
7743 stride = gfc_conv_descriptor_stride_get (desc, tmp);
7744 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7745 ubound, lbound);
7746 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
7747 stride, gfc_index_zero_node);
7748 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7749 boolean_type_node, cond3, cond1);
7750 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
7751 stride, gfc_index_zero_node);
7752 if (assumed_size)
7753 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7754 tmp, build_int_cst (gfc_array_index_type,
7755 expr->rank - 1));
7756 else
7757 cond = boolean_false_node;
7758
7759 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7760 boolean_type_node, cond3, cond4);
7761 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
7762 boolean_type_node, cond, cond1);
7763
7764 return fold_build3_loc (input_location, COND_EXPR,
7765 gfc_array_index_type, cond,
7766 lbound, gfc_index_one_node);
7767 }
7768
7769 if (expr->expr_type == EXPR_FUNCTION)
7770 {
7771 /* A conversion function, so use the argument. */
7772 gcc_assert (expr->value.function.isym
7773 && expr->value.function.isym->conversion);
7774 expr = expr->value.function.actual->expr;
7775 }
7776
7777 if (expr->expr_type == EXPR_VARIABLE)
7778 {
7779 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
7780 for (ref = expr->ref; ref; ref = ref->next)
7781 {
7782 if (ref->type == REF_COMPONENT
7783 && ref->u.c.component->as
7784 && ref->next
7785 && ref->next->u.ar.type == AR_FULL)
7786 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
7787 }
7788 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
7789 }
7790
7791 return gfc_index_one_node;
7792 }
7793
7794
7795 /* Returns true if an expression represents an lhs that can be reallocated
7796 on assignment. */
7797
7798 bool
7799 gfc_is_reallocatable_lhs (gfc_expr *expr)
7800 {
7801 gfc_ref * ref;
7802
7803 if (!expr->ref)
7804 return false;
7805
7806 /* An allocatable variable. */
7807 if (expr->symtree->n.sym->attr.allocatable
7808 && expr->ref
7809 && expr->ref->type == REF_ARRAY
7810 && expr->ref->u.ar.type == AR_FULL)
7811 return true;
7812
7813 /* All that can be left are allocatable components. */
7814 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
7815 && expr->symtree->n.sym->ts.type != BT_CLASS)
7816 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
7817 return false;
7818
7819 /* Find a component ref followed by an array reference. */
7820 for (ref = expr->ref; ref; ref = ref->next)
7821 if (ref->next
7822 && ref->type == REF_COMPONENT
7823 && ref->next->type == REF_ARRAY
7824 && !ref->next->next)
7825 break;
7826
7827 if (!ref)
7828 return false;
7829
7830 /* Return true if valid reallocatable lhs. */
7831 if (ref->u.c.component->attr.allocatable
7832 && ref->next->u.ar.type == AR_FULL)
7833 return true;
7834
7835 return false;
7836 }
7837
7838
7839 /* Allocate the lhs of an assignment to an allocatable array, otherwise
7840 reallocate it. */
7841
7842 tree
7843 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
7844 gfc_expr *expr1,
7845 gfc_expr *expr2)
7846 {
7847 stmtblock_t realloc_block;
7848 stmtblock_t alloc_block;
7849 stmtblock_t fblock;
7850 gfc_ss *rss;
7851 gfc_ss *lss;
7852 gfc_array_info *linfo;
7853 tree realloc_expr;
7854 tree alloc_expr;
7855 tree size1;
7856 tree size2;
7857 tree array1;
7858 tree cond;
7859 tree tmp;
7860 tree tmp2;
7861 tree lbound;
7862 tree ubound;
7863 tree desc;
7864 tree desc2;
7865 tree offset;
7866 tree jump_label1;
7867 tree jump_label2;
7868 tree neq_size;
7869 tree lbd;
7870 int n;
7871 int dim;
7872 gfc_array_spec * as;
7873
7874 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
7875 Find the lhs expression in the loop chain and set expr1 and
7876 expr2 accordingly. */
7877 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
7878 {
7879 expr2 = expr1;
7880 /* Find the ss for the lhs. */
7881 lss = loop->ss;
7882 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7883 if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE)
7884 break;
7885 if (lss == gfc_ss_terminator)
7886 return NULL_TREE;
7887 expr1 = lss->info->expr;
7888 }
7889
7890 /* Bail out if this is not a valid allocate on assignment. */
7891 if (!gfc_is_reallocatable_lhs (expr1)
7892 || (expr2 && !expr2->rank))
7893 return NULL_TREE;
7894
7895 /* Find the ss for the lhs. */
7896 lss = loop->ss;
7897 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
7898 if (lss->info->expr == expr1)
7899 break;
7900
7901 if (lss == gfc_ss_terminator)
7902 return NULL_TREE;
7903
7904 linfo = &lss->info->data.array;
7905
7906 /* Find an ss for the rhs. For operator expressions, we see the
7907 ss's for the operands. Any one of these will do. */
7908 rss = loop->ss;
7909 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
7910 if (rss->info->expr != expr1 && rss != loop->temp_ss)
7911 break;
7912
7913 if (expr2 && rss == gfc_ss_terminator)
7914 return NULL_TREE;
7915
7916 gfc_start_block (&fblock);
7917
7918 /* Since the lhs is allocatable, this must be a descriptor type.
7919 Get the data and array size. */
7920 desc = linfo->descriptor;
7921 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
7922 array1 = gfc_conv_descriptor_data_get (desc);
7923
7924 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
7925 deallocated if expr is an array of different shape or any of the
7926 corresponding length type parameter values of variable and expr
7927 differ." This assures F95 compatibility. */
7928 jump_label1 = gfc_build_label_decl (NULL_TREE);
7929 jump_label2 = gfc_build_label_decl (NULL_TREE);
7930
7931 /* Allocate if data is NULL. */
7932 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
7933 array1, build_int_cst (TREE_TYPE (array1), 0));
7934 tmp = build3_v (COND_EXPR, cond,
7935 build1_v (GOTO_EXPR, jump_label1),
7936 build_empty_stmt (input_location));
7937 gfc_add_expr_to_block (&fblock, tmp);
7938
7939 /* Get arrayspec if expr is a full array. */
7940 if (expr2 && expr2->expr_type == EXPR_FUNCTION
7941 && expr2->value.function.isym
7942 && expr2->value.function.isym->conversion)
7943 {
7944 /* For conversion functions, take the arg. */
7945 gfc_expr *arg = expr2->value.function.actual->expr;
7946 as = gfc_get_full_arrayspec_from_expr (arg);
7947 }
7948 else if (expr2)
7949 as = gfc_get_full_arrayspec_from_expr (expr2);
7950 else
7951 as = NULL;
7952
7953 /* If the lhs shape is not the same as the rhs jump to setting the
7954 bounds and doing the reallocation....... */
7955 for (n = 0; n < expr1->rank; n++)
7956 {
7957 /* Check the shape. */
7958 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
7959 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
7960 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7961 gfc_array_index_type,
7962 loop->to[n], loop->from[n]);
7963 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7964 gfc_array_index_type,
7965 tmp, lbound);
7966 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7967 gfc_array_index_type,
7968 tmp, ubound);
7969 cond = fold_build2_loc (input_location, NE_EXPR,
7970 boolean_type_node,
7971 tmp, gfc_index_zero_node);
7972 tmp = build3_v (COND_EXPR, cond,
7973 build1_v (GOTO_EXPR, jump_label1),
7974 build_empty_stmt (input_location));
7975 gfc_add_expr_to_block (&fblock, tmp);
7976 }
7977
7978 /* ....else jump past the (re)alloc code. */
7979 tmp = build1_v (GOTO_EXPR, jump_label2);
7980 gfc_add_expr_to_block (&fblock, tmp);
7981
7982 /* Add the label to start automatic (re)allocation. */
7983 tmp = build1_v (LABEL_EXPR, jump_label1);
7984 gfc_add_expr_to_block (&fblock, tmp);
7985
7986 size1 = gfc_conv_descriptor_size (desc, expr1->rank);
7987
7988 /* Get the rhs size. Fix both sizes. */
7989 if (expr2)
7990 desc2 = rss->info->data.array.descriptor;
7991 else
7992 desc2 = NULL_TREE;
7993 size2 = gfc_index_one_node;
7994 for (n = 0; n < expr2->rank; n++)
7995 {
7996 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7997 gfc_array_index_type,
7998 loop->to[n], loop->from[n]);
7999 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8000 gfc_array_index_type,
8001 tmp, gfc_index_one_node);
8002 size2 = fold_build2_loc (input_location, MULT_EXPR,
8003 gfc_array_index_type,
8004 tmp, size2);
8005 }
8006
8007 size1 = gfc_evaluate_now (size1, &fblock);
8008 size2 = gfc_evaluate_now (size2, &fblock);
8009
8010 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
8011 size1, size2);
8012 neq_size = gfc_evaluate_now (cond, &fblock);
8013
8014
8015 /* Now modify the lhs descriptor and the associated scalarizer
8016 variables. F2003 7.4.1.3: "If variable is or becomes an
8017 unallocated allocatable variable, then it is allocated with each
8018 deferred type parameter equal to the corresponding type parameters
8019 of expr , with the shape of expr , and with each lower bound equal
8020 to the corresponding element of LBOUND(expr)."
8021 Reuse size1 to keep a dimension-by-dimension track of the
8022 stride of the new array. */
8023 size1 = gfc_index_one_node;
8024 offset = gfc_index_zero_node;
8025
8026 for (n = 0; n < expr2->rank; n++)
8027 {
8028 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8029 gfc_array_index_type,
8030 loop->to[n], loop->from[n]);
8031 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8032 gfc_array_index_type,
8033 tmp, gfc_index_one_node);
8034
8035 lbound = gfc_index_one_node;
8036 ubound = tmp;
8037
8038 if (as)
8039 {
8040 lbd = get_std_lbound (expr2, desc2, n,
8041 as->type == AS_ASSUMED_SIZE);
8042 ubound = fold_build2_loc (input_location,
8043 MINUS_EXPR,
8044 gfc_array_index_type,
8045 ubound, lbound);
8046 ubound = fold_build2_loc (input_location,
8047 PLUS_EXPR,
8048 gfc_array_index_type,
8049 ubound, lbd);
8050 lbound = lbd;
8051 }
8052
8053 gfc_conv_descriptor_lbound_set (&fblock, desc,
8054 gfc_rank_cst[n],
8055 lbound);
8056 gfc_conv_descriptor_ubound_set (&fblock, desc,
8057 gfc_rank_cst[n],
8058 ubound);
8059 gfc_conv_descriptor_stride_set (&fblock, desc,
8060 gfc_rank_cst[n],
8061 size1);
8062 lbound = gfc_conv_descriptor_lbound_get (desc,
8063 gfc_rank_cst[n]);
8064 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
8065 gfc_array_index_type,
8066 lbound, size1);
8067 offset = fold_build2_loc (input_location, MINUS_EXPR,
8068 gfc_array_index_type,
8069 offset, tmp2);
8070 size1 = fold_build2_loc (input_location, MULT_EXPR,
8071 gfc_array_index_type,
8072 tmp, size1);
8073 }
8074
8075 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
8076 the array offset is saved and the info.offset is used for a
8077 running offset. Use the saved_offset instead. */
8078 tmp = gfc_conv_descriptor_offset (desc);
8079 gfc_add_modify (&fblock, tmp, offset);
8080 if (linfo->saved_offset
8081 && TREE_CODE (linfo->saved_offset) == VAR_DECL)
8082 gfc_add_modify (&fblock, linfo->saved_offset, tmp);
8083
8084 /* Now set the deltas for the lhs. */
8085 for (n = 0; n < expr1->rank; n++)
8086 {
8087 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8088 dim = lss->dim[n];
8089 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8090 gfc_array_index_type, tmp,
8091 loop->from[dim]);
8092 if (linfo->delta[dim]
8093 && TREE_CODE (linfo->delta[dim]) == VAR_DECL)
8094 gfc_add_modify (&fblock, linfo->delta[dim], tmp);
8095 }
8096
8097 /* Get the new lhs size in bytes. */
8098 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
8099 {
8100 tmp = expr2->ts.u.cl->backend_decl;
8101 gcc_assert (expr1->ts.u.cl->backend_decl);
8102 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
8103 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
8104 }
8105 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
8106 {
8107 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
8108 tmp = fold_build2_loc (input_location, MULT_EXPR,
8109 gfc_array_index_type, tmp,
8110 expr1->ts.u.cl->backend_decl);
8111 }
8112 else
8113 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
8114 tmp = fold_convert (gfc_array_index_type, tmp);
8115 size2 = fold_build2_loc (input_location, MULT_EXPR,
8116 gfc_array_index_type,
8117 tmp, size2);
8118 size2 = fold_convert (size_type_node, size2);
8119 size2 = gfc_evaluate_now (size2, &fblock);
8120
8121 /* Realloc expression. Note that the scalarizer uses desc.data
8122 in the array reference - (*desc.data)[<element>]. */
8123 gfc_init_block (&realloc_block);
8124 tmp = build_call_expr_loc (input_location,
8125 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
8126 fold_convert (pvoid_type_node, array1),
8127 size2);
8128 gfc_conv_descriptor_data_set (&realloc_block,
8129 desc, tmp);
8130 realloc_expr = gfc_finish_block (&realloc_block);
8131
8132 /* Only reallocate if sizes are different. */
8133 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
8134 build_empty_stmt (input_location));
8135 realloc_expr = tmp;
8136
8137
8138 /* Malloc expression. */
8139 gfc_init_block (&alloc_block);
8140 tmp = build_call_expr_loc (input_location,
8141 builtin_decl_explicit (BUILT_IN_MALLOC),
8142 1, size2);
8143 gfc_conv_descriptor_data_set (&alloc_block,
8144 desc, tmp);
8145 tmp = gfc_conv_descriptor_dtype (desc);
8146 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
8147 alloc_expr = gfc_finish_block (&alloc_block);
8148
8149 /* Malloc if not allocated; realloc otherwise. */
8150 tmp = build_int_cst (TREE_TYPE (array1), 0);
8151 cond = fold_build2_loc (input_location, EQ_EXPR,
8152 boolean_type_node,
8153 array1, tmp);
8154 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
8155 gfc_add_expr_to_block (&fblock, tmp);
8156
8157 /* Make sure that the scalarizer data pointer is updated. */
8158 if (linfo->data
8159 && TREE_CODE (linfo->data) == VAR_DECL)
8160 {
8161 tmp = gfc_conv_descriptor_data_get (desc);
8162 gfc_add_modify (&fblock, linfo->data, tmp);
8163 }
8164
8165 /* Add the exit label. */
8166 tmp = build1_v (LABEL_EXPR, jump_label2);
8167 gfc_add_expr_to_block (&fblock, tmp);
8168
8169 return gfc_finish_block (&fblock);
8170 }
8171
8172
8173 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
8174 Do likewise, recursively if necessary, with the allocatable components of
8175 derived types. */
8176
8177 void
8178 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
8179 {
8180 tree type;
8181 tree tmp;
8182 tree descriptor;
8183 stmtblock_t init;
8184 stmtblock_t cleanup;
8185 locus loc;
8186 int rank;
8187 bool sym_has_alloc_comp;
8188
8189 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
8190 || sym->ts.type == BT_CLASS)
8191 && sym->ts.u.derived->attr.alloc_comp;
8192
8193 /* Make sure the frontend gets these right. */
8194 if (!(sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp))
8195 fatal_error ("Possible front-end bug: Deferred array size without pointer, "
8196 "allocatable attribute or derived type without allocatable "
8197 "components.");
8198
8199 gfc_save_backend_locus (&loc);
8200 gfc_set_backend_locus (&sym->declared_at);
8201 gfc_init_block (&init);
8202
8203 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
8204 || TREE_CODE (sym->backend_decl) == PARM_DECL);
8205
8206 if (sym->ts.type == BT_CHARACTER
8207 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
8208 {
8209 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
8210 gfc_trans_vla_type_sizes (sym, &init);
8211 }
8212
8213 /* Dummy, use associated and result variables don't need anything special. */
8214 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
8215 {
8216 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
8217 gfc_restore_backend_locus (&loc);
8218 return;
8219 }
8220
8221 descriptor = sym->backend_decl;
8222
8223 /* Although static, derived types with default initializers and
8224 allocatable components must not be nulled wholesale; instead they
8225 are treated component by component. */
8226 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp)
8227 {
8228 /* SAVEd variables are not freed on exit. */
8229 gfc_trans_static_array_pointer (sym);
8230
8231 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
8232 gfc_restore_backend_locus (&loc);
8233 return;
8234 }
8235
8236 /* Get the descriptor type. */
8237 type = TREE_TYPE (sym->backend_decl);
8238
8239 if (sym_has_alloc_comp && !(sym->attr.pointer || sym->attr.allocatable))
8240 {
8241 if (!sym->attr.save
8242 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
8243 {
8244 if (sym->value == NULL
8245 || !gfc_has_default_initializer (sym->ts.u.derived))
8246 {
8247 rank = sym->as ? sym->as->rank : 0;
8248 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
8249 descriptor, rank);
8250 gfc_add_expr_to_block (&init, tmp);
8251 }
8252 else
8253 gfc_init_default_dt (sym, &init, false);
8254 }
8255 }
8256 else if (!GFC_DESCRIPTOR_TYPE_P (type))
8257 {
8258 /* If the backend_decl is not a descriptor, we must have a pointer
8259 to one. */
8260 descriptor = build_fold_indirect_ref_loc (input_location,
8261 sym->backend_decl);
8262 type = TREE_TYPE (descriptor);
8263 }
8264
8265 /* NULLIFY the data pointer. */
8266 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save)
8267 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
8268
8269 gfc_restore_backend_locus (&loc);
8270 gfc_init_block (&cleanup);
8271
8272 /* Allocatable arrays need to be freed when they go out of scope.
8273 The allocatable components of pointers must not be touched. */
8274 if (sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
8275 && !sym->attr.pointer && !sym->attr.save)
8276 {
8277 int rank;
8278 rank = sym->as ? sym->as->rank : 0;
8279 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
8280 gfc_add_expr_to_block (&cleanup, tmp);
8281 }
8282
8283 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
8284 && !sym->attr.save && !sym->attr.result)
8285 {
8286 tmp = gfc_trans_dealloc_allocated (sym->backend_decl,
8287 sym->attr.codimension);
8288 gfc_add_expr_to_block (&cleanup, tmp);
8289 }
8290
8291 gfc_add_init_cleanup (block, gfc_finish_block (&init),
8292 gfc_finish_block (&cleanup));
8293 }
8294
8295 /************ Expression Walking Functions ******************/
8296
8297 /* Walk a variable reference.
8298
8299 Possible extension - multiple component subscripts.
8300 x(:,:) = foo%a(:)%b(:)
8301 Transforms to
8302 forall (i=..., j=...)
8303 x(i,j) = foo%a(j)%b(i)
8304 end forall
8305 This adds a fair amount of complexity because you need to deal with more
8306 than one ref. Maybe handle in a similar manner to vector subscripts.
8307 Maybe not worth the effort. */
8308
8309
8310 static gfc_ss *
8311 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
8312 {
8313 gfc_ref *ref;
8314
8315 for (ref = expr->ref; ref; ref = ref->next)
8316 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
8317 break;
8318
8319 return gfc_walk_array_ref (ss, expr, ref);
8320 }
8321
8322
8323 gfc_ss *
8324 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
8325 {
8326 gfc_array_ref *ar;
8327 gfc_ss *newss;
8328 int n;
8329
8330 for (; ref; ref = ref->next)
8331 {
8332 if (ref->type == REF_SUBSTRING)
8333 {
8334 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
8335 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
8336 }
8337
8338 /* We're only interested in array sections from now on. */
8339 if (ref->type != REF_ARRAY)
8340 continue;
8341
8342 ar = &ref->u.ar;
8343
8344 switch (ar->type)
8345 {
8346 case AR_ELEMENT:
8347 for (n = ar->dimen - 1; n >= 0; n--)
8348 ss = gfc_get_scalar_ss (ss, ar->start[n]);
8349 break;
8350
8351 case AR_FULL:
8352 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
8353 newss->info->data.array.ref = ref;
8354
8355 /* Make sure array is the same as array(:,:), this way
8356 we don't need to special case all the time. */
8357 ar->dimen = ar->as->rank;
8358 for (n = 0; n < ar->dimen; n++)
8359 {
8360 ar->dimen_type[n] = DIMEN_RANGE;
8361
8362 gcc_assert (ar->start[n] == NULL);
8363 gcc_assert (ar->end[n] == NULL);
8364 gcc_assert (ar->stride[n] == NULL);
8365 }
8366 ss = newss;
8367 break;
8368
8369 case AR_SECTION:
8370 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
8371 newss->info->data.array.ref = ref;
8372
8373 /* We add SS chains for all the subscripts in the section. */
8374 for (n = 0; n < ar->dimen; n++)
8375 {
8376 gfc_ss *indexss;
8377
8378 switch (ar->dimen_type[n])
8379 {
8380 case DIMEN_ELEMENT:
8381 /* Add SS for elemental (scalar) subscripts. */
8382 gcc_assert (ar->start[n]);
8383 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
8384 indexss->loop_chain = gfc_ss_terminator;
8385 newss->info->data.array.subscript[n] = indexss;
8386 break;
8387
8388 case DIMEN_RANGE:
8389 /* We don't add anything for sections, just remember this
8390 dimension for later. */
8391 newss->dim[newss->dimen] = n;
8392 newss->dimen++;
8393 break;
8394
8395 case DIMEN_VECTOR:
8396 /* Create a GFC_SS_VECTOR index in which we can store
8397 the vector's descriptor. */
8398 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
8399 1, GFC_SS_VECTOR);
8400 indexss->loop_chain = gfc_ss_terminator;
8401 newss->info->data.array.subscript[n] = indexss;
8402 newss->dim[newss->dimen] = n;
8403 newss->dimen++;
8404 break;
8405
8406 default:
8407 /* We should know what sort of section it is by now. */
8408 gcc_unreachable ();
8409 }
8410 }
8411 /* We should have at least one non-elemental dimension,
8412 unless we are creating a descriptor for a (scalar) coarray. */
8413 gcc_assert (newss->dimen > 0
8414 || newss->info->data.array.ref->u.ar.as->corank > 0);
8415 ss = newss;
8416 break;
8417
8418 default:
8419 /* We should know what sort of section it is by now. */
8420 gcc_unreachable ();
8421 }
8422
8423 }
8424 return ss;
8425 }
8426
8427
8428 /* Walk an expression operator. If only one operand of a binary expression is
8429 scalar, we must also add the scalar term to the SS chain. */
8430
8431 static gfc_ss *
8432 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
8433 {
8434 gfc_ss *head;
8435 gfc_ss *head2;
8436
8437 head = gfc_walk_subexpr (ss, expr->value.op.op1);
8438 if (expr->value.op.op2 == NULL)
8439 head2 = head;
8440 else
8441 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
8442
8443 /* All operands are scalar. Pass back and let the caller deal with it. */
8444 if (head2 == ss)
8445 return head2;
8446
8447 /* All operands require scalarization. */
8448 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
8449 return head2;
8450
8451 /* One of the operands needs scalarization, the other is scalar.
8452 Create a gfc_ss for the scalar expression. */
8453 if (head == ss)
8454 {
8455 /* First operand is scalar. We build the chain in reverse order, so
8456 add the scalar SS after the second operand. */
8457 head = head2;
8458 while (head && head->next != ss)
8459 head = head->next;
8460 /* Check we haven't somehow broken the chain. */
8461 gcc_assert (head);
8462 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
8463 }
8464 else /* head2 == head */
8465 {
8466 gcc_assert (head2 == head);
8467 /* Second operand is scalar. */
8468 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
8469 }
8470
8471 return head2;
8472 }
8473
8474
8475 /* Reverse a SS chain. */
8476
8477 gfc_ss *
8478 gfc_reverse_ss (gfc_ss * ss)
8479 {
8480 gfc_ss *next;
8481 gfc_ss *head;
8482
8483 gcc_assert (ss != NULL);
8484
8485 head = gfc_ss_terminator;
8486 while (ss != gfc_ss_terminator)
8487 {
8488 next = ss->next;
8489 /* Check we didn't somehow break the chain. */
8490 gcc_assert (next != NULL);
8491 ss->next = head;
8492 head = ss;
8493 ss = next;
8494 }
8495
8496 return (head);
8497 }
8498
8499
8500 /* Given an expression referring to a procedure, return the symbol of its
8501 interface. We can't get the procedure symbol directly as we have to handle
8502 the case of (deferred) type-bound procedures. */
8503
8504 gfc_symbol *
8505 gfc_get_proc_ifc_for_expr (gfc_expr *procedure_ref)
8506 {
8507 gfc_symbol *sym;
8508 gfc_ref *ref;
8509
8510 if (procedure_ref == NULL)
8511 return NULL;
8512
8513 /* Normal procedure case. */
8514 sym = procedure_ref->symtree->n.sym;
8515
8516 /* Typebound procedure case. */
8517 for (ref = procedure_ref->ref; ref; ref = ref->next)
8518 {
8519 if (ref->type == REF_COMPONENT
8520 && ref->u.c.component->attr.proc_pointer)
8521 sym = ref->u.c.component->ts.interface;
8522 else
8523 sym = NULL;
8524 }
8525
8526 return sym;
8527 }
8528
8529
8530 /* Walk the arguments of an elemental function.
8531 PROC_EXPR is used to check whether an argument is permitted to be absent. If
8532 it is NULL, we don't do the check and the argument is assumed to be present.
8533 */
8534
8535 gfc_ss *
8536 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
8537 gfc_symbol *proc_ifc, gfc_ss_type type)
8538 {
8539 gfc_formal_arglist *dummy_arg;
8540 int scalar;
8541 gfc_ss *head;
8542 gfc_ss *tail;
8543 gfc_ss *newss;
8544
8545 head = gfc_ss_terminator;
8546 tail = NULL;
8547
8548 if (proc_ifc)
8549 dummy_arg = proc_ifc->formal;
8550 else
8551 dummy_arg = NULL;
8552
8553 scalar = 1;
8554 for (; arg; arg = arg->next)
8555 {
8556 if (!arg->expr || arg->expr->expr_type == EXPR_NULL)
8557 continue;
8558
8559 newss = gfc_walk_subexpr (head, arg->expr);
8560 if (newss == head)
8561 {
8562 /* Scalar argument. */
8563 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
8564 newss = gfc_get_scalar_ss (head, arg->expr);
8565 newss->info->type = type;
8566
8567 }
8568 else
8569 scalar = 0;
8570
8571 if (dummy_arg != NULL
8572 && dummy_arg->sym->attr.optional
8573 && arg->expr->expr_type == EXPR_VARIABLE
8574 && (gfc_expr_attr (arg->expr).optional
8575 || gfc_expr_attr (arg->expr).allocatable
8576 || gfc_expr_attr (arg->expr).pointer))
8577 newss->info->can_be_null_ref = true;
8578
8579 head = newss;
8580 if (!tail)
8581 {
8582 tail = head;
8583 while (tail->next != gfc_ss_terminator)
8584 tail = tail->next;
8585 }
8586
8587 if (dummy_arg != NULL)
8588 dummy_arg = dummy_arg->next;
8589 }
8590
8591 if (scalar)
8592 {
8593 /* If all the arguments are scalar we don't need the argument SS. */
8594 gfc_free_ss_chain (head);
8595 /* Pass it back. */
8596 return ss;
8597 }
8598
8599 /* Add it onto the existing chain. */
8600 tail->next = ss;
8601 return head;
8602 }
8603
8604
8605 /* Walk a function call. Scalar functions are passed back, and taken out of
8606 scalarization loops. For elemental functions we walk their arguments.
8607 The result of functions returning arrays is stored in a temporary outside
8608 the loop, so that the function is only called once. Hence we do not need
8609 to walk their arguments. */
8610
8611 static gfc_ss *
8612 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
8613 {
8614 gfc_intrinsic_sym *isym;
8615 gfc_symbol *sym;
8616 gfc_component *comp = NULL;
8617
8618 isym = expr->value.function.isym;
8619
8620 /* Handle intrinsic functions separately. */
8621 if (isym)
8622 return gfc_walk_intrinsic_function (ss, expr, isym);
8623
8624 sym = expr->value.function.esym;
8625 if (!sym)
8626 sym = expr->symtree->n.sym;
8627
8628 /* A function that returns arrays. */
8629 gfc_is_proc_ptr_comp (expr, &comp);
8630 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
8631 || (comp && comp->attr.dimension))
8632 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
8633
8634 /* Walk the parameters of an elemental function. For now we always pass
8635 by reference. */
8636 if (sym->attr.elemental || (comp && comp->attr.elemental))
8637 return gfc_walk_elemental_function_args (ss, expr->value.function.actual,
8638 gfc_get_proc_ifc_for_expr (expr),
8639 GFC_SS_REFERENCE);
8640
8641 /* Scalar functions are OK as these are evaluated outside the scalarization
8642 loop. Pass back and let the caller deal with it. */
8643 return ss;
8644 }
8645
8646
8647 /* An array temporary is constructed for array constructors. */
8648
8649 static gfc_ss *
8650 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
8651 {
8652 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
8653 }
8654
8655
8656 /* Walk an expression. Add walked expressions to the head of the SS chain.
8657 A wholly scalar expression will not be added. */
8658
8659 gfc_ss *
8660 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
8661 {
8662 gfc_ss *head;
8663
8664 switch (expr->expr_type)
8665 {
8666 case EXPR_VARIABLE:
8667 head = gfc_walk_variable_expr (ss, expr);
8668 return head;
8669
8670 case EXPR_OP:
8671 head = gfc_walk_op_expr (ss, expr);
8672 return head;
8673
8674 case EXPR_FUNCTION:
8675 head = gfc_walk_function_expr (ss, expr);
8676 return head;
8677
8678 case EXPR_CONSTANT:
8679 case EXPR_NULL:
8680 case EXPR_STRUCTURE:
8681 /* Pass back and let the caller deal with it. */
8682 break;
8683
8684 case EXPR_ARRAY:
8685 head = gfc_walk_array_constructor (ss, expr);
8686 return head;
8687
8688 case EXPR_SUBSTRING:
8689 /* Pass back and let the caller deal with it. */
8690 break;
8691
8692 default:
8693 internal_error ("bad expression type during walk (%d)",
8694 expr->expr_type);
8695 }
8696 return ss;
8697 }
8698
8699
8700 /* Entry point for expression walking.
8701 A return value equal to the passed chain means this is
8702 a scalar expression. It is up to the caller to take whatever action is
8703 necessary to translate these. */
8704
8705 gfc_ss *
8706 gfc_walk_expr (gfc_expr * expr)
8707 {
8708 gfc_ss *res;
8709
8710 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
8711 return gfc_reverse_ss (res);
8712 }