1 /* Array translation routines
2 Copyright (C) 2002-2016 Free Software Foundation, Inc.
3 Contributed by Paul Brook <paul@nowt.org>
4 and Steven Bosscher <s.bosscher@student.tudelft.nl>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* trans-array.c-- Various array related code, including scalarization,
23 allocation, initialization and other support routines. */
24
25 /* How the scalarizer works.
26 In gfortran, array expressions use the same core routines as scalar
27 expressions.
28 First, a Scalarization State (SS) chain is built. This is done by walking
29 the expression tree, and building a linear list of the terms in the
30 expression. As the tree is walked, scalar subexpressions are translated.
31
32 The scalarization parameters are stored in a gfc_loopinfo structure.
33 First the start and stride of each term is calculated by
34 gfc_conv_ss_startstride. During this process the expressions for the array
35 descriptors and data pointers are also translated.
36
37 If the expression is an assignment, we must then resolve any dependencies.
38 In Fortran all the rhs values of an assignment must be evaluated before
39 any assignments take place. This can require a temporary array to store the
40 values. We also require a temporary when we are passing array expressions
41 or vector subscripts as procedure parameters.
42
43 Array sections are passed without copying to a temporary. These use the
44 scalarizer to determine the shape of the section. The flag
45 loop->array_parameter tells the scalarizer that the actual values and loop
46 variables will not be required.
47
48 The function gfc_conv_loop_setup generates the scalarization setup code.
49 It determines the range of the scalarizing loop variables. If a temporary
50 is required, this is created and initialized. Code for scalar expressions
51 taken outside the loop is also generated at this time. Next the offset and
52 scaling required to translate from loop variables to array indices for each
53 term is calculated.
54
55 A call to gfc_start_scalarized_body marks the start of the scalarized
56 expression. This creates a scope and declares the loop variables. Before
57 calling this gfc_make_ss_chain_used must be used to indicate which terms
58 will be used inside this loop.
59
60 The scalar gfc_conv_* functions are then used to build the main body of the
61 scalarization loop. Scalarization loop variables and precalculated scalar
62 values are automatically substituted. Note that gfc_advance_se_ss_chain
63 must be used, rather than changing the se->ss directly.
64
65 For assignment expressions requiring a temporary two sub loops are
66 generated. The first stores the result of the expression in the temporary,
67 the second copies it to the result. A call to
68 gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
69 the start of the copying loop. The temporary may be less than full rank.
70
71 Finally gfc_trans_scalarizing_loops is called to generate the implicit do
72 loops. The loops are added to the pre chain of the loopinfo. The post
73 chain may still contain cleanup code.
74
75 After the loop code has been added into its parent scope gfc_cleanup_loop
76 is called to free all the SS allocated by the scalarizer. */
77
78 #include "config.h"
79 #include "system.h"
80 #include "coretypes.h"
81 #include "options.h"
82 #include "tree.h"
83 #include "gfortran.h"
84 #include "gimple-expr.h"
85 #include "trans.h"
86 #include "fold-const.h"
87 #include "constructor.h"
88 #include "trans-types.h"
89 #include "trans-array.h"
90 #include "trans-const.h"
91 #include "dependency.h"
92
93 static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
94
95 /* The contents of this structure aren't actually used, just the address. */
96 static gfc_ss gfc_ss_terminator_var;
97 gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
98
99
100 static tree
gfc_array_dataptr_type(tree desc)101 gfc_array_dataptr_type (tree desc)
102 {
103 return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
104 }
105
106
107 /* Build expressions to access the members of an array descriptor.
108 It's surprisingly easy to mess up here, so never access
109 an array descriptor by "brute force", always use these
110 functions. This also avoids problems if we change the format
111 of an array descriptor.
112
113 To understand these magic numbers, look at the comments
114 before gfc_build_array_type() in trans-types.c.
115
116 The code within these defines should be the only code which knows the format
117 of an array descriptor.
118
119 Any code just needing to read obtain the bounds of an array should use
120 gfc_conv_array_* rather than the following functions as these will return
121 know constant values, and work with arrays which do not have descriptors.
122
123 Don't forget to #undef these! */
124
125 #define DATA_FIELD 0
126 #define OFFSET_FIELD 1
127 #define DTYPE_FIELD 2
128 #define DIMENSION_FIELD 3
129 #define CAF_TOKEN_FIELD 4
130
131 #define STRIDE_SUBFIELD 0
132 #define LBOUND_SUBFIELD 1
133 #define UBOUND_SUBFIELD 2
134
135 /* This provides READ-ONLY access to the data field. The field itself
136 doesn't have the proper type. */
137
138 tree
gfc_conv_descriptor_data_get(tree desc)139 gfc_conv_descriptor_data_get (tree desc)
140 {
141 tree field, type, t;
142
143 type = TREE_TYPE (desc);
144 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
145
146 field = TYPE_FIELDS (type);
147 gcc_assert (DATA_FIELD == 0);
148
149 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
150 field, NULL_TREE);
151 t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
152
153 return t;
154 }
155
156 /* This provides WRITE access to the data field.
157
158 TUPLES_P is true if we are generating tuples.
159
160 This function gets called through the following macros:
161 gfc_conv_descriptor_data_set
162 gfc_conv_descriptor_data_set. */
163
164 void
gfc_conv_descriptor_data_set(stmtblock_t * block,tree desc,tree value)165 gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
166 {
167 tree field, type, t;
168
169 type = TREE_TYPE (desc);
170 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
171
172 field = TYPE_FIELDS (type);
173 gcc_assert (DATA_FIELD == 0);
174
175 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
176 field, NULL_TREE);
177 gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
178 }
179
180
181 /* This provides address access to the data field. This should only be
182 used by array allocation, passing this on to the runtime. */
183
184 tree
gfc_conv_descriptor_data_addr(tree desc)185 gfc_conv_descriptor_data_addr (tree desc)
186 {
187 tree field, type, t;
188
189 type = TREE_TYPE (desc);
190 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
191
192 field = TYPE_FIELDS (type);
193 gcc_assert (DATA_FIELD == 0);
194
195 t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
196 field, NULL_TREE);
197 return gfc_build_addr_expr (NULL_TREE, t);
198 }
199
200 static tree
gfc_conv_descriptor_offset(tree desc)201 gfc_conv_descriptor_offset (tree desc)
202 {
203 tree type;
204 tree field;
205
206 type = TREE_TYPE (desc);
207 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
208
209 field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
210 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
211
212 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
213 desc, field, NULL_TREE);
214 }
215
216 tree
gfc_conv_descriptor_offset_get(tree desc)217 gfc_conv_descriptor_offset_get (tree desc)
218 {
219 return gfc_conv_descriptor_offset (desc);
220 }
221
222 void
gfc_conv_descriptor_offset_set(stmtblock_t * block,tree desc,tree value)223 gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
224 tree value)
225 {
226 tree t = gfc_conv_descriptor_offset (desc);
227 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
228 }
229
230
231 tree
gfc_conv_descriptor_dtype(tree desc)232 gfc_conv_descriptor_dtype (tree desc)
233 {
234 tree field;
235 tree type;
236
237 type = TREE_TYPE (desc);
238 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
239
240 field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
241 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
242
243 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
244 desc, field, NULL_TREE);
245 }
246
247
248 tree
gfc_conv_descriptor_rank(tree desc)249 gfc_conv_descriptor_rank (tree desc)
250 {
251 tree tmp;
252 tree dtype;
253
254 dtype = gfc_conv_descriptor_dtype (desc);
255 tmp = build_int_cst (TREE_TYPE (dtype), GFC_DTYPE_RANK_MASK);
256 tmp = fold_build2_loc (input_location, BIT_AND_EXPR, TREE_TYPE (dtype),
257 dtype, tmp);
258 return fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp);
259 }
260
261
262 tree
gfc_get_descriptor_dimension(tree desc)263 gfc_get_descriptor_dimension (tree desc)
264 {
265 tree type, field;
266
267 type = TREE_TYPE (desc);
268 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
269
270 field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
271 gcc_assert (field != NULL_TREE
272 && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
273 && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
274
275 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
276 desc, field, NULL_TREE);
277 }
278
279
280 static tree
gfc_conv_descriptor_dimension(tree desc,tree dim)281 gfc_conv_descriptor_dimension (tree desc, tree dim)
282 {
283 tree tmp;
284
285 tmp = gfc_get_descriptor_dimension (desc);
286
287 return gfc_build_array_ref (tmp, dim, NULL);
288 }
289
290
291 tree
gfc_conv_descriptor_token(tree desc)292 gfc_conv_descriptor_token (tree desc)
293 {
294 tree type;
295 tree field;
296
297 type = TREE_TYPE (desc);
298 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
299 gcc_assert (flag_coarray == GFC_FCOARRAY_LIB);
300 field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
301
302 /* Should be a restricted pointer - except in the finalization wrapper. */
303 gcc_assert (field != NULL_TREE
304 && (TREE_TYPE (field) == prvoid_type_node
305 || TREE_TYPE (field) == pvoid_type_node));
306
307 return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
308 desc, field, NULL_TREE);
309 }
310
311
312 static tree
gfc_conv_descriptor_stride(tree desc,tree dim)313 gfc_conv_descriptor_stride (tree desc, tree dim)
314 {
315 tree tmp;
316 tree field;
317
318 tmp = gfc_conv_descriptor_dimension (desc, dim);
319 field = TYPE_FIELDS (TREE_TYPE (tmp));
320 field = gfc_advance_chain (field, STRIDE_SUBFIELD);
321 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
322
323 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
324 tmp, field, NULL_TREE);
325 return tmp;
326 }
327
328 tree
gfc_conv_descriptor_stride_get(tree desc,tree dim)329 gfc_conv_descriptor_stride_get (tree desc, tree dim)
330 {
331 tree type = TREE_TYPE (desc);
332 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
333 if (integer_zerop (dim)
334 && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
335 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
336 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_RANK_CONT
337 ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
338 return gfc_index_one_node;
339
340 return gfc_conv_descriptor_stride (desc, dim);
341 }
342
343 void
gfc_conv_descriptor_stride_set(stmtblock_t * block,tree desc,tree dim,tree value)344 gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
345 tree dim, tree value)
346 {
347 tree t = gfc_conv_descriptor_stride (desc, dim);
348 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
349 }
350
351 static tree
gfc_conv_descriptor_lbound(tree desc,tree dim)352 gfc_conv_descriptor_lbound (tree desc, tree dim)
353 {
354 tree tmp;
355 tree field;
356
357 tmp = gfc_conv_descriptor_dimension (desc, dim);
358 field = TYPE_FIELDS (TREE_TYPE (tmp));
359 field = gfc_advance_chain (field, LBOUND_SUBFIELD);
360 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
361
362 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
363 tmp, field, NULL_TREE);
364 return tmp;
365 }
366
367 tree
gfc_conv_descriptor_lbound_get(tree desc,tree dim)368 gfc_conv_descriptor_lbound_get (tree desc, tree dim)
369 {
370 return gfc_conv_descriptor_lbound (desc, dim);
371 }
372
373 void
gfc_conv_descriptor_lbound_set(stmtblock_t * block,tree desc,tree dim,tree value)374 gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
375 tree dim, tree value)
376 {
377 tree t = gfc_conv_descriptor_lbound (desc, dim);
378 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
379 }
380
381 static tree
gfc_conv_descriptor_ubound(tree desc,tree dim)382 gfc_conv_descriptor_ubound (tree desc, tree dim)
383 {
384 tree tmp;
385 tree field;
386
387 tmp = gfc_conv_descriptor_dimension (desc, dim);
388 field = TYPE_FIELDS (TREE_TYPE (tmp));
389 field = gfc_advance_chain (field, UBOUND_SUBFIELD);
390 gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
391
392 tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
393 tmp, field, NULL_TREE);
394 return tmp;
395 }
396
397 tree
gfc_conv_descriptor_ubound_get(tree desc,tree dim)398 gfc_conv_descriptor_ubound_get (tree desc, tree dim)
399 {
400 return gfc_conv_descriptor_ubound (desc, dim);
401 }
402
403 void
gfc_conv_descriptor_ubound_set(stmtblock_t * block,tree desc,tree dim,tree value)404 gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
405 tree dim, tree value)
406 {
407 tree t = gfc_conv_descriptor_ubound (desc, dim);
408 gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
409 }
410
411 /* Build a null array descriptor constructor. */
412
413 tree
gfc_build_null_descriptor(tree type)414 gfc_build_null_descriptor (tree type)
415 {
416 tree field;
417 tree tmp;
418
419 gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
420 gcc_assert (DATA_FIELD == 0);
421 field = TYPE_FIELDS (type);
422
423 /* Set a NULL data pointer. */
424 tmp = build_constructor_single (type, field, null_pointer_node);
425 TREE_CONSTANT (tmp) = 1;
426 /* All other fields are ignored. */
427
428 return tmp;
429 }
430
431
432 /* Modify a descriptor such that the lbound of a given dimension is the value
433 specified. This also updates ubound and offset accordingly. */
434
435 void
gfc_conv_shift_descriptor_lbound(stmtblock_t * block,tree desc,int dim,tree new_lbound)436 gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
437 int dim, tree new_lbound)
438 {
439 tree offs, ubound, lbound, stride;
440 tree diff, offs_diff;
441
442 new_lbound = fold_convert (gfc_array_index_type, new_lbound);
443
444 offs = gfc_conv_descriptor_offset_get (desc);
445 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
446 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
447 stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
448
449 /* Get difference (new - old) by which to shift stuff. */
450 diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
451 new_lbound, lbound);
452
453 /* Shift ubound and offset accordingly. This has to be done before
454 updating the lbound, as they depend on the lbound expression! */
455 ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
456 ubound, diff);
457 gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
458 offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
459 diff, stride);
460 offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
461 offs, offs_diff);
462 gfc_conv_descriptor_offset_set (block, desc, offs);
463
464 /* Finally set lbound to value we want. */
465 gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
466 }
467
468
469 /* Cleanup those #defines. */
470
471 #undef DATA_FIELD
472 #undef OFFSET_FIELD
473 #undef DTYPE_FIELD
474 #undef DIMENSION_FIELD
475 #undef CAF_TOKEN_FIELD
476 #undef STRIDE_SUBFIELD
477 #undef LBOUND_SUBFIELD
478 #undef UBOUND_SUBFIELD
479
480
481 /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
482 flags & 1 = Main loop body.
483 flags & 2 = temp copy loop. */
484
485 void
gfc_mark_ss_chain_used(gfc_ss * ss,unsigned flags)486 gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
487 {
488 for (; ss != gfc_ss_terminator; ss = ss->next)
489 ss->info->useflags = flags;
490 }
491
492
493 /* Free a gfc_ss chain. */
494
495 void
gfc_free_ss_chain(gfc_ss * ss)496 gfc_free_ss_chain (gfc_ss * ss)
497 {
498 gfc_ss *next;
499
500 while (ss != gfc_ss_terminator)
501 {
502 gcc_assert (ss != NULL);
503 next = ss->next;
504 gfc_free_ss (ss);
505 ss = next;
506 }
507 }
508
509
510 static void
free_ss_info(gfc_ss_info * ss_info)511 free_ss_info (gfc_ss_info *ss_info)
512 {
513 int n;
514
515 ss_info->refcount--;
516 if (ss_info->refcount > 0)
517 return;
518
519 gcc_assert (ss_info->refcount == 0);
520
521 switch (ss_info->type)
522 {
523 case GFC_SS_SECTION:
524 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
525 if (ss_info->data.array.subscript[n])
526 gfc_free_ss_chain (ss_info->data.array.subscript[n]);
527 break;
528
529 default:
530 break;
531 }
532
533 free (ss_info);
534 }
535
536
537 /* Free a SS. */
538
539 void
gfc_free_ss(gfc_ss * ss)540 gfc_free_ss (gfc_ss * ss)
541 {
542 free_ss_info (ss->info);
543 free (ss);
544 }
545
546
547 /* Creates and initializes an array type gfc_ss struct. */
548
549 gfc_ss *
gfc_get_array_ss(gfc_ss * next,gfc_expr * expr,int dimen,gfc_ss_type type)550 gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
551 {
552 gfc_ss *ss;
553 gfc_ss_info *ss_info;
554 int i;
555
556 ss_info = gfc_get_ss_info ();
557 ss_info->refcount++;
558 ss_info->type = type;
559 ss_info->expr = expr;
560
561 ss = gfc_get_ss ();
562 ss->info = ss_info;
563 ss->next = next;
564 ss->dimen = dimen;
565 for (i = 0; i < ss->dimen; i++)
566 ss->dim[i] = i;
567
568 return ss;
569 }
570
571
572 /* Creates and initializes a temporary type gfc_ss struct. */
573
574 gfc_ss *
gfc_get_temp_ss(tree type,tree string_length,int dimen)575 gfc_get_temp_ss (tree type, tree string_length, int dimen)
576 {
577 gfc_ss *ss;
578 gfc_ss_info *ss_info;
579 int i;
580
581 ss_info = gfc_get_ss_info ();
582 ss_info->refcount++;
583 ss_info->type = GFC_SS_TEMP;
584 ss_info->string_length = string_length;
585 ss_info->data.temp.type = type;
586
587 ss = gfc_get_ss ();
588 ss->info = ss_info;
589 ss->next = gfc_ss_terminator;
590 ss->dimen = dimen;
591 for (i = 0; i < ss->dimen; i++)
592 ss->dim[i] = i;
593
594 return ss;
595 }
596
597
598 /* Creates and initializes a scalar type gfc_ss struct. */
599
600 gfc_ss *
gfc_get_scalar_ss(gfc_ss * next,gfc_expr * expr)601 gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
602 {
603 gfc_ss *ss;
604 gfc_ss_info *ss_info;
605
606 ss_info = gfc_get_ss_info ();
607 ss_info->refcount++;
608 ss_info->type = GFC_SS_SCALAR;
609 ss_info->expr = expr;
610
611 ss = gfc_get_ss ();
612 ss->info = ss_info;
613 ss->next = next;
614
615 return ss;
616 }
617
618
619 /* Free all the SS associated with a loop. */
620
621 void
gfc_cleanup_loop(gfc_loopinfo * loop)622 gfc_cleanup_loop (gfc_loopinfo * loop)
623 {
624 gfc_loopinfo *loop_next, **ploop;
625 gfc_ss *ss;
626 gfc_ss *next;
627
628 ss = loop->ss;
629 while (ss != gfc_ss_terminator)
630 {
631 gcc_assert (ss != NULL);
632 next = ss->loop_chain;
633 gfc_free_ss (ss);
634 ss = next;
635 }
636
637 /* Remove reference to self in the parent loop. */
638 if (loop->parent)
639 for (ploop = &loop->parent->nested; *ploop; ploop = &(*ploop)->next)
640 if (*ploop == loop)
641 {
642 *ploop = loop->next;
643 break;
644 }
645
646 /* Free non-freed nested loops. */
647 for (loop = loop->nested; loop; loop = loop_next)
648 {
649 loop_next = loop->next;
650 gfc_cleanup_loop (loop);
651 free (loop);
652 }
653 }
654
655
656 static void
set_ss_loop(gfc_ss * ss,gfc_loopinfo * loop)657 set_ss_loop (gfc_ss *ss, gfc_loopinfo *loop)
658 {
659 int n;
660
661 for (; ss != gfc_ss_terminator; ss = ss->next)
662 {
663 ss->loop = loop;
664
665 if (ss->info->type == GFC_SS_SCALAR
666 || ss->info->type == GFC_SS_REFERENCE
667 || ss->info->type == GFC_SS_TEMP)
668 continue;
669
670 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
671 if (ss->info->data.array.subscript[n] != NULL)
672 set_ss_loop (ss->info->data.array.subscript[n], loop);
673 }
674 }
675
676
677 /* Associate a SS chain with a loop. */
678
679 void
gfc_add_ss_to_loop(gfc_loopinfo * loop,gfc_ss * head)680 gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
681 {
682 gfc_ss *ss;
683 gfc_loopinfo *nested_loop;
684
685 if (head == gfc_ss_terminator)
686 return;
687
688 set_ss_loop (head, loop);
689
690 ss = head;
691 for (; ss && ss != gfc_ss_terminator; ss = ss->next)
692 {
693 if (ss->nested_ss)
694 {
695 nested_loop = ss->nested_ss->loop;
696
697 /* More than one ss can belong to the same loop. Hence, we add the
698 loop to the chain only if it is different from the previously
699 added one, to avoid duplicate nested loops. */
700 if (nested_loop != loop->nested)
701 {
702 gcc_assert (nested_loop->parent == NULL);
703 nested_loop->parent = loop;
704
705 gcc_assert (nested_loop->next == NULL);
706 nested_loop->next = loop->nested;
707 loop->nested = nested_loop;
708 }
709 else
710 gcc_assert (nested_loop->parent == loop);
711 }
712
713 if (ss->next == gfc_ss_terminator)
714 ss->loop_chain = loop->ss;
715 else
716 ss->loop_chain = ss->next;
717 }
718 gcc_assert (ss == gfc_ss_terminator);
719 loop->ss = head;
720 }
721
722
723 /* Generate an initializer for a static pointer or allocatable array. */
724
725 void
gfc_trans_static_array_pointer(gfc_symbol * sym)726 gfc_trans_static_array_pointer (gfc_symbol * sym)
727 {
728 tree type;
729
730 gcc_assert (TREE_STATIC (sym->backend_decl));
731 /* Just zero the data member. */
732 type = TREE_TYPE (sym->backend_decl);
733 DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
734 }
735
736
737 /* If the bounds of SE's loop have not yet been set, see if they can be
738 determined from array spec AS, which is the array spec of a called
739 function. MAPPING maps the callee's dummy arguments to the values
740 that the caller is passing. Add any initialization and finalization
741 code to SE. */
742
743 void
gfc_set_loop_bounds_from_array_spec(gfc_interface_mapping * mapping,gfc_se * se,gfc_array_spec * as)744 gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
745 gfc_se * se, gfc_array_spec * as)
746 {
747 int n, dim, total_dim;
748 gfc_se tmpse;
749 gfc_ss *ss;
750 tree lower;
751 tree upper;
752 tree tmp;
753
754 total_dim = 0;
755
756 if (!as || as->type != AS_EXPLICIT)
757 return;
758
759 for (ss = se->ss; ss; ss = ss->parent)
760 {
761 total_dim += ss->loop->dimen;
762 for (n = 0; n < ss->loop->dimen; n++)
763 {
764 /* The bound is known, nothing to do. */
765 if (ss->loop->to[n] != NULL_TREE)
766 continue;
767
768 dim = ss->dim[n];
769 gcc_assert (dim < as->rank);
770 gcc_assert (ss->loop->dimen <= as->rank);
771
772 /* Evaluate the lower bound. */
773 gfc_init_se (&tmpse, NULL);
774 gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
775 gfc_add_block_to_block (&se->pre, &tmpse.pre);
776 gfc_add_block_to_block (&se->post, &tmpse.post);
777 lower = fold_convert (gfc_array_index_type, tmpse.expr);
778
779 /* ...and the upper bound. */
780 gfc_init_se (&tmpse, NULL);
781 gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
782 gfc_add_block_to_block (&se->pre, &tmpse.pre);
783 gfc_add_block_to_block (&se->post, &tmpse.post);
784 upper = fold_convert (gfc_array_index_type, tmpse.expr);
785
786 /* Set the upper bound of the loop to UPPER - LOWER. */
787 tmp = fold_build2_loc (input_location, MINUS_EXPR,
788 gfc_array_index_type, upper, lower);
789 tmp = gfc_evaluate_now (tmp, &se->pre);
790 ss->loop->to[n] = tmp;
791 }
792 }
793
794 gcc_assert (total_dim == as->rank);
795 }
796
797
798 /* Generate code to allocate an array temporary, or create a variable to
799 hold the data. If size is NULL, zero the descriptor so that the
800 callee will allocate the array. If DEALLOC is true, also generate code to
801 free the array afterwards.
802
803 If INITIAL is not NULL, it is packed using internal_pack and the result used
804 as data instead of allocating a fresh, unitialized area of memory.
805
806 Initialization code is added to PRE and finalization code to POST.
807 DYNAMIC is true if the caller may want to extend the array later
808 using realloc. This prevents us from putting the array on the stack. */
809
810 static void
gfc_trans_allocate_array_storage(stmtblock_t * pre,stmtblock_t * post,gfc_array_info * info,tree size,tree nelem,tree initial,bool dynamic,bool dealloc)811 gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
812 gfc_array_info * info, tree size, tree nelem,
813 tree initial, bool dynamic, bool dealloc)
814 {
815 tree tmp;
816 tree desc;
817 bool onstack;
818
819 desc = info->descriptor;
820 info->offset = gfc_index_zero_node;
821 if (size == NULL_TREE || integer_zerop (size))
822 {
823 /* A callee allocated array. */
824 gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
825 onstack = FALSE;
826 }
827 else
828 {
829 /* Allocate the temporary. */
830 onstack = !dynamic && initial == NULL_TREE
831 && (flag_stack_arrays
832 || gfc_can_put_var_on_stack (size));
833
834 if (onstack)
835 {
836 /* Make a temporary variable to hold the data. */
837 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
838 nelem, gfc_index_one_node);
839 tmp = gfc_evaluate_now (tmp, pre);
840 tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
841 tmp);
842 tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
843 tmp);
844 tmp = gfc_create_var (tmp, "A");
845 /* If we're here only because of -fstack-arrays we have to
846 emit a DECL_EXPR to make the gimplifier emit alloca calls. */
847 if (!gfc_can_put_var_on_stack (size))
848 gfc_add_expr_to_block (pre,
849 fold_build1_loc (input_location,
850 DECL_EXPR, TREE_TYPE (tmp),
851 tmp));
852 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
853 gfc_conv_descriptor_data_set (pre, desc, tmp);
854 }
855 else
856 {
857 /* Allocate memory to hold the data or call internal_pack. */
858 if (initial == NULL_TREE)
859 {
860 tmp = gfc_call_malloc (pre, NULL, size);
861 tmp = gfc_evaluate_now (tmp, pre);
862 }
863 else
864 {
865 tree packed;
866 tree source_data;
867 tree was_packed;
868 stmtblock_t do_copying;
869
870 tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
871 gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
872 tmp = TREE_TYPE (tmp); /* The descriptor itself. */
873 tmp = gfc_get_element_type (tmp);
874 gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
875 packed = gfc_create_var (build_pointer_type (tmp), "data");
876
877 tmp = build_call_expr_loc (input_location,
878 gfor_fndecl_in_pack, 1, initial);
879 tmp = fold_convert (TREE_TYPE (packed), tmp);
880 gfc_add_modify (pre, packed, tmp);
881
882 tmp = build_fold_indirect_ref_loc (input_location,
883 initial);
884 source_data = gfc_conv_descriptor_data_get (tmp);
885
886 /* internal_pack may return source->data without any allocation
887 or copying if it is already packed. If that's the case, we
888 need to allocate and copy manually. */
889
890 gfc_start_block (&do_copying);
891 tmp = gfc_call_malloc (&do_copying, NULL, size);
892 tmp = fold_convert (TREE_TYPE (packed), tmp);
893 gfc_add_modify (&do_copying, packed, tmp);
894 tmp = gfc_build_memcpy_call (packed, source_data, size);
895 gfc_add_expr_to_block (&do_copying, tmp);
896
897 was_packed = fold_build2_loc (input_location, EQ_EXPR,
898 boolean_type_node, packed,
899 source_data);
900 tmp = gfc_finish_block (&do_copying);
901 tmp = build3_v (COND_EXPR, was_packed, tmp,
902 build_empty_stmt (input_location));
903 gfc_add_expr_to_block (pre, tmp);
904
905 tmp = fold_convert (pvoid_type_node, packed);
906 }
907
908 gfc_conv_descriptor_data_set (pre, desc, tmp);
909 }
910 }
911 info->data = gfc_conv_descriptor_data_get (desc);
912
913 /* The offset is zero because we create temporaries with a zero
914 lower bound. */
915 gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
916
917 if (dealloc && !onstack)
918 {
919 /* Free the temporary. */
920 tmp = gfc_conv_descriptor_data_get (desc);
921 tmp = gfc_call_free (tmp);
922 gfc_add_expr_to_block (post, tmp);
923 }
924 }
925
926
927 /* Get the scalarizer array dimension corresponding to actual array dimension
928 given by ARRAY_DIM.
929
930 For example, if SS represents the array ref a(1,:,:,1), it is a
931 bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1,
932 and 1 for ARRAY_DIM=2.
933 If SS represents transpose(a(:,1,1,:)), it is again a bidimensional
934 scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for
935 ARRAY_DIM=3.
936 If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer
937 array. If called on the inner ss, the result would be respectively 0,1,2 for
938 ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1
939 for ARRAY_DIM=1,2. */
940
941 static int
get_scalarizer_dim_for_array_dim(gfc_ss * ss,int array_dim)942 get_scalarizer_dim_for_array_dim (gfc_ss *ss, int array_dim)
943 {
944 int array_ref_dim;
945 int n;
946
947 array_ref_dim = 0;
948
949 for (; ss; ss = ss->parent)
950 for (n = 0; n < ss->dimen; n++)
951 if (ss->dim[n] < array_dim)
952 array_ref_dim++;
953
954 return array_ref_dim;
955 }
956
957
958 static gfc_ss *
innermost_ss(gfc_ss * ss)959 innermost_ss (gfc_ss *ss)
960 {
961 while (ss->nested_ss != NULL)
962 ss = ss->nested_ss;
963
964 return ss;
965 }
966
967
968
969 /* Get the array reference dimension corresponding to the given loop dimension.
970 It is different from the true array dimension given by the dim array in
971 the case of a partial array reference (i.e. a(:,:,1,:) for example)
972 It is different from the loop dimension in the case of a transposed array.
973 */
974
975 static int
get_array_ref_dim_for_loop_dim(gfc_ss * ss,int loop_dim)976 get_array_ref_dim_for_loop_dim (gfc_ss *ss, int loop_dim)
977 {
978 return get_scalarizer_dim_for_array_dim (innermost_ss (ss),
979 ss->dim[loop_dim]);
980 }
981
982
983 /* Generate code to create and initialize the descriptor for a temporary
984 array. This is used for both temporaries needed by the scalarizer, and
985 functions returning arrays. Adjusts the loop variables to be
986 zero-based, and calculates the loop bounds for callee allocated arrays.
987 Allocate the array unless it's callee allocated (we have a callee
988 allocated array if 'callee_alloc' is true, or if loop->to[n] is
989 NULL_TREE for any n). Also fills in the descriptor, data and offset
990 fields of info if known. Returns the size of the array, or NULL for a
991 callee allocated array.
992
993 'eltype' == NULL signals that the temporary should be a class object.
994 The 'initial' expression is used to obtain the size of the dynamic
995 type; otherwise the allocation and initialization proceeds as for any
996 other expression
997
998 PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
999 gfc_trans_allocate_array_storage. */
1000
1001 tree
gfc_trans_create_temp_array(stmtblock_t * pre,stmtblock_t * post,gfc_ss * ss,tree eltype,tree initial,bool dynamic,bool dealloc,bool callee_alloc,locus * where)1002 gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post, gfc_ss * ss,
1003 tree eltype, tree initial, bool dynamic,
1004 bool dealloc, bool callee_alloc, locus * where)
1005 {
1006 gfc_loopinfo *loop;
1007 gfc_ss *s;
1008 gfc_array_info *info;
1009 tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
1010 tree type;
1011 tree desc;
1012 tree tmp;
1013 tree size;
1014 tree nelem;
1015 tree cond;
1016 tree or_expr;
1017 tree class_expr = NULL_TREE;
1018 int n, dim, tmp_dim;
1019 int total_dim = 0;
1020
1021 /* This signals a class array for which we need the size of the
1022 dynamic type. Generate an eltype and then the class expression. */
1023 if (eltype == NULL_TREE && initial)
1024 {
1025 gcc_assert (POINTER_TYPE_P (TREE_TYPE (initial)));
1026 class_expr = build_fold_indirect_ref_loc (input_location, initial);
1027 eltype = TREE_TYPE (class_expr);
1028 eltype = gfc_get_element_type (eltype);
1029 /* Obtain the structure (class) expression. */
1030 class_expr = TREE_OPERAND (class_expr, 0);
1031 gcc_assert (class_expr);
1032 }
1033
1034 memset (from, 0, sizeof (from));
1035 memset (to, 0, sizeof (to));
1036
1037 info = &ss->info->data.array;
1038
1039 gcc_assert (ss->dimen > 0);
1040 gcc_assert (ss->loop->dimen == ss->dimen);
1041
1042 if (warn_array_temporaries && where)
1043 gfc_warning (OPT_Warray_temporaries,
1044 "Creating array temporary at %L", where);
1045
1046 /* Set the lower bound to zero. */
1047 for (s = ss; s; s = s->parent)
1048 {
1049 loop = s->loop;
1050
1051 total_dim += loop->dimen;
1052 for (n = 0; n < loop->dimen; n++)
1053 {
1054 dim = s->dim[n];
1055
1056 /* Callee allocated arrays may not have a known bound yet. */
1057 if (loop->to[n])
1058 loop->to[n] = gfc_evaluate_now (
1059 fold_build2_loc (input_location, MINUS_EXPR,
1060 gfc_array_index_type,
1061 loop->to[n], loop->from[n]),
1062 pre);
1063 loop->from[n] = gfc_index_zero_node;
1064
1065 /* We have just changed the loop bounds, we must clear the
1066 corresponding specloop, so that delta calculation is not skipped
1067 later in gfc_set_delta. */
1068 loop->specloop[n] = NULL;
1069
1070 /* We are constructing the temporary's descriptor based on the loop
1071 dimensions. As the dimensions may be accessed in arbitrary order
1072 (think of transpose) the size taken from the n'th loop may not map
1073 to the n'th dimension of the array. We need to reconstruct loop
1074 infos in the right order before using it to set the descriptor
1075 bounds. */
1076 tmp_dim = get_scalarizer_dim_for_array_dim (ss, dim);
1077 from[tmp_dim] = loop->from[n];
1078 to[tmp_dim] = loop->to[n];
1079
1080 info->delta[dim] = gfc_index_zero_node;
1081 info->start[dim] = gfc_index_zero_node;
1082 info->end[dim] = gfc_index_zero_node;
1083 info->stride[dim] = gfc_index_one_node;
1084 }
1085 }
1086
1087 /* Initialize the descriptor. */
1088 type =
1089 gfc_get_array_type_bounds (eltype, total_dim, 0, from, to, 1,
1090 GFC_ARRAY_UNKNOWN, true);
1091 desc = gfc_create_var (type, "atmp");
1092 GFC_DECL_PACKED_ARRAY (desc) = 1;
1093
1094 info->descriptor = desc;
1095 size = gfc_index_one_node;
1096
1097 /* Fill in the array dtype. */
1098 tmp = gfc_conv_descriptor_dtype (desc);
1099 gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
1100
1101 /*
1102 Fill in the bounds and stride. This is a packed array, so:
1103
1104 size = 1;
1105 for (n = 0; n < rank; n++)
1106 {
1107 stride[n] = size
1108 delta = ubound[n] + 1 - lbound[n];
1109 size = size * delta;
1110 }
1111 size = size * sizeof(element);
1112 */
1113
1114 or_expr = NULL_TREE;
1115
1116 /* If there is at least one null loop->to[n], it is a callee allocated
1117 array. */
1118 for (n = 0; n < total_dim; n++)
1119 if (to[n] == NULL_TREE)
1120 {
1121 size = NULL_TREE;
1122 break;
1123 }
1124
1125 if (size == NULL_TREE)
1126 for (s = ss; s; s = s->parent)
1127 for (n = 0; n < s->loop->dimen; n++)
1128 {
1129 dim = get_scalarizer_dim_for_array_dim (ss, s->dim[n]);
1130
1131 /* For a callee allocated array express the loop bounds in terms
1132 of the descriptor fields. */
1133 tmp = fold_build2_loc (input_location,
1134 MINUS_EXPR, gfc_array_index_type,
1135 gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
1136 gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
1137 s->loop->to[n] = tmp;
1138 }
1139 else
1140 {
1141 for (n = 0; n < total_dim; n++)
1142 {
1143 /* Store the stride and bound components in the descriptor. */
1144 gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
1145
1146 gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
1147 gfc_index_zero_node);
1148
1149 gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], to[n]);
1150
1151 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1152 gfc_array_index_type,
1153 to[n], gfc_index_one_node);
1154
1155 /* Check whether the size for this dimension is negative. */
1156 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
1157 tmp, gfc_index_zero_node);
1158 cond = gfc_evaluate_now (cond, pre);
1159
1160 if (n == 0)
1161 or_expr = cond;
1162 else
1163 or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
1164 boolean_type_node, or_expr, cond);
1165
1166 size = fold_build2_loc (input_location, MULT_EXPR,
1167 gfc_array_index_type, size, tmp);
1168 size = gfc_evaluate_now (size, pre);
1169 }
1170 }
1171
1172 /* Get the size of the array. */
1173 if (size && !callee_alloc)
1174 {
1175 tree elemsize;
1176 /* If or_expr is true, then the extent in at least one
1177 dimension is zero and the size is set to zero. */
1178 size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
1179 or_expr, gfc_index_zero_node, size);
1180
1181 nelem = size;
1182 if (class_expr == NULL_TREE)
1183 elemsize = fold_convert (gfc_array_index_type,
1184 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
1185 else
1186 elemsize = gfc_class_vtab_size_get (class_expr);
1187
1188 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
1189 size, elemsize);
1190 }
1191 else
1192 {
1193 nelem = size;
1194 size = NULL_TREE;
1195 }
1196
1197 gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
1198 dynamic, dealloc);
1199
1200 while (ss->parent)
1201 ss = ss->parent;
1202
1203 if (ss->dimen > ss->loop->temp_dim)
1204 ss->loop->temp_dim = ss->dimen;
1205
1206 return size;
1207 }
1208
1209
1210 /* Return the number of iterations in a loop that starts at START,
1211 ends at END, and has step STEP. */
1212
1213 static tree
gfc_get_iteration_count(tree start,tree end,tree step)1214 gfc_get_iteration_count (tree start, tree end, tree step)
1215 {
1216 tree tmp;
1217 tree type;
1218
1219 type = TREE_TYPE (step);
1220 tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
1221 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
1222 tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
1223 build_int_cst (type, 1));
1224 tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
1225 build_int_cst (type, 0));
1226 return fold_convert (gfc_array_index_type, tmp);
1227 }
1228
1229
1230 /* Extend the data in array DESC by EXTRA elements. */
1231
1232 static void
gfc_grow_array(stmtblock_t * pblock,tree desc,tree extra)1233 gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
1234 {
1235 tree arg0, arg1;
1236 tree tmp;
1237 tree size;
1238 tree ubound;
1239
1240 if (integer_zerop (extra))
1241 return;
1242
1243 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
1244
1245 /* Add EXTRA to the upper bound. */
1246 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1247 ubound, extra);
1248 gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
1249
1250 /* Get the value of the current data pointer. */
1251 arg0 = gfc_conv_descriptor_data_get (desc);
1252
1253 /* Calculate the new array size. */
1254 size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
1255 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1256 ubound, gfc_index_one_node);
1257 arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
1258 fold_convert (size_type_node, tmp),
1259 fold_convert (size_type_node, size));
1260
1261 /* Call the realloc() function. */
1262 tmp = gfc_call_realloc (pblock, arg0, arg1);
1263 gfc_conv_descriptor_data_set (pblock, desc, tmp);
1264 }
1265
1266
1267 /* Return true if the bounds of iterator I can only be determined
1268 at run time. */
1269
1270 static inline bool
gfc_iterator_has_dynamic_bounds(gfc_iterator * i)1271 gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
1272 {
1273 return (i->start->expr_type != EXPR_CONSTANT
1274 || i->end->expr_type != EXPR_CONSTANT
1275 || i->step->expr_type != EXPR_CONSTANT);
1276 }
1277
1278
1279 /* Split the size of constructor element EXPR into the sum of two terms,
1280 one of which can be determined at compile time and one of which must
1281 be calculated at run time. Set *SIZE to the former and return true
1282 if the latter might be nonzero. */
1283
1284 static bool
gfc_get_array_constructor_element_size(mpz_t * size,gfc_expr * expr)1285 gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
1286 {
1287 if (expr->expr_type == EXPR_ARRAY)
1288 return gfc_get_array_constructor_size (size, expr->value.constructor);
1289 else if (expr->rank > 0)
1290 {
1291 /* Calculate everything at run time. */
1292 mpz_set_ui (*size, 0);
1293 return true;
1294 }
1295 else
1296 {
1297 /* A single element. */
1298 mpz_set_ui (*size, 1);
1299 return false;
1300 }
1301 }
1302
1303
1304 /* Like gfc_get_array_constructor_element_size, but applied to the whole
1305 of array constructor C. */
1306
1307 static bool
gfc_get_array_constructor_size(mpz_t * size,gfc_constructor_base base)1308 gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
1309 {
1310 gfc_constructor *c;
1311 gfc_iterator *i;
1312 mpz_t val;
1313 mpz_t len;
1314 bool dynamic;
1315
1316 mpz_set_ui (*size, 0);
1317 mpz_init (len);
1318 mpz_init (val);
1319
1320 dynamic = false;
1321 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1322 {
1323 i = c->iterator;
1324 if (i && gfc_iterator_has_dynamic_bounds (i))
1325 dynamic = true;
1326 else
1327 {
1328 dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
1329 if (i)
1330 {
1331 /* Multiply the static part of the element size by the
1332 number of iterations. */
1333 mpz_sub (val, i->end->value.integer, i->start->value.integer);
1334 mpz_fdiv_q (val, val, i->step->value.integer);
1335 mpz_add_ui (val, val, 1);
1336 if (mpz_sgn (val) > 0)
1337 mpz_mul (len, len, val);
1338 else
1339 mpz_set_ui (len, 0);
1340 }
1341 mpz_add (*size, *size, len);
1342 }
1343 }
1344 mpz_clear (len);
1345 mpz_clear (val);
1346 return dynamic;
1347 }
1348
1349
1350 /* Make sure offset is a variable. */
1351
1352 static void
gfc_put_offset_into_var(stmtblock_t * pblock,tree * poffset,tree * offsetvar)1353 gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
1354 tree * offsetvar)
1355 {
1356 /* We should have already created the offset variable. We cannot
1357 create it here because we may be in an inner scope. */
1358 gcc_assert (*offsetvar != NULL_TREE);
1359 gfc_add_modify (pblock, *offsetvar, *poffset);
1360 *poffset = *offsetvar;
1361 TREE_USED (*offsetvar) = 1;
1362 }
1363
1364
1365 /* Variables needed for bounds-checking. */
1366 static bool first_len;
1367 static tree first_len_val;
1368 static bool typespec_chararray_ctor;
1369
1370 static void
gfc_trans_array_ctor_element(stmtblock_t * pblock,tree desc,tree offset,gfc_se * se,gfc_expr * expr)1371 gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
1372 tree offset, gfc_se * se, gfc_expr * expr)
1373 {
1374 tree tmp;
1375
1376 gfc_conv_expr (se, expr);
1377
1378 /* Store the value. */
1379 tmp = build_fold_indirect_ref_loc (input_location,
1380 gfc_conv_descriptor_data_get (desc));
1381 tmp = gfc_build_array_ref (tmp, offset, NULL);
1382
1383 if (expr->ts.type == BT_CHARACTER)
1384 {
1385 int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
1386 tree esize;
1387
1388 esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
1389 esize = fold_convert (gfc_charlen_type_node, esize);
1390 esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
1391 gfc_charlen_type_node, esize,
1392 build_int_cst (gfc_charlen_type_node,
1393 gfc_character_kinds[i].bit_size / 8));
1394
1395 gfc_conv_string_parameter (se);
1396 if (POINTER_TYPE_P (TREE_TYPE (tmp)))
1397 {
1398 /* The temporary is an array of pointers. */
1399 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1400 gfc_add_modify (&se->pre, tmp, se->expr);
1401 }
1402 else
1403 {
1404 /* The temporary is an array of string values. */
1405 tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
1406 /* We know the temporary and the value will be the same length,
1407 so can use memcpy. */
1408 gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
1409 se->string_length, se->expr, expr->ts.kind);
1410 }
1411 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
1412 {
1413 if (first_len)
1414 {
1415 gfc_add_modify (&se->pre, first_len_val,
1416 se->string_length);
1417 first_len = false;
1418 }
1419 else
1420 {
1421 /* Verify that all constructor elements are of the same
1422 length. */
1423 tree cond = fold_build2_loc (input_location, NE_EXPR,
1424 boolean_type_node, first_len_val,
1425 se->string_length);
1426 gfc_trans_runtime_check
1427 (true, false, cond, &se->pre, &expr->where,
1428 "Different CHARACTER lengths (%ld/%ld) in array constructor",
1429 fold_convert (long_integer_type_node, first_len_val),
1430 fold_convert (long_integer_type_node, se->string_length));
1431 }
1432 }
1433 }
1434 else if (GFC_CLASS_TYPE_P (TREE_TYPE (se->expr))
1435 && !GFC_CLASS_TYPE_P (gfc_get_element_type (TREE_TYPE (desc))))
1436 {
1437 /* Assignment of a CLASS array constructor to a derived type array. */
1438 if (expr->expr_type == EXPR_FUNCTION)
1439 se->expr = gfc_evaluate_now (se->expr, pblock);
1440 se->expr = gfc_class_data_get (se->expr);
1441 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
1442 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1443 gfc_add_modify (&se->pre, tmp, se->expr);
1444 }
1445 else
1446 {
1447 /* TODO: Should the frontend already have done this conversion? */
1448 se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
1449 gfc_add_modify (&se->pre, tmp, se->expr);
1450 }
1451
1452 gfc_add_block_to_block (pblock, &se->pre);
1453 gfc_add_block_to_block (pblock, &se->post);
1454 }
1455
1456
1457 /* Add the contents of an array to the constructor. DYNAMIC is as for
1458 gfc_trans_array_constructor_value. */
1459
1460 static void
gfc_trans_array_constructor_subarray(stmtblock_t * pblock,tree type ATTRIBUTE_UNUSED,tree desc,gfc_expr * expr,tree * poffset,tree * offsetvar,bool dynamic)1461 gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
1462 tree type ATTRIBUTE_UNUSED,
1463 tree desc, gfc_expr * expr,
1464 tree * poffset, tree * offsetvar,
1465 bool dynamic)
1466 {
1467 gfc_se se;
1468 gfc_ss *ss;
1469 gfc_loopinfo loop;
1470 stmtblock_t body;
1471 tree tmp;
1472 tree size;
1473 int n;
1474
1475 /* We need this to be a variable so we can increment it. */
1476 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1477
1478 gfc_init_se (&se, NULL);
1479
1480 /* Walk the array expression. */
1481 ss = gfc_walk_expr (expr);
1482 gcc_assert (ss != gfc_ss_terminator);
1483
1484 /* Initialize the scalarizer. */
1485 gfc_init_loopinfo (&loop);
1486 gfc_add_ss_to_loop (&loop, ss);
1487
1488 /* Initialize the loop. */
1489 gfc_conv_ss_startstride (&loop);
1490 gfc_conv_loop_setup (&loop, &expr->where);
1491
1492 /* Make sure the constructed array has room for the new data. */
1493 if (dynamic)
1494 {
1495 /* Set SIZE to the total number of elements in the subarray. */
1496 size = gfc_index_one_node;
1497 for (n = 0; n < loop.dimen; n++)
1498 {
1499 tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
1500 gfc_index_one_node);
1501 size = fold_build2_loc (input_location, MULT_EXPR,
1502 gfc_array_index_type, size, tmp);
1503 }
1504
1505 /* Grow the constructed array by SIZE elements. */
1506 gfc_grow_array (&loop.pre, desc, size);
1507 }
1508
1509 /* Make the loop body. */
1510 gfc_mark_ss_chain_used (ss, 1);
1511 gfc_start_scalarized_body (&loop, &body);
1512 gfc_copy_loopinfo_to_se (&se, &loop);
1513 se.ss = ss;
1514
1515 gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
1516 gcc_assert (se.ss == gfc_ss_terminator);
1517
1518 /* Increment the offset. */
1519 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
1520 *poffset, gfc_index_one_node);
1521 gfc_add_modify (&body, *poffset, tmp);
1522
1523 /* Finish the loop. */
1524 gfc_trans_scalarizing_loops (&loop, &body);
1525 gfc_add_block_to_block (&loop.pre, &loop.post);
1526 tmp = gfc_finish_block (&loop.pre);
1527 gfc_add_expr_to_block (pblock, tmp);
1528
1529 gfc_cleanup_loop (&loop);
1530 }
1531
1532
1533 /* Assign the values to the elements of an array constructor. DYNAMIC
1534 is true if descriptor DESC only contains enough data for the static
1535 size calculated by gfc_get_array_constructor_size. When true, memory
1536 for the dynamic parts must be allocated using realloc. */
1537
1538 static void
gfc_trans_array_constructor_value(stmtblock_t * pblock,tree type,tree desc,gfc_constructor_base base,tree * poffset,tree * offsetvar,bool dynamic)1539 gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
1540 tree desc, gfc_constructor_base base,
1541 tree * poffset, tree * offsetvar,
1542 bool dynamic)
1543 {
1544 tree tmp;
1545 tree start = NULL_TREE;
1546 tree end = NULL_TREE;
1547 tree step = NULL_TREE;
1548 stmtblock_t body;
1549 gfc_se se;
1550 mpz_t size;
1551 gfc_constructor *c;
1552
1553 tree shadow_loopvar = NULL_TREE;
1554 gfc_saved_var saved_loopvar;
1555
1556 mpz_init (size);
1557 for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
1558 {
1559 /* If this is an iterator or an array, the offset must be a variable. */
1560 if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
1561 gfc_put_offset_into_var (pblock, poffset, offsetvar);
1562
1563 /* Shadowing the iterator avoids changing its value and saves us from
1564 keeping track of it. Further, it makes sure that there's always a
1565 backend-decl for the symbol, even if there wasn't one before,
1566 e.g. in the case of an iterator that appears in a specification
1567 expression in an interface mapping. */
1568 if (c->iterator)
1569 {
1570 gfc_symbol *sym;
1571 tree type;
1572
1573 /* Evaluate loop bounds before substituting the loop variable
1574 in case they depend on it. Such a case is invalid, but it is
1575 not more expensive to do the right thing here.
1576 See PR 44354. */
1577 gfc_init_se (&se, NULL);
1578 gfc_conv_expr_val (&se, c->iterator->start);
1579 gfc_add_block_to_block (pblock, &se.pre);
1580 start = gfc_evaluate_now (se.expr, pblock);
1581
1582 gfc_init_se (&se, NULL);
1583 gfc_conv_expr_val (&se, c->iterator->end);
1584 gfc_add_block_to_block (pblock, &se.pre);
1585 end = gfc_evaluate_now (se.expr, pblock);
1586
1587 gfc_init_se (&se, NULL);
1588 gfc_conv_expr_val (&se, c->iterator->step);
1589 gfc_add_block_to_block (pblock, &se.pre);
1590 step = gfc_evaluate_now (se.expr, pblock);
1591
1592 sym = c->iterator->var->symtree->n.sym;
1593 type = gfc_typenode_for_spec (&sym->ts);
1594
1595 shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
1596 gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
1597 }
1598
1599 gfc_start_block (&body);
1600
1601 if (c->expr->expr_type == EXPR_ARRAY)
1602 {
1603 /* Array constructors can be nested. */
1604 gfc_trans_array_constructor_value (&body, type, desc,
1605 c->expr->value.constructor,
1606 poffset, offsetvar, dynamic);
1607 }
1608 else if (c->expr->rank > 0)
1609 {
1610 gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
1611 poffset, offsetvar, dynamic);
1612 }
1613 else
1614 {
1615 /* This code really upsets the gimplifier so don't bother for now. */
1616 gfc_constructor *p;
1617 HOST_WIDE_INT n;
1618 HOST_WIDE_INT size;
1619
1620 p = c;
1621 n = 0;
1622 while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
1623 {
1624 p = gfc_constructor_next (p);
1625 n++;
1626 }
1627 if (n < 4)
1628 {
1629 /* Scalar values. */
1630 gfc_init_se (&se, NULL);
1631 gfc_trans_array_ctor_element (&body, desc, *poffset,
1632 &se, c->expr);
1633
1634 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1635 gfc_array_index_type,
1636 *poffset, gfc_index_one_node);
1637 }
1638 else
1639 {
1640 /* Collect multiple scalar constants into a constructor. */
1641 vec<constructor_elt, va_gc> *v = NULL;
1642 tree init;
1643 tree bound;
1644 tree tmptype;
1645 HOST_WIDE_INT idx = 0;
1646
1647 p = c;
1648 /* Count the number of consecutive scalar constants. */
1649 while (p && !(p->iterator
1650 || p->expr->expr_type != EXPR_CONSTANT))
1651 {
1652 gfc_init_se (&se, NULL);
1653 gfc_conv_constant (&se, p->expr);
1654
1655 if (c->expr->ts.type != BT_CHARACTER)
1656 se.expr = fold_convert (type, se.expr);
1657 /* For constant character array constructors we build
1658 an array of pointers. */
1659 else if (POINTER_TYPE_P (type))
1660 se.expr = gfc_build_addr_expr
1661 (gfc_get_pchar_type (p->expr->ts.kind),
1662 se.expr);
1663
1664 CONSTRUCTOR_APPEND_ELT (v,
1665 build_int_cst (gfc_array_index_type,
1666 idx++),
1667 se.expr);
1668 c = p;
1669 p = gfc_constructor_next (p);
1670 }
1671
1672 bound = size_int (n - 1);
1673 /* Create an array type to hold them. */
1674 tmptype = build_range_type (gfc_array_index_type,
1675 gfc_index_zero_node, bound);
1676 tmptype = build_array_type (type, tmptype);
1677
1678 init = build_constructor (tmptype, v);
1679 TREE_CONSTANT (init) = 1;
1680 TREE_STATIC (init) = 1;
1681 /* Create a static variable to hold the data. */
1682 tmp = gfc_create_var (tmptype, "data");
1683 TREE_STATIC (tmp) = 1;
1684 TREE_CONSTANT (tmp) = 1;
1685 TREE_READONLY (tmp) = 1;
1686 DECL_INITIAL (tmp) = init;
1687 init = tmp;
1688
1689 /* Use BUILTIN_MEMCPY to assign the values. */
1690 tmp = gfc_conv_descriptor_data_get (desc);
1691 tmp = build_fold_indirect_ref_loc (input_location,
1692 tmp);
1693 tmp = gfc_build_array_ref (tmp, *poffset, NULL);
1694 tmp = gfc_build_addr_expr (NULL_TREE, tmp);
1695 init = gfc_build_addr_expr (NULL_TREE, init);
1696
1697 size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
1698 bound = build_int_cst (size_type_node, n * size);
1699 tmp = build_call_expr_loc (input_location,
1700 builtin_decl_explicit (BUILT_IN_MEMCPY),
1701 3, tmp, init, bound);
1702 gfc_add_expr_to_block (&body, tmp);
1703
1704 *poffset = fold_build2_loc (input_location, PLUS_EXPR,
1705 gfc_array_index_type, *poffset,
1706 build_int_cst (gfc_array_index_type, n));
1707 }
1708 if (!INTEGER_CST_P (*poffset))
1709 {
1710 gfc_add_modify (&body, *offsetvar, *poffset);
1711 *poffset = *offsetvar;
1712 }
1713 }
1714
1715 /* The frontend should already have done any expansions
1716 at compile-time. */
1717 if (!c->iterator)
1718 {
1719 /* Pass the code as is. */
1720 tmp = gfc_finish_block (&body);
1721 gfc_add_expr_to_block (pblock, tmp);
1722 }
1723 else
1724 {
1725 /* Build the implied do-loop. */
1726 stmtblock_t implied_do_block;
1727 tree cond;
1728 tree exit_label;
1729 tree loopbody;
1730 tree tmp2;
1731
1732 loopbody = gfc_finish_block (&body);
1733
1734 /* Create a new block that holds the implied-do loop. A temporary
1735 loop-variable is used. */
1736 gfc_start_block(&implied_do_block);
1737
1738 /* Initialize the loop. */
1739 gfc_add_modify (&implied_do_block, shadow_loopvar, start);
1740
1741 /* If this array expands dynamically, and the number of iterations
1742 is not constant, we won't have allocated space for the static
1743 part of C->EXPR's size. Do that now. */
1744 if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
1745 {
1746 /* Get the number of iterations. */
1747 tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
1748
1749 /* Get the static part of C->EXPR's size. */
1750 gfc_get_array_constructor_element_size (&size, c->expr);
1751 tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
1752
1753 /* Grow the array by TMP * TMP2 elements. */
1754 tmp = fold_build2_loc (input_location, MULT_EXPR,
1755 gfc_array_index_type, tmp, tmp2);
1756 gfc_grow_array (&implied_do_block, desc, tmp);
1757 }
1758
1759 /* Generate the loop body. */
1760 exit_label = gfc_build_label_decl (NULL_TREE);
1761 gfc_start_block (&body);
1762
1763 /* Generate the exit condition. Depending on the sign of
1764 the step variable we have to generate the correct
1765 comparison. */
1766 tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
1767 step, build_int_cst (TREE_TYPE (step), 0));
1768 cond = fold_build3_loc (input_location, COND_EXPR,
1769 boolean_type_node, tmp,
1770 fold_build2_loc (input_location, GT_EXPR,
1771 boolean_type_node, shadow_loopvar, end),
1772 fold_build2_loc (input_location, LT_EXPR,
1773 boolean_type_node, shadow_loopvar, end));
1774 tmp = build1_v (GOTO_EXPR, exit_label);
1775 TREE_USED (exit_label) = 1;
1776 tmp = build3_v (COND_EXPR, cond, tmp,
1777 build_empty_stmt (input_location));
1778 gfc_add_expr_to_block (&body, tmp);
1779
1780 /* The main loop body. */
1781 gfc_add_expr_to_block (&body, loopbody);
1782
1783 /* Increase loop variable by step. */
1784 tmp = fold_build2_loc (input_location, PLUS_EXPR,
1785 TREE_TYPE (shadow_loopvar), shadow_loopvar,
1786 step);
1787 gfc_add_modify (&body, shadow_loopvar, tmp);
1788
1789 /* Finish the loop. */
1790 tmp = gfc_finish_block (&body);
1791 tmp = build1_v (LOOP_EXPR, tmp);
1792 gfc_add_expr_to_block (&implied_do_block, tmp);
1793
1794 /* Add the exit label. */
1795 tmp = build1_v (LABEL_EXPR, exit_label);
1796 gfc_add_expr_to_block (&implied_do_block, tmp);
1797
1798 /* Finish the implied-do loop. */
1799 tmp = gfc_finish_block(&implied_do_block);
1800 gfc_add_expr_to_block(pblock, tmp);
1801
1802 gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
1803 }
1804 }
1805 mpz_clear (size);
1806 }
1807
1808
1809 /* The array constructor code can create a string length with an operand
1810 in the form of a temporary variable. This variable will retain its
1811 context (current_function_decl). If we store this length tree in a
1812 gfc_charlen structure which is shared by a variable in another
1813 context, the resulting gfc_charlen structure with a variable in a
1814 different context, we could trip the assertion in expand_expr_real_1
1815 when it sees that a variable has been created in one context and
1816 referenced in another.
1817
1818 If this might be the case, we create a new gfc_charlen structure and
1819 link it into the current namespace. */
1820
1821 static void
store_backend_decl(gfc_charlen ** clp,tree len,bool force_new_cl)1822 store_backend_decl (gfc_charlen **clp, tree len, bool force_new_cl)
1823 {
1824 if (force_new_cl)
1825 {
1826 gfc_charlen *new_cl = gfc_new_charlen (gfc_current_ns, *clp);
1827 *clp = new_cl;
1828 }
1829 (*clp)->backend_decl = len;
1830 }
1831
1832 /* A catch-all to obtain the string length for anything that is not
1833 a substring of non-constant length, a constant, array or variable. */
1834
1835 static void
get_array_ctor_all_strlen(stmtblock_t * block,gfc_expr * e,tree * len)1836 get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
1837 {
1838 gfc_se se;
1839
1840 /* Don't bother if we already know the length is a constant. */
1841 if (*len && INTEGER_CST_P (*len))
1842 return;
1843
1844 if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
1845 && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
1846 {
1847 /* This is easy. */
1848 gfc_conv_const_charlen (e->ts.u.cl);
1849 *len = e->ts.u.cl->backend_decl;
1850 }
1851 else
1852 {
1853 /* Otherwise, be brutal even if inefficient. */
1854 gfc_init_se (&se, NULL);
1855
1856 /* No function call, in case of side effects. */
1857 se.no_function_call = 1;
1858 if (e->rank == 0)
1859 gfc_conv_expr (&se, e);
1860 else
1861 gfc_conv_expr_descriptor (&se, e);
1862
1863 /* Fix the value. */
1864 *len = gfc_evaluate_now (se.string_length, &se.pre);
1865
1866 gfc_add_block_to_block (block, &se.pre);
1867 gfc_add_block_to_block (block, &se.post);
1868
1869 store_backend_decl (&e->ts.u.cl, *len, true);
1870 }
1871 }
1872
1873
1874 /* Figure out the string length of a variable reference expression.
1875 Used by get_array_ctor_strlen. */
1876
1877 static void
get_array_ctor_var_strlen(stmtblock_t * block,gfc_expr * expr,tree * len)1878 get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
1879 {
1880 gfc_ref *ref;
1881 gfc_typespec *ts;
1882 mpz_t char_len;
1883
1884 /* Don't bother if we already know the length is a constant. */
1885 if (*len && INTEGER_CST_P (*len))
1886 return;
1887
1888 ts = &expr->symtree->n.sym->ts;
1889 for (ref = expr->ref; ref; ref = ref->next)
1890 {
1891 switch (ref->type)
1892 {
1893 case REF_ARRAY:
1894 /* Array references don't change the string length. */
1895 break;
1896
1897 case REF_COMPONENT:
1898 /* Use the length of the component. */
1899 ts = &ref->u.c.component->ts;
1900 break;
1901
1902 case REF_SUBSTRING:
1903 if (ref->u.ss.start->expr_type != EXPR_CONSTANT
1904 || ref->u.ss.end->expr_type != EXPR_CONSTANT)
1905 {
1906 /* Note that this might evaluate expr. */
1907 get_array_ctor_all_strlen (block, expr, len);
1908 return;
1909 }
1910 mpz_init_set_ui (char_len, 1);
1911 mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
1912 mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
1913 *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
1914 *len = convert (gfc_charlen_type_node, *len);
1915 mpz_clear (char_len);
1916 return;
1917
1918 default:
1919 gcc_unreachable ();
1920 }
1921 }
1922
1923 *len = ts->u.cl->backend_decl;
1924 }
1925
1926
1927 /* Figure out the string length of a character array constructor.
1928 If len is NULL, don't calculate the length; this happens for recursive calls
1929 when a sub-array-constructor is an element but not at the first position,
1930 so when we're not interested in the length.
1931 Returns TRUE if all elements are character constants. */
1932
1933 bool
get_array_ctor_strlen(stmtblock_t * block,gfc_constructor_base base,tree * len)1934 get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
1935 {
1936 gfc_constructor *c;
1937 bool is_const;
1938
1939 is_const = TRUE;
1940
1941 if (gfc_constructor_first (base) == NULL)
1942 {
1943 if (len)
1944 *len = build_int_cstu (gfc_charlen_type_node, 0);
1945 return is_const;
1946 }
1947
1948 /* Loop over all constructor elements to find out is_const, but in len we
1949 want to store the length of the first, not the last, element. We can
1950 of course exit the loop as soon as is_const is found to be false. */
1951 for (c = gfc_constructor_first (base);
1952 c && is_const; c = gfc_constructor_next (c))
1953 {
1954 switch (c->expr->expr_type)
1955 {
1956 case EXPR_CONSTANT:
1957 if (len && !(*len && INTEGER_CST_P (*len)))
1958 *len = build_int_cstu (gfc_charlen_type_node,
1959 c->expr->value.character.length);
1960 break;
1961
1962 case EXPR_ARRAY:
1963 if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
1964 is_const = false;
1965 break;
1966
1967 case EXPR_VARIABLE:
1968 is_const = false;
1969 if (len)
1970 get_array_ctor_var_strlen (block, c->expr, len);
1971 break;
1972
1973 default:
1974 is_const = false;
1975 if (len)
1976 get_array_ctor_all_strlen (block, c->expr, len);
1977 break;
1978 }
1979
1980 /* After the first iteration, we don't want the length modified. */
1981 len = NULL;
1982 }
1983
1984 return is_const;
1985 }
1986
1987 /* Check whether the array constructor C consists entirely of constant
1988 elements, and if so returns the number of those elements, otherwise
1989 return zero. Note, an empty or NULL array constructor returns zero. */
1990
1991 unsigned HOST_WIDE_INT
gfc_constant_array_constructor_p(gfc_constructor_base base)1992 gfc_constant_array_constructor_p (gfc_constructor_base base)
1993 {
1994 unsigned HOST_WIDE_INT nelem = 0;
1995
1996 gfc_constructor *c = gfc_constructor_first (base);
1997 while (c)
1998 {
1999 if (c->iterator
2000 || c->expr->rank > 0
2001 || c->expr->expr_type != EXPR_CONSTANT)
2002 return 0;
2003 c = gfc_constructor_next (c);
2004 nelem++;
2005 }
2006 return nelem;
2007 }
2008
2009
2010 /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
2011 and the tree type of it's elements, TYPE, return a static constant
2012 variable that is compile-time initialized. */
2013
2014 tree
gfc_build_constant_array_constructor(gfc_expr * expr,tree type)2015 gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
2016 {
2017 tree tmptype, init, tmp;
2018 HOST_WIDE_INT nelem;
2019 gfc_constructor *c;
2020 gfc_array_spec as;
2021 gfc_se se;
2022 int i;
2023 vec<constructor_elt, va_gc> *v = NULL;
2024
2025 /* First traverse the constructor list, converting the constants
2026 to tree to build an initializer. */
2027 nelem = 0;
2028 c = gfc_constructor_first (expr->value.constructor);
2029 while (c)
2030 {
2031 gfc_init_se (&se, NULL);
2032 gfc_conv_constant (&se, c->expr);
2033 if (c->expr->ts.type != BT_CHARACTER)
2034 se.expr = fold_convert (type, se.expr);
2035 else if (POINTER_TYPE_P (type))
2036 se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
2037 se.expr);
2038 CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
2039 se.expr);
2040 c = gfc_constructor_next (c);
2041 nelem++;
2042 }
2043
2044 /* Next determine the tree type for the array. We use the gfortran
2045 front-end's gfc_get_nodesc_array_type in order to create a suitable
2046 GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
2047
2048 memset (&as, 0, sizeof (gfc_array_spec));
2049
2050 as.rank = expr->rank;
2051 as.type = AS_EXPLICIT;
2052 if (!expr->shape)
2053 {
2054 as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2055 as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
2056 NULL, nelem - 1);
2057 }
2058 else
2059 for (i = 0; i < expr->rank; i++)
2060 {
2061 int tmp = (int) mpz_get_si (expr->shape[i]);
2062 as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
2063 as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
2064 NULL, tmp - 1);
2065 }
2066
2067 tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
2068
2069 /* as is not needed anymore. */
2070 for (i = 0; i < as.rank + as.corank; i++)
2071 {
2072 gfc_free_expr (as.lower[i]);
2073 gfc_free_expr (as.upper[i]);
2074 }
2075
2076 init = build_constructor (tmptype, v);
2077
2078 TREE_CONSTANT (init) = 1;
2079 TREE_STATIC (init) = 1;
2080
2081 tmp = build_decl (input_location, VAR_DECL, create_tmp_var_name ("A"),
2082 tmptype);
2083 DECL_ARTIFICIAL (tmp) = 1;
2084 DECL_IGNORED_P (tmp) = 1;
2085 TREE_STATIC (tmp) = 1;
2086 TREE_CONSTANT (tmp) = 1;
2087 TREE_READONLY (tmp) = 1;
2088 DECL_INITIAL (tmp) = init;
2089 pushdecl (tmp);
2090
2091 return tmp;
2092 }
2093
2094
2095 /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
2096 This mostly initializes the scalarizer state info structure with the
2097 appropriate values to directly use the array created by the function
2098 gfc_build_constant_array_constructor. */
2099
2100 static void
trans_constant_array_constructor(gfc_ss * ss,tree type)2101 trans_constant_array_constructor (gfc_ss * ss, tree type)
2102 {
2103 gfc_array_info *info;
2104 tree tmp;
2105 int i;
2106
2107 tmp = gfc_build_constant_array_constructor (ss->info->expr, type);
2108
2109 info = &ss->info->data.array;
2110
2111 info->descriptor = tmp;
2112 info->data = gfc_build_addr_expr (NULL_TREE, tmp);
2113 info->offset = gfc_index_zero_node;
2114
2115 for (i = 0; i < ss->dimen; i++)
2116 {
2117 info->delta[i] = gfc_index_zero_node;
2118 info->start[i] = gfc_index_zero_node;
2119 info->end[i] = gfc_index_zero_node;
2120 info->stride[i] = gfc_index_one_node;
2121 }
2122 }
2123
2124
2125 static int
get_rank(gfc_loopinfo * loop)2126 get_rank (gfc_loopinfo *loop)
2127 {
2128 int rank;
2129
2130 rank = 0;
2131 for (; loop; loop = loop->parent)
2132 rank += loop->dimen;
2133
2134 return rank;
2135 }
2136
2137
2138 /* Helper routine of gfc_trans_array_constructor to determine if the
2139 bounds of the loop specified by LOOP are constant and simple enough
2140 to use with trans_constant_array_constructor. Returns the
2141 iteration count of the loop if suitable, and NULL_TREE otherwise. */
2142
2143 static tree
constant_array_constructor_loop_size(gfc_loopinfo * l)2144 constant_array_constructor_loop_size (gfc_loopinfo * l)
2145 {
2146 gfc_loopinfo *loop;
2147 tree size = gfc_index_one_node;
2148 tree tmp;
2149 int i, total_dim;
2150
2151 total_dim = get_rank (l);
2152
2153 for (loop = l; loop; loop = loop->parent)
2154 {
2155 for (i = 0; i < loop->dimen; i++)
2156 {
2157 /* If the bounds aren't constant, return NULL_TREE. */
2158 if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
2159 return NULL_TREE;
2160 if (!integer_zerop (loop->from[i]))
2161 {
2162 /* Only allow nonzero "from" in one-dimensional arrays. */
2163 if (total_dim != 1)
2164 return NULL_TREE;
2165 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2166 gfc_array_index_type,
2167 loop->to[i], loop->from[i]);
2168 }
2169 else
2170 tmp = loop->to[i];
2171 tmp = fold_build2_loc (input_location, PLUS_EXPR,
2172 gfc_array_index_type, tmp, gfc_index_one_node);
2173 size = fold_build2_loc (input_location, MULT_EXPR,
2174 gfc_array_index_type, size, tmp);
2175 }
2176 }
2177
2178 return size;
2179 }
2180
2181
2182 static tree *
get_loop_upper_bound_for_array(gfc_ss * array,int array_dim)2183 get_loop_upper_bound_for_array (gfc_ss *array, int array_dim)
2184 {
2185 gfc_ss *ss;
2186 int n;
2187
2188 gcc_assert (array->nested_ss == NULL);
2189
2190 for (ss = array; ss; ss = ss->parent)
2191 for (n = 0; n < ss->loop->dimen; n++)
2192 if (array_dim == get_array_ref_dim_for_loop_dim (ss, n))
2193 return &(ss->loop->to[n]);
2194
2195 gcc_unreachable ();
2196 }
2197
2198
2199 static gfc_loopinfo *
outermost_loop(gfc_loopinfo * loop)2200 outermost_loop (gfc_loopinfo * loop)
2201 {
2202 while (loop->parent != NULL)
2203 loop = loop->parent;
2204
2205 return loop;
2206 }
2207
2208
2209 /* Array constructors are handled by constructing a temporary, then using that
2210 within the scalarization loop. This is not optimal, but seems by far the
2211 simplest method. */
2212
2213 static void
trans_array_constructor(gfc_ss * ss,locus * where)2214 trans_array_constructor (gfc_ss * ss, locus * where)
2215 {
2216 gfc_constructor_base c;
2217 tree offset;
2218 tree offsetvar;
2219 tree desc;
2220 tree type;
2221 tree tmp;
2222 tree *loop_ubound0;
2223 bool dynamic;
2224 bool old_first_len, old_typespec_chararray_ctor;
2225 tree old_first_len_val;
2226 gfc_loopinfo *loop, *outer_loop;
2227 gfc_ss_info *ss_info;
2228 gfc_expr *expr;
2229 gfc_ss *s;
2230
2231 /* Save the old values for nested checking. */
2232 old_first_len = first_len;
2233 old_first_len_val = first_len_val;
2234 old_typespec_chararray_ctor = typespec_chararray_ctor;
2235
2236 loop = ss->loop;
2237 outer_loop = outermost_loop (loop);
2238 ss_info = ss->info;
2239 expr = ss_info->expr;
2240
2241 /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
2242 typespec was given for the array constructor. */
2243 typespec_chararray_ctor = (expr->ts.u.cl
2244 && expr->ts.u.cl->length_from_typespec);
2245
2246 if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2247 && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
2248 {
2249 first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
2250 first_len = true;
2251 }
2252
2253 gcc_assert (ss->dimen == ss->loop->dimen);
2254
2255 c = expr->value.constructor;
2256 if (expr->ts.type == BT_CHARACTER)
2257 {
2258 bool const_string;
2259 bool force_new_cl = false;
2260
2261 /* get_array_ctor_strlen walks the elements of the constructor, if a
2262 typespec was given, we already know the string length and want the one
2263 specified there. */
2264 if (typespec_chararray_ctor && expr->ts.u.cl->length
2265 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
2266 {
2267 gfc_se length_se;
2268
2269 const_string = false;
2270 gfc_init_se (&length_se, NULL);
2271 gfc_conv_expr_type (&length_se, expr->ts.u.cl->length,
2272 gfc_charlen_type_node);
2273 ss_info->string_length = length_se.expr;
2274 gfc_add_block_to_block (&outer_loop->pre, &length_se.pre);
2275 gfc_add_block_to_block (&outer_loop->post, &length_se.post);
2276 }
2277 else
2278 {
2279 const_string = get_array_ctor_strlen (&outer_loop->pre, c,
2280 &ss_info->string_length);
2281 force_new_cl = true;
2282 }
2283
2284 /* Complex character array constructors should have been taken care of
2285 and not end up here. */
2286 gcc_assert (ss_info->string_length);
2287
2288 store_backend_decl (&expr->ts.u.cl, ss_info->string_length, force_new_cl);
2289
2290 type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length);
2291 if (const_string)
2292 type = build_pointer_type (type);
2293 }
2294 else
2295 type = gfc_typenode_for_spec (&expr->ts);
2296
2297 /* See if the constructor determines the loop bounds. */
2298 dynamic = false;
2299
2300 loop_ubound0 = get_loop_upper_bound_for_array (ss, 0);
2301
2302 if (expr->shape && get_rank (loop) > 1 && *loop_ubound0 == NULL_TREE)
2303 {
2304 /* We have a multidimensional parameter. */
2305 for (s = ss; s; s = s->parent)
2306 {
2307 int n;
2308 for (n = 0; n < s->loop->dimen; n++)
2309 {
2310 s->loop->from[n] = gfc_index_zero_node;
2311 s->loop->to[n] = gfc_conv_mpz_to_tree (expr->shape[s->dim[n]],
2312 gfc_index_integer_kind);
2313 s->loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
2314 gfc_array_index_type,
2315 s->loop->to[n],
2316 gfc_index_one_node);
2317 }
2318 }
2319 }
2320
2321 if (*loop_ubound0 == NULL_TREE)
2322 {
2323 mpz_t size;
2324
2325 /* We should have a 1-dimensional, zero-based loop. */
2326 gcc_assert (loop->parent == NULL && loop->nested == NULL);
2327 gcc_assert (loop->dimen == 1);
2328 gcc_assert (integer_zerop (loop->from[0]));
2329
2330 /* Split the constructor size into a static part and a dynamic part.
2331 Allocate the static size up-front and record whether the dynamic
2332 size might be nonzero. */
2333 mpz_init (size);
2334 dynamic = gfc_get_array_constructor_size (&size, c);
2335 mpz_sub_ui (size, size, 1);
2336 loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
2337 mpz_clear (size);
2338 }
2339
2340 /* Special case constant array constructors. */
2341 if (!dynamic)
2342 {
2343 unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
2344 if (nelem > 0)
2345 {
2346 tree size = constant_array_constructor_loop_size (loop);
2347 if (size && compare_tree_int (size, nelem) == 0)
2348 {
2349 trans_constant_array_constructor (ss, type);
2350 goto finish;
2351 }
2352 }
2353 }
2354
2355 gfc_trans_create_temp_array (&outer_loop->pre, &outer_loop->post, ss, type,
2356 NULL_TREE, dynamic, true, false, where);
2357
2358 desc = ss_info->data.array.descriptor;
2359 offset = gfc_index_zero_node;
2360 offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
2361 TREE_NO_WARNING (offsetvar) = 1;
2362 TREE_USED (offsetvar) = 0;
2363 gfc_trans_array_constructor_value (&outer_loop->pre, type, desc, c,
2364 &offset, &offsetvar, dynamic);
2365
2366 /* If the array grows dynamically, the upper bound of the loop variable
2367 is determined by the array's final upper bound. */
2368 if (dynamic)
2369 {
2370 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2371 gfc_array_index_type,
2372 offsetvar, gfc_index_one_node);
2373 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2374 gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
2375 if (*loop_ubound0 && TREE_CODE (*loop_ubound0) == VAR_DECL)
2376 gfc_add_modify (&outer_loop->pre, *loop_ubound0, tmp);
2377 else
2378 *loop_ubound0 = tmp;
2379 }
2380
2381 if (TREE_USED (offsetvar))
2382 pushdecl (offsetvar);
2383 else
2384 gcc_assert (INTEGER_CST_P (offset));
2385
2386 #if 0
2387 /* Disable bound checking for now because it's probably broken. */
2388 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
2389 {
2390 gcc_unreachable ();
2391 }
2392 #endif
2393
2394 finish:
2395 /* Restore old values of globals. */
2396 first_len = old_first_len;
2397 first_len_val = old_first_len_val;
2398 typespec_chararray_ctor = old_typespec_chararray_ctor;
2399 }
2400
2401
2402 /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
2403 called after evaluating all of INFO's vector dimensions. Go through
2404 each such vector dimension and see if we can now fill in any missing
2405 loop bounds. */
2406
2407 static void
set_vector_loop_bounds(gfc_ss * ss)2408 set_vector_loop_bounds (gfc_ss * ss)
2409 {
2410 gfc_loopinfo *loop, *outer_loop;
2411 gfc_array_info *info;
2412 gfc_se se;
2413 tree tmp;
2414 tree desc;
2415 tree zero;
2416 int n;
2417 int dim;
2418
2419 outer_loop = outermost_loop (ss->loop);
2420
2421 info = &ss->info->data.array;
2422
2423 for (; ss; ss = ss->parent)
2424 {
2425 loop = ss->loop;
2426
2427 for (n = 0; n < loop->dimen; n++)
2428 {
2429 dim = ss->dim[n];
2430 if (info->ref->u.ar.dimen_type[dim] != DIMEN_VECTOR
2431 || loop->to[n] != NULL)
2432 continue;
2433
2434 /* Loop variable N indexes vector dimension DIM, and we don't
2435 yet know the upper bound of loop variable N. Set it to the
2436 difference between the vector's upper and lower bounds. */
2437 gcc_assert (loop->from[n] == gfc_index_zero_node);
2438 gcc_assert (info->subscript[dim]
2439 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2440
2441 gfc_init_se (&se, NULL);
2442 desc = info->subscript[dim]->info->data.array.descriptor;
2443 zero = gfc_rank_cst[0];
2444 tmp = fold_build2_loc (input_location, MINUS_EXPR,
2445 gfc_array_index_type,
2446 gfc_conv_descriptor_ubound_get (desc, zero),
2447 gfc_conv_descriptor_lbound_get (desc, zero));
2448 tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
2449 loop->to[n] = tmp;
2450 }
2451 }
2452 }
2453
2454
2455 /* Tells whether a scalar argument to an elemental procedure is saved out
2456 of a scalarization loop as a value or as a reference. */
2457
2458 bool
gfc_scalar_elemental_arg_saved_as_reference(gfc_ss_info * ss_info)2459 gfc_scalar_elemental_arg_saved_as_reference (gfc_ss_info * ss_info)
2460 {
2461 if (ss_info->type != GFC_SS_REFERENCE)
2462 return false;
2463
2464 /* If the actual argument can be absent (in other words, it can
2465 be a NULL reference), don't try to evaluate it; pass instead
2466 the reference directly. */
2467 if (ss_info->can_be_null_ref)
2468 return true;
2469
2470 /* If the expression is of polymorphic type, it's actual size is not known,
2471 so we avoid copying it anywhere. */
2472 if (ss_info->data.scalar.dummy_arg
2473 && ss_info->data.scalar.dummy_arg->ts.type == BT_CLASS
2474 && ss_info->expr->ts.type == BT_CLASS)
2475 return true;
2476
2477 /* If the expression is a data reference of aggregate type,
2478 and the data reference is not used on the left hand side,
2479 avoid a copy by saving a reference to the content. */
2480 if (!ss_info->data.scalar.needs_temporary
2481 && (ss_info->expr->ts.type == BT_DERIVED
2482 || ss_info->expr->ts.type == BT_CLASS)
2483 && gfc_expr_is_variable (ss_info->expr))
2484 return true;
2485
2486 /* Otherwise the expression is evaluated to a temporary variable before the
2487 scalarization loop. */
2488 return false;
2489 }
2490
2491
2492 /* Add the pre and post chains for all the scalar expressions in a SS chain
2493 to loop. This is called after the loop parameters have been calculated,
2494 but before the actual scalarizing loops. */
2495
2496 static void
gfc_add_loop_ss_code(gfc_loopinfo * loop,gfc_ss * ss,bool subscript,locus * where)2497 gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
2498 locus * where)
2499 {
2500 gfc_loopinfo *nested_loop, *outer_loop;
2501 gfc_se se;
2502 gfc_ss_info *ss_info;
2503 gfc_array_info *info;
2504 gfc_expr *expr;
2505 int n;
2506
2507 /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise,
2508 arguments could get evaluated multiple times. */
2509 if (ss->is_alloc_lhs)
2510 return;
2511
2512 outer_loop = outermost_loop (loop);
2513
2514 /* TODO: This can generate bad code if there are ordering dependencies,
2515 e.g., a callee allocated function and an unknown size constructor. */
2516 gcc_assert (ss != NULL);
2517
2518 for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
2519 {
2520 gcc_assert (ss);
2521
2522 /* Cross loop arrays are handled from within the most nested loop. */
2523 if (ss->nested_ss != NULL)
2524 continue;
2525
2526 ss_info = ss->info;
2527 expr = ss_info->expr;
2528 info = &ss_info->data.array;
2529
2530 switch (ss_info->type)
2531 {
2532 case GFC_SS_SCALAR:
2533 /* Scalar expression. Evaluate this now. This includes elemental
2534 dimension indices, but not array section bounds. */
2535 gfc_init_se (&se, NULL);
2536 gfc_conv_expr (&se, expr);
2537 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2538
2539 if (expr->ts.type != BT_CHARACTER
2540 && !gfc_is_alloc_class_scalar_function (expr))
2541 {
2542 /* Move the evaluation of scalar expressions outside the
2543 scalarization loop, except for WHERE assignments. */
2544 if (subscript)
2545 se.expr = convert(gfc_array_index_type, se.expr);
2546 if (!ss_info->where)
2547 se.expr = gfc_evaluate_now (se.expr, &outer_loop->pre);
2548 gfc_add_block_to_block (&outer_loop->pre, &se.post);
2549 }
2550 else
2551 gfc_add_block_to_block (&outer_loop->post, &se.post);
2552
2553 ss_info->data.scalar.value = se.expr;
2554 ss_info->string_length = se.string_length;
2555 break;
2556
2557 case GFC_SS_REFERENCE:
2558 /* Scalar argument to elemental procedure. */
2559 gfc_init_se (&se, NULL);
2560 if (gfc_scalar_elemental_arg_saved_as_reference (ss_info))
2561 gfc_conv_expr_reference (&se, expr);
2562 else
2563 {
2564 /* Evaluate the argument outside the loop and pass
2565 a reference to the value. */
2566 gfc_conv_expr (&se, expr);
2567 }
2568
2569 /* Ensure that a pointer to the string is stored. */
2570 if (expr->ts.type == BT_CHARACTER)
2571 gfc_conv_string_parameter (&se);
2572
2573 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2574 gfc_add_block_to_block (&outer_loop->post, &se.post);
2575 if (gfc_is_class_scalar_expr (expr))
2576 /* This is necessary because the dynamic type will always be
2577 large than the declared type. In consequence, assigning
2578 the value to a temporary could segfault.
2579 OOP-TODO: see if this is generally correct or is the value
2580 has to be written to an allocated temporary, whose address
2581 is passed via ss_info. */
2582 ss_info->data.scalar.value = se.expr;
2583 else
2584 ss_info->data.scalar.value = gfc_evaluate_now (se.expr,
2585 &outer_loop->pre);
2586
2587 ss_info->string_length = se.string_length;
2588 break;
2589
2590 case GFC_SS_SECTION:
2591 /* Add the expressions for scalar and vector subscripts. */
2592 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2593 if (info->subscript[n])
2594 gfc_add_loop_ss_code (loop, info->subscript[n], true, where);
2595
2596 set_vector_loop_bounds (ss);
2597 break;
2598
2599 case GFC_SS_VECTOR:
2600 /* Get the vector's descriptor and store it in SS. */
2601 gfc_init_se (&se, NULL);
2602 gfc_conv_expr_descriptor (&se, expr);
2603 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2604 gfc_add_block_to_block (&outer_loop->post, &se.post);
2605 info->descriptor = se.expr;
2606 break;
2607
2608 case GFC_SS_INTRINSIC:
2609 gfc_add_intrinsic_ss_code (loop, ss);
2610 break;
2611
2612 case GFC_SS_FUNCTION:
2613 /* Array function return value. We call the function and save its
2614 result in a temporary for use inside the loop. */
2615 gfc_init_se (&se, NULL);
2616 se.loop = loop;
2617 se.ss = ss;
2618 if (gfc_is_class_array_function (expr))
2619 expr->must_finalize = 1;
2620 gfc_conv_expr (&se, expr);
2621 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2622 gfc_add_block_to_block (&outer_loop->post, &se.post);
2623 ss_info->string_length = se.string_length;
2624 break;
2625
2626 case GFC_SS_CONSTRUCTOR:
2627 if (expr->ts.type == BT_CHARACTER
2628 && ss_info->string_length == NULL
2629 && expr->ts.u.cl
2630 && expr->ts.u.cl->length
2631 && expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
2632 {
2633 gfc_init_se (&se, NULL);
2634 gfc_conv_expr_type (&se, expr->ts.u.cl->length,
2635 gfc_charlen_type_node);
2636 ss_info->string_length = se.expr;
2637 gfc_add_block_to_block (&outer_loop->pre, &se.pre);
2638 gfc_add_block_to_block (&outer_loop->post, &se.post);
2639 }
2640 trans_array_constructor (ss, where);
2641 break;
2642
2643 case GFC_SS_TEMP:
2644 case GFC_SS_COMPONENT:
2645 /* Do nothing. These are handled elsewhere. */
2646 break;
2647
2648 default:
2649 gcc_unreachable ();
2650 }
2651 }
2652
2653 if (!subscript)
2654 for (nested_loop = loop->nested; nested_loop;
2655 nested_loop = nested_loop->next)
2656 gfc_add_loop_ss_code (nested_loop, nested_loop->ss, subscript, where);
2657 }
2658
2659
2660 /* Translate expressions for the descriptor and data pointer of a SS. */
2661 /*GCC ARRAYS*/
2662
2663 static void
gfc_conv_ss_descriptor(stmtblock_t * block,gfc_ss * ss,int base)2664 gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
2665 {
2666 gfc_se se;
2667 gfc_ss_info *ss_info;
2668 gfc_array_info *info;
2669 tree tmp;
2670
2671 ss_info = ss->info;
2672 info = &ss_info->data.array;
2673
2674 /* Get the descriptor for the array to be scalarized. */
2675 gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE);
2676 gfc_init_se (&se, NULL);
2677 se.descriptor_only = 1;
2678 gfc_conv_expr_lhs (&se, ss_info->expr);
2679 gfc_add_block_to_block (block, &se.pre);
2680 info->descriptor = se.expr;
2681 ss_info->string_length = se.string_length;
2682
2683 if (base)
2684 {
2685 /* Also the data pointer. */
2686 tmp = gfc_conv_array_data (se.expr);
2687 /* If this is a variable or address of a variable we use it directly.
2688 Otherwise we must evaluate it now to avoid breaking dependency
2689 analysis by pulling the expressions for elemental array indices
2690 inside the loop. */
2691 if (!(DECL_P (tmp)
2692 || (TREE_CODE (tmp) == ADDR_EXPR
2693 && DECL_P (TREE_OPERAND (tmp, 0)))))
2694 tmp = gfc_evaluate_now (tmp, block);
2695 info->data = tmp;
2696
2697 tmp = gfc_conv_array_offset (se.expr);
2698 info->offset = gfc_evaluate_now (tmp, block);
2699
2700 /* Make absolutely sure that the saved_offset is indeed saved
2701 so that the variable is still accessible after the loops
2702 are translated. */
2703 info->saved_offset = info->offset;
2704 }
2705 }
2706
2707
2708 /* Initialize a gfc_loopinfo structure. */
2709
2710 void
gfc_init_loopinfo(gfc_loopinfo * loop)2711 gfc_init_loopinfo (gfc_loopinfo * loop)
2712 {
2713 int n;
2714
2715 memset (loop, 0, sizeof (gfc_loopinfo));
2716 gfc_init_block (&loop->pre);
2717 gfc_init_block (&loop->post);
2718
2719 /* Initially scalarize in order and default to no loop reversal. */
2720 for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
2721 {
2722 loop->order[n] = n;
2723 loop->reverse[n] = GFC_INHIBIT_REVERSE;
2724 }
2725
2726 loop->ss = gfc_ss_terminator;
2727 }
2728
2729
2730 /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
2731 chain. */
2732
2733 void
gfc_copy_loopinfo_to_se(gfc_se * se,gfc_loopinfo * loop)2734 gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
2735 {
2736 se->loop = loop;
2737 }
2738
2739
2740 /* Return an expression for the data pointer of an array. */
2741
2742 tree
gfc_conv_array_data(tree descriptor)2743 gfc_conv_array_data (tree descriptor)
2744 {
2745 tree type;
2746
2747 type = TREE_TYPE (descriptor);
2748 if (GFC_ARRAY_TYPE_P (type))
2749 {
2750 if (TREE_CODE (type) == POINTER_TYPE)
2751 return descriptor;
2752 else
2753 {
2754 /* Descriptorless arrays. */
2755 return gfc_build_addr_expr (NULL_TREE, descriptor);
2756 }
2757 }
2758 else
2759 return gfc_conv_descriptor_data_get (descriptor);
2760 }
2761
2762
2763 /* Return an expression for the base offset of an array. */
2764
2765 tree
gfc_conv_array_offset(tree descriptor)2766 gfc_conv_array_offset (tree descriptor)
2767 {
2768 tree type;
2769
2770 type = TREE_TYPE (descriptor);
2771 if (GFC_ARRAY_TYPE_P (type))
2772 return GFC_TYPE_ARRAY_OFFSET (type);
2773 else
2774 return gfc_conv_descriptor_offset_get (descriptor);
2775 }
2776
2777
2778 /* Get an expression for the array stride. */
2779
2780 tree
gfc_conv_array_stride(tree descriptor,int dim)2781 gfc_conv_array_stride (tree descriptor, int dim)
2782 {
2783 tree tmp;
2784 tree type;
2785
2786 type = TREE_TYPE (descriptor);
2787
2788 /* For descriptorless arrays use the array size. */
2789 tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
2790 if (tmp != NULL_TREE)
2791 return tmp;
2792
2793 tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
2794 return tmp;
2795 }
2796
2797
2798 /* Like gfc_conv_array_stride, but for the lower bound. */
2799
2800 tree
gfc_conv_array_lbound(tree descriptor,int dim)2801 gfc_conv_array_lbound (tree descriptor, int dim)
2802 {
2803 tree tmp;
2804 tree type;
2805
2806 type = TREE_TYPE (descriptor);
2807
2808 tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
2809 if (tmp != NULL_TREE)
2810 return tmp;
2811
2812 tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
2813 return tmp;
2814 }
2815
2816
2817 /* Like gfc_conv_array_stride, but for the upper bound. */
2818
2819 tree
gfc_conv_array_ubound(tree descriptor,int dim)2820 gfc_conv_array_ubound (tree descriptor, int dim)
2821 {
2822 tree tmp;
2823 tree type;
2824
2825 type = TREE_TYPE (descriptor);
2826
2827 tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
2828 if (tmp != NULL_TREE)
2829 return tmp;
2830
2831 /* This should only ever happen when passing an assumed shape array
2832 as an actual parameter. The value will never be used. */
2833 if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
2834 return gfc_index_zero_node;
2835
2836 tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
2837 return tmp;
2838 }
2839
2840
2841 /* Generate code to perform an array index bound check. */
2842
2843 static tree
trans_array_bound_check(gfc_se * se,gfc_ss * ss,tree index,int n,locus * where,bool check_upper)2844 trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
2845 locus * where, bool check_upper)
2846 {
2847 tree fault;
2848 tree tmp_lo, tmp_up;
2849 tree descriptor;
2850 char *msg;
2851 const char * name = NULL;
2852
2853 if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
2854 return index;
2855
2856 descriptor = ss->info->data.array.descriptor;
2857
2858 index = gfc_evaluate_now (index, &se->pre);
2859
2860 /* We find a name for the error message. */
2861 name = ss->info->expr->symtree->n.sym->name;
2862 gcc_assert (name != NULL);
2863
2864 if (TREE_CODE (descriptor) == VAR_DECL)
2865 name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
2866
2867 /* If upper bound is present, include both bounds in the error message. */
2868 if (check_upper)
2869 {
2870 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2871 tmp_up = gfc_conv_array_ubound (descriptor, n);
2872
2873 if (name)
2874 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
2875 "outside of expected range (%%ld:%%ld)", n+1, name);
2876 else
2877 msg = xasprintf ("Index '%%ld' of dimension %d "
2878 "outside of expected range (%%ld:%%ld)", n+1);
2879
2880 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2881 index, tmp_lo);
2882 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2883 fold_convert (long_integer_type_node, index),
2884 fold_convert (long_integer_type_node, tmp_lo),
2885 fold_convert (long_integer_type_node, tmp_up));
2886 fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
2887 index, tmp_up);
2888 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2889 fold_convert (long_integer_type_node, index),
2890 fold_convert (long_integer_type_node, tmp_lo),
2891 fold_convert (long_integer_type_node, tmp_up));
2892 free (msg);
2893 }
2894 else
2895 {
2896 tmp_lo = gfc_conv_array_lbound (descriptor, n);
2897
2898 if (name)
2899 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
2900 "below lower bound of %%ld", n+1, name);
2901 else
2902 msg = xasprintf ("Index '%%ld' of dimension %d "
2903 "below lower bound of %%ld", n+1);
2904
2905 fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
2906 index, tmp_lo);
2907 gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
2908 fold_convert (long_integer_type_node, index),
2909 fold_convert (long_integer_type_node, tmp_lo));
2910 free (msg);
2911 }
2912
2913 return index;
2914 }
2915
2916
2917 /* Return the offset for an index. Performs bound checking for elemental
2918 dimensions. Single element references are processed separately.
2919 DIM is the array dimension, I is the loop dimension. */
2920
2921 static tree
conv_array_index_offset(gfc_se * se,gfc_ss * ss,int dim,int i,gfc_array_ref * ar,tree stride)2922 conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
2923 gfc_array_ref * ar, tree stride)
2924 {
2925 gfc_array_info *info;
2926 tree index;
2927 tree desc;
2928 tree data;
2929
2930 info = &ss->info->data.array;
2931
2932 /* Get the index into the array for this dimension. */
2933 if (ar)
2934 {
2935 gcc_assert (ar->type != AR_ELEMENT);
2936 switch (ar->dimen_type[dim])
2937 {
2938 case DIMEN_THIS_IMAGE:
2939 gcc_unreachable ();
2940 break;
2941 case DIMEN_ELEMENT:
2942 /* Elemental dimension. */
2943 gcc_assert (info->subscript[dim]
2944 && info->subscript[dim]->info->type == GFC_SS_SCALAR);
2945 /* We've already translated this value outside the loop. */
2946 index = info->subscript[dim]->info->data.scalar.value;
2947
2948 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2949 ar->as->type != AS_ASSUMED_SIZE
2950 || dim < ar->dimen - 1);
2951 break;
2952
2953 case DIMEN_VECTOR:
2954 gcc_assert (info && se->loop);
2955 gcc_assert (info->subscript[dim]
2956 && info->subscript[dim]->info->type == GFC_SS_VECTOR);
2957 desc = info->subscript[dim]->info->data.array.descriptor;
2958
2959 /* Get a zero-based index into the vector. */
2960 index = fold_build2_loc (input_location, MINUS_EXPR,
2961 gfc_array_index_type,
2962 se->loop->loopvar[i], se->loop->from[i]);
2963
2964 /* Multiply the index by the stride. */
2965 index = fold_build2_loc (input_location, MULT_EXPR,
2966 gfc_array_index_type,
2967 index, gfc_conv_array_stride (desc, 0));
2968
2969 /* Read the vector to get an index into info->descriptor. */
2970 data = build_fold_indirect_ref_loc (input_location,
2971 gfc_conv_array_data (desc));
2972 index = gfc_build_array_ref (data, index, NULL);
2973 index = gfc_evaluate_now (index, &se->pre);
2974 index = fold_convert (gfc_array_index_type, index);
2975
2976 /* Do any bounds checking on the final info->descriptor index. */
2977 index = trans_array_bound_check (se, ss, index, dim, &ar->where,
2978 ar->as->type != AS_ASSUMED_SIZE
2979 || dim < ar->dimen - 1);
2980 break;
2981
2982 case DIMEN_RANGE:
2983 /* Scalarized dimension. */
2984 gcc_assert (info && se->loop);
2985
2986 /* Multiply the loop variable by the stride and delta. */
2987 index = se->loop->loopvar[i];
2988 if (!integer_onep (info->stride[dim]))
2989 index = fold_build2_loc (input_location, MULT_EXPR,
2990 gfc_array_index_type, index,
2991 info->stride[dim]);
2992 if (!integer_zerop (info->delta[dim]))
2993 index = fold_build2_loc (input_location, PLUS_EXPR,
2994 gfc_array_index_type, index,
2995 info->delta[dim]);
2996 break;
2997
2998 default:
2999 gcc_unreachable ();
3000 }
3001 }
3002 else
3003 {
3004 /* Temporary array or derived type component. */
3005 gcc_assert (se->loop);
3006 index = se->loop->loopvar[se->loop->order[i]];
3007
3008 /* Pointer functions can have stride[0] different from unity.
3009 Use the stride returned by the function call and stored in
3010 the descriptor for the temporary. */
3011 if (se->ss && se->ss->info->type == GFC_SS_FUNCTION
3012 && se->ss->info->expr
3013 && se->ss->info->expr->symtree
3014 && se->ss->info->expr->symtree->n.sym->result
3015 && se->ss->info->expr->symtree->n.sym->result->attr.pointer)
3016 stride = gfc_conv_descriptor_stride_get (info->descriptor,
3017 gfc_rank_cst[dim]);
3018
3019 if (info->delta[dim] && !integer_zerop (info->delta[dim]))
3020 index = fold_build2_loc (input_location, PLUS_EXPR,
3021 gfc_array_index_type, index, info->delta[dim]);
3022 }
3023
3024 /* Multiply by the stride. */
3025 if (stride != NULL && !integer_onep (stride))
3026 index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3027 index, stride);
3028
3029 return index;
3030 }
3031
3032
3033 /* Build a scalarized array reference using the vptr 'size'. */
3034
3035 static bool
build_class_array_ref(gfc_se * se,tree base,tree index)3036 build_class_array_ref (gfc_se *se, tree base, tree index)
3037 {
3038 tree type;
3039 tree size;
3040 tree offset;
3041 tree decl;
3042 tree tmp;
3043 gfc_expr *expr = se->ss->info->expr;
3044 gfc_ref *ref;
3045 gfc_ref *class_ref;
3046 gfc_typespec *ts;
3047
3048 if (expr == NULL
3049 || (expr->ts.type != BT_CLASS
3050 && !gfc_is_class_array_function (expr)))
3051 return false;
3052
3053 if (expr->symtree && expr->symtree->n.sym->ts.type == BT_CLASS)
3054 ts = &expr->symtree->n.sym->ts;
3055 else
3056 ts = NULL;
3057 class_ref = NULL;
3058
3059 for (ref = expr->ref; ref; ref = ref->next)
3060 {
3061 if (ref->type == REF_COMPONENT
3062 && ref->u.c.component->ts.type == BT_CLASS
3063 && ref->next && ref->next->type == REF_COMPONENT
3064 && strcmp (ref->next->u.c.component->name, "_data") == 0
3065 && ref->next->next
3066 && ref->next->next->type == REF_ARRAY
3067 && ref->next->next->u.ar.type != AR_ELEMENT)
3068 {
3069 ts = &ref->u.c.component->ts;
3070 class_ref = ref;
3071 break;
3072 }
3073 }
3074
3075 if (ts == NULL)
3076 return false;
3077
3078 if (class_ref == NULL && expr->symtree->n.sym->attr.function
3079 && expr->symtree->n.sym == expr->symtree->n.sym->result
3080 && expr->symtree->n.sym->backend_decl == current_function_decl)
3081 {
3082 decl = gfc_get_fake_result_decl (expr->symtree->n.sym, 0);
3083 }
3084 else if (gfc_is_class_array_function (expr))
3085 {
3086 size = NULL_TREE;
3087 decl = NULL_TREE;
3088 for (tmp = base; tmp; tmp = TREE_OPERAND (tmp, 0))
3089 {
3090 tree type;
3091 type = TREE_TYPE (tmp);
3092 while (type)
3093 {
3094 if (GFC_CLASS_TYPE_P (type))
3095 decl = tmp;
3096 if (type != TYPE_CANONICAL (type))
3097 type = TYPE_CANONICAL (type);
3098 else
3099 type = NULL_TREE;
3100 }
3101 if (TREE_CODE (tmp) == VAR_DECL)
3102 break;
3103 }
3104
3105 if (decl == NULL_TREE)
3106 return false;
3107
3108 se->class_vptr = gfc_evaluate_now (gfc_class_vptr_get (decl), &se->pre);
3109 }
3110 else if (class_ref == NULL)
3111 {
3112 decl = expr->symtree->n.sym->backend_decl;
3113 /* For class arrays the tree containing the class is stored in
3114 GFC_DECL_SAVED_DESCRIPTOR of the sym's backend_decl.
3115 For all others it's sym's backend_decl directly. */
3116 if (DECL_LANG_SPECIFIC (decl) && GFC_DECL_SAVED_DESCRIPTOR (decl))
3117 decl = GFC_DECL_SAVED_DESCRIPTOR (decl);
3118 }
3119 else
3120 {
3121 /* Remove everything after the last class reference, convert the
3122 expression and then recover its tailend once more. */
3123 gfc_se tmpse;
3124 ref = class_ref->next;
3125 class_ref->next = NULL;
3126 gfc_init_se (&tmpse, NULL);
3127 gfc_conv_expr (&tmpse, expr);
3128 decl = tmpse.expr;
3129 class_ref->next = ref;
3130 }
3131
3132 if (POINTER_TYPE_P (TREE_TYPE (decl)))
3133 decl = build_fold_indirect_ref_loc (input_location, decl);
3134
3135 if (!GFC_CLASS_TYPE_P (TREE_TYPE (decl)))
3136 return false;
3137
3138 size = gfc_class_vtab_size_get (decl);
3139
3140 /* Build the address of the element. */
3141 type = TREE_TYPE (TREE_TYPE (base));
3142 size = fold_convert (TREE_TYPE (index), size);
3143 offset = fold_build2_loc (input_location, MULT_EXPR,
3144 gfc_array_index_type,
3145 index, size);
3146 tmp = gfc_build_addr_expr (pvoid_type_node, base);
3147 tmp = fold_build_pointer_plus_loc (input_location, tmp, offset);
3148 tmp = fold_convert (build_pointer_type (type), tmp);
3149
3150 /* Return the element in the se expression. */
3151 se->expr = build_fold_indirect_ref_loc (input_location, tmp);
3152 return true;
3153 }
3154
3155
3156 /* Build a scalarized reference to an array. */
3157
3158 static void
gfc_conv_scalarized_array_ref(gfc_se * se,gfc_array_ref * ar)3159 gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
3160 {
3161 gfc_array_info *info;
3162 tree decl = NULL_TREE;
3163 tree index;
3164 tree tmp;
3165 gfc_ss *ss;
3166 gfc_expr *expr;
3167 int n;
3168
3169 ss = se->ss;
3170 expr = ss->info->expr;
3171 info = &ss->info->data.array;
3172 if (ar)
3173 n = se->loop->order[0];
3174 else
3175 n = 0;
3176
3177 index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
3178 /* Add the offset for this dimension to the stored offset for all other
3179 dimensions. */
3180 if (info->offset && !integer_zerop (info->offset))
3181 index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3182 index, info->offset);
3183
3184 if (expr && (is_subref_array (expr)
3185 || (expr->ts.deferred && (expr->expr_type == EXPR_VARIABLE
3186 || expr->expr_type == EXPR_FUNCTION))))
3187 decl = expr->symtree->n.sym->backend_decl;
3188
3189 tmp = build_fold_indirect_ref_loc (input_location, info->data);
3190
3191 /* Use the vptr 'size' field to access a class the element of a class
3192 array. */
3193 if (build_class_array_ref (se, tmp, index))
3194 return;
3195
3196 se->expr = gfc_build_array_ref (tmp, index, decl);
3197 }
3198
3199
3200 /* Translate access of temporary array. */
3201
3202 void
gfc_conv_tmp_array_ref(gfc_se * se)3203 gfc_conv_tmp_array_ref (gfc_se * se)
3204 {
3205 se->string_length = se->ss->info->string_length;
3206 gfc_conv_scalarized_array_ref (se, NULL);
3207 gfc_advance_se_ss_chain (se);
3208 }
3209
3210 /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
3211
3212 static void
add_to_offset(tree * cst_offset,tree * offset,tree t)3213 add_to_offset (tree *cst_offset, tree *offset, tree t)
3214 {
3215 if (TREE_CODE (t) == INTEGER_CST)
3216 *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
3217 else
3218 {
3219 if (!integer_zerop (*offset))
3220 *offset = fold_build2_loc (input_location, PLUS_EXPR,
3221 gfc_array_index_type, *offset, t);
3222 else
3223 *offset = t;
3224 }
3225 }
3226
3227
3228 static tree
build_array_ref(tree desc,tree offset,tree decl,tree vptr)3229 build_array_ref (tree desc, tree offset, tree decl, tree vptr)
3230 {
3231 tree tmp;
3232 tree type;
3233 tree cdecl;
3234 bool classarray = false;
3235
3236 /* For class arrays the class declaration is stored in the saved
3237 descriptor. */
3238 if (INDIRECT_REF_P (desc)
3239 && DECL_LANG_SPECIFIC (TREE_OPERAND (desc, 0))
3240 && GFC_DECL_SAVED_DESCRIPTOR (TREE_OPERAND (desc, 0)))
3241 cdecl = gfc_class_data_get (GFC_DECL_SAVED_DESCRIPTOR (
3242 TREE_OPERAND (desc, 0)));
3243 else
3244 cdecl = desc;
3245
3246 /* Class container types do not always have the GFC_CLASS_TYPE_P
3247 but the canonical type does. */
3248 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (cdecl))
3249 && TREE_CODE (cdecl) == COMPONENT_REF)
3250 {
3251 type = TREE_TYPE (TREE_OPERAND (cdecl, 0));
3252 if (TYPE_CANONICAL (type)
3253 && GFC_CLASS_TYPE_P (TYPE_CANONICAL (type)))
3254 {
3255 type = TREE_TYPE (desc);
3256 classarray = true;
3257 }
3258 }
3259 else
3260 type = NULL;
3261
3262 /* Class array references need special treatment because the assigned
3263 type size needs to be used to point to the element. */
3264 if (classarray)
3265 {
3266 type = gfc_get_element_type (type);
3267 tmp = TREE_OPERAND (cdecl, 0);
3268 tmp = gfc_get_class_array_ref (offset, tmp, NULL_TREE);
3269 tmp = fold_convert (build_pointer_type (type), tmp);
3270 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3271 return tmp;
3272 }
3273
3274 tmp = gfc_conv_array_data (desc);
3275 tmp = build_fold_indirect_ref_loc (input_location, tmp);
3276 tmp = gfc_build_array_ref (tmp, offset, decl, vptr);
3277 return tmp;
3278 }
3279
3280
3281 /* Build an array reference. se->expr already holds the array descriptor.
3282 This should be either a variable, indirect variable reference or component
3283 reference. For arrays which do not have a descriptor, se->expr will be
3284 the data pointer.
3285 a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
3286
3287 void
gfc_conv_array_ref(gfc_se * se,gfc_array_ref * ar,gfc_expr * expr,locus * where)3288 gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_expr *expr,
3289 locus * where)
3290 {
3291 int n;
3292 tree offset, cst_offset;
3293 tree tmp;
3294 tree stride;
3295 gfc_se indexse;
3296 gfc_se tmpse;
3297 gfc_symbol * sym = expr->symtree->n.sym;
3298 char *var_name = NULL;
3299
3300 if (ar->dimen == 0)
3301 {
3302 gcc_assert (ar->codimen);
3303
3304 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
3305 se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
3306 else
3307 {
3308 if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
3309 && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
3310 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
3311
3312 /* Use the actual tree type and not the wrapped coarray. */
3313 if (!se->want_pointer)
3314 se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
3315 se->expr);
3316 }
3317
3318 return;
3319 }
3320
3321 /* Handle scalarized references separately. */
3322 if (ar->type != AR_ELEMENT)
3323 {
3324 gfc_conv_scalarized_array_ref (se, ar);
3325 gfc_advance_se_ss_chain (se);
3326 return;
3327 }
3328
3329 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3330 {
3331 size_t len;
3332 gfc_ref *ref;
3333
3334 len = strlen (sym->name) + 1;
3335 for (ref = expr->ref; ref; ref = ref->next)
3336 {
3337 if (ref->type == REF_ARRAY && &ref->u.ar == ar)
3338 break;
3339 if (ref->type == REF_COMPONENT)
3340 len += 1 + strlen (ref->u.c.component->name);
3341 }
3342
3343 var_name = XALLOCAVEC (char, len);
3344 strcpy (var_name, sym->name);
3345
3346 for (ref = expr->ref; ref; ref = ref->next)
3347 {
3348 if (ref->type == REF_ARRAY && &ref->u.ar == ar)
3349 break;
3350 if (ref->type == REF_COMPONENT)
3351 {
3352 strcat (var_name, "%%");
3353 strcat (var_name, ref->u.c.component->name);
3354 }
3355 }
3356 }
3357
3358 cst_offset = offset = gfc_index_zero_node;
3359 add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
3360
3361 /* Calculate the offsets from all the dimensions. Make sure to associate
3362 the final offset so that we form a chain of loop invariant summands. */
3363 for (n = ar->dimen - 1; n >= 0; n--)
3364 {
3365 /* Calculate the index for this dimension. */
3366 gfc_init_se (&indexse, se);
3367 gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
3368 gfc_add_block_to_block (&se->pre, &indexse.pre);
3369
3370 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
3371 {
3372 /* Check array bounds. */
3373 tree cond;
3374 char *msg;
3375
3376 /* Evaluate the indexse.expr only once. */
3377 indexse.expr = save_expr (indexse.expr);
3378
3379 /* Lower bound. */
3380 tmp = gfc_conv_array_lbound (se->expr, n);
3381 if (sym->attr.temporary)
3382 {
3383 gfc_init_se (&tmpse, se);
3384 gfc_conv_expr_type (&tmpse, ar->as->lower[n],
3385 gfc_array_index_type);
3386 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3387 tmp = tmpse.expr;
3388 }
3389
3390 cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
3391 indexse.expr, tmp);
3392 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
3393 "below lower bound of %%ld", n+1, var_name);
3394 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3395 fold_convert (long_integer_type_node,
3396 indexse.expr),
3397 fold_convert (long_integer_type_node, tmp));
3398 free (msg);
3399
3400 /* Upper bound, but not for the last dimension of assumed-size
3401 arrays. */
3402 if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
3403 {
3404 tmp = gfc_conv_array_ubound (se->expr, n);
3405 if (sym->attr.temporary)
3406 {
3407 gfc_init_se (&tmpse, se);
3408 gfc_conv_expr_type (&tmpse, ar->as->upper[n],
3409 gfc_array_index_type);
3410 gfc_add_block_to_block (&se->pre, &tmpse.pre);
3411 tmp = tmpse.expr;
3412 }
3413
3414 cond = fold_build2_loc (input_location, GT_EXPR,
3415 boolean_type_node, indexse.expr, tmp);
3416 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
3417 "above upper bound of %%ld", n+1, var_name);
3418 gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
3419 fold_convert (long_integer_type_node,
3420 indexse.expr),
3421 fold_convert (long_integer_type_node, tmp));
3422 free (msg);
3423 }
3424 }
3425
3426 /* Multiply the index by the stride. */
3427 stride = gfc_conv_array_stride (se->expr, n);
3428 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
3429 indexse.expr, stride);
3430
3431 /* And add it to the total. */
3432 add_to_offset (&cst_offset, &offset, tmp);
3433 }
3434
3435 if (!integer_zerop (cst_offset))
3436 offset = fold_build2_loc (input_location, PLUS_EXPR,
3437 gfc_array_index_type, offset, cst_offset);
3438
3439 se->expr = build_array_ref (se->expr, offset, sym->ts.type == BT_CLASS ?
3440 NULL_TREE : sym->backend_decl, se->class_vptr);
3441 }
3442
3443
3444 /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
3445 LOOP_DIM dimension (if any) to array's offset. */
3446
3447 static void
add_array_offset(stmtblock_t * pblock,gfc_loopinfo * loop,gfc_ss * ss,gfc_array_ref * ar,int array_dim,int loop_dim)3448 add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
3449 gfc_array_ref *ar, int array_dim, int loop_dim)
3450 {
3451 gfc_se se;
3452 gfc_array_info *info;
3453 tree stride, index;
3454
3455 info = &ss->info->data.array;
3456
3457 gfc_init_se (&se, NULL);
3458 se.loop = loop;
3459 se.expr = info->descriptor;
3460 stride = gfc_conv_array_stride (info->descriptor, array_dim);
3461 index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
3462 gfc_add_block_to_block (pblock, &se.pre);
3463
3464 info->offset = fold_build2_loc (input_location, PLUS_EXPR,
3465 gfc_array_index_type,
3466 info->offset, index);
3467 info->offset = gfc_evaluate_now (info->offset, pblock);
3468 }
3469
3470
3471 /* Generate the code to be executed immediately before entering a
3472 scalarization loop. */
3473
3474 static void
gfc_trans_preloop_setup(gfc_loopinfo * loop,int dim,int flag,stmtblock_t * pblock)3475 gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
3476 stmtblock_t * pblock)
3477 {
3478 tree stride;
3479 gfc_ss_info *ss_info;
3480 gfc_array_info *info;
3481 gfc_ss_type ss_type;
3482 gfc_ss *ss, *pss;
3483 gfc_loopinfo *ploop;
3484 gfc_array_ref *ar;
3485 int i;
3486
3487 /* This code will be executed before entering the scalarization loop
3488 for this dimension. */
3489 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3490 {
3491 ss_info = ss->info;
3492
3493 if ((ss_info->useflags & flag) == 0)
3494 continue;
3495
3496 ss_type = ss_info->type;
3497 if (ss_type != GFC_SS_SECTION
3498 && ss_type != GFC_SS_FUNCTION
3499 && ss_type != GFC_SS_CONSTRUCTOR
3500 && ss_type != GFC_SS_COMPONENT)
3501 continue;
3502
3503 info = &ss_info->data.array;
3504
3505 gcc_assert (dim < ss->dimen);
3506 gcc_assert (ss->dimen == loop->dimen);
3507
3508 if (info->ref)
3509 ar = &info->ref->u.ar;
3510 else
3511 ar = NULL;
3512
3513 if (dim == loop->dimen - 1 && loop->parent != NULL)
3514 {
3515 /* If we are in the outermost dimension of this loop, the previous
3516 dimension shall be in the parent loop. */
3517 gcc_assert (ss->parent != NULL);
3518
3519 pss = ss->parent;
3520 ploop = loop->parent;
3521
3522 /* ss and ss->parent are about the same array. */
3523 gcc_assert (ss_info == pss->info);
3524 }
3525 else
3526 {
3527 ploop = loop;
3528 pss = ss;
3529 }
3530
3531 if (dim == loop->dimen - 1)
3532 i = 0;
3533 else
3534 i = dim + 1;
3535
3536 /* For the time being, there is no loop reordering. */
3537 gcc_assert (i == ploop->order[i]);
3538 i = ploop->order[i];
3539
3540 if (dim == loop->dimen - 1 && loop->parent == NULL)
3541 {
3542 stride = gfc_conv_array_stride (info->descriptor,
3543 innermost_ss (ss)->dim[i]);
3544
3545 /* Calculate the stride of the innermost loop. Hopefully this will
3546 allow the backend optimizers to do their stuff more effectively.
3547 */
3548 info->stride0 = gfc_evaluate_now (stride, pblock);
3549
3550 /* For the outermost loop calculate the offset due to any
3551 elemental dimensions. It will have been initialized with the
3552 base offset of the array. */
3553 if (info->ref)
3554 {
3555 for (i = 0; i < ar->dimen; i++)
3556 {
3557 if (ar->dimen_type[i] != DIMEN_ELEMENT)
3558 continue;
3559
3560 add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
3561 }
3562 }
3563 }
3564 else
3565 /* Add the offset for the previous loop dimension. */
3566 add_array_offset (pblock, ploop, ss, ar, pss->dim[i], i);
3567
3568 /* Remember this offset for the second loop. */
3569 if (dim == loop->temp_dim - 1 && loop->parent == NULL)
3570 info->saved_offset = info->offset;
3571 }
3572 }
3573
3574
3575 /* Start a scalarized expression. Creates a scope and declares loop
3576 variables. */
3577
3578 void
gfc_start_scalarized_body(gfc_loopinfo * loop,stmtblock_t * pbody)3579 gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
3580 {
3581 int dim;
3582 int n;
3583 int flags;
3584
3585 gcc_assert (!loop->array_parameter);
3586
3587 for (dim = loop->dimen - 1; dim >= 0; dim--)
3588 {
3589 n = loop->order[dim];
3590
3591 gfc_start_block (&loop->code[n]);
3592
3593 /* Create the loop variable. */
3594 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
3595
3596 if (dim < loop->temp_dim)
3597 flags = 3;
3598 else
3599 flags = 1;
3600 /* Calculate values that will be constant within this loop. */
3601 gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
3602 }
3603 gfc_start_block (pbody);
3604 }
3605
3606
3607 /* Generates the actual loop code for a scalarization loop. */
3608
3609 void
gfc_trans_scalarized_loop_end(gfc_loopinfo * loop,int n,stmtblock_t * pbody)3610 gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
3611 stmtblock_t * pbody)
3612 {
3613 stmtblock_t block;
3614 tree cond;
3615 tree tmp;
3616 tree loopbody;
3617 tree exit_label;
3618 tree stmt;
3619 tree init;
3620 tree incr;
3621
3622 if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS
3623 | OMPWS_SCALARIZER_BODY))
3624 == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
3625 && n == loop->dimen - 1)
3626 {
3627 /* We create an OMP_FOR construct for the outermost scalarized loop. */
3628 init = make_tree_vec (1);
3629 cond = make_tree_vec (1);
3630 incr = make_tree_vec (1);
3631
3632 /* Cycle statement is implemented with a goto. Exit statement must not
3633 be present for this loop. */
3634 exit_label = gfc_build_label_decl (NULL_TREE);
3635 TREE_USED (exit_label) = 1;
3636
3637 /* Label for cycle statements (if needed). */
3638 tmp = build1_v (LABEL_EXPR, exit_label);
3639 gfc_add_expr_to_block (pbody, tmp);
3640
3641 stmt = make_node (OMP_FOR);
3642
3643 TREE_TYPE (stmt) = void_type_node;
3644 OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
3645
3646 OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
3647 OMP_CLAUSE_SCHEDULE);
3648 OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
3649 = OMP_CLAUSE_SCHEDULE_STATIC;
3650 if (ompws_flags & OMPWS_NOWAIT)
3651 OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
3652 = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
3653
3654 /* Initialize the loopvar. */
3655 TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
3656 loop->from[n]);
3657 OMP_FOR_INIT (stmt) = init;
3658 /* The exit condition. */
3659 TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
3660 boolean_type_node,
3661 loop->loopvar[n], loop->to[n]);
3662 SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
3663 OMP_FOR_COND (stmt) = cond;
3664 /* Increment the loopvar. */
3665 tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
3666 loop->loopvar[n], gfc_index_one_node);
3667 TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
3668 void_type_node, loop->loopvar[n], tmp);
3669 OMP_FOR_INCR (stmt) = incr;
3670
3671 ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
3672 gfc_add_expr_to_block (&loop->code[n], stmt);
3673 }
3674 else
3675 {
3676 bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
3677 && (loop->temp_ss == NULL);
3678
3679 loopbody = gfc_finish_block (pbody);
3680
3681 if (reverse_loop)
3682 std::swap (loop->from[n], loop->to[n]);
3683
3684 /* Initialize the loopvar. */
3685 if (loop->loopvar[n] != loop->from[n])
3686 gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
3687
3688 exit_label = gfc_build_label_decl (NULL_TREE);
3689
3690 /* Generate the loop body. */
3691 gfc_init_block (&block);
3692
3693 /* The exit condition. */
3694 cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
3695 boolean_type_node, loop->loopvar[n], loop->to[n]);
3696 tmp = build1_v (GOTO_EXPR, exit_label);
3697 TREE_USED (exit_label) = 1;
3698 tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
3699 gfc_add_expr_to_block (&block, tmp);
3700
3701 /* The main body. */
3702 gfc_add_expr_to_block (&block, loopbody);
3703
3704 /* Increment the loopvar. */
3705 tmp = fold_build2_loc (input_location,
3706 reverse_loop ? MINUS_EXPR : PLUS_EXPR,
3707 gfc_array_index_type, loop->loopvar[n],
3708 gfc_index_one_node);
3709
3710 gfc_add_modify (&block, loop->loopvar[n], tmp);
3711
3712 /* Build the loop. */
3713 tmp = gfc_finish_block (&block);
3714 tmp = build1_v (LOOP_EXPR, tmp);
3715 gfc_add_expr_to_block (&loop->code[n], tmp);
3716
3717 /* Add the exit label. */
3718 tmp = build1_v (LABEL_EXPR, exit_label);
3719 gfc_add_expr_to_block (&loop->code[n], tmp);
3720 }
3721
3722 }
3723
3724
3725 /* Finishes and generates the loops for a scalarized expression. */
3726
3727 void
gfc_trans_scalarizing_loops(gfc_loopinfo * loop,stmtblock_t * body)3728 gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
3729 {
3730 int dim;
3731 int n;
3732 gfc_ss *ss;
3733 stmtblock_t *pblock;
3734 tree tmp;
3735
3736 pblock = body;
3737 /* Generate the loops. */
3738 for (dim = 0; dim < loop->dimen; dim++)
3739 {
3740 n = loop->order[dim];
3741 gfc_trans_scalarized_loop_end (loop, n, pblock);
3742 loop->loopvar[n] = NULL_TREE;
3743 pblock = &loop->code[n];
3744 }
3745
3746 tmp = gfc_finish_block (pblock);
3747 gfc_add_expr_to_block (&loop->pre, tmp);
3748
3749 /* Clear all the used flags. */
3750 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3751 if (ss->parent == NULL)
3752 ss->info->useflags = 0;
3753 }
3754
3755
3756 /* Finish the main body of a scalarized expression, and start the secondary
3757 copying body. */
3758
3759 void
gfc_trans_scalarized_loop_boundary(gfc_loopinfo * loop,stmtblock_t * body)3760 gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
3761 {
3762 int dim;
3763 int n;
3764 stmtblock_t *pblock;
3765 gfc_ss *ss;
3766
3767 pblock = body;
3768 /* We finish as many loops as are used by the temporary. */
3769 for (dim = 0; dim < loop->temp_dim - 1; dim++)
3770 {
3771 n = loop->order[dim];
3772 gfc_trans_scalarized_loop_end (loop, n, pblock);
3773 loop->loopvar[n] = NULL_TREE;
3774 pblock = &loop->code[n];
3775 }
3776
3777 /* We don't want to finish the outermost loop entirely. */
3778 n = loop->order[loop->temp_dim - 1];
3779 gfc_trans_scalarized_loop_end (loop, n, pblock);
3780
3781 /* Restore the initial offsets. */
3782 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3783 {
3784 gfc_ss_type ss_type;
3785 gfc_ss_info *ss_info;
3786
3787 ss_info = ss->info;
3788
3789 if ((ss_info->useflags & 2) == 0)
3790 continue;
3791
3792 ss_type = ss_info->type;
3793 if (ss_type != GFC_SS_SECTION
3794 && ss_type != GFC_SS_FUNCTION
3795 && ss_type != GFC_SS_CONSTRUCTOR
3796 && ss_type != GFC_SS_COMPONENT)
3797 continue;
3798
3799 ss_info->data.array.offset = ss_info->data.array.saved_offset;
3800 }
3801
3802 /* Restart all the inner loops we just finished. */
3803 for (dim = loop->temp_dim - 2; dim >= 0; dim--)
3804 {
3805 n = loop->order[dim];
3806
3807 gfc_start_block (&loop->code[n]);
3808
3809 loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
3810
3811 gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
3812 }
3813
3814 /* Start a block for the secondary copying code. */
3815 gfc_start_block (body);
3816 }
3817
3818
3819 /* Precalculate (either lower or upper) bound of an array section.
3820 BLOCK: Block in which the (pre)calculation code will go.
3821 BOUNDS[DIM]: Where the bound value will be stored once evaluated.
3822 VALUES[DIM]: Specified bound (NULL <=> unspecified).
3823 DESC: Array descriptor from which the bound will be picked if unspecified
3824 (either lower or upper bound according to LBOUND). */
3825
3826 static void
evaluate_bound(stmtblock_t * block,tree * bounds,gfc_expr ** values,tree desc,int dim,bool lbound,bool deferred)3827 evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
3828 tree desc, int dim, bool lbound, bool deferred)
3829 {
3830 gfc_se se;
3831 gfc_expr * input_val = values[dim];
3832 tree *output = &bounds[dim];
3833
3834
3835 if (input_val)
3836 {
3837 /* Specified section bound. */
3838 gfc_init_se (&se, NULL);
3839 gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
3840 gfc_add_block_to_block (block, &se.pre);
3841 *output = se.expr;
3842 }
3843 else if (deferred && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
3844 {
3845 /* The gfc_conv_array_lbound () routine returns a constant zero for
3846 deferred length arrays, which in the scalarizer wreaks havoc, when
3847 copying to a (newly allocated) one-based array.
3848 Keep returning the actual result in sync for both bounds. */
3849 *output = lbound ? gfc_conv_descriptor_lbound_get (desc,
3850 gfc_rank_cst[dim]):
3851 gfc_conv_descriptor_ubound_get (desc,
3852 gfc_rank_cst[dim]);
3853 }
3854 else
3855 {
3856 /* No specific bound specified so use the bound of the array. */
3857 *output = lbound ? gfc_conv_array_lbound (desc, dim) :
3858 gfc_conv_array_ubound (desc, dim);
3859 }
3860 *output = gfc_evaluate_now (*output, block);
3861 }
3862
3863
3864 /* Calculate the lower bound of an array section. */
3865
3866 static void
gfc_conv_section_startstride(stmtblock_t * block,gfc_ss * ss,int dim)3867 gfc_conv_section_startstride (stmtblock_t * block, gfc_ss * ss, int dim)
3868 {
3869 gfc_expr *stride = NULL;
3870 tree desc;
3871 gfc_se se;
3872 gfc_array_info *info;
3873 gfc_array_ref *ar;
3874
3875 gcc_assert (ss->info->type == GFC_SS_SECTION);
3876
3877 info = &ss->info->data.array;
3878 ar = &info->ref->u.ar;
3879
3880 if (ar->dimen_type[dim] == DIMEN_VECTOR)
3881 {
3882 /* We use a zero-based index to access the vector. */
3883 info->start[dim] = gfc_index_zero_node;
3884 info->end[dim] = NULL;
3885 info->stride[dim] = gfc_index_one_node;
3886 return;
3887 }
3888
3889 gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
3890 || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
3891 desc = info->descriptor;
3892 stride = ar->stride[dim];
3893
3894
3895 /* Calculate the start of the range. For vector subscripts this will
3896 be the range of the vector. */
3897 evaluate_bound (block, info->start, ar->start, desc, dim, true,
3898 ar->as->type == AS_DEFERRED);
3899
3900 /* Similarly calculate the end. Although this is not used in the
3901 scalarizer, it is needed when checking bounds and where the end
3902 is an expression with side-effects. */
3903 evaluate_bound (block, info->end, ar->end, desc, dim, false,
3904 ar->as->type == AS_DEFERRED);
3905
3906
3907 /* Calculate the stride. */
3908 if (stride == NULL)
3909 info->stride[dim] = gfc_index_one_node;
3910 else
3911 {
3912 gfc_init_se (&se, NULL);
3913 gfc_conv_expr_type (&se, stride, gfc_array_index_type);
3914 gfc_add_block_to_block (block, &se.pre);
3915 info->stride[dim] = gfc_evaluate_now (se.expr, block);
3916 }
3917 }
3918
3919
3920 /* Calculates the range start and stride for a SS chain. Also gets the
3921 descriptor and data pointer. The range of vector subscripts is the size
3922 of the vector. Array bounds are also checked. */
3923
3924 void
gfc_conv_ss_startstride(gfc_loopinfo * loop)3925 gfc_conv_ss_startstride (gfc_loopinfo * loop)
3926 {
3927 int n;
3928 tree tmp;
3929 gfc_ss *ss;
3930 tree desc;
3931
3932 gfc_loopinfo * const outer_loop = outermost_loop (loop);
3933
3934 loop->dimen = 0;
3935 /* Determine the rank of the loop. */
3936 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3937 {
3938 switch (ss->info->type)
3939 {
3940 case GFC_SS_SECTION:
3941 case GFC_SS_CONSTRUCTOR:
3942 case GFC_SS_FUNCTION:
3943 case GFC_SS_COMPONENT:
3944 loop->dimen = ss->dimen;
3945 goto done;
3946
3947 /* As usual, lbound and ubound are exceptions!. */
3948 case GFC_SS_INTRINSIC:
3949 switch (ss->info->expr->value.function.isym->id)
3950 {
3951 case GFC_ISYM_LBOUND:
3952 case GFC_ISYM_UBOUND:
3953 case GFC_ISYM_LCOBOUND:
3954 case GFC_ISYM_UCOBOUND:
3955 case GFC_ISYM_THIS_IMAGE:
3956 loop->dimen = ss->dimen;
3957 goto done;
3958
3959 default:
3960 break;
3961 }
3962
3963 default:
3964 break;
3965 }
3966 }
3967
3968 /* We should have determined the rank of the expression by now. If
3969 not, that's bad news. */
3970 gcc_unreachable ();
3971
3972 done:
3973 /* Loop over all the SS in the chain. */
3974 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
3975 {
3976 gfc_ss_info *ss_info;
3977 gfc_array_info *info;
3978 gfc_expr *expr;
3979
3980 ss_info = ss->info;
3981 expr = ss_info->expr;
3982 info = &ss_info->data.array;
3983
3984 if (expr && expr->shape && !info->shape)
3985 info->shape = expr->shape;
3986
3987 switch (ss_info->type)
3988 {
3989 case GFC_SS_SECTION:
3990 /* Get the descriptor for the array. If it is a cross loops array,
3991 we got the descriptor already in the outermost loop. */
3992 if (ss->parent == NULL)
3993 gfc_conv_ss_descriptor (&outer_loop->pre, ss,
3994 !loop->array_parameter);
3995
3996 for (n = 0; n < ss->dimen; n++)
3997 gfc_conv_section_startstride (&outer_loop->pre, ss, ss->dim[n]);
3998 break;
3999
4000 case GFC_SS_INTRINSIC:
4001 switch (expr->value.function.isym->id)
4002 {
4003 /* Fall through to supply start and stride. */
4004 case GFC_ISYM_LBOUND:
4005 case GFC_ISYM_UBOUND:
4006 {
4007 gfc_expr *arg;
4008
4009 /* This is the variant without DIM=... */
4010 gcc_assert (expr->value.function.actual->next->expr == NULL);
4011
4012 arg = expr->value.function.actual->expr;
4013 if (arg->rank == -1)
4014 {
4015 gfc_se se;
4016 tree rank, tmp;
4017
4018 /* The rank (hence the return value's shape) is unknown,
4019 we have to retrieve it. */
4020 gfc_init_se (&se, NULL);
4021 se.descriptor_only = 1;
4022 gfc_conv_expr (&se, arg);
4023 /* This is a bare variable, so there is no preliminary
4024 or cleanup code. */
4025 gcc_assert (se.pre.head == NULL_TREE
4026 && se.post.head == NULL_TREE);
4027 rank = gfc_conv_descriptor_rank (se.expr);
4028 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4029 gfc_array_index_type,
4030 fold_convert (gfc_array_index_type,
4031 rank),
4032 gfc_index_one_node);
4033 info->end[0] = gfc_evaluate_now (tmp, &outer_loop->pre);
4034 info->start[0] = gfc_index_zero_node;
4035 info->stride[0] = gfc_index_one_node;
4036 continue;
4037 }
4038 /* Otherwise fall through GFC_SS_FUNCTION. */
4039 }
4040 case GFC_ISYM_LCOBOUND:
4041 case GFC_ISYM_UCOBOUND:
4042 case GFC_ISYM_THIS_IMAGE:
4043 break;
4044
4045 default:
4046 continue;
4047 }
4048
4049 case GFC_SS_CONSTRUCTOR:
4050 case GFC_SS_FUNCTION:
4051 for (n = 0; n < ss->dimen; n++)
4052 {
4053 int dim = ss->dim[n];
4054
4055 info->start[dim] = gfc_index_zero_node;
4056 info->end[dim] = gfc_index_zero_node;
4057 info->stride[dim] = gfc_index_one_node;
4058 }
4059 break;
4060
4061 default:
4062 break;
4063 }
4064 }
4065
4066 /* The rest is just runtime bound checking. */
4067 if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
4068 {
4069 stmtblock_t block;
4070 tree lbound, ubound;
4071 tree end;
4072 tree size[GFC_MAX_DIMENSIONS];
4073 tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
4074 gfc_array_info *info;
4075 char *msg;
4076 int dim;
4077
4078 gfc_start_block (&block);
4079
4080 for (n = 0; n < loop->dimen; n++)
4081 size[n] = NULL_TREE;
4082
4083 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4084 {
4085 stmtblock_t inner;
4086 gfc_ss_info *ss_info;
4087 gfc_expr *expr;
4088 locus *expr_loc;
4089 const char *expr_name;
4090
4091 ss_info = ss->info;
4092 if (ss_info->type != GFC_SS_SECTION)
4093 continue;
4094
4095 /* Catch allocatable lhs in f2003. */
4096 if (flag_realloc_lhs && ss->is_alloc_lhs)
4097 continue;
4098
4099 expr = ss_info->expr;
4100 expr_loc = &expr->where;
4101 expr_name = expr->symtree->name;
4102
4103 gfc_start_block (&inner);
4104
4105 /* TODO: range checking for mapped dimensions. */
4106 info = &ss_info->data.array;
4107
4108 /* This code only checks ranges. Elemental and vector
4109 dimensions are checked later. */
4110 for (n = 0; n < loop->dimen; n++)
4111 {
4112 bool check_upper;
4113
4114 dim = ss->dim[n];
4115 if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
4116 continue;
4117
4118 if (dim == info->ref->u.ar.dimen - 1
4119 && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
4120 check_upper = false;
4121 else
4122 check_upper = true;
4123
4124 /* Zero stride is not allowed. */
4125 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
4126 info->stride[dim], gfc_index_zero_node);
4127 msg = xasprintf ("Zero stride is not allowed, for dimension %d "
4128 "of array '%s'", dim + 1, expr_name);
4129 gfc_trans_runtime_check (true, false, tmp, &inner,
4130 expr_loc, msg);
4131 free (msg);
4132
4133 desc = info->descriptor;
4134
4135 /* This is the run-time equivalent of resolve.c's
4136 check_dimension(). The logical is more readable there
4137 than it is here, with all the trees. */
4138 lbound = gfc_conv_array_lbound (desc, dim);
4139 end = info->end[dim];
4140 if (check_upper)
4141 ubound = gfc_conv_array_ubound (desc, dim);
4142 else
4143 ubound = NULL;
4144
4145 /* non_zerosized is true when the selected range is not
4146 empty. */
4147 stride_pos = fold_build2_loc (input_location, GT_EXPR,
4148 boolean_type_node, info->stride[dim],
4149 gfc_index_zero_node);
4150 tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
4151 info->start[dim], end);
4152 stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4153 boolean_type_node, stride_pos, tmp);
4154
4155 stride_neg = fold_build2_loc (input_location, LT_EXPR,
4156 boolean_type_node,
4157 info->stride[dim], gfc_index_zero_node);
4158 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
4159 info->start[dim], end);
4160 stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4161 boolean_type_node,
4162 stride_neg, tmp);
4163 non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4164 boolean_type_node,
4165 stride_pos, stride_neg);
4166
4167 /* Check the start of the range against the lower and upper
4168 bounds of the array, if the range is not empty.
4169 If upper bound is present, include both bounds in the
4170 error message. */
4171 if (check_upper)
4172 {
4173 tmp = fold_build2_loc (input_location, LT_EXPR,
4174 boolean_type_node,
4175 info->start[dim], lbound);
4176 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4177 boolean_type_node,
4178 non_zerosized, tmp);
4179 tmp2 = fold_build2_loc (input_location, GT_EXPR,
4180 boolean_type_node,
4181 info->start[dim], ubound);
4182 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4183 boolean_type_node,
4184 non_zerosized, tmp2);
4185 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4186 "outside of expected range (%%ld:%%ld)",
4187 dim + 1, expr_name);
4188 gfc_trans_runtime_check (true, false, tmp, &inner,
4189 expr_loc, msg,
4190 fold_convert (long_integer_type_node, info->start[dim]),
4191 fold_convert (long_integer_type_node, lbound),
4192 fold_convert (long_integer_type_node, ubound));
4193 gfc_trans_runtime_check (true, false, tmp2, &inner,
4194 expr_loc, msg,
4195 fold_convert (long_integer_type_node, info->start[dim]),
4196 fold_convert (long_integer_type_node, lbound),
4197 fold_convert (long_integer_type_node, ubound));
4198 free (msg);
4199 }
4200 else
4201 {
4202 tmp = fold_build2_loc (input_location, LT_EXPR,
4203 boolean_type_node,
4204 info->start[dim], lbound);
4205 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4206 boolean_type_node, non_zerosized, tmp);
4207 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4208 "below lower bound of %%ld",
4209 dim + 1, expr_name);
4210 gfc_trans_runtime_check (true, false, tmp, &inner,
4211 expr_loc, msg,
4212 fold_convert (long_integer_type_node, info->start[dim]),
4213 fold_convert (long_integer_type_node, lbound));
4214 free (msg);
4215 }
4216
4217 /* Compute the last element of the range, which is not
4218 necessarily "end" (think 0:5:3, which doesn't contain 5)
4219 and check it against both lower and upper bounds. */
4220
4221 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4222 gfc_array_index_type, end,
4223 info->start[dim]);
4224 tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
4225 gfc_array_index_type, tmp,
4226 info->stride[dim]);
4227 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4228 gfc_array_index_type, end, tmp);
4229 tmp2 = fold_build2_loc (input_location, LT_EXPR,
4230 boolean_type_node, tmp, lbound);
4231 tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4232 boolean_type_node, non_zerosized, tmp2);
4233 if (check_upper)
4234 {
4235 tmp3 = fold_build2_loc (input_location, GT_EXPR,
4236 boolean_type_node, tmp, ubound);
4237 tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
4238 boolean_type_node, non_zerosized, tmp3);
4239 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4240 "outside of expected range (%%ld:%%ld)",
4241 dim + 1, expr_name);
4242 gfc_trans_runtime_check (true, false, tmp2, &inner,
4243 expr_loc, msg,
4244 fold_convert (long_integer_type_node, tmp),
4245 fold_convert (long_integer_type_node, ubound),
4246 fold_convert (long_integer_type_node, lbound));
4247 gfc_trans_runtime_check (true, false, tmp3, &inner,
4248 expr_loc, msg,
4249 fold_convert (long_integer_type_node, tmp),
4250 fold_convert (long_integer_type_node, ubound),
4251 fold_convert (long_integer_type_node, lbound));
4252 free (msg);
4253 }
4254 else
4255 {
4256 msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
4257 "below lower bound of %%ld",
4258 dim + 1, expr_name);
4259 gfc_trans_runtime_check (true, false, tmp2, &inner,
4260 expr_loc, msg,
4261 fold_convert (long_integer_type_node, tmp),
4262 fold_convert (long_integer_type_node, lbound));
4263 free (msg);
4264 }
4265
4266 /* Check the section sizes match. */
4267 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4268 gfc_array_index_type, end,
4269 info->start[dim]);
4270 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4271 gfc_array_index_type, tmp,
4272 info->stride[dim]);
4273 tmp = fold_build2_loc (input_location, PLUS_EXPR,
4274 gfc_array_index_type,
4275 gfc_index_one_node, tmp);
4276 tmp = fold_build2_loc (input_location, MAX_EXPR,
4277 gfc_array_index_type, tmp,
4278 build_int_cst (gfc_array_index_type, 0));
4279 /* We remember the size of the first section, and check all the
4280 others against this. */
4281 if (size[n])
4282 {
4283 tmp3 = fold_build2_loc (input_location, NE_EXPR,
4284 boolean_type_node, tmp, size[n]);
4285 msg = xasprintf ("Array bound mismatch for dimension %d "
4286 "of array '%s' (%%ld/%%ld)",
4287 dim + 1, expr_name);
4288
4289 gfc_trans_runtime_check (true, false, tmp3, &inner,
4290 expr_loc, msg,
4291 fold_convert (long_integer_type_node, tmp),
4292 fold_convert (long_integer_type_node, size[n]));
4293
4294 free (msg);
4295 }
4296 else
4297 size[n] = gfc_evaluate_now (tmp, &inner);
4298 }
4299
4300 tmp = gfc_finish_block (&inner);
4301
4302 /* For optional arguments, only check bounds if the argument is
4303 present. */
4304 if (expr->symtree->n.sym->attr.optional
4305 || expr->symtree->n.sym->attr.not_always_present)
4306 tmp = build3_v (COND_EXPR,
4307 gfc_conv_expr_present (expr->symtree->n.sym),
4308 tmp, build_empty_stmt (input_location));
4309
4310 gfc_add_expr_to_block (&block, tmp);
4311
4312 }
4313
4314 tmp = gfc_finish_block (&block);
4315 gfc_add_expr_to_block (&outer_loop->pre, tmp);
4316 }
4317
4318 for (loop = loop->nested; loop; loop = loop->next)
4319 gfc_conv_ss_startstride (loop);
4320 }
4321
4322 /* Return true if both symbols could refer to the same data object. Does
4323 not take account of aliasing due to equivalence statements. */
4324
4325 static int
symbols_could_alias(gfc_symbol * lsym,gfc_symbol * rsym,bool lsym_pointer,bool lsym_target,bool rsym_pointer,bool rsym_target)4326 symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
4327 bool lsym_target, bool rsym_pointer, bool rsym_target)
4328 {
4329 /* Aliasing isn't possible if the symbols have different base types. */
4330 if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
4331 return 0;
4332
4333 /* Pointers can point to other pointers and target objects. */
4334
4335 if ((lsym_pointer && (rsym_pointer || rsym_target))
4336 || (rsym_pointer && (lsym_pointer || lsym_target)))
4337 return 1;
4338
4339 /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
4340 and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
4341 checked above. */
4342 if (lsym_target && rsym_target
4343 && ((lsym->attr.dummy && !lsym->attr.contiguous
4344 && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
4345 || (rsym->attr.dummy && !rsym->attr.contiguous
4346 && (!rsym->attr.dimension
4347 || rsym->as->type == AS_ASSUMED_SHAPE))))
4348 return 1;
4349
4350 return 0;
4351 }
4352
4353
4354 /* Return true if the two SS could be aliased, i.e. both point to the same data
4355 object. */
4356 /* TODO: resolve aliases based on frontend expressions. */
4357
4358 static int
gfc_could_be_alias(gfc_ss * lss,gfc_ss * rss)4359 gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
4360 {
4361 gfc_ref *lref;
4362 gfc_ref *rref;
4363 gfc_expr *lexpr, *rexpr;
4364 gfc_symbol *lsym;
4365 gfc_symbol *rsym;
4366 bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
4367
4368 lexpr = lss->info->expr;
4369 rexpr = rss->info->expr;
4370
4371 lsym = lexpr->symtree->n.sym;
4372 rsym = rexpr->symtree->n.sym;
4373
4374 lsym_pointer = lsym->attr.pointer;
4375 lsym_target = lsym->attr.target;
4376 rsym_pointer = rsym->attr.pointer;
4377 rsym_target = rsym->attr.target;
4378
4379 if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
4380 rsym_pointer, rsym_target))
4381 return 1;
4382
4383 if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
4384 && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
4385 return 0;
4386
4387 /* For derived types we must check all the component types. We can ignore
4388 array references as these will have the same base type as the previous
4389 component ref. */
4390 for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next)
4391 {
4392 if (lref->type != REF_COMPONENT)
4393 continue;
4394
4395 lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
4396 lsym_target = lsym_target || lref->u.c.sym->attr.target;
4397
4398 if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
4399 rsym_pointer, rsym_target))
4400 return 1;
4401
4402 if ((lsym_pointer && (rsym_pointer || rsym_target))
4403 || (rsym_pointer && (lsym_pointer || lsym_target)))
4404 {
4405 if (gfc_compare_types (&lref->u.c.component->ts,
4406 &rsym->ts))
4407 return 1;
4408 }
4409
4410 for (rref = rexpr->ref; rref != rss->info->data.array.ref;
4411 rref = rref->next)
4412 {
4413 if (rref->type != REF_COMPONENT)
4414 continue;
4415
4416 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4417 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4418
4419 if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
4420 lsym_pointer, lsym_target,
4421 rsym_pointer, rsym_target))
4422 return 1;
4423
4424 if ((lsym_pointer && (rsym_pointer || rsym_target))
4425 || (rsym_pointer && (lsym_pointer || lsym_target)))
4426 {
4427 if (gfc_compare_types (&lref->u.c.component->ts,
4428 &rref->u.c.sym->ts))
4429 return 1;
4430 if (gfc_compare_types (&lref->u.c.sym->ts,
4431 &rref->u.c.component->ts))
4432 return 1;
4433 if (gfc_compare_types (&lref->u.c.component->ts,
4434 &rref->u.c.component->ts))
4435 return 1;
4436 }
4437 }
4438 }
4439
4440 lsym_pointer = lsym->attr.pointer;
4441 lsym_target = lsym->attr.target;
4442 lsym_pointer = lsym->attr.pointer;
4443 lsym_target = lsym->attr.target;
4444
4445 for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next)
4446 {
4447 if (rref->type != REF_COMPONENT)
4448 break;
4449
4450 rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
4451 rsym_target = lsym_target || rref->u.c.sym->attr.target;
4452
4453 if (symbols_could_alias (rref->u.c.sym, lsym,
4454 lsym_pointer, lsym_target,
4455 rsym_pointer, rsym_target))
4456 return 1;
4457
4458 if ((lsym_pointer && (rsym_pointer || rsym_target))
4459 || (rsym_pointer && (lsym_pointer || lsym_target)))
4460 {
4461 if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
4462 return 1;
4463 }
4464 }
4465
4466 return 0;
4467 }
4468
4469
4470 /* Resolve array data dependencies. Creates a temporary if required. */
4471 /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
4472 dependency.c. */
4473
4474 void
gfc_conv_resolve_dependencies(gfc_loopinfo * loop,gfc_ss * dest,gfc_ss * rss)4475 gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
4476 gfc_ss * rss)
4477 {
4478 gfc_ss *ss;
4479 gfc_ref *lref;
4480 gfc_ref *rref;
4481 gfc_ss_info *ss_info;
4482 gfc_expr *dest_expr;
4483 gfc_expr *ss_expr;
4484 int nDepend = 0;
4485 int i, j;
4486
4487 loop->temp_ss = NULL;
4488 dest_expr = dest->info->expr;
4489
4490 for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
4491 {
4492 ss_info = ss->info;
4493 ss_expr = ss_info->expr;
4494
4495 if (ss_info->array_outer_dependency)
4496 {
4497 nDepend = 1;
4498 break;
4499 }
4500
4501 if (ss_info->type != GFC_SS_SECTION)
4502 {
4503 if (flag_realloc_lhs
4504 && dest_expr != ss_expr
4505 && gfc_is_reallocatable_lhs (dest_expr)
4506 && ss_expr->rank)
4507 nDepend = gfc_check_dependency (dest_expr, ss_expr, true);
4508
4509 /* Check for cases like c(:)(1:2) = c(2)(2:3) */
4510 if (!nDepend && dest_expr->rank > 0
4511 && dest_expr->ts.type == BT_CHARACTER
4512 && ss_expr->expr_type == EXPR_VARIABLE)
4513
4514 nDepend = gfc_check_dependency (dest_expr, ss_expr, false);
4515
4516 if (ss_info->type == GFC_SS_REFERENCE
4517 && gfc_check_dependency (dest_expr, ss_expr, false))
4518 ss_info->data.scalar.needs_temporary = 1;
4519
4520 continue;
4521 }
4522
4523 if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym)
4524 {
4525 if (gfc_could_be_alias (dest, ss)
4526 || gfc_are_equivalenced_arrays (dest_expr, ss_expr))
4527 {
4528 nDepend = 1;
4529 break;
4530 }
4531 }
4532 else
4533 {
4534 lref = dest_expr->ref;
4535 rref = ss_expr->ref;
4536
4537 nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
4538
4539 if (nDepend == 1)
4540 break;
4541
4542 for (i = 0; i < dest->dimen; i++)
4543 for (j = 0; j < ss->dimen; j++)
4544 if (i != j
4545 && dest->dim[i] == ss->dim[j])
4546 {
4547 /* If we don't access array elements in the same order,
4548 there is a dependency. */
4549 nDepend = 1;
4550 goto temporary;
4551 }
4552 #if 0
4553 /* TODO : loop shifting. */
4554 if (nDepend == 1)
4555 {
4556 /* Mark the dimensions for LOOP SHIFTING */
4557 for (n = 0; n < loop->dimen; n++)
4558 {
4559 int dim = dest->data.info.dim[n];
4560
4561 if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
4562 depends[n] = 2;
4563 else if (! gfc_is_same_range (&lref->u.ar,
4564 &rref->u.ar, dim, 0))
4565 depends[n] = 1;
4566 }
4567
4568 /* Put all the dimensions with dependencies in the
4569 innermost loops. */
4570 dim = 0;
4571 for (n = 0; n < loop->dimen; n++)
4572 {
4573 gcc_assert (loop->order[n] == n);
4574 if (depends[n])
4575 loop->order[dim++] = n;
4576 }
4577 for (n = 0; n < loop->dimen; n++)
4578 {
4579 if (! depends[n])
4580 loop->order[dim++] = n;
4581 }
4582
4583 gcc_assert (dim == loop->dimen);
4584 break;
4585 }
4586 #endif
4587 }
4588 }
4589
4590 temporary:
4591
4592 if (nDepend == 1)
4593 {
4594 tree base_type = gfc_typenode_for_spec (&dest_expr->ts);
4595 if (GFC_ARRAY_TYPE_P (base_type)
4596 || GFC_DESCRIPTOR_TYPE_P (base_type))
4597 base_type = gfc_get_element_type (base_type);
4598 loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length,
4599 loop->dimen);
4600 gfc_add_ss_to_loop (loop, loop->temp_ss);
4601 }
4602 else
4603 loop->temp_ss = NULL;
4604 }
4605
4606
4607 /* Browse through each array's information from the scalarizer and set the loop
4608 bounds according to the "best" one (per dimension), i.e. the one which
4609 provides the most information (constant bounds, shape, etc.). */
4610
4611 static void
set_loop_bounds(gfc_loopinfo * loop)4612 set_loop_bounds (gfc_loopinfo *loop)
4613 {
4614 int n, dim, spec_dim;
4615 gfc_array_info *info;
4616 gfc_array_info *specinfo;
4617 gfc_ss *ss;
4618 tree tmp;
4619 gfc_ss **loopspec;
4620 bool dynamic[GFC_MAX_DIMENSIONS];
4621 mpz_t *cshape;
4622 mpz_t i;
4623 bool nonoptional_arr;
4624
4625 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4626
4627 loopspec = loop->specloop;
4628
4629 mpz_init (i);
4630 for (n = 0; n < loop->dimen; n++)
4631 {
4632 loopspec[n] = NULL;
4633 dynamic[n] = false;
4634
4635 /* If there are both optional and nonoptional array arguments, scalarize
4636 over the nonoptional; otherwise, it does not matter as then all
4637 (optional) arrays have to be present per F2008, 125.2.12p3(6). */
4638
4639 nonoptional_arr = false;
4640
4641 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4642 if (ss->info->type != GFC_SS_SCALAR && ss->info->type != GFC_SS_TEMP
4643 && ss->info->type != GFC_SS_REFERENCE && !ss->info->can_be_null_ref)
4644 {
4645 nonoptional_arr = true;
4646 break;
4647 }
4648
4649 /* We use one SS term, and use that to determine the bounds of the
4650 loop for this dimension. We try to pick the simplest term. */
4651 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4652 {
4653 gfc_ss_type ss_type;
4654
4655 ss_type = ss->info->type;
4656 if (ss_type == GFC_SS_SCALAR
4657 || ss_type == GFC_SS_TEMP
4658 || ss_type == GFC_SS_REFERENCE
4659 || (ss->info->can_be_null_ref && nonoptional_arr))
4660 continue;
4661
4662 info = &ss->info->data.array;
4663 dim = ss->dim[n];
4664
4665 if (loopspec[n] != NULL)
4666 {
4667 specinfo = &loopspec[n]->info->data.array;
4668 spec_dim = loopspec[n]->dim[n];
4669 }
4670 else
4671 {
4672 /* Silence uninitialized warnings. */
4673 specinfo = NULL;
4674 spec_dim = 0;
4675 }
4676
4677 if (info->shape)
4678 {
4679 gcc_assert (info->shape[dim]);
4680 /* The frontend has worked out the size for us. */
4681 if (!loopspec[n]
4682 || !specinfo->shape
4683 || !integer_zerop (specinfo->start[spec_dim]))
4684 /* Prefer zero-based descriptors if possible. */
4685 loopspec[n] = ss;
4686 continue;
4687 }
4688
4689 if (ss_type == GFC_SS_CONSTRUCTOR)
4690 {
4691 gfc_constructor_base base;
4692 /* An unknown size constructor will always be rank one.
4693 Higher rank constructors will either have known shape,
4694 or still be wrapped in a call to reshape. */
4695 gcc_assert (loop->dimen == 1);
4696
4697 /* Always prefer to use the constructor bounds if the size
4698 can be determined at compile time. Prefer not to otherwise,
4699 since the general case involves realloc, and it's better to
4700 avoid that overhead if possible. */
4701 base = ss->info->expr->value.constructor;
4702 dynamic[n] = gfc_get_array_constructor_size (&i, base);
4703 if (!dynamic[n] || !loopspec[n])
4704 loopspec[n] = ss;
4705 continue;
4706 }
4707
4708 /* Avoid using an allocatable lhs in an assignment, since
4709 there might be a reallocation coming. */
4710 if (loopspec[n] && ss->is_alloc_lhs)
4711 continue;
4712
4713 if (!loopspec[n])
4714 loopspec[n] = ss;
4715 /* Criteria for choosing a loop specifier (most important first):
4716 doesn't need realloc
4717 stride of one
4718 known stride
4719 known lower bound
4720 known upper bound
4721 */
4722 else if (loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n])
4723 loopspec[n] = ss;
4724 else if (integer_onep (info->stride[dim])
4725 && !integer_onep (specinfo->stride[spec_dim]))
4726 loopspec[n] = ss;
4727 else if (INTEGER_CST_P (info->stride[dim])
4728 && !INTEGER_CST_P (specinfo->stride[spec_dim]))
4729 loopspec[n] = ss;
4730 else if (INTEGER_CST_P (info->start[dim])
4731 && !INTEGER_CST_P (specinfo->start[spec_dim])
4732 && integer_onep (info->stride[dim])
4733 == integer_onep (specinfo->stride[spec_dim])
4734 && INTEGER_CST_P (info->stride[dim])
4735 == INTEGER_CST_P (specinfo->stride[spec_dim]))
4736 loopspec[n] = ss;
4737 /* We don't work out the upper bound.
4738 else if (INTEGER_CST_P (info->finish[n])
4739 && ! INTEGER_CST_P (specinfo->finish[n]))
4740 loopspec[n] = ss; */
4741 }
4742
4743 /* We should have found the scalarization loop specifier. If not,
4744 that's bad news. */
4745 gcc_assert (loopspec[n]);
4746
4747 info = &loopspec[n]->info->data.array;
4748 dim = loopspec[n]->dim[n];
4749
4750 /* Set the extents of this range. */
4751 cshape = info->shape;
4752 if (cshape && INTEGER_CST_P (info->start[dim])
4753 && INTEGER_CST_P (info->stride[dim]))
4754 {
4755 loop->from[n] = info->start[dim];
4756 mpz_set (i, cshape[get_array_ref_dim_for_loop_dim (loopspec[n], n)]);
4757 mpz_sub_ui (i, i, 1);
4758 /* To = from + (size - 1) * stride. */
4759 tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
4760 if (!integer_onep (info->stride[dim]))
4761 tmp = fold_build2_loc (input_location, MULT_EXPR,
4762 gfc_array_index_type, tmp,
4763 info->stride[dim]);
4764 loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
4765 gfc_array_index_type,
4766 loop->from[n], tmp);
4767 }
4768 else
4769 {
4770 loop->from[n] = info->start[dim];
4771 switch (loopspec[n]->info->type)
4772 {
4773 case GFC_SS_CONSTRUCTOR:
4774 /* The upper bound is calculated when we expand the
4775 constructor. */
4776 gcc_assert (loop->to[n] == NULL_TREE);
4777 break;
4778
4779 case GFC_SS_SECTION:
4780 /* Use the end expression if it exists and is not constant,
4781 so that it is only evaluated once. */
4782 loop->to[n] = info->end[dim];
4783 break;
4784
4785 case GFC_SS_FUNCTION:
4786 /* The loop bound will be set when we generate the call. */
4787 gcc_assert (loop->to[n] == NULL_TREE);
4788 break;
4789
4790 case GFC_SS_INTRINSIC:
4791 {
4792 gfc_expr *expr = loopspec[n]->info->expr;
4793
4794 /* The {l,u}bound of an assumed rank. */
4795 gcc_assert ((expr->value.function.isym->id == GFC_ISYM_LBOUND
4796 || expr->value.function.isym->id == GFC_ISYM_UBOUND)
4797 && expr->value.function.actual->next->expr == NULL
4798 && expr->value.function.actual->expr->rank == -1);
4799
4800 loop->to[n] = info->end[dim];
4801 break;
4802 }
4803
4804 default:
4805 gcc_unreachable ();
4806 }
4807 }
4808
4809 /* Transform everything so we have a simple incrementing variable. */
4810 if (integer_onep (info->stride[dim]))
4811 info->delta[dim] = gfc_index_zero_node;
4812 else
4813 {
4814 /* Set the delta for this section. */
4815 info->delta[dim] = gfc_evaluate_now (loop->from[n], &outer_loop->pre);
4816 /* Number of iterations is (end - start + step) / step.
4817 with start = 0, this simplifies to
4818 last = end / step;
4819 for (i = 0; i<=last; i++){...}; */
4820 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4821 gfc_array_index_type, loop->to[n],
4822 loop->from[n]);
4823 tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
4824 gfc_array_index_type, tmp, info->stride[dim]);
4825 tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
4826 tmp, build_int_cst (gfc_array_index_type, -1));
4827 loop->to[n] = gfc_evaluate_now (tmp, &outer_loop->pre);
4828 /* Make the loop variable start at 0. */
4829 loop->from[n] = gfc_index_zero_node;
4830 }
4831 }
4832 mpz_clear (i);
4833
4834 for (loop = loop->nested; loop; loop = loop->next)
4835 set_loop_bounds (loop);
4836 }
4837
4838
4839 /* Initialize the scalarization loop. Creates the loop variables. Determines
4840 the range of the loop variables. Creates a temporary if required.
4841 Also generates code for scalar expressions which have been
4842 moved outside the loop. */
4843
4844 void
gfc_conv_loop_setup(gfc_loopinfo * loop,locus * where)4845 gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
4846 {
4847 gfc_ss *tmp_ss;
4848 tree tmp;
4849
4850 set_loop_bounds (loop);
4851
4852 /* Add all the scalar code that can be taken out of the loops.
4853 This may include calculating the loop bounds, so do it before
4854 allocating the temporary. */
4855 gfc_add_loop_ss_code (loop, loop->ss, false, where);
4856
4857 tmp_ss = loop->temp_ss;
4858 /* If we want a temporary then create it. */
4859 if (tmp_ss != NULL)
4860 {
4861 gfc_ss_info *tmp_ss_info;
4862
4863 tmp_ss_info = tmp_ss->info;
4864 gcc_assert (tmp_ss_info->type == GFC_SS_TEMP);
4865 gcc_assert (loop->parent == NULL);
4866
4867 /* Make absolutely sure that this is a complete type. */
4868 if (tmp_ss_info->string_length)
4869 tmp_ss_info->data.temp.type
4870 = gfc_get_character_type_len_for_eltype
4871 (TREE_TYPE (tmp_ss_info->data.temp.type),
4872 tmp_ss_info->string_length);
4873
4874 tmp = tmp_ss_info->data.temp.type;
4875 memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info));
4876 tmp_ss_info->type = GFC_SS_SECTION;
4877
4878 gcc_assert (tmp_ss->dimen != 0);
4879
4880 gfc_trans_create_temp_array (&loop->pre, &loop->post, tmp_ss, tmp,
4881 NULL_TREE, false, true, false, where);
4882 }
4883
4884 /* For array parameters we don't have loop variables, so don't calculate the
4885 translations. */
4886 if (!loop->array_parameter)
4887 gfc_set_delta (loop);
4888 }
4889
4890
4891 /* Calculates how to transform from loop variables to array indices for each
4892 array: once loop bounds are chosen, sets the difference (DELTA field) between
4893 loop bounds and array reference bounds, for each array info. */
4894
4895 void
gfc_set_delta(gfc_loopinfo * loop)4896 gfc_set_delta (gfc_loopinfo *loop)
4897 {
4898 gfc_ss *ss, **loopspec;
4899 gfc_array_info *info;
4900 tree tmp;
4901 int n, dim;
4902
4903 gfc_loopinfo * const outer_loop = outermost_loop (loop);
4904
4905 loopspec = loop->specloop;
4906
4907 /* Calculate the translation from loop variables to array indices. */
4908 for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
4909 {
4910 gfc_ss_type ss_type;
4911
4912 ss_type = ss->info->type;
4913 if (ss_type != GFC_SS_SECTION
4914 && ss_type != GFC_SS_COMPONENT
4915 && ss_type != GFC_SS_CONSTRUCTOR)
4916 continue;
4917
4918 info = &ss->info->data.array;
4919
4920 for (n = 0; n < ss->dimen; n++)
4921 {
4922 /* If we are specifying the range the delta is already set. */
4923 if (loopspec[n] != ss)
4924 {
4925 dim = ss->dim[n];
4926
4927 /* Calculate the offset relative to the loop variable.
4928 First multiply by the stride. */
4929 tmp = loop->from[n];
4930 if (!integer_onep (info->stride[dim]))
4931 tmp = fold_build2_loc (input_location, MULT_EXPR,
4932 gfc_array_index_type,
4933 tmp, info->stride[dim]);
4934
4935 /* Then subtract this from our starting value. */
4936 tmp = fold_build2_loc (input_location, MINUS_EXPR,
4937 gfc_array_index_type,
4938 info->start[dim], tmp);
4939
4940 info->delta[dim] = gfc_evaluate_now (tmp, &outer_loop->pre);
4941 }
4942 }
4943 }
4944
4945 for (loop = loop->nested; loop; loop = loop->next)
4946 gfc_set_delta (loop);
4947 }
4948
4949
4950 /* Calculate the size of a given array dimension from the bounds. This
4951 is simply (ubound - lbound + 1) if this expression is positive
4952 or 0 if it is negative (pick either one if it is zero). Optionally
4953 (if or_expr is present) OR the (expression != 0) condition to it. */
4954
4955 tree
gfc_conv_array_extent_dim(tree lbound,tree ubound,tree * or_expr)4956 gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
4957 {
4958 tree res;
4959 tree cond;
4960
4961 /* Calculate (ubound - lbound + 1). */
4962 res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
4963 ubound, lbound);
4964 res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
4965 gfc_index_one_node);
4966
4967 /* Check whether the size for this dimension is negative. */
4968 cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
4969 gfc_index_zero_node);
4970 res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
4971 gfc_index_zero_node, res);
4972
4973 /* Build OR expression. */
4974 if (or_expr)
4975 *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
4976 boolean_type_node, *or_expr, cond);
4977
4978 return res;
4979 }
4980
4981
4982 /* For an array descriptor, get the total number of elements. This is just
4983 the product of the extents along from_dim to to_dim. */
4984
4985 static tree
gfc_conv_descriptor_size_1(tree desc,int from_dim,int to_dim)4986 gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
4987 {
4988 tree res;
4989 int dim;
4990
4991 res = gfc_index_one_node;
4992
4993 for (dim = from_dim; dim < to_dim; ++dim)
4994 {
4995 tree lbound;
4996 tree ubound;
4997 tree extent;
4998
4999 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
5000 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
5001
5002 extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
5003 res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5004 res, extent);
5005 }
5006
5007 return res;
5008 }
5009
5010
5011 /* Full size of an array. */
5012
5013 tree
gfc_conv_descriptor_size(tree desc,int rank)5014 gfc_conv_descriptor_size (tree desc, int rank)
5015 {
5016 return gfc_conv_descriptor_size_1 (desc, 0, rank);
5017 }
5018
5019
5020 /* Size of a coarray for all dimensions but the last. */
5021
5022 tree
gfc_conv_descriptor_cosize(tree desc,int rank,int corank)5023 gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
5024 {
5025 return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
5026 }
5027
5028
5029 /* Fills in an array descriptor, and returns the size of the array.
5030 The size will be a simple_val, ie a variable or a constant. Also
5031 calculates the offset of the base. The pointer argument overflow,
5032 which should be of integer type, will increase in value if overflow
5033 occurs during the size calculation. Returns the size of the array.
5034 {
5035 stride = 1;
5036 offset = 0;
5037 for (n = 0; n < rank; n++)
5038 {
5039 a.lbound[n] = specified_lower_bound;
5040 offset = offset + a.lbond[n] * stride;
5041 size = 1 - lbound;
5042 a.ubound[n] = specified_upper_bound;
5043 a.stride[n] = stride;
5044 size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
5045 overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
5046 stride = stride * size;
5047 }
5048 for (n = rank; n < rank+corank; n++)
5049 (Set lcobound/ucobound as above.)
5050 element_size = sizeof (array element);
5051 if (!rank)
5052 return element_size
5053 stride = (size_t) stride;
5054 overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
5055 stride = stride * element_size;
5056 return (stride);
5057 } */
5058 /*GCC ARRAYS*/
5059
5060 static tree
gfc_array_init_size(tree descriptor,int rank,int corank,tree * poffset,gfc_expr ** lower,gfc_expr ** upper,stmtblock_t * pblock,stmtblock_t * descriptor_block,tree * overflow,tree expr3_elem_size,tree * nelems,gfc_expr * expr3,tree expr3_desc,bool e3_is_array_constr,gfc_expr * expr)5061 gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
5062 gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
5063 stmtblock_t * descriptor_block, tree * overflow,
5064 tree expr3_elem_size, tree *nelems, gfc_expr *expr3,
5065 tree expr3_desc, bool e3_is_array_constr, gfc_expr *expr)
5066 {
5067 tree type;
5068 tree tmp;
5069 tree size;
5070 tree offset;
5071 tree stride;
5072 tree element_size;
5073 tree or_expr;
5074 tree thencase;
5075 tree elsecase;
5076 tree cond;
5077 tree var;
5078 stmtblock_t thenblock;
5079 stmtblock_t elseblock;
5080 gfc_expr *ubound;
5081 gfc_se se;
5082 int n;
5083
5084 type = TREE_TYPE (descriptor);
5085
5086 stride = gfc_index_one_node;
5087 offset = gfc_index_zero_node;
5088
5089 /* Set the dtype. */
5090 if (expr->ts.type == BT_CHARACTER && expr->ts.deferred
5091 && TREE_CODE (expr->ts.u.cl->backend_decl) == VAR_DECL)
5092 {
5093 type = gfc_typenode_for_spec (&expr->ts);
5094 tmp = gfc_conv_descriptor_dtype (descriptor);
5095 gfc_add_modify (descriptor_block, tmp,
5096 gfc_get_dtype_rank_type (rank, type));
5097 }
5098 else
5099 {
5100 tmp = gfc_conv_descriptor_dtype (descriptor);
5101 gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (type));
5102 }
5103
5104 or_expr = boolean_false_node;
5105
5106 for (n = 0; n < rank; n++)
5107 {
5108 tree conv_lbound;
5109 tree conv_ubound;
5110
5111 /* We have 3 possibilities for determining the size of the array:
5112 lower == NULL => lbound = 1, ubound = upper[n]
5113 upper[n] = NULL => lbound = 1, ubound = lower[n]
5114 upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
5115 ubound = upper[n];
5116
5117 /* Set lower bound. */
5118 gfc_init_se (&se, NULL);
5119 if (expr3_desc != NULL_TREE)
5120 {
5121 if (e3_is_array_constr)
5122 /* The lbound of a constant array [] starts at zero, but when
5123 allocating it, the standard expects the array to start at
5124 one. */
5125 se.expr = gfc_index_one_node;
5126 else
5127 se.expr = gfc_conv_descriptor_lbound_get (expr3_desc,
5128 gfc_rank_cst[n]);
5129 }
5130 else if (lower == NULL)
5131 se.expr = gfc_index_one_node;
5132 else
5133 {
5134 gcc_assert (lower[n]);
5135 if (ubound)
5136 {
5137 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
5138 gfc_add_block_to_block (pblock, &se.pre);
5139 }
5140 else
5141 {
5142 se.expr = gfc_index_one_node;
5143 ubound = lower[n];
5144 }
5145 }
5146 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
5147 gfc_rank_cst[n], se.expr);
5148 conv_lbound = se.expr;
5149
5150 /* Work out the offset for this component. */
5151 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5152 se.expr, stride);
5153 offset = fold_build2_loc (input_location, MINUS_EXPR,
5154 gfc_array_index_type, offset, tmp);
5155
5156 /* Set upper bound. */
5157 gfc_init_se (&se, NULL);
5158 if (expr3_desc != NULL_TREE)
5159 {
5160 if (e3_is_array_constr)
5161 {
5162 /* The lbound of a constant array [] starts at zero, but when
5163 allocating it, the standard expects the array to start at
5164 one. Therefore fix the upper bound to be
5165 (desc.ubound - desc.lbound)+ 1. */
5166 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5167 gfc_array_index_type,
5168 gfc_conv_descriptor_ubound_get (
5169 expr3_desc, gfc_rank_cst[n]),
5170 gfc_conv_descriptor_lbound_get (
5171 expr3_desc, gfc_rank_cst[n]));
5172 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5173 gfc_array_index_type, tmp,
5174 gfc_index_one_node);
5175 se.expr = gfc_evaluate_now (tmp, pblock);
5176 }
5177 else
5178 se.expr = gfc_conv_descriptor_ubound_get (expr3_desc,
5179 gfc_rank_cst[n]);
5180 }
5181 else
5182 {
5183 gcc_assert (ubound);
5184 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
5185 gfc_add_block_to_block (pblock, &se.pre);
5186 if (ubound->expr_type == EXPR_FUNCTION)
5187 se.expr = gfc_evaluate_now (se.expr, pblock);
5188 }
5189 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
5190 gfc_rank_cst[n], se.expr);
5191 conv_ubound = se.expr;
5192
5193 /* Store the stride. */
5194 gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
5195 gfc_rank_cst[n], stride);
5196
5197 /* Calculate size and check whether extent is negative. */
5198 size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
5199 size = gfc_evaluate_now (size, pblock);
5200
5201 /* Check whether multiplying the stride by the number of
5202 elements in this dimension would overflow. We must also check
5203 whether the current dimension has zero size in order to avoid
5204 division by zero.
5205 */
5206 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
5207 gfc_array_index_type,
5208 fold_convert (gfc_array_index_type,
5209 TYPE_MAX_VALUE (gfc_array_index_type)),
5210 size);
5211 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
5212 boolean_type_node, tmp, stride),
5213 PRED_FORTRAN_OVERFLOW);
5214 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5215 integer_one_node, integer_zero_node);
5216 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5217 boolean_type_node, size,
5218 gfc_index_zero_node),
5219 PRED_FORTRAN_SIZE_ZERO);
5220 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5221 integer_zero_node, tmp);
5222 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5223 *overflow, tmp);
5224 *overflow = gfc_evaluate_now (tmp, pblock);
5225
5226 /* Multiply the stride by the number of elements in this dimension. */
5227 stride = fold_build2_loc (input_location, MULT_EXPR,
5228 gfc_array_index_type, stride, size);
5229 stride = gfc_evaluate_now (stride, pblock);
5230 }
5231
5232 for (n = rank; n < rank + corank; n++)
5233 {
5234 ubound = upper[n];
5235
5236 /* Set lower bound. */
5237 gfc_init_se (&se, NULL);
5238 if (lower == NULL || lower[n] == NULL)
5239 {
5240 gcc_assert (n == rank + corank - 1);
5241 se.expr = gfc_index_one_node;
5242 }
5243 else
5244 {
5245 if (ubound || n == rank + corank - 1)
5246 {
5247 gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
5248 gfc_add_block_to_block (pblock, &se.pre);
5249 }
5250 else
5251 {
5252 se.expr = gfc_index_one_node;
5253 ubound = lower[n];
5254 }
5255 }
5256 gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
5257 gfc_rank_cst[n], se.expr);
5258
5259 if (n < rank + corank - 1)
5260 {
5261 gfc_init_se (&se, NULL);
5262 gcc_assert (ubound);
5263 gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
5264 gfc_add_block_to_block (pblock, &se.pre);
5265 gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
5266 gfc_rank_cst[n], se.expr);
5267 }
5268 }
5269
5270 /* The stride is the number of elements in the array, so multiply by the
5271 size of an element to get the total size. Obviously, if there is a
5272 SOURCE expression (expr3) we must use its element size. */
5273 if (expr3_elem_size != NULL_TREE)
5274 tmp = expr3_elem_size;
5275 else if (expr3 != NULL)
5276 {
5277 if (expr3->ts.type == BT_CLASS)
5278 {
5279 gfc_se se_sz;
5280 gfc_expr *sz = gfc_copy_expr (expr3);
5281 gfc_add_vptr_component (sz);
5282 gfc_add_size_component (sz);
5283 gfc_init_se (&se_sz, NULL);
5284 gfc_conv_expr (&se_sz, sz);
5285 gfc_free_expr (sz);
5286 tmp = se_sz.expr;
5287 }
5288 else
5289 {
5290 tmp = gfc_typenode_for_spec (&expr3->ts);
5291 tmp = TYPE_SIZE_UNIT (tmp);
5292 }
5293 }
5294 else
5295 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5296
5297 /* Convert to size_t. */
5298 element_size = fold_convert (size_type_node, tmp);
5299
5300 if (rank == 0)
5301 return element_size;
5302
5303 *nelems = gfc_evaluate_now (stride, pblock);
5304 stride = fold_convert (size_type_node, stride);
5305
5306 /* First check for overflow. Since an array of type character can
5307 have zero element_size, we must check for that before
5308 dividing. */
5309 tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
5310 size_type_node,
5311 TYPE_MAX_VALUE (size_type_node), element_size);
5312 cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
5313 boolean_type_node, tmp, stride),
5314 PRED_FORTRAN_OVERFLOW);
5315 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5316 integer_one_node, integer_zero_node);
5317 cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
5318 boolean_type_node, element_size,
5319 build_int_cst (size_type_node, 0)),
5320 PRED_FORTRAN_SIZE_ZERO);
5321 tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
5322 integer_zero_node, tmp);
5323 tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
5324 *overflow, tmp);
5325 *overflow = gfc_evaluate_now (tmp, pblock);
5326
5327 size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
5328 stride, element_size);
5329
5330 if (poffset != NULL)
5331 {
5332 offset = gfc_evaluate_now (offset, pblock);
5333 *poffset = offset;
5334 }
5335
5336 if (integer_zerop (or_expr))
5337 return size;
5338 if (integer_onep (or_expr))
5339 return build_int_cst (size_type_node, 0);
5340
5341 var = gfc_create_var (TREE_TYPE (size), "size");
5342 gfc_start_block (&thenblock);
5343 gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
5344 thencase = gfc_finish_block (&thenblock);
5345
5346 gfc_start_block (&elseblock);
5347 gfc_add_modify (&elseblock, var, size);
5348 elsecase = gfc_finish_block (&elseblock);
5349
5350 tmp = gfc_evaluate_now (or_expr, pblock);
5351 tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
5352 gfc_add_expr_to_block (pblock, tmp);
5353
5354 return var;
5355 }
5356
5357
5358 /* Retrieve the last ref from the chain. This routine is specific to
5359 gfc_array_allocate ()'s needs. */
5360
5361 bool
retrieve_last_ref(gfc_ref ** ref_in,gfc_ref ** prev_ref_in)5362 retrieve_last_ref (gfc_ref **ref_in, gfc_ref **prev_ref_in)
5363 {
5364 gfc_ref *ref, *prev_ref;
5365
5366 ref = *ref_in;
5367 /* Prevent warnings for uninitialized variables. */
5368 prev_ref = *prev_ref_in;
5369 while (ref && ref->next != NULL)
5370 {
5371 gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
5372 || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
5373 prev_ref = ref;
5374 ref = ref->next;
5375 }
5376
5377 if (ref == NULL || ref->type != REF_ARRAY)
5378 return false;
5379
5380 *ref_in = ref;
5381 *prev_ref_in = prev_ref;
5382 return true;
5383 }
5384
5385 /* Initializes the descriptor and generates a call to _gfor_allocate. Does
5386 the work for an ALLOCATE statement. */
5387 /*GCC ARRAYS*/
5388
5389 bool
gfc_array_allocate(gfc_se * se,gfc_expr * expr,tree status,tree errmsg,tree errlen,tree label_finish,tree expr3_elem_size,tree * nelems,gfc_expr * expr3,tree e3_arr_desc,bool e3_is_array_constr)5390 gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
5391 tree errlen, tree label_finish, tree expr3_elem_size,
5392 tree *nelems, gfc_expr *expr3, tree e3_arr_desc,
5393 bool e3_is_array_constr)
5394 {
5395 tree tmp;
5396 tree pointer;
5397 tree offset = NULL_TREE;
5398 tree token = NULL_TREE;
5399 tree size;
5400 tree msg;
5401 tree error = NULL_TREE;
5402 tree overflow; /* Boolean storing whether size calculation overflows. */
5403 tree var_overflow = NULL_TREE;
5404 tree cond;
5405 tree set_descriptor;
5406 stmtblock_t set_descriptor_block;
5407 stmtblock_t elseblock;
5408 gfc_expr **lower;
5409 gfc_expr **upper;
5410 gfc_ref *ref, *prev_ref = NULL;
5411 bool allocatable, coarray, dimension, alloc_w_e3_arr_spec = false;
5412
5413 ref = expr->ref;
5414
5415 /* Find the last reference in the chain. */
5416 if (!retrieve_last_ref (&ref, &prev_ref))
5417 return false;
5418
5419 /* Take the allocatable and coarray properties solely from the expr-ref's
5420 attributes and not from source=-expression. */
5421 if (!prev_ref)
5422 {
5423 allocatable = expr->symtree->n.sym->attr.allocatable;
5424 coarray = expr->symtree->n.sym->attr.codimension;
5425 dimension = expr->symtree->n.sym->attr.dimension;
5426 }
5427 else
5428 {
5429 allocatable = prev_ref->u.c.component->attr.allocatable;
5430 coarray = prev_ref->u.c.component->attr.codimension;
5431 dimension = prev_ref->u.c.component->attr.dimension;
5432 }
5433
5434 if (!dimension)
5435 gcc_assert (coarray);
5436
5437 if (ref->u.ar.type == AR_FULL && expr3 != NULL)
5438 {
5439 gfc_ref *old_ref = ref;
5440 /* F08:C633: Array shape from expr3. */
5441 ref = expr3->ref;
5442
5443 /* Find the last reference in the chain. */
5444 if (!retrieve_last_ref (&ref, &prev_ref))
5445 {
5446 if (expr3->expr_type == EXPR_FUNCTION
5447 && gfc_expr_attr (expr3).dimension)
5448 ref = old_ref;
5449 else
5450 return false;
5451 }
5452 alloc_w_e3_arr_spec = true;
5453 }
5454
5455 /* Figure out the size of the array. */
5456 switch (ref->u.ar.type)
5457 {
5458 case AR_ELEMENT:
5459 if (!coarray)
5460 {
5461 lower = NULL;
5462 upper = ref->u.ar.start;
5463 break;
5464 }
5465 /* Fall through. */
5466
5467 case AR_SECTION:
5468 lower = ref->u.ar.start;
5469 upper = ref->u.ar.end;
5470 break;
5471
5472 case AR_FULL:
5473 gcc_assert (ref->u.ar.as->type == AS_EXPLICIT
5474 || alloc_w_e3_arr_spec);
5475
5476 lower = ref->u.ar.as->lower;
5477 upper = ref->u.ar.as->upper;
5478 break;
5479
5480 default:
5481 gcc_unreachable ();
5482 break;
5483 }
5484
5485 overflow = integer_zero_node;
5486
5487 gfc_init_block (&set_descriptor_block);
5488 size = gfc_array_init_size (se->expr, alloc_w_e3_arr_spec ? expr->rank
5489 : ref->u.ar.as->rank,
5490 coarray ? ref->u.ar.as->corank : 0,
5491 &offset, lower, upper,
5492 &se->pre, &set_descriptor_block, &overflow,
5493 expr3_elem_size, nelems, expr3, e3_arr_desc,
5494 e3_is_array_constr, expr);
5495
5496 if (dimension)
5497 {
5498 var_overflow = gfc_create_var (integer_type_node, "overflow");
5499 gfc_add_modify (&se->pre, var_overflow, overflow);
5500
5501 if (status == NULL_TREE)
5502 {
5503 /* Generate the block of code handling overflow. */
5504 msg = gfc_build_addr_expr (pchar_type_node,
5505 gfc_build_localized_cstring_const
5506 ("Integer overflow when calculating the amount of "
5507 "memory to allocate"));
5508 error = build_call_expr_loc (input_location,
5509 gfor_fndecl_runtime_error, 1, msg);
5510 }
5511 else
5512 {
5513 tree status_type = TREE_TYPE (status);
5514 stmtblock_t set_status_block;
5515
5516 gfc_start_block (&set_status_block);
5517 gfc_add_modify (&set_status_block, status,
5518 build_int_cst (status_type, LIBERROR_ALLOCATION));
5519 error = gfc_finish_block (&set_status_block);
5520 }
5521 }
5522
5523 gfc_start_block (&elseblock);
5524
5525 /* Allocate memory to store the data. */
5526 if (POINTER_TYPE_P (TREE_TYPE (se->expr)))
5527 se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
5528
5529 pointer = gfc_conv_descriptor_data_get (se->expr);
5530 STRIP_NOPS (pointer);
5531
5532 if (coarray && flag_coarray == GFC_FCOARRAY_LIB)
5533 token = gfc_build_addr_expr (NULL_TREE,
5534 gfc_conv_descriptor_token (se->expr));
5535
5536 /* The allocatable variant takes the old pointer as first argument. */
5537 if (allocatable)
5538 gfc_allocate_allocatable (&elseblock, pointer, size, token,
5539 status, errmsg, errlen, label_finish, expr);
5540 else
5541 gfc_allocate_using_malloc (&elseblock, pointer, size, status);
5542
5543 if (dimension)
5544 {
5545 cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
5546 boolean_type_node, var_overflow, integer_zero_node),
5547 PRED_FORTRAN_OVERFLOW);
5548 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
5549 error, gfc_finish_block (&elseblock));
5550 }
5551 else
5552 tmp = gfc_finish_block (&elseblock);
5553
5554 gfc_add_expr_to_block (&se->pre, tmp);
5555
5556 /* Update the array descriptors. */
5557 if (dimension)
5558 gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
5559
5560 set_descriptor = gfc_finish_block (&set_descriptor_block);
5561 if (status != NULL_TREE)
5562 {
5563 cond = fold_build2_loc (input_location, EQ_EXPR,
5564 boolean_type_node, status,
5565 build_int_cst (TREE_TYPE (status), 0));
5566 gfc_add_expr_to_block (&se->pre,
5567 fold_build3_loc (input_location, COND_EXPR, void_type_node,
5568 gfc_likely (cond, PRED_FORTRAN_FAIL_ALLOC),
5569 set_descriptor,
5570 build_empty_stmt (input_location)));
5571 }
5572 else
5573 gfc_add_expr_to_block (&se->pre, set_descriptor);
5574
5575 if (expr->ts.type == BT_DERIVED && expr->ts.u.derived->attr.alloc_comp
5576 && !coarray)
5577 {
5578 tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
5579 ref->u.ar.as->rank);
5580 gfc_add_expr_to_block (&se->pre, tmp);
5581 }
5582
5583 return true;
5584 }
5585
5586
5587 /* Deallocate an array variable. Also used when an allocated variable goes
5588 out of scope. */
5589 /*GCC ARRAYS*/
5590
5591 tree
gfc_array_deallocate(tree descriptor,tree pstat,tree errmsg,tree errlen,tree label_finish,gfc_expr * expr)5592 gfc_array_deallocate (tree descriptor, tree pstat, tree errmsg, tree errlen,
5593 tree label_finish, gfc_expr* expr)
5594 {
5595 tree var;
5596 tree tmp;
5597 stmtblock_t block;
5598 bool coarray = gfc_is_coarray (expr);
5599
5600 gfc_start_block (&block);
5601
5602 /* Get a pointer to the data. */
5603 var = gfc_conv_descriptor_data_get (descriptor);
5604 STRIP_NOPS (var);
5605
5606 /* Parameter is the address of the data component. */
5607 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, pstat, errmsg,
5608 errlen, label_finish, false, expr, coarray);
5609 gfc_add_expr_to_block (&block, tmp);
5610
5611 /* Zero the data pointer; only for coarrays an error can occur and then
5612 the allocation status may not be changed. */
5613 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
5614 var, build_int_cst (TREE_TYPE (var), 0));
5615 if (pstat != NULL_TREE && coarray && flag_coarray == GFC_FCOARRAY_LIB)
5616 {
5617 tree cond;
5618 tree stat = build_fold_indirect_ref_loc (input_location, pstat);
5619
5620 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
5621 stat, build_int_cst (TREE_TYPE (stat), 0));
5622 tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
5623 cond, tmp, build_empty_stmt (input_location));
5624 }
5625
5626 gfc_add_expr_to_block (&block, tmp);
5627
5628 return gfc_finish_block (&block);
5629 }
5630
5631
5632 /* Create an array constructor from an initialization expression.
5633 We assume the frontend already did any expansions and conversions. */
5634
5635 tree
gfc_conv_array_initializer(tree type,gfc_expr * expr)5636 gfc_conv_array_initializer (tree type, gfc_expr * expr)
5637 {
5638 gfc_constructor *c;
5639 tree tmp;
5640 offset_int wtmp;
5641 gfc_se se;
5642 tree index, range;
5643 vec<constructor_elt, va_gc> *v = NULL;
5644
5645 if (expr->expr_type == EXPR_VARIABLE
5646 && expr->symtree->n.sym->attr.flavor == FL_PARAMETER
5647 && expr->symtree->n.sym->value)
5648 expr = expr->symtree->n.sym->value;
5649
5650 switch (expr->expr_type)
5651 {
5652 case EXPR_CONSTANT:
5653 case EXPR_STRUCTURE:
5654 /* A single scalar or derived type value. Create an array with all
5655 elements equal to that value. */
5656 gfc_init_se (&se, NULL);
5657
5658 if (expr->expr_type == EXPR_CONSTANT)
5659 gfc_conv_constant (&se, expr);
5660 else
5661 gfc_conv_structure (&se, expr, 1);
5662
5663 wtmp = wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
5664 /* This will probably eat buckets of memory for large arrays. */
5665 while (wtmp != 0)
5666 {
5667 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
5668 wtmp -= 1;
5669 }
5670 break;
5671
5672 case EXPR_ARRAY:
5673 /* Create a vector of all the elements. */
5674 for (c = gfc_constructor_first (expr->value.constructor);
5675 c; c = gfc_constructor_next (c))
5676 {
5677 if (c->iterator)
5678 {
5679 /* Problems occur when we get something like
5680 integer :: a(lots) = (/(i, i=1, lots)/) */
5681 gfc_fatal_error ("The number of elements in the array "
5682 "constructor at %L requires an increase of "
5683 "the allowed %d upper limit. See "
5684 "%<-fmax-array-constructor%> option",
5685 &expr->where, flag_max_array_constructor);
5686 return NULL_TREE;
5687 }
5688 if (mpz_cmp_si (c->offset, 0) != 0)
5689 index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5690 else
5691 index = NULL_TREE;
5692
5693 if (mpz_cmp_si (c->repeat, 1) > 0)
5694 {
5695 tree tmp1, tmp2;
5696 mpz_t maxval;
5697
5698 mpz_init (maxval);
5699 mpz_add (maxval, c->offset, c->repeat);
5700 mpz_sub_ui (maxval, maxval, 1);
5701 tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5702 if (mpz_cmp_si (c->offset, 0) != 0)
5703 {
5704 mpz_add_ui (maxval, c->offset, 1);
5705 tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
5706 }
5707 else
5708 tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
5709
5710 range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
5711 mpz_clear (maxval);
5712 }
5713 else
5714 range = NULL;
5715
5716 gfc_init_se (&se, NULL);
5717 switch (c->expr->expr_type)
5718 {
5719 case EXPR_CONSTANT:
5720 gfc_conv_constant (&se, c->expr);
5721 break;
5722
5723 case EXPR_STRUCTURE:
5724 gfc_conv_structure (&se, c->expr, 1);
5725 break;
5726
5727 default:
5728 /* Catch those occasional beasts that do not simplify
5729 for one reason or another, assuming that if they are
5730 standard defying the frontend will catch them. */
5731 gfc_conv_expr (&se, c->expr);
5732 break;
5733 }
5734
5735 if (range == NULL_TREE)
5736 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5737 else
5738 {
5739 if (index != NULL_TREE)
5740 CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
5741 CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
5742 }
5743 }
5744 break;
5745
5746 case EXPR_NULL:
5747 return gfc_build_null_descriptor (type);
5748
5749 default:
5750 gcc_unreachable ();
5751 }
5752
5753 /* Create a constructor from the list of elements. */
5754 tmp = build_constructor (type, v);
5755 TREE_CONSTANT (tmp) = 1;
5756 return tmp;
5757 }
5758
5759
5760 /* Generate code to evaluate non-constant coarray cobounds. */
5761
5762 void
gfc_trans_array_cobounds(tree type,stmtblock_t * pblock,const gfc_symbol * sym)5763 gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
5764 const gfc_symbol *sym)
5765 {
5766 int dim;
5767 tree ubound;
5768 tree lbound;
5769 gfc_se se;
5770 gfc_array_spec *as;
5771
5772 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
5773
5774 for (dim = as->rank; dim < as->rank + as->corank; dim++)
5775 {
5776 /* Evaluate non-constant array bound expressions. */
5777 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5778 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5779 {
5780 gfc_init_se (&se, NULL);
5781 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5782 gfc_add_block_to_block (pblock, &se.pre);
5783 gfc_add_modify (pblock, lbound, se.expr);
5784 }
5785 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5786 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5787 {
5788 gfc_init_se (&se, NULL);
5789 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5790 gfc_add_block_to_block (pblock, &se.pre);
5791 gfc_add_modify (pblock, ubound, se.expr);
5792 }
5793 }
5794 }
5795
5796
5797 /* Generate code to evaluate non-constant array bounds. Sets *poffset and
5798 returns the size (in elements) of the array. */
5799
5800 static tree
gfc_trans_array_bounds(tree type,gfc_symbol * sym,tree * poffset,stmtblock_t * pblock)5801 gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
5802 stmtblock_t * pblock)
5803 {
5804 gfc_array_spec *as;
5805 tree size;
5806 tree stride;
5807 tree offset;
5808 tree ubound;
5809 tree lbound;
5810 tree tmp;
5811 gfc_se se;
5812
5813 int dim;
5814
5815 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
5816
5817 size = gfc_index_one_node;
5818 offset = gfc_index_zero_node;
5819 for (dim = 0; dim < as->rank; dim++)
5820 {
5821 /* Evaluate non-constant array bound expressions. */
5822 lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
5823 if (as->lower[dim] && !INTEGER_CST_P (lbound))
5824 {
5825 gfc_init_se (&se, NULL);
5826 gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
5827 gfc_add_block_to_block (pblock, &se.pre);
5828 gfc_add_modify (pblock, lbound, se.expr);
5829 }
5830 ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
5831 if (as->upper[dim] && !INTEGER_CST_P (ubound))
5832 {
5833 gfc_init_se (&se, NULL);
5834 gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
5835 gfc_add_block_to_block (pblock, &se.pre);
5836 gfc_add_modify (pblock, ubound, se.expr);
5837 }
5838 /* The offset of this dimension. offset = offset - lbound * stride. */
5839 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5840 lbound, size);
5841 offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
5842 offset, tmp);
5843
5844 /* The size of this dimension, and the stride of the next. */
5845 if (dim + 1 < as->rank)
5846 stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
5847 else
5848 stride = GFC_TYPE_ARRAY_SIZE (type);
5849
5850 if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
5851 {
5852 /* Calculate stride = size * (ubound + 1 - lbound). */
5853 tmp = fold_build2_loc (input_location, MINUS_EXPR,
5854 gfc_array_index_type,
5855 gfc_index_one_node, lbound);
5856 tmp = fold_build2_loc (input_location, PLUS_EXPR,
5857 gfc_array_index_type, ubound, tmp);
5858 tmp = fold_build2_loc (input_location, MULT_EXPR,
5859 gfc_array_index_type, size, tmp);
5860 if (stride)
5861 gfc_add_modify (pblock, stride, tmp);
5862 else
5863 stride = gfc_evaluate_now (tmp, pblock);
5864
5865 /* Make sure that negative size arrays are translated
5866 to being zero size. */
5867 tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
5868 stride, gfc_index_zero_node);
5869 tmp = fold_build3_loc (input_location, COND_EXPR,
5870 gfc_array_index_type, tmp,
5871 stride, gfc_index_zero_node);
5872 gfc_add_modify (pblock, stride, tmp);
5873 }
5874
5875 size = stride;
5876 }
5877
5878 gfc_trans_array_cobounds (type, pblock, sym);
5879 gfc_trans_vla_type_sizes (sym, pblock);
5880
5881 *poffset = offset;
5882 return size;
5883 }
5884
5885
5886 /* Generate code to initialize/allocate an array variable. */
5887
5888 void
gfc_trans_auto_array_allocation(tree decl,gfc_symbol * sym,gfc_wrapped_block * block)5889 gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
5890 gfc_wrapped_block * block)
5891 {
5892 stmtblock_t init;
5893 tree type;
5894 tree tmp = NULL_TREE;
5895 tree size;
5896 tree offset;
5897 tree space;
5898 tree inittree;
5899 bool onstack;
5900
5901 gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
5902
5903 /* Do nothing for USEd variables. */
5904 if (sym->attr.use_assoc)
5905 return;
5906
5907 type = TREE_TYPE (decl);
5908 gcc_assert (GFC_ARRAY_TYPE_P (type));
5909 onstack = TREE_CODE (type) != POINTER_TYPE;
5910
5911 gfc_init_block (&init);
5912
5913 /* Evaluate character string length. */
5914 if (sym->ts.type == BT_CHARACTER
5915 && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5916 {
5917 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5918
5919 gfc_trans_vla_type_sizes (sym, &init);
5920
5921 /* Emit a DECL_EXPR for this variable, which will cause the
5922 gimplifier to allocate storage, and all that good stuff. */
5923 tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
5924 gfc_add_expr_to_block (&init, tmp);
5925 }
5926
5927 if (onstack)
5928 {
5929 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5930 return;
5931 }
5932
5933 type = TREE_TYPE (type);
5934
5935 gcc_assert (!sym->attr.use_assoc);
5936 gcc_assert (!TREE_STATIC (decl));
5937 gcc_assert (!sym->module);
5938
5939 if (sym->ts.type == BT_CHARACTER
5940 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
5941 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
5942
5943 size = gfc_trans_array_bounds (type, sym, &offset, &init);
5944
5945 /* Don't actually allocate space for Cray Pointees. */
5946 if (sym->attr.cray_pointee)
5947 {
5948 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5949 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5950
5951 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
5952 return;
5953 }
5954
5955 if (flag_stack_arrays)
5956 {
5957 gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
5958 space = build_decl (sym->declared_at.lb->location,
5959 VAR_DECL, create_tmp_var_name ("A"),
5960 TREE_TYPE (TREE_TYPE (decl)));
5961 gfc_trans_vla_type_sizes (sym, &init);
5962 }
5963 else
5964 {
5965 /* The size is the number of elements in the array, so multiply by the
5966 size of an element to get the total size. */
5967 tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
5968 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
5969 size, fold_convert (gfc_array_index_type, tmp));
5970
5971 /* Allocate memory to hold the data. */
5972 tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
5973 gfc_add_modify (&init, decl, tmp);
5974
5975 /* Free the temporary. */
5976 tmp = gfc_call_free (decl);
5977 space = NULL_TREE;
5978 }
5979
5980 /* Set offset of the array. */
5981 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
5982 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
5983
5984 /* Automatic arrays should not have initializers. */
5985 gcc_assert (!sym->value);
5986
5987 inittree = gfc_finish_block (&init);
5988
5989 if (space)
5990 {
5991 tree addr;
5992 pushdecl (space);
5993
5994 /* Don't create new scope, emit the DECL_EXPR in exactly the scope
5995 where also space is located. */
5996 gfc_init_block (&init);
5997 tmp = fold_build1_loc (input_location, DECL_EXPR,
5998 TREE_TYPE (space), space);
5999 gfc_add_expr_to_block (&init, tmp);
6000 addr = fold_build1_loc (sym->declared_at.lb->location,
6001 ADDR_EXPR, TREE_TYPE (decl), space);
6002 gfc_add_modify (&init, decl, addr);
6003 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
6004 tmp = NULL_TREE;
6005 }
6006 gfc_add_init_cleanup (block, inittree, tmp);
6007 }
6008
6009
6010 /* Generate entry and exit code for g77 calling convention arrays. */
6011
6012 void
gfc_trans_g77_array(gfc_symbol * sym,gfc_wrapped_block * block)6013 gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
6014 {
6015 tree parm;
6016 tree type;
6017 locus loc;
6018 tree offset;
6019 tree tmp;
6020 tree stmt;
6021 stmtblock_t init;
6022
6023 gfc_save_backend_locus (&loc);
6024 gfc_set_backend_locus (&sym->declared_at);
6025
6026 /* Descriptor type. */
6027 parm = sym->backend_decl;
6028 type = TREE_TYPE (parm);
6029 gcc_assert (GFC_ARRAY_TYPE_P (type));
6030
6031 gfc_start_block (&init);
6032
6033 if (sym->ts.type == BT_CHARACTER
6034 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
6035 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
6036
6037 /* Evaluate the bounds of the array. */
6038 gfc_trans_array_bounds (type, sym, &offset, &init);
6039
6040 /* Set the offset. */
6041 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
6042 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
6043
6044 /* Set the pointer itself if we aren't using the parameter directly. */
6045 if (TREE_CODE (parm) != PARM_DECL)
6046 {
6047 tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
6048 gfc_add_modify (&init, parm, tmp);
6049 }
6050 stmt = gfc_finish_block (&init);
6051
6052 gfc_restore_backend_locus (&loc);
6053
6054 /* Add the initialization code to the start of the function. */
6055
6056 if (sym->attr.optional || sym->attr.not_always_present)
6057 {
6058 tmp = gfc_conv_expr_present (sym);
6059 stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
6060 }
6061
6062 gfc_add_init_cleanup (block, stmt, NULL_TREE);
6063 }
6064
6065
6066 /* Modify the descriptor of an array parameter so that it has the
6067 correct lower bound. Also move the upper bound accordingly.
6068 If the array is not packed, it will be copied into a temporary.
6069 For each dimension we set the new lower and upper bounds. Then we copy the
6070 stride and calculate the offset for this dimension. We also work out
6071 what the stride of a packed array would be, and see it the two match.
6072 If the array need repacking, we set the stride to the values we just
6073 calculated, recalculate the offset and copy the array data.
6074 Code is also added to copy the data back at the end of the function.
6075 */
6076
6077 void
gfc_trans_dummy_array_bias(gfc_symbol * sym,tree tmpdesc,gfc_wrapped_block * block)6078 gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
6079 gfc_wrapped_block * block)
6080 {
6081 tree size;
6082 tree type;
6083 tree offset;
6084 locus loc;
6085 stmtblock_t init;
6086 tree stmtInit, stmtCleanup;
6087 tree lbound;
6088 tree ubound;
6089 tree dubound;
6090 tree dlbound;
6091 tree dumdesc;
6092 tree tmp;
6093 tree stride, stride2;
6094 tree stmt_packed;
6095 tree stmt_unpacked;
6096 tree partial;
6097 gfc_se se;
6098 int n;
6099 int checkparm;
6100 int no_repack;
6101 bool optional_arg;
6102 gfc_array_spec *as;
6103 bool is_classarray = IS_CLASS_ARRAY (sym);
6104
6105 /* Do nothing for pointer and allocatable arrays. */
6106 if ((sym->ts.type != BT_CLASS && sym->attr.pointer)
6107 || (sym->ts.type == BT_CLASS && CLASS_DATA (sym)->attr.class_pointer)
6108 || sym->attr.allocatable
6109 || (is_classarray && CLASS_DATA (sym)->attr.allocatable))
6110 return;
6111
6112 if (!is_classarray && sym->attr.dummy && gfc_is_nodesc_array (sym))
6113 {
6114 gfc_trans_g77_array (sym, block);
6115 return;
6116 }
6117
6118 loc.nextc = NULL;
6119 gfc_save_backend_locus (&loc);
6120 /* loc.nextc is not set by save_backend_locus but the location routines
6121 depend on it. */
6122 if (loc.nextc == NULL)
6123 loc.nextc = loc.lb->line;
6124 gfc_set_backend_locus (&sym->declared_at);
6125
6126 /* Descriptor type. */
6127 type = TREE_TYPE (tmpdesc);
6128 gcc_assert (GFC_ARRAY_TYPE_P (type));
6129 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6130 if (is_classarray)
6131 /* For a class array the dummy array descriptor is in the _class
6132 component. */
6133 dumdesc = gfc_class_data_get (dumdesc);
6134 else
6135 dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
6136 as = IS_CLASS_ARRAY (sym) ? CLASS_DATA (sym)->as : sym->as;
6137 gfc_start_block (&init);
6138
6139 if (sym->ts.type == BT_CHARACTER
6140 && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
6141 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
6142
6143 checkparm = (as->type == AS_EXPLICIT
6144 && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
6145
6146 no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
6147 || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
6148
6149 if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
6150 {
6151 /* For non-constant shape arrays we only check if the first dimension
6152 is contiguous. Repacking higher dimensions wouldn't gain us
6153 anything as we still don't know the array stride. */
6154 partial = gfc_create_var (boolean_type_node, "partial");
6155 TREE_USED (partial) = 1;
6156 tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
6157 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
6158 gfc_index_one_node);
6159 gfc_add_modify (&init, partial, tmp);
6160 }
6161 else
6162 partial = NULL_TREE;
6163
6164 /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
6165 here, however I think it does the right thing. */
6166 if (no_repack)
6167 {
6168 /* Set the first stride. */
6169 stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
6170 stride = gfc_evaluate_now (stride, &init);
6171
6172 tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
6173 stride, gfc_index_zero_node);
6174 tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
6175 tmp, gfc_index_one_node, stride);
6176 stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
6177 gfc_add_modify (&init, stride, tmp);
6178
6179 /* Allow the user to disable array repacking. */
6180 stmt_unpacked = NULL_TREE;
6181 }
6182 else
6183 {
6184 gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
6185 /* A library call to repack the array if necessary. */
6186 tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6187 stmt_unpacked = build_call_expr_loc (input_location,
6188 gfor_fndecl_in_pack, 1, tmp);
6189
6190 stride = gfc_index_one_node;
6191
6192 if (warn_array_temporaries)
6193 gfc_warning (OPT_Warray_temporaries,
6194 "Creating array temporary at %L", &loc);
6195 }
6196
6197 /* This is for the case where the array data is used directly without
6198 calling the repack function. */
6199 if (no_repack || partial != NULL_TREE)
6200 stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
6201 else
6202 stmt_packed = NULL_TREE;
6203
6204 /* Assign the data pointer. */
6205 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
6206 {
6207 /* Don't repack unknown shape arrays when the first stride is 1. */
6208 tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
6209 partial, stmt_packed, stmt_unpacked);
6210 }
6211 else
6212 tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
6213 gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
6214
6215 offset = gfc_index_zero_node;
6216 size = gfc_index_one_node;
6217
6218 /* Evaluate the bounds of the array. */
6219 for (n = 0; n < as->rank; n++)
6220 {
6221 if (checkparm || !as->upper[n])
6222 {
6223 /* Get the bounds of the actual parameter. */
6224 dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
6225 dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
6226 }
6227 else
6228 {
6229 dubound = NULL_TREE;
6230 dlbound = NULL_TREE;
6231 }
6232
6233 lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
6234 if (!INTEGER_CST_P (lbound))
6235 {
6236 gfc_init_se (&se, NULL);
6237 gfc_conv_expr_type (&se, as->lower[n],
6238 gfc_array_index_type);
6239 gfc_add_block_to_block (&init, &se.pre);
6240 gfc_add_modify (&init, lbound, se.expr);
6241 }
6242
6243 ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
6244 /* Set the desired upper bound. */
6245 if (as->upper[n])
6246 {
6247 /* We know what we want the upper bound to be. */
6248 if (!INTEGER_CST_P (ubound))
6249 {
6250 gfc_init_se (&se, NULL);
6251 gfc_conv_expr_type (&se, as->upper[n],
6252 gfc_array_index_type);
6253 gfc_add_block_to_block (&init, &se.pre);
6254 gfc_add_modify (&init, ubound, se.expr);
6255 }
6256
6257 /* Check the sizes match. */
6258 if (checkparm)
6259 {
6260 /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
6261 char * msg;
6262 tree temp;
6263
6264 temp = fold_build2_loc (input_location, MINUS_EXPR,
6265 gfc_array_index_type, ubound, lbound);
6266 temp = fold_build2_loc (input_location, PLUS_EXPR,
6267 gfc_array_index_type,
6268 gfc_index_one_node, temp);
6269 stride2 = fold_build2_loc (input_location, MINUS_EXPR,
6270 gfc_array_index_type, dubound,
6271 dlbound);
6272 stride2 = fold_build2_loc (input_location, PLUS_EXPR,
6273 gfc_array_index_type,
6274 gfc_index_one_node, stride2);
6275 tmp = fold_build2_loc (input_location, NE_EXPR,
6276 gfc_array_index_type, temp, stride2);
6277 msg = xasprintf ("Dimension %d of array '%s' has extent "
6278 "%%ld instead of %%ld", n+1, sym->name);
6279
6280 gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
6281 fold_convert (long_integer_type_node, temp),
6282 fold_convert (long_integer_type_node, stride2));
6283
6284 free (msg);
6285 }
6286 }
6287 else
6288 {
6289 /* For assumed shape arrays move the upper bound by the same amount
6290 as the lower bound. */
6291 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6292 gfc_array_index_type, dubound, dlbound);
6293 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6294 gfc_array_index_type, tmp, lbound);
6295 gfc_add_modify (&init, ubound, tmp);
6296 }
6297 /* The offset of this dimension. offset = offset - lbound * stride. */
6298 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
6299 lbound, stride);
6300 offset = fold_build2_loc (input_location, MINUS_EXPR,
6301 gfc_array_index_type, offset, tmp);
6302
6303 /* The size of this dimension, and the stride of the next. */
6304 if (n + 1 < as->rank)
6305 {
6306 stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
6307
6308 if (no_repack || partial != NULL_TREE)
6309 stmt_unpacked =
6310 gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
6311
6312 /* Figure out the stride if not a known constant. */
6313 if (!INTEGER_CST_P (stride))
6314 {
6315 if (no_repack)
6316 stmt_packed = NULL_TREE;
6317 else
6318 {
6319 /* Calculate stride = size * (ubound + 1 - lbound). */
6320 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6321 gfc_array_index_type,
6322 gfc_index_one_node, lbound);
6323 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6324 gfc_array_index_type, ubound, tmp);
6325 size = fold_build2_loc (input_location, MULT_EXPR,
6326 gfc_array_index_type, size, tmp);
6327 stmt_packed = size;
6328 }
6329
6330 /* Assign the stride. */
6331 if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
6332 tmp = fold_build3_loc (input_location, COND_EXPR,
6333 gfc_array_index_type, partial,
6334 stmt_unpacked, stmt_packed);
6335 else
6336 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
6337 gfc_add_modify (&init, stride, tmp);
6338 }
6339 }
6340 else
6341 {
6342 stride = GFC_TYPE_ARRAY_SIZE (type);
6343
6344 if (stride && !INTEGER_CST_P (stride))
6345 {
6346 /* Calculate size = stride * (ubound + 1 - lbound). */
6347 tmp = fold_build2_loc (input_location, MINUS_EXPR,
6348 gfc_array_index_type,
6349 gfc_index_one_node, lbound);
6350 tmp = fold_build2_loc (input_location, PLUS_EXPR,
6351 gfc_array_index_type,
6352 ubound, tmp);
6353 tmp = fold_build2_loc (input_location, MULT_EXPR,
6354 gfc_array_index_type,
6355 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
6356 gfc_add_modify (&init, stride, tmp);
6357 }
6358 }
6359 }
6360
6361 gfc_trans_array_cobounds (type, &init, sym);
6362
6363 /* Set the offset. */
6364 if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
6365 gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
6366
6367 gfc_trans_vla_type_sizes (sym, &init);
6368
6369 stmtInit = gfc_finish_block (&init);
6370
6371 /* Only do the entry/initialization code if the arg is present. */
6372 dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
6373 optional_arg = (sym->attr.optional
6374 || (sym->ns->proc_name->attr.entry_master
6375 && sym->attr.dummy));
6376 if (optional_arg)
6377 {
6378 tmp = gfc_conv_expr_present (sym);
6379 stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
6380 build_empty_stmt (input_location));
6381 }
6382
6383 /* Cleanup code. */
6384 if (no_repack)
6385 stmtCleanup = NULL_TREE;
6386 else
6387 {
6388 stmtblock_t cleanup;
6389 gfc_start_block (&cleanup);
6390
6391 if (sym->attr.intent != INTENT_IN)
6392 {
6393 /* Copy the data back. */
6394 tmp = build_call_expr_loc (input_location,
6395 gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
6396 gfc_add_expr_to_block (&cleanup, tmp);
6397 }
6398
6399 /* Free the temporary. */
6400 tmp = gfc_call_free (tmpdesc);
6401 gfc_add_expr_to_block (&cleanup, tmp);
6402
6403 stmtCleanup = gfc_finish_block (&cleanup);
6404
6405 /* Only do the cleanup if the array was repacked. */
6406 if (is_classarray)
6407 /* For a class array the dummy array descriptor is in the _class
6408 component. */
6409 tmp = gfc_class_data_get (dumdesc);
6410 else
6411 tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
6412 tmp = gfc_conv_descriptor_data_get (tmp);
6413 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
6414 tmp, tmpdesc);
6415 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6416 build_empty_stmt (input_location));
6417
6418 if (optional_arg)
6419 {
6420 tmp = gfc_conv_expr_present (sym);
6421 stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
6422 build_empty_stmt (input_location));
6423 }
6424 }
6425
6426 /* We don't need to free any memory allocated by internal_pack as it will
6427 be freed at the end of the function by pop_context. */
6428 gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
6429
6430 gfc_restore_backend_locus (&loc);
6431 }
6432
6433
6434 /* Calculate the overall offset, including subreferences. */
6435 static void
gfc_get_dataptr_offset(stmtblock_t * block,tree parm,tree desc,tree offset,bool subref,gfc_expr * expr)6436 gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
6437 bool subref, gfc_expr *expr)
6438 {
6439 tree tmp;
6440 tree field;
6441 tree stride;
6442 tree index;
6443 gfc_ref *ref;
6444 gfc_se start;
6445 int n;
6446
6447 /* If offset is NULL and this is not a subreferenced array, there is
6448 nothing to do. */
6449 if (offset == NULL_TREE)
6450 {
6451 if (subref)
6452 offset = gfc_index_zero_node;
6453 else
6454 return;
6455 }
6456
6457 tmp = build_array_ref (desc, offset, NULL, NULL);
6458
6459 /* Offset the data pointer for pointer assignments from arrays with
6460 subreferences; e.g. my_integer => my_type(:)%integer_component. */
6461 if (subref)
6462 {
6463 /* Go past the array reference. */
6464 for (ref = expr->ref; ref; ref = ref->next)
6465 if (ref->type == REF_ARRAY &&
6466 ref->u.ar.type != AR_ELEMENT)
6467 {
6468 ref = ref->next;
6469 break;
6470 }
6471
6472 /* Calculate the offset for each subsequent subreference. */
6473 for (; ref; ref = ref->next)
6474 {
6475 switch (ref->type)
6476 {
6477 case REF_COMPONENT:
6478 field = ref->u.c.component->backend_decl;
6479 gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
6480 tmp = fold_build3_loc (input_location, COMPONENT_REF,
6481 TREE_TYPE (field),
6482 tmp, field, NULL_TREE);
6483 break;
6484
6485 case REF_SUBSTRING:
6486 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
6487 gfc_init_se (&start, NULL);
6488 gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
6489 gfc_add_block_to_block (block, &start.pre);
6490 tmp = gfc_build_array_ref (tmp, start.expr, NULL);
6491 break;
6492
6493 case REF_ARRAY:
6494 gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
6495 && ref->u.ar.type == AR_ELEMENT);
6496
6497 /* TODO - Add bounds checking. */
6498 stride = gfc_index_one_node;
6499 index = gfc_index_zero_node;
6500 for (n = 0; n < ref->u.ar.dimen; n++)
6501 {
6502 tree itmp;
6503 tree jtmp;
6504
6505 /* Update the index. */
6506 gfc_init_se (&start, NULL);
6507 gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
6508 itmp = gfc_evaluate_now (start.expr, block);
6509 gfc_init_se (&start, NULL);
6510 gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
6511 jtmp = gfc_evaluate_now (start.expr, block);
6512 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6513 gfc_array_index_type, itmp, jtmp);
6514 itmp = fold_build2_loc (input_location, MULT_EXPR,
6515 gfc_array_index_type, itmp, stride);
6516 index = fold_build2_loc (input_location, PLUS_EXPR,
6517 gfc_array_index_type, itmp, index);
6518 index = gfc_evaluate_now (index, block);
6519
6520 /* Update the stride. */
6521 gfc_init_se (&start, NULL);
6522 gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
6523 itmp = fold_build2_loc (input_location, MINUS_EXPR,
6524 gfc_array_index_type, start.expr,
6525 jtmp);
6526 itmp = fold_build2_loc (input_location, PLUS_EXPR,
6527 gfc_array_index_type,
6528 gfc_index_one_node, itmp);
6529 stride = fold_build2_loc (input_location, MULT_EXPR,
6530 gfc_array_index_type, stride, itmp);
6531 stride = gfc_evaluate_now (stride, block);
6532 }
6533
6534 /* Apply the index to obtain the array element. */
6535 tmp = gfc_build_array_ref (tmp, index, NULL);
6536 break;
6537
6538 default:
6539 gcc_unreachable ();
6540 break;
6541 }
6542 }
6543 }
6544
6545 /* Set the target data pointer. */
6546 offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
6547 gfc_conv_descriptor_data_set (block, parm, offset);
6548 }
6549
6550
6551 /* gfc_conv_expr_descriptor needs the string length an expression
6552 so that the size of the temporary can be obtained. This is done
6553 by adding up the string lengths of all the elements in the
6554 expression. Function with non-constant expressions have their
6555 string lengths mapped onto the actual arguments using the
6556 interface mapping machinery in trans-expr.c. */
6557 static void
get_array_charlen(gfc_expr * expr,gfc_se * se)6558 get_array_charlen (gfc_expr *expr, gfc_se *se)
6559 {
6560 gfc_interface_mapping mapping;
6561 gfc_formal_arglist *formal;
6562 gfc_actual_arglist *arg;
6563 gfc_se tse;
6564
6565 if (expr->ts.u.cl->length
6566 && gfc_is_constant_expr (expr->ts.u.cl->length))
6567 {
6568 if (!expr->ts.u.cl->backend_decl)
6569 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6570 return;
6571 }
6572
6573 switch (expr->expr_type)
6574 {
6575 case EXPR_OP:
6576 get_array_charlen (expr->value.op.op1, se);
6577
6578 /* For parentheses the expression ts.u.cl is identical. */
6579 if (expr->value.op.op == INTRINSIC_PARENTHESES)
6580 return;
6581
6582 expr->ts.u.cl->backend_decl =
6583 gfc_create_var (gfc_charlen_type_node, "sln");
6584
6585 if (expr->value.op.op2)
6586 {
6587 get_array_charlen (expr->value.op.op2, se);
6588
6589 gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
6590
6591 /* Add the string lengths and assign them to the expression
6592 string length backend declaration. */
6593 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6594 fold_build2_loc (input_location, PLUS_EXPR,
6595 gfc_charlen_type_node,
6596 expr->value.op.op1->ts.u.cl->backend_decl,
6597 expr->value.op.op2->ts.u.cl->backend_decl));
6598 }
6599 else
6600 gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
6601 expr->value.op.op1->ts.u.cl->backend_decl);
6602 break;
6603
6604 case EXPR_FUNCTION:
6605 if (expr->value.function.esym == NULL
6606 || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
6607 {
6608 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6609 break;
6610 }
6611
6612 /* Map expressions involving the dummy arguments onto the actual
6613 argument expressions. */
6614 gfc_init_interface_mapping (&mapping);
6615 formal = gfc_sym_get_dummy_args (expr->symtree->n.sym);
6616 arg = expr->value.function.actual;
6617
6618 /* Set se = NULL in the calls to the interface mapping, to suppress any
6619 backend stuff. */
6620 for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
6621 {
6622 if (!arg->expr)
6623 continue;
6624 if (formal->sym)
6625 gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
6626 }
6627
6628 gfc_init_se (&tse, NULL);
6629
6630 /* Build the expression for the character length and convert it. */
6631 gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
6632
6633 gfc_add_block_to_block (&se->pre, &tse.pre);
6634 gfc_add_block_to_block (&se->post, &tse.post);
6635 tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
6636 tse.expr = fold_build2_loc (input_location, MAX_EXPR,
6637 gfc_charlen_type_node, tse.expr,
6638 build_int_cst (gfc_charlen_type_node, 0));
6639 expr->ts.u.cl->backend_decl = tse.expr;
6640 gfc_free_interface_mapping (&mapping);
6641 break;
6642
6643 default:
6644 gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
6645 break;
6646 }
6647 }
6648
6649
6650 /* Helper function to check dimensions. */
6651 static bool
transposed_dims(gfc_ss * ss)6652 transposed_dims (gfc_ss *ss)
6653 {
6654 int n;
6655
6656 for (n = 0; n < ss->dimen; n++)
6657 if (ss->dim[n] != n)
6658 return true;
6659 return false;
6660 }
6661
6662
6663 /* Convert the last ref of a scalar coarray from an AR_ELEMENT to an
6664 AR_FULL, suitable for the scalarizer. */
6665
6666 static gfc_ss *
walk_coarray(gfc_expr * e)6667 walk_coarray (gfc_expr *e)
6668 {
6669 gfc_ss *ss;
6670
6671 gcc_assert (gfc_get_corank (e) > 0);
6672
6673 ss = gfc_walk_expr (e);
6674
6675 /* Fix scalar coarray. */
6676 if (ss == gfc_ss_terminator)
6677 {
6678 gfc_ref *ref;
6679
6680 ref = e->ref;
6681 while (ref)
6682 {
6683 if (ref->type == REF_ARRAY
6684 && ref->u.ar.codimen > 0)
6685 break;
6686
6687 ref = ref->next;
6688 }
6689
6690 gcc_assert (ref != NULL);
6691 if (ref->u.ar.type == AR_ELEMENT)
6692 ref->u.ar.type = AR_SECTION;
6693 ss = gfc_reverse_ss (gfc_walk_array_ref (ss, e, ref));
6694 }
6695
6696 return ss;
6697 }
6698
6699
6700 /* Convert an array for passing as an actual argument. Expressions and
6701 vector subscripts are evaluated and stored in a temporary, which is then
6702 passed. For whole arrays the descriptor is passed. For array sections
6703 a modified copy of the descriptor is passed, but using the original data.
6704
6705 This function is also used for array pointer assignments, and there
6706 are three cases:
6707
6708 - se->want_pointer && !se->direct_byref
6709 EXPR is an actual argument. On exit, se->expr contains a
6710 pointer to the array descriptor.
6711
6712 - !se->want_pointer && !se->direct_byref
6713 EXPR is an actual argument to an intrinsic function or the
6714 left-hand side of a pointer assignment. On exit, se->expr
6715 contains the descriptor for EXPR.
6716
6717 - !se->want_pointer && se->direct_byref
6718 EXPR is the right-hand side of a pointer assignment and
6719 se->expr is the descriptor for the previously-evaluated
6720 left-hand side. The function creates an assignment from
6721 EXPR to se->expr.
6722
6723
6724 The se->force_tmp flag disables the non-copying descriptor optimization
6725 that is used for transpose. It may be used in cases where there is an
6726 alias between the transpose argument and another argument in the same
6727 function call. */
6728
6729 void
gfc_conv_expr_descriptor(gfc_se * se,gfc_expr * expr)6730 gfc_conv_expr_descriptor (gfc_se *se, gfc_expr *expr)
6731 {
6732 gfc_ss *ss;
6733 gfc_ss_type ss_type;
6734 gfc_ss_info *ss_info;
6735 gfc_loopinfo loop;
6736 gfc_array_info *info;
6737 int need_tmp;
6738 int n;
6739 tree tmp;
6740 tree desc;
6741 stmtblock_t block;
6742 tree start;
6743 tree offset;
6744 int full;
6745 bool subref_array_target = false;
6746 gfc_expr *arg, *ss_expr;
6747
6748 if (se->want_coarray)
6749 ss = walk_coarray (expr);
6750 else
6751 ss = gfc_walk_expr (expr);
6752
6753 gcc_assert (ss != NULL);
6754 gcc_assert (ss != gfc_ss_terminator);
6755
6756 ss_info = ss->info;
6757 ss_type = ss_info->type;
6758 ss_expr = ss_info->expr;
6759
6760 /* Special case: TRANSPOSE which needs no temporary. */
6761 while (expr->expr_type == EXPR_FUNCTION && expr->value.function.isym
6762 && NULL != (arg = gfc_get_noncopying_intrinsic_argument (expr)))
6763 {
6764 /* This is a call to transpose which has already been handled by the
6765 scalarizer, so that we just need to get its argument's descriptor. */
6766 gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
6767 expr = expr->value.function.actual->expr;
6768 }
6769
6770 /* Special case things we know we can pass easily. */
6771 switch (expr->expr_type)
6772 {
6773 case EXPR_VARIABLE:
6774 /* If we have a linear array section, we can pass it directly.
6775 Otherwise we need to copy it into a temporary. */
6776
6777 gcc_assert (ss_type == GFC_SS_SECTION);
6778 gcc_assert (ss_expr == expr);
6779 info = &ss_info->data.array;
6780
6781 /* Get the descriptor for the array. */
6782 gfc_conv_ss_descriptor (&se->pre, ss, 0);
6783 desc = info->descriptor;
6784
6785 subref_array_target = se->direct_byref && is_subref_array (expr);
6786 need_tmp = gfc_ref_needs_temporary_p (expr->ref)
6787 && !subref_array_target;
6788
6789 if (se->force_tmp)
6790 need_tmp = 1;
6791
6792 if (need_tmp)
6793 full = 0;
6794 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
6795 {
6796 /* Create a new descriptor if the array doesn't have one. */
6797 full = 0;
6798 }
6799 else if (info->ref->u.ar.type == AR_FULL || se->descriptor_only)
6800 full = 1;
6801 else if (se->direct_byref)
6802 full = 0;
6803 else
6804 full = gfc_full_array_ref_p (info->ref, NULL);
6805
6806 if (full && !transposed_dims (ss))
6807 {
6808 if (se->direct_byref && !se->byref_noassign)
6809 {
6810 /* Copy the descriptor for pointer assignments. */
6811 gfc_add_modify (&se->pre, se->expr, desc);
6812
6813 /* Add any offsets from subreferences. */
6814 gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
6815 subref_array_target, expr);
6816 }
6817 else if (se->want_pointer)
6818 {
6819 /* We pass full arrays directly. This means that pointers and
6820 allocatable arrays should also work. */
6821 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
6822 }
6823 else
6824 {
6825 se->expr = desc;
6826 }
6827
6828 if (expr->ts.type == BT_CHARACTER)
6829 se->string_length = gfc_get_expr_charlen (expr);
6830
6831 gfc_free_ss_chain (ss);
6832 return;
6833 }
6834 break;
6835
6836 case EXPR_FUNCTION:
6837 /* A transformational function return value will be a temporary
6838 array descriptor. We still need to go through the scalarizer
6839 to create the descriptor. Elemental functions are handled as
6840 arbitrary expressions, i.e. copy to a temporary. */
6841
6842 if (se->direct_byref)
6843 {
6844 gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr);
6845
6846 /* For pointer assignments pass the descriptor directly. */
6847 if (se->ss == NULL)
6848 se->ss = ss;
6849 else
6850 gcc_assert (se->ss == ss);
6851 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
6852 gfc_conv_expr (se, expr);
6853 gfc_free_ss_chain (ss);
6854 return;
6855 }
6856
6857 if (ss_expr != expr || ss_type != GFC_SS_FUNCTION)
6858 {
6859 if (ss_expr != expr)
6860 /* Elemental function. */
6861 gcc_assert ((expr->value.function.esym != NULL
6862 && expr->value.function.esym->attr.elemental)
6863 || (expr->value.function.isym != NULL
6864 && expr->value.function.isym->elemental)
6865 || gfc_inline_intrinsic_function_p (expr));
6866 else
6867 gcc_assert (ss_type == GFC_SS_INTRINSIC);
6868
6869 need_tmp = 1;
6870 if (expr->ts.type == BT_CHARACTER
6871 && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
6872 get_array_charlen (expr, se);
6873
6874 info = NULL;
6875 }
6876 else
6877 {
6878 /* Transformational function. */
6879 info = &ss_info->data.array;
6880 need_tmp = 0;
6881 }
6882 break;
6883
6884 case EXPR_ARRAY:
6885 /* Constant array constructors don't need a temporary. */
6886 if (ss_type == GFC_SS_CONSTRUCTOR
6887 && expr->ts.type != BT_CHARACTER
6888 && gfc_constant_array_constructor_p (expr->value.constructor))
6889 {
6890 need_tmp = 0;
6891 info = &ss_info->data.array;
6892 }
6893 else
6894 {
6895 need_tmp = 1;
6896 info = NULL;
6897 }
6898 break;
6899
6900 default:
6901 /* Something complicated. Copy it into a temporary. */
6902 need_tmp = 1;
6903 info = NULL;
6904 break;
6905 }
6906
6907 /* If we are creating a temporary, we don't need to bother about aliases
6908 anymore. */
6909 if (need_tmp)
6910 se->force_tmp = 0;
6911
6912 gfc_init_loopinfo (&loop);
6913
6914 /* Associate the SS with the loop. */
6915 gfc_add_ss_to_loop (&loop, ss);
6916
6917 /* Tell the scalarizer not to bother creating loop variables, etc. */
6918 if (!need_tmp)
6919 loop.array_parameter = 1;
6920 else
6921 /* The right-hand side of a pointer assignment mustn't use a temporary. */
6922 gcc_assert (!se->direct_byref);
6923
6924 /* Setup the scalarizing loops and bounds. */
6925 gfc_conv_ss_startstride (&loop);
6926
6927 if (need_tmp)
6928 {
6929 if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
6930 get_array_charlen (expr, se);
6931
6932 /* Tell the scalarizer to make a temporary. */
6933 loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
6934 ((expr->ts.type == BT_CHARACTER)
6935 ? expr->ts.u.cl->backend_decl
6936 : NULL),
6937 loop.dimen);
6938
6939 se->string_length = loop.temp_ss->info->string_length;
6940 gcc_assert (loop.temp_ss->dimen == loop.dimen);
6941 gfc_add_ss_to_loop (&loop, loop.temp_ss);
6942 }
6943
6944 gfc_conv_loop_setup (&loop, & expr->where);
6945
6946 if (need_tmp)
6947 {
6948 /* Copy into a temporary and pass that. We don't need to copy the data
6949 back because expressions and vector subscripts must be INTENT_IN. */
6950 /* TODO: Optimize passing function return values. */
6951 gfc_se lse;
6952 gfc_se rse;
6953
6954 /* Start the copying loops. */
6955 gfc_mark_ss_chain_used (loop.temp_ss, 1);
6956 gfc_mark_ss_chain_used (ss, 1);
6957 gfc_start_scalarized_body (&loop, &block);
6958
6959 /* Copy each data element. */
6960 gfc_init_se (&lse, NULL);
6961 gfc_copy_loopinfo_to_se (&lse, &loop);
6962 gfc_init_se (&rse, NULL);
6963 gfc_copy_loopinfo_to_se (&rse, &loop);
6964
6965 lse.ss = loop.temp_ss;
6966 rse.ss = ss;
6967
6968 gfc_conv_scalarized_array_ref (&lse, NULL);
6969 if (expr->ts.type == BT_CHARACTER)
6970 {
6971 gfc_conv_expr (&rse, expr);
6972 if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
6973 rse.expr = build_fold_indirect_ref_loc (input_location,
6974 rse.expr);
6975 }
6976 else
6977 gfc_conv_expr_val (&rse, expr);
6978
6979 gfc_add_block_to_block (&block, &rse.pre);
6980 gfc_add_block_to_block (&block, &lse.pre);
6981
6982 lse.string_length = rse.string_length;
6983 tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts,
6984 expr->expr_type == EXPR_VARIABLE
6985 || expr->expr_type == EXPR_ARRAY, false);
6986 gfc_add_expr_to_block (&block, tmp);
6987
6988 /* Finish the copying loops. */
6989 gfc_trans_scalarizing_loops (&loop, &block);
6990
6991 desc = loop.temp_ss->info->data.array.descriptor;
6992 }
6993 else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
6994 {
6995 desc = info->descriptor;
6996 se->string_length = ss_info->string_length;
6997 }
6998 else
6999 {
7000 /* We pass sections without copying to a temporary. Make a new
7001 descriptor and point it at the section we want. The loop variable
7002 limits will be the limits of the section.
7003 A function may decide to repack the array to speed up access, but
7004 we're not bothered about that here. */
7005 int dim, ndim, codim;
7006 tree parm;
7007 tree parmtype;
7008 tree stride;
7009 tree from;
7010 tree to;
7011 tree base;
7012 bool onebased = false, rank_remap;
7013
7014 ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
7015 rank_remap = ss->dimen < ndim;
7016
7017 if (se->want_coarray)
7018 {
7019 gfc_array_ref *ar = &info->ref->u.ar;
7020
7021 codim = gfc_get_corank (expr);
7022 for (n = 0; n < codim - 1; n++)
7023 {
7024 /* Make sure we are not lost somehow. */
7025 gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
7026
7027 /* Make sure the call to gfc_conv_section_startstride won't
7028 generate unnecessary code to calculate stride. */
7029 gcc_assert (ar->stride[n + ndim] == NULL);
7030
7031 gfc_conv_section_startstride (&loop.pre, ss, n + ndim);
7032 loop.from[n + loop.dimen] = info->start[n + ndim];
7033 loop.to[n + loop.dimen] = info->end[n + ndim];
7034 }
7035
7036 gcc_assert (n == codim - 1);
7037 evaluate_bound (&loop.pre, info->start, ar->start,
7038 info->descriptor, n + ndim, true,
7039 ar->as->type == AS_DEFERRED);
7040 loop.from[n + loop.dimen] = info->start[n + ndim];
7041 }
7042 else
7043 codim = 0;
7044
7045 /* Set the string_length for a character array. */
7046 if (expr->ts.type == BT_CHARACTER)
7047 se->string_length = gfc_get_expr_charlen (expr);
7048
7049 /* If we have an array section or are assigning make sure that
7050 the lower bound is 1. References to the full
7051 array should otherwise keep the original bounds. */
7052 if ((!info->ref || info->ref->u.ar.type != AR_FULL) && !se->want_pointer)
7053 for (dim = 0; dim < loop.dimen; dim++)
7054 if (!integer_onep (loop.from[dim]))
7055 {
7056 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7057 gfc_array_index_type, gfc_index_one_node,
7058 loop.from[dim]);
7059 loop.to[dim] = fold_build2_loc (input_location, PLUS_EXPR,
7060 gfc_array_index_type,
7061 loop.to[dim], tmp);
7062 loop.from[dim] = gfc_index_one_node;
7063 }
7064
7065 desc = info->descriptor;
7066 if (se->direct_byref && !se->byref_noassign)
7067 {
7068 /* For pointer assignments we fill in the destination. */
7069 parm = se->expr;
7070 parmtype = TREE_TYPE (parm);
7071 }
7072 else
7073 {
7074 /* Otherwise make a new one. */
7075 parmtype = gfc_get_element_type (TREE_TYPE (desc));
7076 parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
7077 loop.from, loop.to, 0,
7078 GFC_ARRAY_UNKNOWN, false);
7079 parm = gfc_create_var (parmtype, "parm");
7080 }
7081
7082 offset = gfc_index_zero_node;
7083
7084 /* The following can be somewhat confusing. We have two
7085 descriptors, a new one and the original array.
7086 {parm, parmtype, dim} refer to the new one.
7087 {desc, type, n, loop} refer to the original, which maybe
7088 a descriptorless array.
7089 The bounds of the scalarization are the bounds of the section.
7090 We don't have to worry about numeric overflows when calculating
7091 the offsets because all elements are within the array data. */
7092
7093 /* Set the dtype. */
7094 tmp = gfc_conv_descriptor_dtype (parm);
7095 gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
7096
7097 /* Set offset for assignments to pointer only to zero if it is not
7098 the full array. */
7099 if ((se->direct_byref || se->use_offset)
7100 && ((info->ref && info->ref->u.ar.type != AR_FULL)
7101 || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
7102 base = gfc_index_zero_node;
7103 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7104 base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
7105 else
7106 base = NULL_TREE;
7107
7108 for (n = 0; n < ndim; n++)
7109 {
7110 stride = gfc_conv_array_stride (desc, n);
7111
7112 /* Work out the offset. */
7113 if (info->ref
7114 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
7115 {
7116 gcc_assert (info->subscript[n]
7117 && info->subscript[n]->info->type == GFC_SS_SCALAR);
7118 start = info->subscript[n]->info->data.scalar.value;
7119 }
7120 else
7121 {
7122 /* Evaluate and remember the start of the section. */
7123 start = info->start[n];
7124 stride = gfc_evaluate_now (stride, &loop.pre);
7125 }
7126
7127 tmp = gfc_conv_array_lbound (desc, n);
7128 tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
7129 start, tmp);
7130 tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
7131 tmp, stride);
7132 offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
7133 offset, tmp);
7134
7135 if (info->ref
7136 && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
7137 {
7138 /* For elemental dimensions, we only need the offset. */
7139 continue;
7140 }
7141
7142 /* Vector subscripts need copying and are handled elsewhere. */
7143 if (info->ref)
7144 gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
7145
7146 /* look for the corresponding scalarizer dimension: dim. */
7147 for (dim = 0; dim < ndim; dim++)
7148 if (ss->dim[dim] == n)
7149 break;
7150
7151 /* loop exited early: the DIM being looked for has been found. */
7152 gcc_assert (dim < ndim);
7153
7154 /* Set the new lower bound. */
7155 from = loop.from[dim];
7156 to = loop.to[dim];
7157
7158 onebased = integer_onep (from);
7159 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
7160 gfc_rank_cst[dim], from);
7161
7162 /* Set the new upper bound. */
7163 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
7164 gfc_rank_cst[dim], to);
7165
7166 /* Multiply the stride by the section stride to get the
7167 total stride. */
7168 stride = fold_build2_loc (input_location, MULT_EXPR,
7169 gfc_array_index_type,
7170 stride, info->stride[n]);
7171
7172 if ((se->direct_byref || se->use_offset)
7173 && ((info->ref && info->ref->u.ar.type != AR_FULL)
7174 || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
7175 {
7176 base = fold_build2_loc (input_location, MINUS_EXPR,
7177 TREE_TYPE (base), base, stride);
7178 }
7179 else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)) || se->use_offset)
7180 {
7181 bool toonebased;
7182 tmp = gfc_conv_array_lbound (desc, n);
7183 toonebased = integer_onep (tmp);
7184 // lb(arr) - from (- start + 1)
7185 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7186 TREE_TYPE (base), tmp, from);
7187 if (onebased && toonebased)
7188 {
7189 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7190 TREE_TYPE (base), tmp, start);
7191 tmp = fold_build2_loc (input_location, PLUS_EXPR,
7192 TREE_TYPE (base), tmp,
7193 gfc_index_one_node);
7194 }
7195 tmp = fold_build2_loc (input_location, MULT_EXPR,
7196 TREE_TYPE (base), tmp,
7197 gfc_conv_array_stride (desc, n));
7198 base = fold_build2_loc (input_location, PLUS_EXPR,
7199 TREE_TYPE (base), tmp, base);
7200 }
7201
7202 /* Store the new stride. */
7203 gfc_conv_descriptor_stride_set (&loop.pre, parm,
7204 gfc_rank_cst[dim], stride);
7205 }
7206
7207 for (n = loop.dimen; n < loop.dimen + codim; n++)
7208 {
7209 from = loop.from[n];
7210 to = loop.to[n];
7211 gfc_conv_descriptor_lbound_set (&loop.pre, parm,
7212 gfc_rank_cst[n], from);
7213 if (n < loop.dimen + codim - 1)
7214 gfc_conv_descriptor_ubound_set (&loop.pre, parm,
7215 gfc_rank_cst[n], to);
7216 }
7217
7218 if (se->data_not_needed)
7219 gfc_conv_descriptor_data_set (&loop.pre, parm,
7220 gfc_index_zero_node);
7221 else
7222 /* Point the data pointer at the 1st element in the section. */
7223 gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
7224 subref_array_target, expr);
7225
7226 /* Force the offset to be -1, when the lower bound of the highest
7227 dimension is one and the symbol is present and is not a
7228 pointer/allocatable or associated. */
7229 if (((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7230 && !se->data_not_needed)
7231 || (se->use_offset && base != NULL_TREE))
7232 {
7233 /* Set the offset depending on base. */
7234 tmp = rank_remap && !se->direct_byref ?
7235 fold_build2_loc (input_location, PLUS_EXPR,
7236 gfc_array_index_type, base,
7237 offset)
7238 : base;
7239 gfc_conv_descriptor_offset_set (&loop.pre, parm, tmp);
7240 }
7241 else if (onebased && (!rank_remap || se->use_offset)
7242 && expr->symtree
7243 && !(expr->symtree->n.sym && expr->symtree->n.sym->ts.type == BT_CLASS
7244 && !CLASS_DATA (expr->symtree->n.sym)->attr.class_pointer)
7245 && !expr->symtree->n.sym->attr.allocatable
7246 && !expr->symtree->n.sym->attr.pointer
7247 && !expr->symtree->n.sym->attr.host_assoc
7248 && !expr->symtree->n.sym->attr.use_assoc)
7249 {
7250 /* Set the offset to -1. */
7251 mpz_t minus_one;
7252 mpz_init_set_si (minus_one, -1);
7253 tmp = gfc_conv_mpz_to_tree (minus_one, gfc_index_integer_kind);
7254 gfc_conv_descriptor_offset_set (&loop.pre, parm, tmp);
7255 }
7256 else
7257 {
7258 /* Only the callee knows what the correct offset it, so just set
7259 it to zero here. */
7260 gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
7261 }
7262 desc = parm;
7263 }
7264
7265 /* For class arrays add the class tree into the saved descriptor to
7266 enable getting of _vptr and the like. */
7267 if (expr->expr_type == EXPR_VARIABLE && VAR_P (desc)
7268 && IS_CLASS_ARRAY (expr->symtree->n.sym))
7269 {
7270 gfc_allocate_lang_decl (desc);
7271 GFC_DECL_SAVED_DESCRIPTOR (desc) =
7272 DECL_LANG_SPECIFIC (expr->symtree->n.sym->backend_decl) ?
7273 GFC_DECL_SAVED_DESCRIPTOR (expr->symtree->n.sym->backend_decl)
7274 : expr->symtree->n.sym->backend_decl;
7275 }
7276 if (!se->direct_byref || se->byref_noassign)
7277 {
7278 /* Get a pointer to the new descriptor. */
7279 if (se->want_pointer)
7280 se->expr = gfc_build_addr_expr (NULL_TREE, desc);
7281 else
7282 se->expr = desc;
7283 }
7284
7285 gfc_add_block_to_block (&se->pre, &loop.pre);
7286 gfc_add_block_to_block (&se->post, &loop.post);
7287
7288 /* Cleanup the scalarizer. */
7289 gfc_cleanup_loop (&loop);
7290 }
7291
7292 /* Helper function for gfc_conv_array_parameter if array size needs to be
7293 computed. */
7294
7295 static void
array_parameter_size(tree desc,gfc_expr * expr,tree * size)7296 array_parameter_size (tree desc, gfc_expr *expr, tree *size)
7297 {
7298 tree elem;
7299 if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
7300 *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
7301 else if (expr->rank > 1)
7302 *size = build_call_expr_loc (input_location,
7303 gfor_fndecl_size0, 1,
7304 gfc_build_addr_expr (NULL, desc));
7305 else
7306 {
7307 tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
7308 tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
7309
7310 *size = fold_build2_loc (input_location, MINUS_EXPR,
7311 gfc_array_index_type, ubound, lbound);
7312 *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7313 *size, gfc_index_one_node);
7314 *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
7315 *size, gfc_index_zero_node);
7316 }
7317 elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
7318 *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7319 *size, fold_convert (gfc_array_index_type, elem));
7320 }
7321
7322 /* Convert an array for passing as an actual parameter. */
7323 /* TODO: Optimize passing g77 arrays. */
7324
7325 void
gfc_conv_array_parameter(gfc_se * se,gfc_expr * expr,bool g77,const gfc_symbol * fsym,const char * proc_name,tree * size)7326 gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, bool g77,
7327 const gfc_symbol *fsym, const char *proc_name,
7328 tree *size)
7329 {
7330 tree ptr;
7331 tree desc;
7332 tree tmp = NULL_TREE;
7333 tree stmt;
7334 tree parent = DECL_CONTEXT (current_function_decl);
7335 bool full_array_var;
7336 bool this_array_result;
7337 bool contiguous;
7338 bool no_pack;
7339 bool array_constructor;
7340 bool good_allocatable;
7341 bool ultimate_ptr_comp;
7342 bool ultimate_alloc_comp;
7343 gfc_symbol *sym;
7344 stmtblock_t block;
7345 gfc_ref *ref;
7346
7347 ultimate_ptr_comp = false;
7348 ultimate_alloc_comp = false;
7349
7350 for (ref = expr->ref; ref; ref = ref->next)
7351 {
7352 if (ref->next == NULL)
7353 break;
7354
7355 if (ref->type == REF_COMPONENT)
7356 {
7357 ultimate_ptr_comp = ref->u.c.component->attr.pointer;
7358 ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
7359 }
7360 }
7361
7362 full_array_var = false;
7363 contiguous = false;
7364
7365 if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
7366 full_array_var = gfc_full_array_ref_p (ref, &contiguous);
7367
7368 sym = full_array_var ? expr->symtree->n.sym : NULL;
7369
7370 /* The symbol should have an array specification. */
7371 gcc_assert (!sym || sym->as || ref->u.ar.as);
7372
7373 if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
7374 {
7375 get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
7376 expr->ts.u.cl->backend_decl = tmp;
7377 se->string_length = tmp;
7378 }
7379
7380 /* Is this the result of the enclosing procedure? */
7381 this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
7382 if (this_array_result
7383 && (sym->backend_decl != current_function_decl)
7384 && (sym->backend_decl != parent))
7385 this_array_result = false;
7386
7387 /* Passing address of the array if it is not pointer or assumed-shape. */
7388 if (full_array_var && g77 && !this_array_result
7389 && sym->ts.type != BT_DERIVED && sym->ts.type != BT_CLASS)
7390 {
7391 tmp = gfc_get_symbol_decl (sym);
7392
7393 if (sym->ts.type == BT_CHARACTER)
7394 se->string_length = sym->ts.u.cl->backend_decl;
7395
7396 if (!sym->attr.pointer
7397 && sym->as
7398 && sym->as->type != AS_ASSUMED_SHAPE
7399 && sym->as->type != AS_DEFERRED
7400 && sym->as->type != AS_ASSUMED_RANK
7401 && !sym->attr.allocatable)
7402 {
7403 /* Some variables are declared directly, others are declared as
7404 pointers and allocated on the heap. */
7405 if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
7406 se->expr = tmp;
7407 else
7408 se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
7409 if (size)
7410 array_parameter_size (tmp, expr, size);
7411 return;
7412 }
7413
7414 if (sym->attr.allocatable)
7415 {
7416 if (sym->attr.dummy || sym->attr.result)
7417 {
7418 gfc_conv_expr_descriptor (se, expr);
7419 tmp = se->expr;
7420 }
7421 if (size)
7422 array_parameter_size (tmp, expr, size);
7423 se->expr = gfc_conv_array_data (tmp);
7424 return;
7425 }
7426 }
7427
7428 /* A convenient reduction in scope. */
7429 contiguous = g77 && !this_array_result && contiguous;
7430
7431 /* There is no need to pack and unpack the array, if it is contiguous
7432 and not a deferred- or assumed-shape array, or if it is simply
7433 contiguous. */
7434 no_pack = ((sym && sym->as
7435 && !sym->attr.pointer
7436 && sym->as->type != AS_DEFERRED
7437 && sym->as->type != AS_ASSUMED_RANK
7438 && sym->as->type != AS_ASSUMED_SHAPE)
7439 ||
7440 (ref && ref->u.ar.as
7441 && ref->u.ar.as->type != AS_DEFERRED
7442 && ref->u.ar.as->type != AS_ASSUMED_RANK
7443 && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
7444 ||
7445 gfc_is_simply_contiguous (expr, false, true));
7446
7447 no_pack = contiguous && no_pack;
7448
7449 /* Array constructors are always contiguous and do not need packing. */
7450 array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
7451
7452 /* Same is true of contiguous sections from allocatable variables. */
7453 good_allocatable = contiguous
7454 && expr->symtree
7455 && expr->symtree->n.sym->attr.allocatable;
7456
7457 /* Or ultimate allocatable components. */
7458 ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
7459
7460 if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
7461 {
7462 gfc_conv_expr_descriptor (se, expr);
7463 /* Deallocate the allocatable components of structures that are
7464 not variable. */
7465 if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7466 && expr->ts.u.derived->attr.alloc_comp
7467 && expr->expr_type != EXPR_VARIABLE)
7468 {
7469 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, se->expr, expr->rank);
7470
7471 /* The components shall be deallocated before their containing entity. */
7472 gfc_prepend_expr_to_block (&se->post, tmp);
7473 }
7474 if (expr->ts.type == BT_CHARACTER)
7475 se->string_length = expr->ts.u.cl->backend_decl;
7476 if (size)
7477 array_parameter_size (se->expr, expr, size);
7478 se->expr = gfc_conv_array_data (se->expr);
7479 return;
7480 }
7481
7482 if (this_array_result)
7483 {
7484 /* Result of the enclosing function. */
7485 gfc_conv_expr_descriptor (se, expr);
7486 if (size)
7487 array_parameter_size (se->expr, expr, size);
7488 se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
7489
7490 if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
7491 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
7492 se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
7493 se->expr));
7494
7495 return;
7496 }
7497 else
7498 {
7499 /* Every other type of array. */
7500 se->want_pointer = 1;
7501 gfc_conv_expr_descriptor (se, expr);
7502 if (size)
7503 array_parameter_size (build_fold_indirect_ref_loc (input_location,
7504 se->expr),
7505 expr, size);
7506 }
7507
7508 /* Deallocate the allocatable components of structures that are
7509 not variable, for descriptorless arguments.
7510 Arguments with a descriptor are handled in gfc_conv_procedure_call. */
7511 if (g77 && (expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
7512 && expr->ts.u.derived->attr.alloc_comp
7513 && expr->expr_type != EXPR_VARIABLE)
7514 {
7515 tmp = build_fold_indirect_ref_loc (input_location, se->expr);
7516 tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
7517
7518 /* The components shall be deallocated before their containing entity. */
7519 gfc_prepend_expr_to_block (&se->post, tmp);
7520 }
7521
7522 if (g77 || (fsym && fsym->attr.contiguous
7523 && !gfc_is_simply_contiguous (expr, false, true)))
7524 {
7525 tree origptr = NULL_TREE;
7526
7527 desc = se->expr;
7528
7529 /* For contiguous arrays, save the original value of the descriptor. */
7530 if (!g77)
7531 {
7532 origptr = gfc_create_var (pvoid_type_node, "origptr");
7533 tmp = build_fold_indirect_ref_loc (input_location, desc);
7534 tmp = gfc_conv_array_data (tmp);
7535 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
7536 TREE_TYPE (origptr), origptr,
7537 fold_convert (TREE_TYPE (origptr), tmp));
7538 gfc_add_expr_to_block (&se->pre, tmp);
7539 }
7540
7541 /* Repack the array. */
7542 if (warn_array_temporaries)
7543 {
7544 if (fsym)
7545 gfc_warning (OPT_Warray_temporaries,
7546 "Creating array temporary at %L for argument %qs",
7547 &expr->where, fsym->name);
7548 else
7549 gfc_warning (OPT_Warray_temporaries,
7550 "Creating array temporary at %L", &expr->where);
7551 }
7552
7553 ptr = build_call_expr_loc (input_location,
7554 gfor_fndecl_in_pack, 1, desc);
7555
7556 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7557 {
7558 tmp = gfc_conv_expr_present (sym);
7559 ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
7560 tmp, fold_convert (TREE_TYPE (se->expr), ptr),
7561 fold_convert (TREE_TYPE (se->expr), null_pointer_node));
7562 }
7563
7564 ptr = gfc_evaluate_now (ptr, &se->pre);
7565
7566 /* Use the packed data for the actual argument, except for contiguous arrays,
7567 where the descriptor's data component is set. */
7568 if (g77)
7569 se->expr = ptr;
7570 else
7571 {
7572 tmp = build_fold_indirect_ref_loc (input_location, desc);
7573
7574 gfc_ss * ss = gfc_walk_expr (expr);
7575 if (!transposed_dims (ss))
7576 gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
7577 else
7578 {
7579 tree old_field, new_field;
7580
7581 /* The original descriptor has transposed dims so we can't reuse
7582 it directly; we have to create a new one. */
7583 tree old_desc = tmp;
7584 tree new_desc = gfc_create_var (TREE_TYPE (old_desc), "arg_desc");
7585
7586 old_field = gfc_conv_descriptor_dtype (old_desc);
7587 new_field = gfc_conv_descriptor_dtype (new_desc);
7588 gfc_add_modify (&se->pre, new_field, old_field);
7589
7590 old_field = gfc_conv_descriptor_offset (old_desc);
7591 new_field = gfc_conv_descriptor_offset (new_desc);
7592 gfc_add_modify (&se->pre, new_field, old_field);
7593
7594 for (int i = 0; i < expr->rank; i++)
7595 {
7596 old_field = gfc_conv_descriptor_dimension (old_desc,
7597 gfc_rank_cst[get_array_ref_dim_for_loop_dim (ss, i)]);
7598 new_field = gfc_conv_descriptor_dimension (new_desc,
7599 gfc_rank_cst[i]);
7600 gfc_add_modify (&se->pre, new_field, old_field);
7601 }
7602
7603 if (flag_coarray == GFC_FCOARRAY_LIB
7604 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (old_desc))
7605 && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (old_desc))
7606 == GFC_ARRAY_ALLOCATABLE)
7607 {
7608 old_field = gfc_conv_descriptor_token (old_desc);
7609 new_field = gfc_conv_descriptor_token (new_desc);
7610 gfc_add_modify (&se->pre, new_field, old_field);
7611 }
7612
7613 gfc_conv_descriptor_data_set (&se->pre, new_desc, ptr);
7614 se->expr = gfc_build_addr_expr (NULL_TREE, new_desc);
7615 }
7616 gfc_free_ss (ss);
7617 }
7618
7619 if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
7620 {
7621 char * msg;
7622
7623 if (fsym && proc_name)
7624 msg = xasprintf ("An array temporary was created for argument "
7625 "'%s' of procedure '%s'", fsym->name, proc_name);
7626 else
7627 msg = xasprintf ("An array temporary was created");
7628
7629 tmp = build_fold_indirect_ref_loc (input_location,
7630 desc);
7631 tmp = gfc_conv_array_data (tmp);
7632 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7633 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7634
7635 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7636 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7637 boolean_type_node,
7638 gfc_conv_expr_present (sym), tmp);
7639
7640 gfc_trans_runtime_check (false, true, tmp, &se->pre,
7641 &expr->where, msg);
7642 free (msg);
7643 }
7644
7645 gfc_start_block (&block);
7646
7647 /* Copy the data back. */
7648 if (fsym == NULL || fsym->attr.intent != INTENT_IN)
7649 {
7650 tmp = build_call_expr_loc (input_location,
7651 gfor_fndecl_in_unpack, 2, desc, ptr);
7652 gfc_add_expr_to_block (&block, tmp);
7653 }
7654
7655 /* Free the temporary. */
7656 tmp = gfc_call_free (ptr);
7657 gfc_add_expr_to_block (&block, tmp);
7658
7659 stmt = gfc_finish_block (&block);
7660
7661 gfc_init_block (&block);
7662 /* Only if it was repacked. This code needs to be executed before the
7663 loop cleanup code. */
7664 tmp = build_fold_indirect_ref_loc (input_location,
7665 desc);
7666 tmp = gfc_conv_array_data (tmp);
7667 tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7668 fold_convert (TREE_TYPE (tmp), ptr), tmp);
7669
7670 if (fsym && fsym->attr.optional && sym && sym->attr.optional)
7671 tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
7672 boolean_type_node,
7673 gfc_conv_expr_present (sym), tmp);
7674
7675 tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
7676
7677 gfc_add_expr_to_block (&block, tmp);
7678 gfc_add_block_to_block (&block, &se->post);
7679
7680 gfc_init_block (&se->post);
7681
7682 /* Reset the descriptor pointer. */
7683 if (!g77)
7684 {
7685 tmp = build_fold_indirect_ref_loc (input_location, desc);
7686 gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
7687 }
7688
7689 gfc_add_block_to_block (&se->post, &block);
7690 }
7691 }
7692
7693
7694 /* Generate code to deallocate an array, if it is allocated. */
7695
7696 tree
gfc_trans_dealloc_allocated(tree descriptor,bool coarray,gfc_expr * expr)7697 gfc_trans_dealloc_allocated (tree descriptor, bool coarray, gfc_expr *expr)
7698 {
7699 tree tmp;
7700 tree var;
7701 stmtblock_t block;
7702
7703 gfc_start_block (&block);
7704
7705 var = gfc_conv_descriptor_data_get (descriptor);
7706 STRIP_NOPS (var);
7707
7708 /* Call array_deallocate with an int * present in the second argument.
7709 Although it is ignored here, it's presence ensures that arrays that
7710 are already deallocated are ignored. */
7711 tmp = gfc_deallocate_with_status (coarray ? descriptor : var, NULL_TREE,
7712 NULL_TREE, NULL_TREE, NULL_TREE, true,
7713 expr, coarray);
7714 gfc_add_expr_to_block (&block, tmp);
7715
7716 /* Zero the data pointer. */
7717 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7718 var, build_int_cst (TREE_TYPE (var), 0));
7719 gfc_add_expr_to_block (&block, tmp);
7720
7721 return gfc_finish_block (&block);
7722 }
7723
7724
7725 /* This helper function calculates the size in words of a full array. */
7726
7727 tree
gfc_full_array_size(stmtblock_t * block,tree decl,int rank)7728 gfc_full_array_size (stmtblock_t *block, tree decl, int rank)
7729 {
7730 tree idx;
7731 tree nelems;
7732 tree tmp;
7733 idx = gfc_rank_cst[rank - 1];
7734 nelems = gfc_conv_descriptor_ubound_get (decl, idx);
7735 tmp = gfc_conv_descriptor_lbound_get (decl, idx);
7736 tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
7737 nelems, tmp);
7738 tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
7739 tmp, gfc_index_one_node);
7740 tmp = gfc_evaluate_now (tmp, block);
7741
7742 nelems = gfc_conv_descriptor_stride_get (decl, idx);
7743 tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7744 nelems, tmp);
7745 return gfc_evaluate_now (tmp, block);
7746 }
7747
7748
7749 /* Allocate dest to the same size as src, and copy src -> dest.
7750 If no_malloc is set, only the copy is done. */
7751
7752 static tree
duplicate_allocatable(tree dest,tree src,tree type,int rank,bool no_malloc,bool no_memcpy,tree str_sz,tree add_when_allocated)7753 duplicate_allocatable (tree dest, tree src, tree type, int rank,
7754 bool no_malloc, bool no_memcpy, tree str_sz,
7755 tree add_when_allocated)
7756 {
7757 tree tmp;
7758 tree size;
7759 tree nelems;
7760 tree null_cond;
7761 tree null_data;
7762 stmtblock_t block;
7763
7764 /* If the source is null, set the destination to null. Then,
7765 allocate memory to the destination. */
7766 gfc_init_block (&block);
7767
7768 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
7769 {
7770 tmp = null_pointer_node;
7771 tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
7772 gfc_add_expr_to_block (&block, tmp);
7773 null_data = gfc_finish_block (&block);
7774
7775 gfc_init_block (&block);
7776 if (str_sz != NULL_TREE)
7777 size = str_sz;
7778 else
7779 size = TYPE_SIZE_UNIT (TREE_TYPE (type));
7780
7781 if (!no_malloc)
7782 {
7783 tmp = gfc_call_malloc (&block, type, size);
7784 tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
7785 dest, fold_convert (type, tmp));
7786 gfc_add_expr_to_block (&block, tmp);
7787 }
7788
7789 if (!no_memcpy)
7790 {
7791 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7792 tmp = build_call_expr_loc (input_location, tmp, 3, dest, src,
7793 fold_convert (size_type_node, size));
7794 gfc_add_expr_to_block (&block, tmp);
7795 }
7796 }
7797 else
7798 {
7799 gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
7800 null_data = gfc_finish_block (&block);
7801
7802 gfc_init_block (&block);
7803 if (rank)
7804 nelems = gfc_full_array_size (&block, src, rank);
7805 else
7806 nelems = gfc_index_one_node;
7807
7808 if (str_sz != NULL_TREE)
7809 tmp = fold_convert (gfc_array_index_type, str_sz);
7810 else
7811 tmp = fold_convert (gfc_array_index_type,
7812 TYPE_SIZE_UNIT (gfc_get_element_type (type)));
7813 size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
7814 nelems, tmp);
7815 if (!no_malloc)
7816 {
7817 tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
7818 tmp = gfc_call_malloc (&block, tmp, size);
7819 gfc_conv_descriptor_data_set (&block, dest, tmp);
7820 }
7821
7822 /* We know the temporary and the value will be the same length,
7823 so can use memcpy. */
7824 if (!no_memcpy)
7825 {
7826 tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
7827 tmp = build_call_expr_loc (input_location, tmp, 3,
7828 gfc_conv_descriptor_data_get (dest),
7829 gfc_conv_descriptor_data_get (src),
7830 fold_convert (size_type_node, size));
7831 gfc_add_expr_to_block (&block, tmp);
7832 }
7833 }
7834
7835 gfc_add_expr_to_block (&block, add_when_allocated);
7836 tmp = gfc_finish_block (&block);
7837
7838 /* Null the destination if the source is null; otherwise do
7839 the allocate and copy. */
7840 if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (src)))
7841 null_cond = src;
7842 else
7843 null_cond = gfc_conv_descriptor_data_get (src);
7844
7845 null_cond = convert (pvoid_type_node, null_cond);
7846 null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
7847 null_cond, null_pointer_node);
7848 return build3_v (COND_EXPR, null_cond, tmp, null_data);
7849 }
7850
7851
7852 /* Allocate dest to the same size as src, and copy data src -> dest. */
7853
7854 tree
gfc_duplicate_allocatable(tree dest,tree src,tree type,int rank,tree add_when_allocated)7855 gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank,
7856 tree add_when_allocated)
7857 {
7858 return duplicate_allocatable (dest, src, type, rank, false, false,
7859 NULL_TREE, add_when_allocated);
7860 }
7861
7862
7863 /* Copy data src -> dest. */
7864
7865 tree
gfc_copy_allocatable_data(tree dest,tree src,tree type,int rank)7866 gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
7867 {
7868 return duplicate_allocatable (dest, src, type, rank, true, false,
7869 NULL_TREE, NULL_TREE);
7870 }
7871
7872 /* Allocate dest to the same size as src, but don't copy anything. */
7873
7874 tree
gfc_duplicate_allocatable_nocopy(tree dest,tree src,tree type,int rank)7875 gfc_duplicate_allocatable_nocopy (tree dest, tree src, tree type, int rank)
7876 {
7877 return duplicate_allocatable (dest, src, type, rank, false, true,
7878 NULL_TREE, NULL_TREE);
7879 }
7880
7881
7882 /* Recursively traverse an object of derived type, generating code to
7883 deallocate, nullify or copy allocatable components. This is the work horse
7884 function for the functions named in this enum. */
7885
7886 enum {DEALLOCATE_ALLOC_COMP = 1, DEALLOCATE_ALLOC_COMP_NO_CAF,
7887 NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP, COPY_ONLY_ALLOC_COMP,
7888 COPY_ALLOC_COMP_CAF};
7889
7890 static tree
structure_alloc_comps(gfc_symbol * der_type,tree decl,tree dest,int rank,int purpose)7891 structure_alloc_comps (gfc_symbol * der_type, tree decl,
7892 tree dest, int rank, int purpose)
7893 {
7894 gfc_component *c;
7895 gfc_loopinfo loop;
7896 stmtblock_t fnblock;
7897 stmtblock_t loopbody;
7898 stmtblock_t tmpblock;
7899 tree decl_type;
7900 tree tmp;
7901 tree comp;
7902 tree dcmp;
7903 tree nelems;
7904 tree index;
7905 tree var;
7906 tree cdecl;
7907 tree ctype;
7908 tree vref, dref;
7909 tree null_cond = NULL_TREE;
7910 tree add_when_allocated;
7911 bool called_dealloc_with_status;
7912
7913 gfc_init_block (&fnblock);
7914
7915 decl_type = TREE_TYPE (decl);
7916
7917 if ((POINTER_TYPE_P (decl_type))
7918 || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
7919 {
7920 decl = build_fold_indirect_ref_loc (input_location, decl);
7921 /* Deref dest in sync with decl, but only when it is not NULL. */
7922 if (dest)
7923 dest = build_fold_indirect_ref_loc (input_location, dest);
7924 }
7925
7926 /* Just in case it gets dereferenced. */
7927 decl_type = TREE_TYPE (decl);
7928
7929 /* If this is an array of derived types with allocatable components
7930 build a loop and recursively call this function. */
7931 if (TREE_CODE (decl_type) == ARRAY_TYPE
7932 || (GFC_DESCRIPTOR_TYPE_P (decl_type) && rank != 0))
7933 {
7934 tmp = gfc_conv_array_data (decl);
7935 var = build_fold_indirect_ref_loc (input_location, tmp);
7936
7937 /* Get the number of elements - 1 and set the counter. */
7938 if (GFC_DESCRIPTOR_TYPE_P (decl_type))
7939 {
7940 /* Use the descriptor for an allocatable array. Since this
7941 is a full array reference, we only need the descriptor
7942 information from dimension = rank. */
7943 tmp = gfc_full_array_size (&fnblock, decl, rank);
7944 tmp = fold_build2_loc (input_location, MINUS_EXPR,
7945 gfc_array_index_type, tmp,
7946 gfc_index_one_node);
7947
7948 null_cond = gfc_conv_descriptor_data_get (decl);
7949 null_cond = fold_build2_loc (input_location, NE_EXPR,
7950 boolean_type_node, null_cond,
7951 build_int_cst (TREE_TYPE (null_cond), 0));
7952 }
7953 else
7954 {
7955 /* Otherwise use the TYPE_DOMAIN information. */
7956 tmp = array_type_nelts (decl_type);
7957 tmp = fold_convert (gfc_array_index_type, tmp);
7958 }
7959
7960 /* Remember that this is, in fact, the no. of elements - 1. */
7961 nelems = gfc_evaluate_now (tmp, &fnblock);
7962 index = gfc_create_var (gfc_array_index_type, "S");
7963
7964 /* Build the body of the loop. */
7965 gfc_init_block (&loopbody);
7966
7967 vref = gfc_build_array_ref (var, index, NULL);
7968
7969 if (purpose == COPY_ALLOC_COMP || purpose == COPY_ONLY_ALLOC_COMP)
7970 {
7971 tmp = build_fold_indirect_ref_loc (input_location,
7972 gfc_conv_array_data (dest));
7973 dref = gfc_build_array_ref (tmp, index, NULL);
7974 tmp = structure_alloc_comps (der_type, vref, dref, rank,
7975 COPY_ALLOC_COMP);
7976 }
7977 else
7978 tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
7979
7980 gfc_add_expr_to_block (&loopbody, tmp);
7981
7982 /* Build the loop and return. */
7983 gfc_init_loopinfo (&loop);
7984 loop.dimen = 1;
7985 loop.from[0] = gfc_index_zero_node;
7986 loop.loopvar[0] = index;
7987 loop.to[0] = nelems;
7988 gfc_trans_scalarizing_loops (&loop, &loopbody);
7989 gfc_add_block_to_block (&fnblock, &loop.pre);
7990
7991 tmp = gfc_finish_block (&fnblock);
7992 /* When copying allocateable components, the above implements the
7993 deep copy. Nevertheless is a deep copy only allowed, when the current
7994 component is allocated, for which code will be generated in
7995 gfc_duplicate_allocatable (), where the deep copy code is just added
7996 into the if's body, by adding tmp (the deep copy code) as last
7997 argument to gfc_duplicate_allocatable (). */
7998 if (purpose == COPY_ALLOC_COMP
7999 && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
8000 tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank,
8001 tmp);
8002 else if (null_cond != NULL_TREE)
8003 tmp = build3_v (COND_EXPR, null_cond, tmp,
8004 build_empty_stmt (input_location));
8005
8006 return tmp;
8007 }
8008
8009 /* Otherwise, act on the components or recursively call self to
8010 act on a chain of components. */
8011 for (c = der_type->components; c; c = c->next)
8012 {
8013 bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
8014 || c->ts.type == BT_CLASS)
8015 && c->ts.u.derived->attr.alloc_comp;
8016 cdecl = c->backend_decl;
8017 ctype = TREE_TYPE (cdecl);
8018
8019 switch (purpose)
8020 {
8021 case DEALLOCATE_ALLOC_COMP:
8022 case DEALLOCATE_ALLOC_COMP_NO_CAF:
8023
8024 /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp
8025 (i.e. this function) so generate all the calls and suppress the
8026 recursion from here, if necessary. */
8027 called_dealloc_with_status = false;
8028 gfc_init_block (&tmpblock);
8029
8030 if ((c->ts.type == BT_DERIVED && !c->attr.pointer)
8031 || (c->ts.type == BT_CLASS && !CLASS_DATA (c)->attr.class_pointer))
8032 {
8033 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8034 decl, cdecl, NULL_TREE);
8035
8036 /* The finalizer frees allocatable components. */
8037 called_dealloc_with_status
8038 = gfc_add_comp_finalizer_call (&tmpblock, comp, c,
8039 purpose == DEALLOCATE_ALLOC_COMP);
8040 }
8041 else
8042 comp = NULL_TREE;
8043
8044 if (c->attr.allocatable && !c->attr.proc_pointer
8045 && (c->attr.dimension
8046 || (c->attr.codimension
8047 && purpose != DEALLOCATE_ALLOC_COMP_NO_CAF)))
8048 {
8049 if (comp == NULL_TREE)
8050 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8051 decl, cdecl, NULL_TREE);
8052 tmp = gfc_trans_dealloc_allocated (comp, c->attr.codimension, NULL);
8053 gfc_add_expr_to_block (&tmpblock, tmp);
8054 }
8055 else if (c->attr.allocatable && !c->attr.codimension)
8056 {
8057 /* Allocatable scalar components. */
8058 if (comp == NULL_TREE)
8059 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8060 decl, cdecl, NULL_TREE);
8061
8062 tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
8063 c->ts);
8064 gfc_add_expr_to_block (&tmpblock, tmp);
8065 called_dealloc_with_status = true;
8066
8067 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8068 void_type_node, comp,
8069 build_int_cst (TREE_TYPE (comp), 0));
8070 gfc_add_expr_to_block (&tmpblock, tmp);
8071 }
8072 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable
8073 && (!CLASS_DATA (c)->attr.codimension
8074 || purpose != DEALLOCATE_ALLOC_COMP_NO_CAF))
8075 {
8076 /* Allocatable CLASS components. */
8077
8078 /* Add reference to '_data' component. */
8079 tmp = CLASS_DATA (c)->backend_decl;
8080 comp = fold_build3_loc (input_location, COMPONENT_REF,
8081 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
8082
8083 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
8084 tmp = gfc_trans_dealloc_allocated (comp,
8085 CLASS_DATA (c)->attr.codimension, NULL);
8086 else
8087 {
8088 tmp = gfc_deallocate_scalar_with_status (comp, NULL_TREE, true, NULL,
8089 CLASS_DATA (c)->ts);
8090 gfc_add_expr_to_block (&tmpblock, tmp);
8091 called_dealloc_with_status = true;
8092
8093 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8094 void_type_node, comp,
8095 build_int_cst (TREE_TYPE (comp), 0));
8096 }
8097 gfc_add_expr_to_block (&tmpblock, tmp);
8098
8099 /* Finally, reset the vptr to the declared type vtable and, if
8100 necessary reset the _len field.
8101
8102 First recover the reference to the component and obtain
8103 the vptr. */
8104 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8105 decl, cdecl, NULL_TREE);
8106 tmp = gfc_class_vptr_get (comp);
8107
8108 if (UNLIMITED_POLY (c))
8109 {
8110 /* Both vptr and _len field should be nulled. */
8111 gfc_add_modify (&tmpblock, tmp,
8112 build_int_cst (TREE_TYPE (tmp), 0));
8113 tmp = gfc_class_len_get (comp);
8114 gfc_add_modify (&tmpblock, tmp,
8115 build_int_cst (TREE_TYPE (tmp), 0));
8116 }
8117 else
8118 {
8119 /* Build the vtable address and set the vptr with it. */
8120 tree vtab;
8121 gfc_symbol *vtable;
8122 vtable = gfc_find_derived_vtab (c->ts.u.derived);
8123 vtab = vtable->backend_decl;
8124 if (vtab == NULL_TREE)
8125 vtab = gfc_get_symbol_decl (vtable);
8126 vtab = gfc_build_addr_expr (NULL, vtab);
8127 vtab = fold_convert (TREE_TYPE (tmp), vtab);
8128 gfc_add_modify (&tmpblock, tmp, vtab);
8129 }
8130 }
8131
8132 if (cmp_has_alloc_comps
8133 && !c->attr.pointer && !c->attr.proc_pointer
8134 && !called_dealloc_with_status)
8135 {
8136 /* Do not deallocate the components of ultimate pointer
8137 components or iteratively call self if call has been made
8138 to gfc_trans_dealloc_allocated */
8139 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8140 decl, cdecl, NULL_TREE);
8141 rank = c->as ? c->as->rank : 0;
8142 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
8143 rank, purpose);
8144 gfc_add_expr_to_block (&fnblock, tmp);
8145 }
8146
8147 /* Now add the deallocation of this component. */
8148 gfc_add_block_to_block (&fnblock, &tmpblock);
8149 break;
8150
8151 case NULLIFY_ALLOC_COMP:
8152 if (c->attr.pointer || c->attr.proc_pointer)
8153 continue;
8154 else if (c->attr.allocatable
8155 && (c->attr.dimension|| c->attr.codimension))
8156 {
8157 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8158 decl, cdecl, NULL_TREE);
8159 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
8160 }
8161 else if (c->attr.allocatable)
8162 {
8163 /* Allocatable scalar components. */
8164 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8165 decl, cdecl, NULL_TREE);
8166 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8167 void_type_node, comp,
8168 build_int_cst (TREE_TYPE (comp), 0));
8169 gfc_add_expr_to_block (&fnblock, tmp);
8170 if (gfc_deferred_strlen (c, &comp))
8171 {
8172 comp = fold_build3_loc (input_location, COMPONENT_REF,
8173 TREE_TYPE (comp),
8174 decl, comp, NULL_TREE);
8175 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8176 TREE_TYPE (comp), comp,
8177 build_int_cst (TREE_TYPE (comp), 0));
8178 gfc_add_expr_to_block (&fnblock, tmp);
8179 }
8180 }
8181 else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
8182 {
8183 /* Allocatable CLASS components. */
8184 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8185 decl, cdecl, NULL_TREE);
8186 /* Add reference to '_data' component. */
8187 tmp = CLASS_DATA (c)->backend_decl;
8188 comp = fold_build3_loc (input_location, COMPONENT_REF,
8189 TREE_TYPE (tmp), comp, tmp, NULL_TREE);
8190 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
8191 gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
8192 else
8193 {
8194 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8195 void_type_node, comp,
8196 build_int_cst (TREE_TYPE (comp), 0));
8197 gfc_add_expr_to_block (&fnblock, tmp);
8198 }
8199 }
8200 else if (cmp_has_alloc_comps)
8201 {
8202 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
8203 decl, cdecl, NULL_TREE);
8204 rank = c->as ? c->as->rank : 0;
8205 tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
8206 rank, purpose);
8207 gfc_add_expr_to_block (&fnblock, tmp);
8208 }
8209 break;
8210
8211 case COPY_ALLOC_COMP_CAF:
8212 if (!c->attr.codimension
8213 && (c->ts.type != BT_CLASS || CLASS_DATA (c)->attr.coarray_comp)
8214 && (c->ts.type != BT_DERIVED
8215 || !c->ts.u.derived->attr.coarray_comp))
8216 continue;
8217
8218 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
8219 cdecl, NULL_TREE);
8220 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
8221 cdecl, NULL_TREE);
8222
8223 if (c->attr.codimension)
8224 {
8225 if (c->ts.type == BT_CLASS)
8226 {
8227 comp = gfc_class_data_get (comp);
8228 dcmp = gfc_class_data_get (dcmp);
8229 }
8230 gfc_conv_descriptor_data_set (&fnblock, dcmp,
8231 gfc_conv_descriptor_data_get (comp));
8232 }
8233 else
8234 {
8235 tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
8236 rank, purpose);
8237 gfc_add_expr_to_block (&fnblock, tmp);
8238
8239 }
8240 break;
8241
8242 case COPY_ALLOC_COMP:
8243 if (c->attr.pointer)
8244 continue;
8245
8246 /* We need source and destination components. */
8247 comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
8248 cdecl, NULL_TREE);
8249 dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
8250 cdecl, NULL_TREE);
8251 dcmp = fold_convert (TREE_TYPE (comp), dcmp);
8252
8253 if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
8254 {
8255 tree ftn_tree;
8256 tree size;
8257 tree dst_data;
8258 tree src_data;
8259 tree null_data;
8260
8261 dst_data = gfc_class_data_get (dcmp);
8262 src_data = gfc_class_data_get (comp);
8263 size = fold_convert (size_type_node,
8264 gfc_class_vtab_size_get (comp));
8265
8266 if (CLASS_DATA (c)->attr.dimension)
8267 {
8268 nelems = gfc_conv_descriptor_size (src_data,
8269 CLASS_DATA (c)->as->rank);
8270 size = fold_build2_loc (input_location, MULT_EXPR,
8271 size_type_node, size,
8272 fold_convert (size_type_node,
8273 nelems));
8274 }
8275 else
8276 nelems = build_int_cst (size_type_node, 1);
8277
8278 if (CLASS_DATA (c)->attr.dimension
8279 || CLASS_DATA (c)->attr.codimension)
8280 {
8281 src_data = gfc_conv_descriptor_data_get (src_data);
8282 dst_data = gfc_conv_descriptor_data_get (dst_data);
8283 }
8284
8285 gfc_init_block (&tmpblock);
8286
8287 /* Coarray component have to have the same allocation status and
8288 shape/type-parameter/effective-type on the LHS and RHS of an
8289 intrinsic assignment. Hence, we did not deallocated them - and
8290 do not allocate them here. */
8291 if (!CLASS_DATA (c)->attr.codimension)
8292 {
8293 ftn_tree = builtin_decl_explicit (BUILT_IN_MALLOC);
8294 tmp = build_call_expr_loc (input_location, ftn_tree, 1, size);
8295 gfc_add_modify (&tmpblock, dst_data,
8296 fold_convert (TREE_TYPE (dst_data), tmp));
8297 }
8298
8299 tmp = gfc_copy_class_to_class (comp, dcmp, nelems,
8300 UNLIMITED_POLY (c));
8301 gfc_add_expr_to_block (&tmpblock, tmp);
8302 tmp = gfc_finish_block (&tmpblock);
8303
8304 gfc_init_block (&tmpblock);
8305 gfc_add_modify (&tmpblock, dst_data,
8306 fold_convert (TREE_TYPE (dst_data),
8307 null_pointer_node));
8308 null_data = gfc_finish_block (&tmpblock);
8309
8310 null_cond = fold_build2_loc (input_location, NE_EXPR,
8311 boolean_type_node, src_data,
8312 null_pointer_node);
8313
8314 gfc_add_expr_to_block (&fnblock, build3_v (COND_EXPR, null_cond,
8315 tmp, null_data));
8316 continue;
8317 }
8318
8319 /* To implement guarded deep copy, i.e., deep copy only allocatable
8320 components that are really allocated, the deep copy code has to
8321 be generated first and then added to the if-block in
8322 gfc_duplicate_allocatable (). */
8323 if (cmp_has_alloc_comps
8324 && !c->attr.proc_pointer)
8325 {
8326 rank = c->as ? c->as->rank : 0;
8327 tmp = fold_convert (TREE_TYPE (dcmp), comp);
8328 gfc_add_modify (&fnblock, dcmp, tmp);
8329 add_when_allocated = structure_alloc_comps (c->ts.u.derived,
8330 comp, dcmp,
8331 rank, purpose);
8332 }
8333 else
8334 add_when_allocated = NULL_TREE;
8335
8336 if (gfc_deferred_strlen (c, &tmp))
8337 {
8338 tree len, size;
8339 len = tmp;
8340 tmp = fold_build3_loc (input_location, COMPONENT_REF,
8341 TREE_TYPE (len),
8342 decl, len, NULL_TREE);
8343 len = fold_build3_loc (input_location, COMPONENT_REF,
8344 TREE_TYPE (len),
8345 dest, len, NULL_TREE);
8346 tmp = fold_build2_loc (input_location, MODIFY_EXPR,
8347 TREE_TYPE (len), len, tmp);
8348 gfc_add_expr_to_block (&fnblock, tmp);
8349 size = size_of_string_in_bytes (c->ts.kind, len);
8350 /* This component can not have allocatable components,
8351 therefore add_when_allocated of duplicate_allocatable ()
8352 is always NULL. */
8353 tmp = duplicate_allocatable (dcmp, comp, ctype, rank,
8354 false, false, size, NULL_TREE);
8355 gfc_add_expr_to_block (&fnblock, tmp);
8356 }
8357 else if (c->attr.allocatable && !c->attr.proc_pointer
8358 && (!(cmp_has_alloc_comps && c->as)
8359 || c->attr.codimension))
8360 {
8361 rank = c->as ? c->as->rank : 0;
8362 if (c->attr.codimension)
8363 tmp = gfc_copy_allocatable_data (dcmp, comp, ctype, rank);
8364 else
8365 tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank,
8366 add_when_allocated);
8367 gfc_add_expr_to_block (&fnblock, tmp);
8368 }
8369 else
8370 if (cmp_has_alloc_comps)
8371 gfc_add_expr_to_block (&fnblock, add_when_allocated);
8372
8373 break;
8374
8375 default:
8376 gcc_unreachable ();
8377 break;
8378 }
8379 }
8380
8381 return gfc_finish_block (&fnblock);
8382 }
8383
8384 /* Recursively traverse an object of derived type, generating code to
8385 nullify allocatable components. */
8386
8387 tree
gfc_nullify_alloc_comp(gfc_symbol * der_type,tree decl,int rank)8388 gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
8389 {
8390 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8391 NULLIFY_ALLOC_COMP);
8392 }
8393
8394
8395 /* Recursively traverse an object of derived type, generating code to
8396 deallocate allocatable components. */
8397
8398 tree
gfc_deallocate_alloc_comp(gfc_symbol * der_type,tree decl,int rank)8399 gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
8400 {
8401 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8402 DEALLOCATE_ALLOC_COMP);
8403 }
8404
8405
8406 /* Recursively traverse an object of derived type, generating code to
8407 deallocate allocatable components. But do not deallocate coarrays.
8408 To be used for intrinsic assignment, which may not change the allocation
8409 status of coarrays. */
8410
8411 tree
gfc_deallocate_alloc_comp_no_caf(gfc_symbol * der_type,tree decl,int rank)8412 gfc_deallocate_alloc_comp_no_caf (gfc_symbol * der_type, tree decl, int rank)
8413 {
8414 return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
8415 DEALLOCATE_ALLOC_COMP_NO_CAF);
8416 }
8417
8418
8419 tree
gfc_reassign_alloc_comp_caf(gfc_symbol * der_type,tree decl,tree dest)8420 gfc_reassign_alloc_comp_caf (gfc_symbol *der_type, tree decl, tree dest)
8421 {
8422 return structure_alloc_comps (der_type, decl, dest, 0, COPY_ALLOC_COMP_CAF);
8423 }
8424
8425
8426 /* Recursively traverse an object of derived type, generating code to
8427 copy it and its allocatable components. */
8428
8429 tree
gfc_copy_alloc_comp(gfc_symbol * der_type,tree decl,tree dest,int rank)8430 gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
8431 {
8432 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
8433 }
8434
8435
8436 /* Recursively traverse an object of derived type, generating code to
8437 copy only its allocatable components. */
8438
8439 tree
gfc_copy_only_alloc_comp(gfc_symbol * der_type,tree decl,tree dest,int rank)8440 gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
8441 {
8442 return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
8443 }
8444
8445
8446 /* Returns the value of LBOUND for an expression. This could be broken out
8447 from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
8448 called by gfc_alloc_allocatable_for_assignment. */
8449 static tree
get_std_lbound(gfc_expr * expr,tree desc,int dim,bool assumed_size)8450 get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
8451 {
8452 tree lbound;
8453 tree ubound;
8454 tree stride;
8455 tree cond, cond1, cond3, cond4;
8456 tree tmp;
8457 gfc_ref *ref;
8458
8459 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
8460 {
8461 tmp = gfc_rank_cst[dim];
8462 lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
8463 ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
8464 stride = gfc_conv_descriptor_stride_get (desc, tmp);
8465 cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
8466 ubound, lbound);
8467 cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
8468 stride, gfc_index_zero_node);
8469 cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
8470 boolean_type_node, cond3, cond1);
8471 cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
8472 stride, gfc_index_zero_node);
8473 if (assumed_size)
8474 cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8475 tmp, build_int_cst (gfc_array_index_type,
8476 expr->rank - 1));
8477 else
8478 cond = boolean_false_node;
8479
8480 cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
8481 boolean_type_node, cond3, cond4);
8482 cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
8483 boolean_type_node, cond, cond1);
8484
8485 return fold_build3_loc (input_location, COND_EXPR,
8486 gfc_array_index_type, cond,
8487 lbound, gfc_index_one_node);
8488 }
8489
8490 if (expr->expr_type == EXPR_FUNCTION)
8491 {
8492 /* A conversion function, so use the argument. */
8493 gcc_assert (expr->value.function.isym
8494 && expr->value.function.isym->conversion);
8495 expr = expr->value.function.actual->expr;
8496 }
8497
8498 if (expr->expr_type == EXPR_VARIABLE)
8499 {
8500 tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
8501 for (ref = expr->ref; ref; ref = ref->next)
8502 {
8503 if (ref->type == REF_COMPONENT
8504 && ref->u.c.component->as
8505 && ref->next
8506 && ref->next->u.ar.type == AR_FULL)
8507 tmp = TREE_TYPE (ref->u.c.component->backend_decl);
8508 }
8509 return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
8510 }
8511
8512 return gfc_index_one_node;
8513 }
8514
8515
8516 /* Returns true if an expression represents an lhs that can be reallocated
8517 on assignment. */
8518
8519 bool
gfc_is_reallocatable_lhs(gfc_expr * expr)8520 gfc_is_reallocatable_lhs (gfc_expr *expr)
8521 {
8522 gfc_ref * ref;
8523
8524 if (!expr->ref)
8525 return false;
8526
8527 /* An allocatable variable. */
8528 if (expr->symtree->n.sym->attr.allocatable
8529 && expr->ref
8530 && expr->ref->type == REF_ARRAY
8531 && expr->ref->u.ar.type == AR_FULL)
8532 return true;
8533
8534 /* All that can be left are allocatable components. */
8535 if ((expr->symtree->n.sym->ts.type != BT_DERIVED
8536 && expr->symtree->n.sym->ts.type != BT_CLASS)
8537 || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
8538 return false;
8539
8540 /* Find a component ref followed by an array reference. */
8541 for (ref = expr->ref; ref; ref = ref->next)
8542 if (ref->next
8543 && ref->type == REF_COMPONENT
8544 && ref->next->type == REF_ARRAY
8545 && !ref->next->next)
8546 break;
8547
8548 if (!ref)
8549 return false;
8550
8551 /* Return true if valid reallocatable lhs. */
8552 if (ref->u.c.component->attr.allocatable
8553 && ref->next->u.ar.type == AR_FULL)
8554 return true;
8555
8556 return false;
8557 }
8558
8559
8560 static tree
concat_str_length(gfc_expr * expr)8561 concat_str_length (gfc_expr* expr)
8562 {
8563 tree type;
8564 tree len1;
8565 tree len2;
8566 gfc_se se;
8567
8568 type = gfc_typenode_for_spec (&expr->value.op.op1->ts);
8569 len1 = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8570 if (len1 == NULL_TREE)
8571 {
8572 if (expr->value.op.op1->expr_type == EXPR_OP)
8573 len1 = concat_str_length (expr->value.op.op1);
8574 else if (expr->value.op.op1->expr_type == EXPR_CONSTANT)
8575 len1 = build_int_cst (gfc_charlen_type_node,
8576 expr->value.op.op1->value.character.length);
8577 else if (expr->value.op.op1->ts.u.cl->length)
8578 {
8579 gfc_init_se (&se, NULL);
8580 gfc_conv_expr (&se, expr->value.op.op1->ts.u.cl->length);
8581 len1 = se.expr;
8582 }
8583 else
8584 {
8585 /* Last resort! */
8586 gfc_init_se (&se, NULL);
8587 se.want_pointer = 1;
8588 se.descriptor_only = 1;
8589 gfc_conv_expr (&se, expr->value.op.op1);
8590 len1 = se.string_length;
8591 }
8592 }
8593
8594 type = gfc_typenode_for_spec (&expr->value.op.op2->ts);
8595 len2 = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8596 if (len2 == NULL_TREE)
8597 {
8598 if (expr->value.op.op2->expr_type == EXPR_OP)
8599 len2 = concat_str_length (expr->value.op.op2);
8600 else if (expr->value.op.op2->expr_type == EXPR_CONSTANT)
8601 len2 = build_int_cst (gfc_charlen_type_node,
8602 expr->value.op.op2->value.character.length);
8603 else if (expr->value.op.op2->ts.u.cl->length)
8604 {
8605 gfc_init_se (&se, NULL);
8606 gfc_conv_expr (&se, expr->value.op.op2->ts.u.cl->length);
8607 len2 = se.expr;
8608 }
8609 else
8610 {
8611 /* Last resort! */
8612 gfc_init_se (&se, NULL);
8613 se.want_pointer = 1;
8614 se.descriptor_only = 1;
8615 gfc_conv_expr (&se, expr->value.op.op2);
8616 len2 = se.string_length;
8617 }
8618 }
8619
8620 gcc_assert(len1 && len2);
8621 len1 = fold_convert (gfc_charlen_type_node, len1);
8622 len2 = fold_convert (gfc_charlen_type_node, len2);
8623
8624 return fold_build2_loc (input_location, PLUS_EXPR,
8625 gfc_charlen_type_node, len1, len2);
8626 }
8627
8628
8629 /* Allocate the lhs of an assignment to an allocatable array, otherwise
8630 reallocate it. */
8631
8632 tree
gfc_alloc_allocatable_for_assignment(gfc_loopinfo * loop,gfc_expr * expr1,gfc_expr * expr2)8633 gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
8634 gfc_expr *expr1,
8635 gfc_expr *expr2)
8636 {
8637 stmtblock_t realloc_block;
8638 stmtblock_t alloc_block;
8639 stmtblock_t fblock;
8640 gfc_ss *rss;
8641 gfc_ss *lss;
8642 gfc_array_info *linfo;
8643 tree realloc_expr;
8644 tree alloc_expr;
8645 tree size1;
8646 tree size2;
8647 tree array1;
8648 tree cond_null;
8649 tree cond;
8650 tree tmp;
8651 tree tmp2;
8652 tree lbound;
8653 tree ubound;
8654 tree desc;
8655 tree old_desc;
8656 tree desc2;
8657 tree offset;
8658 tree jump_label1;
8659 tree jump_label2;
8660 tree neq_size;
8661 tree lbd;
8662 int n;
8663 int dim;
8664 gfc_array_spec * as;
8665
8666 /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
8667 Find the lhs expression in the loop chain and set expr1 and
8668 expr2 accordingly. */
8669 if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
8670 {
8671 expr2 = expr1;
8672 /* Find the ss for the lhs. */
8673 lss = loop->ss;
8674 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
8675 if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE)
8676 break;
8677 if (lss == gfc_ss_terminator)
8678 return NULL_TREE;
8679 expr1 = lss->info->expr;
8680 }
8681
8682 /* Bail out if this is not a valid allocate on assignment. */
8683 if (!gfc_is_reallocatable_lhs (expr1)
8684 || (expr2 && !expr2->rank))
8685 return NULL_TREE;
8686
8687 /* Find the ss for the lhs. */
8688 lss = loop->ss;
8689 for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
8690 if (lss->info->expr == expr1)
8691 break;
8692
8693 if (lss == gfc_ss_terminator)
8694 return NULL_TREE;
8695
8696 linfo = &lss->info->data.array;
8697
8698 /* Find an ss for the rhs. For operator expressions, we see the
8699 ss's for the operands. Any one of these will do. */
8700 rss = loop->ss;
8701 for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
8702 if (rss->info->expr != expr1 && rss != loop->temp_ss)
8703 break;
8704
8705 if (expr2 && rss == gfc_ss_terminator)
8706 return NULL_TREE;
8707
8708 gfc_start_block (&fblock);
8709
8710 /* Since the lhs is allocatable, this must be a descriptor type.
8711 Get the data and array size. */
8712 desc = linfo->descriptor;
8713 gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
8714 array1 = gfc_conv_descriptor_data_get (desc);
8715
8716 /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
8717 deallocated if expr is an array of different shape or any of the
8718 corresponding length type parameter values of variable and expr
8719 differ." This assures F95 compatibility. */
8720 jump_label1 = gfc_build_label_decl (NULL_TREE);
8721 jump_label2 = gfc_build_label_decl (NULL_TREE);
8722
8723 /* Allocate if data is NULL. */
8724 cond_null = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
8725 array1, build_int_cst (TREE_TYPE (array1), 0));
8726
8727 if (expr1->ts.deferred)
8728 cond_null = gfc_evaluate_now (boolean_true_node, &fblock);
8729 else
8730 cond_null= gfc_evaluate_now (cond_null, &fblock);
8731
8732 tmp = build3_v (COND_EXPR, cond_null,
8733 build1_v (GOTO_EXPR, jump_label1),
8734 build_empty_stmt (input_location));
8735 gfc_add_expr_to_block (&fblock, tmp);
8736
8737 /* Get arrayspec if expr is a full array. */
8738 if (expr2 && expr2->expr_type == EXPR_FUNCTION
8739 && expr2->value.function.isym
8740 && expr2->value.function.isym->conversion)
8741 {
8742 /* For conversion functions, take the arg. */
8743 gfc_expr *arg = expr2->value.function.actual->expr;
8744 as = gfc_get_full_arrayspec_from_expr (arg);
8745 }
8746 else if (expr2)
8747 as = gfc_get_full_arrayspec_from_expr (expr2);
8748 else
8749 as = NULL;
8750
8751 /* If the lhs shape is not the same as the rhs jump to setting the
8752 bounds and doing the reallocation....... */
8753 for (n = 0; n < expr1->rank; n++)
8754 {
8755 /* Check the shape. */
8756 lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8757 ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
8758 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8759 gfc_array_index_type,
8760 loop->to[n], loop->from[n]);
8761 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8762 gfc_array_index_type,
8763 tmp, lbound);
8764 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8765 gfc_array_index_type,
8766 tmp, ubound);
8767 cond = fold_build2_loc (input_location, NE_EXPR,
8768 boolean_type_node,
8769 tmp, gfc_index_zero_node);
8770 tmp = build3_v (COND_EXPR, cond,
8771 build1_v (GOTO_EXPR, jump_label1),
8772 build_empty_stmt (input_location));
8773 gfc_add_expr_to_block (&fblock, tmp);
8774 }
8775
8776 /* ....else jump past the (re)alloc code. */
8777 tmp = build1_v (GOTO_EXPR, jump_label2);
8778 gfc_add_expr_to_block (&fblock, tmp);
8779
8780 /* Add the label to start automatic (re)allocation. */
8781 tmp = build1_v (LABEL_EXPR, jump_label1);
8782 gfc_add_expr_to_block (&fblock, tmp);
8783
8784 /* If the lhs has not been allocated, its bounds will not have been
8785 initialized and so its size is set to zero. */
8786 size1 = gfc_create_var (gfc_array_index_type, NULL);
8787 gfc_init_block (&alloc_block);
8788 gfc_add_modify (&alloc_block, size1, gfc_index_zero_node);
8789 gfc_init_block (&realloc_block);
8790 gfc_add_modify (&realloc_block, size1,
8791 gfc_conv_descriptor_size (desc, expr1->rank));
8792 tmp = build3_v (COND_EXPR, cond_null,
8793 gfc_finish_block (&alloc_block),
8794 gfc_finish_block (&realloc_block));
8795 gfc_add_expr_to_block (&fblock, tmp);
8796
8797 /* Get the rhs size and fix it. */
8798 if (expr2)
8799 desc2 = rss->info->data.array.descriptor;
8800 else
8801 desc2 = NULL_TREE;
8802
8803 size2 = gfc_index_one_node;
8804 for (n = 0; n < expr2->rank; n++)
8805 {
8806 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8807 gfc_array_index_type,
8808 loop->to[n], loop->from[n]);
8809 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8810 gfc_array_index_type,
8811 tmp, gfc_index_one_node);
8812 size2 = fold_build2_loc (input_location, MULT_EXPR,
8813 gfc_array_index_type,
8814 tmp, size2);
8815 }
8816 size2 = gfc_evaluate_now (size2, &fblock);
8817
8818 cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
8819 size1, size2);
8820
8821 /* If the lhs is deferred length, assume that the element size
8822 changes and force a reallocation. */
8823 if (expr1->ts.deferred)
8824 neq_size = gfc_evaluate_now (boolean_true_node, &fblock);
8825 else
8826 neq_size = gfc_evaluate_now (cond, &fblock);
8827
8828 /* Deallocation of allocatable components will have to occur on
8829 reallocation. Fix the old descriptor now. */
8830 if ((expr1->ts.type == BT_DERIVED)
8831 && expr1->ts.u.derived->attr.alloc_comp)
8832 old_desc = gfc_evaluate_now (desc, &fblock);
8833 else
8834 old_desc = NULL_TREE;
8835
8836 /* Now modify the lhs descriptor and the associated scalarizer
8837 variables. F2003 7.4.1.3: "If variable is or becomes an
8838 unallocated allocatable variable, then it is allocated with each
8839 deferred type parameter equal to the corresponding type parameters
8840 of expr , with the shape of expr , and with each lower bound equal
8841 to the corresponding element of LBOUND(expr)."
8842 Reuse size1 to keep a dimension-by-dimension track of the
8843 stride of the new array. */
8844 size1 = gfc_index_one_node;
8845 offset = gfc_index_zero_node;
8846
8847 for (n = 0; n < expr2->rank; n++)
8848 {
8849 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8850 gfc_array_index_type,
8851 loop->to[n], loop->from[n]);
8852 tmp = fold_build2_loc (input_location, PLUS_EXPR,
8853 gfc_array_index_type,
8854 tmp, gfc_index_one_node);
8855
8856 lbound = gfc_index_one_node;
8857 ubound = tmp;
8858
8859 if (as)
8860 {
8861 lbd = get_std_lbound (expr2, desc2, n,
8862 as->type == AS_ASSUMED_SIZE);
8863 ubound = fold_build2_loc (input_location,
8864 MINUS_EXPR,
8865 gfc_array_index_type,
8866 ubound, lbound);
8867 ubound = fold_build2_loc (input_location,
8868 PLUS_EXPR,
8869 gfc_array_index_type,
8870 ubound, lbd);
8871 lbound = lbd;
8872 }
8873
8874 gfc_conv_descriptor_lbound_set (&fblock, desc,
8875 gfc_rank_cst[n],
8876 lbound);
8877 gfc_conv_descriptor_ubound_set (&fblock, desc,
8878 gfc_rank_cst[n],
8879 ubound);
8880 gfc_conv_descriptor_stride_set (&fblock, desc,
8881 gfc_rank_cst[n],
8882 size1);
8883 lbound = gfc_conv_descriptor_lbound_get (desc,
8884 gfc_rank_cst[n]);
8885 tmp2 = fold_build2_loc (input_location, MULT_EXPR,
8886 gfc_array_index_type,
8887 lbound, size1);
8888 offset = fold_build2_loc (input_location, MINUS_EXPR,
8889 gfc_array_index_type,
8890 offset, tmp2);
8891 size1 = fold_build2_loc (input_location, MULT_EXPR,
8892 gfc_array_index_type,
8893 tmp, size1);
8894 }
8895
8896 /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
8897 the array offset is saved and the info.offset is used for a
8898 running offset. Use the saved_offset instead. */
8899 tmp = gfc_conv_descriptor_offset (desc);
8900 gfc_add_modify (&fblock, tmp, offset);
8901 if (linfo->saved_offset
8902 && TREE_CODE (linfo->saved_offset) == VAR_DECL)
8903 gfc_add_modify (&fblock, linfo->saved_offset, tmp);
8904
8905 /* Now set the deltas for the lhs. */
8906 for (n = 0; n < expr1->rank; n++)
8907 {
8908 tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
8909 dim = lss->dim[n];
8910 tmp = fold_build2_loc (input_location, MINUS_EXPR,
8911 gfc_array_index_type, tmp,
8912 loop->from[dim]);
8913 if (linfo->delta[dim]
8914 && TREE_CODE (linfo->delta[dim]) == VAR_DECL)
8915 gfc_add_modify (&fblock, linfo->delta[dim], tmp);
8916 }
8917
8918 /* Get the new lhs size in bytes. */
8919 if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
8920 {
8921 if (expr2->ts.deferred)
8922 {
8923 if (TREE_CODE (expr2->ts.u.cl->backend_decl) == VAR_DECL)
8924 tmp = expr2->ts.u.cl->backend_decl;
8925 else
8926 tmp = rss->info->string_length;
8927 }
8928 else
8929 {
8930 tmp = expr2->ts.u.cl->backend_decl;
8931 if (!tmp && expr2->expr_type == EXPR_OP
8932 && expr2->value.op.op == INTRINSIC_CONCAT)
8933 {
8934 tmp = concat_str_length (expr2);
8935 expr2->ts.u.cl->backend_decl = gfc_evaluate_now (tmp, &fblock);
8936 }
8937 tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
8938 }
8939
8940 if (expr1->ts.u.cl->backend_decl
8941 && TREE_CODE (expr1->ts.u.cl->backend_decl) == VAR_DECL)
8942 gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
8943 else
8944 gfc_add_modify (&fblock, lss->info->string_length, tmp);
8945 }
8946 else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
8947 {
8948 tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
8949 tmp = fold_build2_loc (input_location, MULT_EXPR,
8950 gfc_array_index_type, tmp,
8951 expr1->ts.u.cl->backend_decl);
8952 }
8953 else
8954 tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
8955 tmp = fold_convert (gfc_array_index_type, tmp);
8956 size2 = fold_build2_loc (input_location, MULT_EXPR,
8957 gfc_array_index_type,
8958 tmp, size2);
8959 size2 = fold_convert (size_type_node, size2);
8960 size2 = fold_build2_loc (input_location, MAX_EXPR, size_type_node,
8961 size2, size_one_node);
8962 size2 = gfc_evaluate_now (size2, &fblock);
8963
8964 /* For deferred character length, the 'size' field of the dtype might
8965 have changed so set the dtype. */
8966 if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))
8967 && expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
8968 {
8969 tree type;
8970 tmp = gfc_conv_descriptor_dtype (desc);
8971 if (expr2->ts.u.cl->backend_decl)
8972 type = gfc_typenode_for_spec (&expr2->ts);
8973 else
8974 type = gfc_typenode_for_spec (&expr1->ts);
8975
8976 gfc_add_modify (&fblock, tmp,
8977 gfc_get_dtype_rank_type (expr1->rank,type));
8978 }
8979
8980 /* Realloc expression. Note that the scalarizer uses desc.data
8981 in the array reference - (*desc.data)[<element>]. */
8982 gfc_init_block (&realloc_block);
8983
8984 if ((expr1->ts.type == BT_DERIVED)
8985 && expr1->ts.u.derived->attr.alloc_comp)
8986 {
8987 tmp = gfc_deallocate_alloc_comp_no_caf (expr1->ts.u.derived, old_desc,
8988 expr1->rank);
8989 gfc_add_expr_to_block (&realloc_block, tmp);
8990 }
8991
8992 tmp = build_call_expr_loc (input_location,
8993 builtin_decl_explicit (BUILT_IN_REALLOC), 2,
8994 fold_convert (pvoid_type_node, array1),
8995 size2);
8996 gfc_conv_descriptor_data_set (&realloc_block,
8997 desc, tmp);
8998
8999 if ((expr1->ts.type == BT_DERIVED)
9000 && expr1->ts.u.derived->attr.alloc_comp)
9001 {
9002 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
9003 expr1->rank);
9004 gfc_add_expr_to_block (&realloc_block, tmp);
9005 }
9006
9007 realloc_expr = gfc_finish_block (&realloc_block);
9008
9009 /* Only reallocate if sizes are different. */
9010 tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
9011 build_empty_stmt (input_location));
9012 realloc_expr = tmp;
9013
9014
9015 /* Malloc expression. */
9016 gfc_init_block (&alloc_block);
9017 tmp = build_call_expr_loc (input_location,
9018 builtin_decl_explicit (BUILT_IN_MALLOC),
9019 1, size2);
9020 gfc_conv_descriptor_data_set (&alloc_block,
9021 desc, tmp);
9022
9023 /* We already set the dtype in the case of deferred character
9024 length arrays. */
9025 if (!(GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))
9026 && expr1->ts.type == BT_CHARACTER && expr1->ts.deferred))
9027 {
9028 tmp = gfc_conv_descriptor_dtype (desc);
9029 gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
9030 }
9031
9032 if ((expr1->ts.type == BT_DERIVED)
9033 && expr1->ts.u.derived->attr.alloc_comp)
9034 {
9035 tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
9036 expr1->rank);
9037 gfc_add_expr_to_block (&alloc_block, tmp);
9038 }
9039 alloc_expr = gfc_finish_block (&alloc_block);
9040
9041 /* Malloc if not allocated; realloc otherwise. */
9042 tmp = build_int_cst (TREE_TYPE (array1), 0);
9043 cond = fold_build2_loc (input_location, EQ_EXPR,
9044 boolean_type_node,
9045 array1, tmp);
9046 tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
9047 gfc_add_expr_to_block (&fblock, tmp);
9048
9049 /* Make sure that the scalarizer data pointer is updated. */
9050 if (linfo->data
9051 && TREE_CODE (linfo->data) == VAR_DECL)
9052 {
9053 tmp = gfc_conv_descriptor_data_get (desc);
9054 gfc_add_modify (&fblock, linfo->data, tmp);
9055 }
9056
9057 /* Add the exit label. */
9058 tmp = build1_v (LABEL_EXPR, jump_label2);
9059 gfc_add_expr_to_block (&fblock, tmp);
9060
9061 return gfc_finish_block (&fblock);
9062 }
9063
9064
9065 /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
9066 Do likewise, recursively if necessary, with the allocatable components of
9067 derived types. */
9068
9069 void
gfc_trans_deferred_array(gfc_symbol * sym,gfc_wrapped_block * block)9070 gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
9071 {
9072 tree type;
9073 tree tmp;
9074 tree descriptor;
9075 stmtblock_t init;
9076 stmtblock_t cleanup;
9077 locus loc;
9078 int rank;
9079 bool sym_has_alloc_comp, has_finalizer;
9080
9081 sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
9082 || sym->ts.type == BT_CLASS)
9083 && sym->ts.u.derived->attr.alloc_comp;
9084 has_finalizer = sym->ts.type == BT_CLASS || sym->ts.type == BT_DERIVED
9085 ? gfc_is_finalizable (sym->ts.u.derived, NULL) : false;
9086
9087 /* Make sure the frontend gets these right. */
9088 gcc_assert (sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp
9089 || has_finalizer);
9090
9091 gfc_save_backend_locus (&loc);
9092 gfc_set_backend_locus (&sym->declared_at);
9093 gfc_init_block (&init);
9094
9095 gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
9096 || TREE_CODE (sym->backend_decl) == PARM_DECL);
9097
9098 if (sym->ts.type == BT_CHARACTER
9099 && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
9100 {
9101 gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
9102 gfc_trans_vla_type_sizes (sym, &init);
9103 }
9104
9105 /* Dummy, use associated and result variables don't need anything special. */
9106 if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
9107 {
9108 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
9109 gfc_restore_backend_locus (&loc);
9110 return;
9111 }
9112
9113 descriptor = sym->backend_decl;
9114
9115 /* Although static, derived types with default initializers and
9116 allocatable components must not be nulled wholesale; instead they
9117 are treated component by component. */
9118 if (TREE_STATIC (descriptor) && !sym_has_alloc_comp && !has_finalizer)
9119 {
9120 /* SAVEd variables are not freed on exit. */
9121 gfc_trans_static_array_pointer (sym);
9122
9123 gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
9124 gfc_restore_backend_locus (&loc);
9125 return;
9126 }
9127
9128 /* Get the descriptor type. */
9129 type = TREE_TYPE (sym->backend_decl);
9130
9131 if ((sym_has_alloc_comp || (has_finalizer && sym->ts.type != BT_CLASS))
9132 && !(sym->attr.pointer || sym->attr.allocatable))
9133 {
9134 if (!sym->attr.save
9135 && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
9136 {
9137 if (sym->value == NULL
9138 || !gfc_has_default_initializer (sym->ts.u.derived))
9139 {
9140 rank = sym->as ? sym->as->rank : 0;
9141 tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
9142 descriptor, rank);
9143 gfc_add_expr_to_block (&init, tmp);
9144 }
9145 else
9146 gfc_init_default_dt (sym, &init, false);
9147 }
9148 }
9149 else if (!GFC_DESCRIPTOR_TYPE_P (type))
9150 {
9151 /* If the backend_decl is not a descriptor, we must have a pointer
9152 to one. */
9153 descriptor = build_fold_indirect_ref_loc (input_location,
9154 sym->backend_decl);
9155 type = TREE_TYPE (descriptor);
9156 }
9157
9158 /* NULLIFY the data pointer, for non-saved allocatables. */
9159 if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save && sym->attr.allocatable)
9160 gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
9161
9162 gfc_restore_backend_locus (&loc);
9163 gfc_init_block (&cleanup);
9164
9165 /* Allocatable arrays need to be freed when they go out of scope.
9166 The allocatable components of pointers must not be touched. */
9167 if (!sym->attr.allocatable && has_finalizer && sym->ts.type != BT_CLASS
9168 && !sym->attr.pointer && !sym->attr.artificial && !sym->attr.save
9169 && !sym->ns->proc_name->attr.is_main_program)
9170 {
9171 gfc_expr *e;
9172 sym->attr.referenced = 1;
9173 e = gfc_lval_expr_from_sym (sym);
9174 gfc_add_finalizer_call (&cleanup, e);
9175 gfc_free_expr (e);
9176 }
9177 else if ((!sym->attr.allocatable || !has_finalizer)
9178 && sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
9179 && !sym->attr.pointer && !sym->attr.save
9180 && !sym->ns->proc_name->attr.is_main_program)
9181 {
9182 int rank;
9183 rank = sym->as ? sym->as->rank : 0;
9184 tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
9185 gfc_add_expr_to_block (&cleanup, tmp);
9186 }
9187
9188 if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
9189 && !sym->attr.save && !sym->attr.result
9190 && !sym->ns->proc_name->attr.is_main_program)
9191 {
9192 gfc_expr *e;
9193 e = has_finalizer ? gfc_lval_expr_from_sym (sym) : NULL;
9194 tmp = gfc_trans_dealloc_allocated (sym->backend_decl,
9195 sym->attr.codimension, e);
9196 if (e)
9197 gfc_free_expr (e);
9198 gfc_add_expr_to_block (&cleanup, tmp);
9199 }
9200
9201 gfc_add_init_cleanup (block, gfc_finish_block (&init),
9202 gfc_finish_block (&cleanup));
9203 }
9204
9205 /************ Expression Walking Functions ******************/
9206
9207 /* Walk a variable reference.
9208
9209 Possible extension - multiple component subscripts.
9210 x(:,:) = foo%a(:)%b(:)
9211 Transforms to
9212 forall (i=..., j=...)
9213 x(i,j) = foo%a(j)%b(i)
9214 end forall
9215 This adds a fair amount of complexity because you need to deal with more
9216 than one ref. Maybe handle in a similar manner to vector subscripts.
9217 Maybe not worth the effort. */
9218
9219
9220 static gfc_ss *
gfc_walk_variable_expr(gfc_ss * ss,gfc_expr * expr)9221 gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
9222 {
9223 gfc_ref *ref;
9224
9225 for (ref = expr->ref; ref; ref = ref->next)
9226 if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
9227 break;
9228
9229 return gfc_walk_array_ref (ss, expr, ref);
9230 }
9231
9232
9233 gfc_ss *
gfc_walk_array_ref(gfc_ss * ss,gfc_expr * expr,gfc_ref * ref)9234 gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
9235 {
9236 gfc_array_ref *ar;
9237 gfc_ss *newss;
9238 int n;
9239
9240 for (; ref; ref = ref->next)
9241 {
9242 if (ref->type == REF_SUBSTRING)
9243 {
9244 ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
9245 ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
9246 }
9247
9248 /* We're only interested in array sections from now on. */
9249 if (ref->type != REF_ARRAY)
9250 continue;
9251
9252 ar = &ref->u.ar;
9253
9254 switch (ar->type)
9255 {
9256 case AR_ELEMENT:
9257 for (n = ar->dimen - 1; n >= 0; n--)
9258 ss = gfc_get_scalar_ss (ss, ar->start[n]);
9259 break;
9260
9261 case AR_FULL:
9262 newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
9263 newss->info->data.array.ref = ref;
9264
9265 /* Make sure array is the same as array(:,:), this way
9266 we don't need to special case all the time. */
9267 ar->dimen = ar->as->rank;
9268 for (n = 0; n < ar->dimen; n++)
9269 {
9270 ar->dimen_type[n] = DIMEN_RANGE;
9271
9272 gcc_assert (ar->start[n] == NULL);
9273 gcc_assert (ar->end[n] == NULL);
9274 gcc_assert (ar->stride[n] == NULL);
9275 }
9276 ss = newss;
9277 break;
9278
9279 case AR_SECTION:
9280 newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
9281 newss->info->data.array.ref = ref;
9282
9283 /* We add SS chains for all the subscripts in the section. */
9284 for (n = 0; n < ar->dimen; n++)
9285 {
9286 gfc_ss *indexss;
9287
9288 switch (ar->dimen_type[n])
9289 {
9290 case DIMEN_ELEMENT:
9291 /* Add SS for elemental (scalar) subscripts. */
9292 gcc_assert (ar->start[n]);
9293 indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
9294 indexss->loop_chain = gfc_ss_terminator;
9295 newss->info->data.array.subscript[n] = indexss;
9296 break;
9297
9298 case DIMEN_RANGE:
9299 /* We don't add anything for sections, just remember this
9300 dimension for later. */
9301 newss->dim[newss->dimen] = n;
9302 newss->dimen++;
9303 break;
9304
9305 case DIMEN_VECTOR:
9306 /* Create a GFC_SS_VECTOR index in which we can store
9307 the vector's descriptor. */
9308 indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
9309 1, GFC_SS_VECTOR);
9310 indexss->loop_chain = gfc_ss_terminator;
9311 newss->info->data.array.subscript[n] = indexss;
9312 newss->dim[newss->dimen] = n;
9313 newss->dimen++;
9314 break;
9315
9316 default:
9317 /* We should know what sort of section it is by now. */
9318 gcc_unreachable ();
9319 }
9320 }
9321 /* We should have at least one non-elemental dimension,
9322 unless we are creating a descriptor for a (scalar) coarray. */
9323 gcc_assert (newss->dimen > 0
9324 || newss->info->data.array.ref->u.ar.as->corank > 0);
9325 ss = newss;
9326 break;
9327
9328 default:
9329 /* We should know what sort of section it is by now. */
9330 gcc_unreachable ();
9331 }
9332
9333 }
9334 return ss;
9335 }
9336
9337
9338 /* Walk an expression operator. If only one operand of a binary expression is
9339 scalar, we must also add the scalar term to the SS chain. */
9340
9341 static gfc_ss *
gfc_walk_op_expr(gfc_ss * ss,gfc_expr * expr)9342 gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
9343 {
9344 gfc_ss *head;
9345 gfc_ss *head2;
9346
9347 head = gfc_walk_subexpr (ss, expr->value.op.op1);
9348 if (expr->value.op.op2 == NULL)
9349 head2 = head;
9350 else
9351 head2 = gfc_walk_subexpr (head, expr->value.op.op2);
9352
9353 /* All operands are scalar. Pass back and let the caller deal with it. */
9354 if (head2 == ss)
9355 return head2;
9356
9357 /* All operands require scalarization. */
9358 if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
9359 return head2;
9360
9361 /* One of the operands needs scalarization, the other is scalar.
9362 Create a gfc_ss for the scalar expression. */
9363 if (head == ss)
9364 {
9365 /* First operand is scalar. We build the chain in reverse order, so
9366 add the scalar SS after the second operand. */
9367 head = head2;
9368 while (head && head->next != ss)
9369 head = head->next;
9370 /* Check we haven't somehow broken the chain. */
9371 gcc_assert (head);
9372 head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
9373 }
9374 else /* head2 == head */
9375 {
9376 gcc_assert (head2 == head);
9377 /* Second operand is scalar. */
9378 head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
9379 }
9380
9381 return head2;
9382 }
9383
9384
9385 /* Reverse a SS chain. */
9386
9387 gfc_ss *
gfc_reverse_ss(gfc_ss * ss)9388 gfc_reverse_ss (gfc_ss * ss)
9389 {
9390 gfc_ss *next;
9391 gfc_ss *head;
9392
9393 gcc_assert (ss != NULL);
9394
9395 head = gfc_ss_terminator;
9396 while (ss != gfc_ss_terminator)
9397 {
9398 next = ss->next;
9399 /* Check we didn't somehow break the chain. */
9400 gcc_assert (next != NULL);
9401 ss->next = head;
9402 head = ss;
9403 ss = next;
9404 }
9405
9406 return (head);
9407 }
9408
9409
9410 /* Given an expression referring to a procedure, return the symbol of its
9411 interface. We can't get the procedure symbol directly as we have to handle
9412 the case of (deferred) type-bound procedures. */
9413
9414 gfc_symbol *
gfc_get_proc_ifc_for_expr(gfc_expr * procedure_ref)9415 gfc_get_proc_ifc_for_expr (gfc_expr *procedure_ref)
9416 {
9417 gfc_symbol *sym;
9418 gfc_ref *ref;
9419
9420 if (procedure_ref == NULL)
9421 return NULL;
9422
9423 /* Normal procedure case. */
9424 if (procedure_ref->expr_type == EXPR_FUNCTION
9425 && procedure_ref->value.function.esym)
9426 sym = procedure_ref->value.function.esym;
9427 else
9428 sym = procedure_ref->symtree->n.sym;
9429
9430 /* Typebound procedure case. */
9431 for (ref = procedure_ref->ref; ref; ref = ref->next)
9432 {
9433 if (ref->type == REF_COMPONENT
9434 && ref->u.c.component->attr.proc_pointer)
9435 sym = ref->u.c.component->ts.interface;
9436 else
9437 sym = NULL;
9438 }
9439
9440 return sym;
9441 }
9442
9443
9444 /* Walk the arguments of an elemental function.
9445 PROC_EXPR is used to check whether an argument is permitted to be absent. If
9446 it is NULL, we don't do the check and the argument is assumed to be present.
9447 */
9448
9449 gfc_ss *
gfc_walk_elemental_function_args(gfc_ss * ss,gfc_actual_arglist * arg,gfc_symbol * proc_ifc,gfc_ss_type type)9450 gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
9451 gfc_symbol *proc_ifc, gfc_ss_type type)
9452 {
9453 gfc_formal_arglist *dummy_arg;
9454 int scalar;
9455 gfc_ss *head;
9456 gfc_ss *tail;
9457 gfc_ss *newss;
9458
9459 head = gfc_ss_terminator;
9460 tail = NULL;
9461
9462 if (proc_ifc)
9463 dummy_arg = gfc_sym_get_dummy_args (proc_ifc);
9464 else
9465 dummy_arg = NULL;
9466
9467 scalar = 1;
9468 for (; arg; arg = arg->next)
9469 {
9470 if (!arg->expr || arg->expr->expr_type == EXPR_NULL)
9471 goto loop_continue;
9472
9473 newss = gfc_walk_subexpr (head, arg->expr);
9474 if (newss == head)
9475 {
9476 /* Scalar argument. */
9477 gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
9478 newss = gfc_get_scalar_ss (head, arg->expr);
9479 newss->info->type = type;
9480 if (dummy_arg)
9481 newss->info->data.scalar.dummy_arg = dummy_arg->sym;
9482 }
9483 else
9484 scalar = 0;
9485
9486 if (dummy_arg != NULL
9487 && dummy_arg->sym->attr.optional
9488 && arg->expr->expr_type == EXPR_VARIABLE
9489 && (gfc_expr_attr (arg->expr).optional
9490 || gfc_expr_attr (arg->expr).allocatable
9491 || gfc_expr_attr (arg->expr).pointer))
9492 newss->info->can_be_null_ref = true;
9493
9494 head = newss;
9495 if (!tail)
9496 {
9497 tail = head;
9498 while (tail->next != gfc_ss_terminator)
9499 tail = tail->next;
9500 }
9501
9502 loop_continue:
9503 if (dummy_arg != NULL)
9504 dummy_arg = dummy_arg->next;
9505 }
9506
9507 if (scalar)
9508 {
9509 /* If all the arguments are scalar we don't need the argument SS. */
9510 gfc_free_ss_chain (head);
9511 /* Pass it back. */
9512 return ss;
9513 }
9514
9515 /* Add it onto the existing chain. */
9516 tail->next = ss;
9517 return head;
9518 }
9519
9520
9521 /* Walk a function call. Scalar functions are passed back, and taken out of
9522 scalarization loops. For elemental functions we walk their arguments.
9523 The result of functions returning arrays is stored in a temporary outside
9524 the loop, so that the function is only called once. Hence we do not need
9525 to walk their arguments. */
9526
9527 static gfc_ss *
gfc_walk_function_expr(gfc_ss * ss,gfc_expr * expr)9528 gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
9529 {
9530 gfc_intrinsic_sym *isym;
9531 gfc_symbol *sym;
9532 gfc_component *comp = NULL;
9533
9534 isym = expr->value.function.isym;
9535
9536 /* Handle intrinsic functions separately. */
9537 if (isym)
9538 return gfc_walk_intrinsic_function (ss, expr, isym);
9539
9540 sym = expr->value.function.esym;
9541 if (!sym)
9542 sym = expr->symtree->n.sym;
9543
9544 if (gfc_is_class_array_function (expr))
9545 return gfc_get_array_ss (ss, expr,
9546 CLASS_DATA (expr->value.function.esym->result)->as->rank,
9547 GFC_SS_FUNCTION);
9548
9549 /* A function that returns arrays. */
9550 comp = gfc_get_proc_ptr_comp (expr);
9551 if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
9552 || (comp && comp->attr.dimension))
9553 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
9554
9555 /* Walk the parameters of an elemental function. For now we always pass
9556 by reference. */
9557 if (sym->attr.elemental || (comp && comp->attr.elemental))
9558 {
9559 gfc_ss *old_ss = ss;
9560
9561 ss = gfc_walk_elemental_function_args (old_ss,
9562 expr->value.function.actual,
9563 gfc_get_proc_ifc_for_expr (expr),
9564 GFC_SS_REFERENCE);
9565 if (ss != old_ss
9566 && (comp
9567 || sym->attr.proc_pointer
9568 || sym->attr.if_source != IFSRC_DECL
9569 || sym->attr.array_outer_dependency))
9570 ss->info->array_outer_dependency = 1;
9571 }
9572
9573 /* Scalar functions are OK as these are evaluated outside the scalarization
9574 loop. Pass back and let the caller deal with it. */
9575 return ss;
9576 }
9577
9578
9579 /* An array temporary is constructed for array constructors. */
9580
9581 static gfc_ss *
gfc_walk_array_constructor(gfc_ss * ss,gfc_expr * expr)9582 gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
9583 {
9584 return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
9585 }
9586
9587
9588 /* Walk an expression. Add walked expressions to the head of the SS chain.
9589 A wholly scalar expression will not be added. */
9590
9591 gfc_ss *
gfc_walk_subexpr(gfc_ss * ss,gfc_expr * expr)9592 gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
9593 {
9594 gfc_ss *head;
9595
9596 switch (expr->expr_type)
9597 {
9598 case EXPR_VARIABLE:
9599 head = gfc_walk_variable_expr (ss, expr);
9600 return head;
9601
9602 case EXPR_OP:
9603 head = gfc_walk_op_expr (ss, expr);
9604 return head;
9605
9606 case EXPR_FUNCTION:
9607 head = gfc_walk_function_expr (ss, expr);
9608 return head;
9609
9610 case EXPR_CONSTANT:
9611 case EXPR_NULL:
9612 case EXPR_STRUCTURE:
9613 /* Pass back and let the caller deal with it. */
9614 break;
9615
9616 case EXPR_ARRAY:
9617 head = gfc_walk_array_constructor (ss, expr);
9618 return head;
9619
9620 case EXPR_SUBSTRING:
9621 /* Pass back and let the caller deal with it. */
9622 break;
9623
9624 default:
9625 gfc_internal_error ("bad expression type during walk (%d)",
9626 expr->expr_type);
9627 }
9628 return ss;
9629 }
9630
9631
9632 /* Entry point for expression walking.
9633 A return value equal to the passed chain means this is
9634 a scalar expression. It is up to the caller to take whatever action is
9635 necessary to translate these. */
9636
9637 gfc_ss *
gfc_walk_expr(gfc_expr * expr)9638 gfc_walk_expr (gfc_expr * expr)
9639 {
9640 gfc_ss *res;
9641
9642 res = gfc_walk_subexpr (gfc_ss_terminator, expr);
9643 return gfc_reverse_ss (res);
9644 }
9645