1 /*
2 * Copyright (c) 1994-2019, NVIDIA CORPORATION. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *
16 */
17
18 /** \brief Fortran transformation module */
19
20 #include "gbldefs.h"
21 #include "global.h"
22 #include "error.h"
23 #include "comm.h"
24 #include "symtab.h"
25 #include "symutl.h"
26 #include "dtypeutl.h"
27 #include "soc.h"
28 #include "semant.h"
29 #include "ast.h"
30 #include "transfrm.h"
31 #include "gramtk.h"
32 #include "extern.h"
33 #include "hpfutl.h"
34 #include "dinit.h"
35 #include "ccffinfo.h"
36 #include "optimize.h"
37 #include "rte.h"
38 #include "rtlRtns.h"
39
40 static void rewrite_into_forall(void);
41 static void rewrite_block_where(void);
42 static void rewrite_block_forall(void);
43 static void find_allocatable_assignment(void);
44 static void rewrite_allocatable_assignment(int, int, bool, bool);
45 static void handle_allocatable_members(int, int, int, bool);
46 static void trans_get_descrs(void);
47 static int trans_getidx(void);
48 static void trans_clridx(void);
49 static void trans_freeidx(void);
50 static int collapse_assignment(int, int);
51 static int build_sdsc_node(int);
52 static int inline_spread_shifts(int, int, int);
53 static int copy_forall(int);
54 static void clear_dist_align(void);
55 static void transform_init(void);
56 static void declare_local_mode(void);
57 static void init_finfo(void);
58 static void distribute_fval(void);
59 static int get_newdist_with_newproc(int dist);
60 static void set_initial_s1(void);
61 static LOGICAL contains_non0_scope(int astSrc);
62 static LOGICAL is_non0_scope(int sptr);
63 static void gen_allocated_check(int, int, int, bool, bool, bool);
64 static int subscript_allocmem(int aref, int asd);
65 static int normalize_subscripts(int oldasd, int oldshape, int newshape);
66 static int gen_dos_over_shape(int shape, int std);
67 static void gen_do_ends(int docnt, int std);
68 static LOGICAL all_stride_one_shape(int shape);
69 static int mk_bounds_shape(int shape);
70 #if DEBUG
71 extern void dbg_print_stmts(FILE *);
72 #endif
73 static bool chk_assumed_subscr(int a);
74 static int mk_ptr_subscr(int subAst, int std);
75 static int get_sdsc_ast(SPTR sptrsrc, int astsrc);
76 static int build_poly_func_node(int dest, int src, int intrin_type);
77 static int mk_poly_test(int dest, int src, int optype, int intrin_type);
78 static int count_allocatable_members(int ast);
79
80 FINFO_TBL finfot;
81 static int init_idx[MAXSUBS + MAXSUBS];
82 static int num_init_idx;
83 struct pure_gbl pure_gbl;
84
85 extern int pghpf_type_sptr;
86 int pghpf_local_mode_sptr = 0;
87
88 void
transform(void)89 transform(void)
90 {
91 pghpf_type_sptr = 0;
92 pghpf_local_mode_sptr = 0;
93 if (gbl.rutype != RU_BDATA) {
94 transform_init();
95 set_initial_s1();
96 /* create descriptors */
97 trans_get_descrs();
98
99 /* turn block wheres into single wheres */
100 #if DEBUG
101 if (DBGBIT(50, 4)) {
102 fprintf(gbl.dbgfil, "Before rewrite_block_where\n");
103 dstda();
104 }
105 #endif
106 rewrite_block_where();
107 #if DEBUG
108 if (DBGBIT(50, 4)) {
109 fprintf(gbl.dbgfil, "After rewrite_block_where\n");
110 dstda();
111 }
112 #endif
113
114 /* turn block foralls into single foralls */
115 rewrite_block_forall();
116 #if DEBUG
117 if (DBGBIT(50, 4)) {
118 fprintf(gbl.dbgfil, "After rewrite_block_forall\n");
119 dstda();
120 }
121 #endif
122
123 /* transformational intrinsics */
124 /* rewrite_forall_intrinsic();*/
125 rewrite_forall_pure();
126 if (flg.opt >= 2 && XBIT(53, 2)) {
127 points_to();
128 }
129 #if DEBUG
130 if (DBGBIT(50, 4)) {
131 fprintf(gbl.dbgfil, "After rewrite_forall_pure\n");
132 dstdpa();
133 }
134 #endif
135
136 /* Rewrite arguments to subroutines and uses of array-valued
137 * functions */
138 rewrite_calls();
139 #if DEBUG
140 if (DBGBIT(50, 4)) {
141 fprintf(gbl.dbgfil, "After rewrite_calls\n");
142 dstda();
143 }
144 #endif
145
146 find_allocatable_assignment();
147 #if DEBUG
148 if (DBGBIT(50, 4)) {
149 fprintf(gbl.dbgfil, "After find_allocatable_assignment\n");
150 dstda();
151 }
152 #endif
153
154 /* Transform array assignments, etc. into forall */
155 rewrite_into_forall();
156 #if DEBUG
157 if (DBGBIT(50, 4)) {
158 fprintf(gbl.dbgfil, "After rewrite_into_forall\n");
159 dstda();
160 }
161 #endif
162
163 /* This routine rewrites those foralls
164 * 1. forall with shape suc as A(i,:)
165 * 2. forall with dependency,
166 * 3. forall with distributed indirection array at rhs.
167 */
168 rewrite_forall();
169 #if DEBUG
170 if (DBGBIT(50, 4)) {
171 fprintf(gbl.dbgfil, "After rewrite_forall\n");
172 dstda();
173 }
174 #endif
175
176 #if DEBUG
177 if (DBGBIT(50, 2)) {
178 fprintf(gbl.dbgfil, "Statements after transform pass\n");
179 dbg_print_stmts(gbl.dbgfil);
180 }
181 #endif
182 if (flg.opt >= 2 && XBIT(53, 2)) {
183 f90_fini_pointsto();
184 }
185
186 trans_freeidx();
187
188 if (sem.p_dealloc != 0) {
189 interr("items were added to sem.p_dealloc but not freed", 0, ERR_Severe);
190 }
191 }
192 }
193
194 void
reset_init_idx(void)195 reset_init_idx(void)
196 {
197 int i;
198 for (i = 0; i < MAXSUBS + MAXSUBS; i++) {
199 init_idx[i] = 0;
200 }
201 }
202
203 static void
transform_init(void)204 transform_init(void)
205 {
206 int i;
207
208 init_finfo();
209 pure_gbl.local_mode = 0;
210 pghpf_type_sptr = 0;
211 pghpf_local_mode_sptr = 0;
212 init_region();
213 if (gbl.rutype != RU_BDATA) {
214 for (i = 0; i < MAXSUBS + MAXSUBS; i++) {
215 init_idx[i] = 0;
216 }
217 num_init_idx = 0;
218 }
219 }
220
221 /*
222 * set SDSDNS1 for descriptors of user array pointers or array-member pointers
223 * for allocatables, assumed-shape, fixed-shape arrays, the associated
224 * descriptors will always have a linear stride in the 1st dimension of one.
225 * Also, set SDSCCONTIG for descriptors of user arrays with ALLOCATABLE
226 * attribute, assumed-shape dummies, or fixed-shape arrays.
227 */
228 static void
set_initial_s1(void)229 set_initial_s1(void)
230 {
231 int sptr, sdsc, dtype, eldtype;
232 for (sptr = stb.firstusym; sptr < stb.stg_avail; ++sptr) {
233 switch (STYPEG(sptr)) {
234 case ST_ARRAY:
235 case ST_DESCRIPTOR:
236 case ST_VAR:
237 case ST_IDENT:
238 case ST_STRUCT:
239 case ST_MEMBER:
240 if (IGNOREG(sptr))
241 break;
242 dtype = DTYPEG(sptr);
243 if (dtype && DTY(dtype) == TY_ARRAY) {
244 sdsc = SDSCG(sptr);
245 if (sdsc != 0 && STYPEG(sdsc) != ST_PARAM) {
246 /* an array with a section descriptor */
247 if (!POINTERG(sptr)) {
248 if ((SCG(sptr) == SC_DUMMY || SCG(sdsc) == SC_DUMMY) &&
249 ASSUMSHPG(sptr)) {
250 if (!XBIT(54, 2) && !(XBIT(58, 0x400000) && TARGETG(sptr))) {
251 /* don't set S1 for assumed-shape if -x 54 2 */
252 /* don't set S1 for assumed-shape if -x 58 0x400000 && target */
253 SDSCS1P(sdsc, 1);
254 }
255 } else {
256 SDSCS1P(sdsc, 1);
257 }
258 } else {
259 /* set SDSCS1 for section descriptor if stride-1 */
260 long s1;
261 s1 = 0;
262 if (s1) {
263 SDSCS1P(sdsc, 1);
264 SDSCCONTIGP(sdsc, 1);
265 BYTELENP(sdsc, s1);
266 }
267 }
268 if ((ALLOCATTRG(sptr) || (ASSUMSHPG(sptr) && !XBIT(54, 2)
269 && !(XBIT(58, 0x400000) && TARGETG(sptr))))
270 &&
271 !ASSUMLENG(sptr) && !ADJLENG(sptr) &&
272 !(DDTG(DTYPEG(sptr)) == DT_DEFERCHAR ||
273 DDTG(DTYPEG(sptr)) == DT_DEFERNCHAR)) {
274 SDSCCONTIGP(sdsc, 1);
275 eldtype = DTY(dtype + 1);
276 BYTELENP(sdsc, size_of(eldtype));
277 }
278 }
279 if (SCG(sptr) == SC_DUMMY) {
280 sdsc = NEWDSCG(sptr);
281 if (sdsc != 0 && STYPEG(sdsc) != ST_PARAM) {
282 if (!POINTERG(sptr) && !(XBIT(54, 2) && ASSUMSHPG(sptr)) &&
283 !(XBIT(58, 0x400000) && TARGETG(sptr) && ASSUMSHPG(sptr))) {
284 /* set SDSCS1 for section descriptor */
285 /* don't set S1 for assumed-shape if -x 54 2 */
286 /* don't set S1 for assumed-shape if -x 58 0x400000 && target */
287 SDSCS1P(sdsc, 1);
288 }
289 if ((ALLOCATTRG(sptr) || (ASSUMSHPG(sptr) && !XBIT(54, 2) &&
290 !(XBIT(58, 0x400000) && TARGETG(sptr)))) &&
291 !ASSUMLENG(sptr) && !ADJLENG(sptr) &&
292 !(DDTG(DTYPEG(sptr)) == DT_DEFERCHAR ||
293 DDTG(DTYPEG(sptr)) == DT_DEFERNCHAR)) {
294 SDSCCONTIGP(sdsc, 1);
295 eldtype = DTY(dtype + 1);
296 BYTELENP(sdsc, size_of(eldtype));
297 }
298 }
299 }
300 }
301 break;
302 default:;
303 }
304 }
305 } /* set_initial_s1 */
306
307 int
get_init_idx(int i,int dtype)308 get_init_idx(int i, int dtype)
309 {
310 if (init_idx[i] == 0 || SCG(init_idx[i]) != symutl.sc ||
311 DTYPEG(init_idx[i]) != dtype) {
312 char ci[2], cj[2];
313 ci[0] = 'i';
314 ci[1] = '\0';
315 cj[0] = 'a' + num_init_idx;
316 cj[1] = '\0';
317 init_idx[i] = sym_get_scalar(ci, cj, dtype);
318 ++num_init_idx;
319 if (num_init_idx >= 26)
320 num_init_idx = 0;
321 }
322 return init_idx[i];
323 } /* get_init_idx */
324
325 /* forall table */
326
327 static void
init_finfo(void)328 init_finfo(void)
329 {
330 finfot.size = 240;
331 NEW(finfot.base, FINFO, finfot.size);
332 finfot.avl = 1;
333 }
334
335 static int
mk_finfo(void)336 mk_finfo(void)
337 {
338 int nd;
339
340 nd = finfot.avl++;
341 /* finfot.avl += sizeof(FINFO); */
342 NEED(finfot.avl, finfot.base, FINFO, finfot.size, finfot.size + 240);
343 if (finfot.base == NULL)
344 errfatal(7);
345 return nd;
346 }
347
348 int
get_finfo(int forall,int a)349 get_finfo(int forall, int a)
350 {
351 int i;
352
353 for (i = A_STARTG(forall); i > (int)(A_STARTG(forall) - A_NCOUNTG(forall));
354 i--)
355 if (a == FINFO_AST(i))
356 return i;
357 return 0;
358 }
359
360 #define TRANS_AREA 10
361
362 static void
clear_dist_align(void)363 clear_dist_align(void)
364 {
365 int sptr;
366 int stype;
367
368 for (sptr = stb.firstusym; sptr < stb.stg_avail; sptr++) {
369 stype = STYPEG(sptr);
370 if (stype == ST_ARRAY) {
371 if (!ASSUMSHPG(sptr))
372 SEQP(sptr, 1);
373 }
374 }
375 }
376
377 static struct {
378 int sptr;
379 } wherestuff;
380
381 static void
nice_mask(int ast,LOGICAL * nice)382 nice_mask(int ast, LOGICAL *nice)
383 {
384 switch (A_TYPEG(ast)) {
385 case A_BINOP:
386 if (A_OPTYPEG(ast) == OP_XTOX) /* real ** real */
387 *nice = FALSE;
388 break;
389 case A_SUBSCR:
390 case A_ID:
391 case A_PAREN:
392 case A_CONV:
393 case A_CNST:
394 case A_CMPLXC:
395 case A_UNOP:
396 case A_TRIPLE:
397 break;
398 default:
399 *nice = FALSE;
400 break;
401 }
402 }
403
404 static LOGICAL
nice_where_mask(int ast)405 nice_where_mask(int ast)
406 {
407 LOGICAL nice;
408
409 nice = TRUE;
410 ast_visit(1, 1);
411 ast_traverse(ast, NULL, nice_mask, &nice);
412 ast_unvisit();
413 return nice;
414 }
415
416 static void
srch_sym(int ast,LOGICAL * has_sym)417 srch_sym(int ast, LOGICAL *has_sym)
418 {
419 if (A_TYPEG(ast) == A_ID && wherestuff.sptr == A_SPTRG(ast))
420 *has_sym = TRUE;
421 }
422
423 static LOGICAL
mask_on_lhs(int mask,int lhs)424 mask_on_lhs(int mask, int lhs)
425 {
426 int sptr, stype;
427 LOGICAL has_sym;
428
429 /* find the LHS symbol */
430 if (A_TYPEG(lhs) == A_SUBSCR)
431 lhs = A_LOPG(lhs);
432 if (A_TYPEG(lhs) != A_ID)
433 return TRUE;
434 sptr = A_SPTRG(lhs);
435 stype = STYPEG(sptr);
436 assert(stype == ST_ARRAY, "mask_on_lhs: sptr not array", sptr, 4);
437 wherestuff.sptr = sptr;
438 has_sym = FALSE;
439 ast_visit(1, 1);
440 ast_traverse(mask, NULL, srch_sym, &has_sym);
441 ast_unvisit();
442 return has_sym;
443 }
444
445 static void
rewrite_where_expr(int where_std,int endwhere_std)446 rewrite_where_expr(int where_std, int endwhere_std)
447 {
448 int ast, std;
449 int astnew, stdnew;
450
451 /* rewrite the where expression if it has transformationals, etc. */
452 ast = STD_AST(where_std);
453 /* If the expression requires a temporary as part of its
454 * evaluation, must make sure that the temp is freed after
455 * the WHERE, if it is a block where. An ugly way to
456 * do this is to create a temp statement then move stuff
457 * that gets added after it.
458 */
459 astnew = mk_stmt(A_CONTINUE, 0);
460 stdnew = add_stmt_before(astnew, where_std);
461 arg_gbl.std = stdnew;
462 /* A_IFEXPRP(ast, rewrite_sub_ast(A_IFEXPRG(ast)));*/
463 /* all the stuff from between stdnew and where_std needs
464 * to be moved after the ENDWHERE
465 */
466 if (STD_NEXT(stdnew) != where_std) {
467 /* link the chain in after endwhere_std */
468 STD_PREV(STD_NEXT(endwhere_std)) = STD_PREV(where_std);
469 STD_NEXT(STD_PREV(where_std)) = STD_NEXT(endwhere_std);
470 STD_NEXT(endwhere_std) = STD_NEXT(stdnew);
471 STD_PREV(STD_NEXT(endwhere_std)) = endwhere_std;
472 /* remove the chain after stdnew */
473 STD_NEXT(stdnew) = where_std;
474 STD_PREV(where_std) = stdnew;
475 }
476 /* unlink the dummy statement */
477 STD_NEXT(STD_PREV(stdnew)) = STD_NEXT(stdnew);
478 STD_PREV(STD_NEXT(stdnew)) = STD_PREV(stdnew);
479 arg_gbl.std = where_std;
480 }
481
482 typedef struct wherestackentry {
483 int where, elsewhere, forall;
484 } wherestackentry;
485
486 struct wherestack {
487 wherestackentry *base;
488 int size, topwhere, topforall;
489 } wherestack = {(wherestackentry *)0, 0, 0, 0};
490
491 /*
492 * allocate the wherestack; also, initialize it at entry zero
493 * with zero where/elsewhere statements
494 */
495 static void
init_where(void)496 init_where(void)
497 {
498 int top;
499 wherestack.size = 5;
500 NEW(wherestack.base, wherestackentry, wherestack.size);
501 top = wherestack.topwhere = wherestack.topforall = 0;
502 wherestack.base[top].where = 0;
503 wherestack.base[top].elsewhere = 0;
504 wherestack.base[top].forall = 0;
505 } /* init_where */
506
507 static void
push_where(int where_std)508 push_where(int where_std)
509 {
510 int top;
511 ++wherestack.topwhere;
512 NEED(wherestack.topwhere + 1, wherestack.base, wherestackentry,
513 wherestack.size, 2 * wherestack.size);
514 top = wherestack.topwhere;
515 wherestack.base[top].where = where_std;
516 wherestack.base[top].elsewhere = 0;
517 } /* push_where */
518
519 static void
push_elsewhere(int elsewhere_std)520 push_elsewhere(int elsewhere_std)
521 {
522 int top;
523 top = wherestack.topwhere;
524 if (top == 0)
525 interr("rewrite_block_forall: elsewhere with no where", elsewhere_std, 3);
526 if (wherestack.base[top].elsewhere != 0)
527 interr("rewrite_block_forall: two elsewheres", elsewhere_std, 3);
528 wherestack.base[top].elsewhere = elsewhere_std;
529 } /* push_elsewhere */
530
531 static void
pop_where(int * where,int * elsewhere)532 pop_where(int *where, int *elsewhere)
533 {
534 int top;
535 top = wherestack.topwhere;
536 if (top <= 0) {
537 *where = 0;
538 *elsewhere = 0;
539 } else {
540 *where = wherestack.base[top].where;
541 *elsewhere = wherestack.base[top].elsewhere;
542 --wherestack.topwhere;
543 }
544 } /* pop_where */
545
546 static void
push_forall(int forall_std)547 push_forall(int forall_std)
548 {
549 int top;
550 ++wherestack.topforall;
551 NEED(wherestack.topforall + 1, wherestack.base, wherestackentry,
552 wherestack.size, 2 * wherestack.size);
553 top = wherestack.topforall;
554 wherestack.base[top].forall = forall_std;
555 } /* push_forall */
556
557 static void
pop_forall(int * forall_std)558 pop_forall(int *forall_std)
559 {
560 int top;
561 top = wherestack.topforall;
562 if (top <= 0) {
563 *forall_std = 0;
564 } else {
565 *forall_std = wherestack.base[top].forall;
566 --wherestack.topforall;
567 }
568 } /* pop_forall */
569
570 static void
add_wheresym(ITEM ** wheresymlist,int wheresym)571 add_wheresym(ITEM **wheresymlist, int wheresym)
572 {
573 ITEM *itemp = (ITEM *)getitem(TRANS_AREA, sizeof(ITEM));
574 itemp->next = *wheresymlist;
575 itemp->t.sptr = wheresym;
576 *wheresymlist = itemp;
577 }
578
579 static LOGICAL
in_wheresymlist(ITEM * list,int sptr)580 in_wheresymlist(ITEM *list, int sptr)
581 {
582 ITEM *itemp;
583 for (itemp = list; itemp != ITEM_END; itemp = itemp->next) {
584 if (itemp->t.sptr == sptr) {
585 return TRUE;
586 }
587 }
588 return FALSE;
589 }
590
591 /*
592 * Transform block WHERE statements to single-statement wheres
593 */
594 static void
rewrite_block_where(void)595 rewrite_block_where(void)
596 {
597 int std, stdnext, std1;
598 int shape;
599 int ast, ast1, ast2, lhs, nestedwhere;
600 int where_load;
601 int list;
602 int wheresym;
603 int sptr_lhs;
604 int subscr[MAXSUBS];
605 int where_std, elsewhere_std, endwhere_std;
606 int outer_where_std, outer_endwhere_std;
607 LOGICAL nice_where;
608 int shape1;
609 int parallel_depth;
610 int task_depth;
611 ITEM *wheresymlist = ITEM_END;
612
613 init_where();
614
615 /* Transform block wheres */
616 where_std = elsewhere_std = 0;
617 parallel_depth = 0;
618 task_depth = 0;
619 for (std = STD_NEXT(0); std != 0; std = stdnext) {
620 stdnext = STD_NEXT(std);
621 gbl.lineno = STD_LINENO(std);
622 ast = STD_AST(std);
623 switch (A_TYPEG(ast)) {
624 case A_MP_PARALLEL:
625 ++parallel_depth;
626 /*symutl.sc = SC_PRIVATE;*/
627 set_descriptor_sc(SC_PRIVATE);
628 break;
629 case A_MP_ENDPARALLEL:
630 --parallel_depth;
631 if (parallel_depth == 0 && task_depth == 0) {
632 /*symutl.sc = SC_LOCAL;*/
633 set_descriptor_sc(SC_LOCAL);
634 }
635 break;
636 case A_MP_TASK:
637 case A_MP_TASKLOOP:
638 ++task_depth;
639 set_descriptor_sc(SC_PRIVATE);
640 break;
641 case A_MP_ENDTASK:
642 case A_MP_ETASKLOOP:
643 --task_depth;
644 if (parallel_depth == 0 && task_depth == 0) {
645 set_descriptor_sc(SC_LOCAL);
646 }
647 break;
648 case A_FORALL:
649 if (A_IFSTMTG(ast) == 0) {
650 int astli, li;
651 push_forall(std);
652 /* mark the forall indices */
653 astli = A_LISTG(ast);
654 for (li = astli; li != 0; li = ASTLI_NEXT(li)) {
655 int sptr = ASTLI_SPTR(li);
656 #if DEBUG
657 if (FORALLNDXG(sptr)) {
658 interr("rewrite_block_where: nested foralls with same index", std,
659 4);
660 }
661 #endif
662 FORALLNDXP(sptr, 1);
663 }
664 }
665 break;
666 case A_ENDFORALL: {
667 int forall_std, forall_ast, astli, li;
668 pop_forall(&forall_std);
669 forall_ast = STD_AST(forall_std);
670 #if DEBUG
671 if (A_TYPEG(forall_ast) != A_FORALL) {
672 interr("rewrite_block_where: problem with endforall nesting", std, 4);
673 }
674 #endif
675 /* now unmark the forall indices */
676 astli = A_LISTG(forall_ast);
677 for (li = astli; li != 0; li = ASTLI_NEXT(li)) {
678 int sptr = ASTLI_SPTR(li);
679 #if DEBUG
680 if (!FORALLNDXG(sptr)) {
681 interr("rewrite_block_where: forall index flag improperly reset", std,
682 4);
683 }
684 #endif
685 FORALLNDXP(sptr, 0);
686 }
687 } break;
688 case A_WHERE:
689 if (!A_IFSTMTG(ast)) {
690 if (wherestack.topwhere == 0) {
691 int std1, ast1, ast2, wherenest;
692 /* this is the outermost WHERE, find outermost ENDWHERE */
693 outer_where_std = std;
694 outer_endwhere_std = 0;
695 wherenest = 1;
696 for (std1 = STD_NEXT(std); std1 > 0 && wherenest > 0;
697 std1 = STD_NEXT(std1)) {
698 ast1 = STD_AST(std1);
699 switch (A_TYPEG(ast1)) {
700 case A_WHERE:
701 if (A_IFSTMTG(ast1) == 0) {
702 ++wherenest;
703 } else {
704 /* Single-statement WHERE from nested where
705 * Rewrite to regular nested WHERE */
706 ast2 = mk_stmt(A_ENDWHERE, 0);
707 add_stmt_after(ast2, std1);
708 ast2 = A_IFSTMTG(ast1);
709 ast2 = mk_assn_stmt(A_DESTG(ast2), A_SRCG(ast2), A_DTYPEG(ast2));
710 add_stmt_after(ast2, std1);
711 ast2 = mk_stmt(A_WHERE, 0);
712 A_IFEXPRP(ast2, A_IFEXPRG(ast1));
713 add_stmt_after(ast2, std1);
714 ast_to_comment(STD_AST(std1));
715 }
716 break;
717 case A_ENDWHERE:
718 --wherenest;
719 if (wherenest == 0)
720 outer_endwhere_std = std1;
721 break;
722 }
723 }
724 if (outer_endwhere_std == 0)
725 interr("rewrite_block_where: no outer endwhere", std, 4);
726 }
727 push_where(std);
728 }
729 break;
730 case A_ELSEWHERE:
731 assert(wherestack.topwhere > 0,
732 "rewrite_block_where: ELSEWHERE with no WHERE", 0, 4);
733 push_elsewhere(std);
734 break;
735 case A_ENDWHERE:
736 /* end of block where. Try to optimize mask creation. If the
737 * mask expression is 'nice', and no variable in the mask
738 * expr is modified in the WHERE, then just use the expression
739 * and its negation. Otherwise create a temp and use that.
740 *
741 * Use-def would be nice here, we'll hack it for now.
742 */
743 pop_where(&where_std, &elsewhere_std);
744 endwhere_std = std;
745 /* find lhs */
746 lhs = 0;
747 for (std1 = where_std; std1 != endwhere_std; std1 = STD_NEXT(std1)) {
748
749 if (std1 == where_std || std1 == elsewhere_std)
750 continue;
751
752 ast = STD_AST(std1);
753 /* might be a call or an allocate here,
754 * front end rewrites array-valued
755 * functions.
756 */
757 switch (A_TYPEG(ast)) {
758 case A_CALL:
759 case A_ALLOC:
760 case A_CONTINUE:
761 case A_COMMENT:
762 case A_COMSTR:
763 case A_DO:
764 case A_ENDDO:
765 continue;
766 case A_WHERE:
767 /* could be single-statement WHERE from nested where */
768 ast = A_IFSTMTG(ast);
769 break;
770 case A_ASN:
771 break;
772 default:
773 error(510, 4, STD_LINENO(where_std), CNULL, CNULL);
774 }
775
776 /* assignment node, look at lhs */
777 lhs = A_DESTG(ast);
778 if (HCCSYMG(memsym_of_ast(lhs))) {
779 /* assignments to compiler generated symbols to not need
780 * to be conformable */
781 continue;
782 }
783 shape = A_SHAPEG(lhs);
784 if (shape == 0)
785 continue;
786 shape1 = A_SHAPEG(A_IFEXPRG(STD_AST(where_std)));
787 if (!conform_shape(shape, shape1))
788 error(511, 3, STD_LINENO(std), CNULL, CNULL);
789 break;
790 }
791 if (!A_SHAPEG(A_IFEXPRG(STD_AST(where_std))))
792 error(512, 4, STD_LINENO(where_std), CNULL, CNULL);
793 rewrite_where_expr(where_std, endwhere_std);
794 if (wherestack.topwhere > 0) {
795 /* nested WHEREs always get temporary */
796 nice_where = FALSE;
797 } else {
798 nice_where = nice_where_mask(A_IFEXPRG(STD_AST(where_std)));
799 }
800
801 where_load = A_IFEXPRG(STD_AST(where_std));
802 for (std1 = where_std; nice_where && std1 != endwhere_std;
803 std1 = STD_NEXT(std1)) {
804
805 if (std1 == where_std || std1 == elsewhere_std)
806 continue;
807
808 ast = STD_AST(std1);
809 /* might be a call or an allocate here,
810 * front end rewrites array-valued
811 * functions.
812 */
813 switch (A_TYPEG(ast)) {
814 case A_CALL:
815 case A_ALLOC:
816 case A_CONTINUE:
817 case A_COMMENT:
818 case A_COMSTR:
819 case A_DO:
820 case A_ENDDO:
821 continue;
822 case A_WHERE:
823 /* could be single-statement WHERE from nested where */
824 ast = A_IFSTMTG(ast);
825 break;
826 case A_ASN:
827 break;
828 default:
829 interr("rewrite_block_where: non assignment in WHERE", std1, 4);
830 }
831
832 /* assignment node, look at lhs */
833 lhs = A_DESTG(ast);
834 shape = A_SHAPEG(lhs);
835 if (shape == 0)
836 continue;
837 /* this is an array assignment */
838 if (mask_on_lhs(where_load, lhs))
839 nice_where = FALSE;
840 }
841 if (!nice_where && lhs) {
842 ast = STD_AST(where_std);
843 shape = A_SHAPEG(A_IFEXPRG(ast));
844 assert(shape != 0, "rewrite_block_where: bad where", std, 4);
845 /* get a temp */
846 assert(A_SHAPEG(lhs), "rewrite_block_where: no shape in WHERE", 0, 4);
847 ast1 = lhs;
848 if (ast1 == 0)
849 ast1 = search_conform_array(A_IFEXPRG(ast), FALSE);
850 if (ast1 == 0)
851 ast1 = search_conform_array(A_IFEXPRG(ast), TRUE);
852 assert(ast1 != 0, "rewrite_block_where: can't find array", 0, 4);
853 wheresym = mk_assign_sptr(ast1, "ww", subscr, DT_LOG, &where_load);
854 add_wheresym(&wheresymlist, wheresym);
855 }
856 for (std1 = where_std; std1 != endwhere_std; std1 = STD_NEXT(std1)) {
857
858 if (std1 == where_std)
859 continue;
860 if (std1 == elsewhere_std) {
861 if (nice_where)
862 where_load = mk_unop(OP_LNOT, where_load, A_DTYPEG(where_load));
863 continue;
864 }
865 ast = STD_AST(std1);
866
867 nestedwhere = 0;
868 switch (A_TYPEG(ast)) {
869 case A_CALL:
870 case A_ALLOC:
871 case A_CONTINUE:
872 case A_COMMENT:
873 case A_COMSTR:
874 case A_DO:
875 case A_ENDDO:
876 continue;
877 case A_WHERE:
878 /* could be single-statement WHERE from nested where */
879 nestedwhere = A_IFEXPRG(ast);
880 ast = A_IFSTMTG(ast);
881 break;
882 case A_ASN:
883 break;
884 default:
885 interr("rewrite_block_where: non assignment in WHERE", std1, 4);
886 }
887
888 /* assignment node, look at lhs */
889 lhs = A_DESTG(ast);
890
891 sptr_lhs = memsym_of_ast(lhs);
892 if (A_SHAPEG(A_DESTG(ast)) == 0 ||
893 (HCCSYMG(sptr_lhs) && !in_wheresymlist(wheresymlist, sptr_lhs)))
894 continue;
895
896 /* this is an array assignment */
897
898 /* make it a where */
899 ast1 = mk_stmt(A_WHERE, 0);
900 A_IFSTMTP(ast1, ast);
901 if (nestedwhere) {
902 /* make .AND. of condition; use SCAND as noncommutative AND */
903 A_IFEXPRP(ast1, nestedwhere);
904 nestedwhere =
905 mk_binop(OP_SCAND, where_load, nestedwhere, A_DTYPEG(where_load));
906 } else {
907 A_IFEXPRP(ast1, where_load);
908 }
909 A_STDP(ast1, std1);
910 STD_AST(std1) = ast1;
911 }
912 if (!nice_where && lhs) {
913 /* make "wheresym = expr" */
914 ast = STD_AST(where_std);
915 ast2 = mk_stmt(A_ASN, DTYPEG(wheresym));
916 A_DESTP(ast2, where_load);
917 A_SRCP(ast2, A_IFEXPRG(ast));
918 add_stmt_after(ast2, where_std);
919 /* Insert the allocate statement */
920 mk_mem_allocate(mk_id(wheresym), subscr, outer_where_std, 0);
921 add_stmt_before(mk_assn_stmt(where_load, astb.i0, DT_LOG),
922 outer_where_std);
923
924 if (elsewhere_std) {
925 /* generate "where_sym = .not. where_sym" */
926 ast2 = mk_unop(OP_LNOT, where_load, A_DTYPEG(where_load));
927 ast1 = mk_stmt(A_ASN, DTYPEG(wheresym));
928 A_DESTP(ast1, where_load);
929 A_SRCP(ast1, ast2);
930 add_stmt_after(ast1, elsewhere_std);
931 }
932
933 /* insert deallocate statement */
934 mk_mem_deallocate(mk_id(wheresym), outer_endwhere_std);
935 }
936 if (where_std)
937 ast_to_comment(STD_AST(where_std));
938 if (elsewhere_std)
939 ast_to_comment(STD_AST(elsewhere_std));
940 if (endwhere_std)
941 ast_to_comment(STD_AST(endwhere_std));
942 break;
943 default:
944 break;
945 }
946 }
947 FREE(wherestack.base);
948 }
949
950 static int ForallList;
951
952 /* This is the callback function for contains_forall_index(). */
953 static LOGICAL
_contains_forall_index(int ast,LOGICAL * flag)954 _contains_forall_index(int ast, LOGICAL *flag)
955 {
956 if (ast && A_TYPEG(ast) == A_ID) {
957 int list;
958 for (list = ForallList; list; list = ASTLI_NEXT(list)) {
959 if (A_SPTRG(ast) == ASTLI_SPTR(list)) {
960 *flag = TRUE;
961 return TRUE;
962 }
963 }
964 }
965 return FALSE;
966 } /* _contains_forall_index */
967
968 /* Return TRUE if any index in the forall_list occurs somewhere within ast.
969 * Modified from 'ast.c:contains_ast' */
970 static LOGICAL
contains_forall_index(int ast,int forall_list)971 contains_forall_index(int ast, int forall_list)
972 {
973 LOGICAL result = FALSE;
974
975 if (!ast)
976 return FALSE;
977
978 ForallList = forall_list;
979 ast_visit(1, 1);
980 ast_traverse(ast, _contains_forall_index, NULL, &result);
981 ast_unvisit();
982 return result;
983 } /* contains_forall_index */
984
985 static void
rewrite_block_forall(void)986 rewrite_block_forall(void)
987 {
988 int std, stdnext, std1;
989 int ast, ast1, ast2;
990 int list, stmt;
991 int expr, expr1, where_expr;
992 int subscr[MAXSUBS];
993 int forallb_std, endforall_std;
994 int stack[MAXSUBS], top;
995 int newforall;
996 int forallb;
997
998 /*
999 * Transform block FORALL constructs to single-statement FORALLs
1000 */
1001
1002 /* Transform block FORALLs */
1003 forallb_std = endforall_std = 0;
1004 top = 0;
1005 for (std = STD_NEXT(0); std != 0; std = stdnext) {
1006 stdnext = STD_NEXT(std);
1007 gbl.lineno = STD_LINENO(std);
1008 ast = STD_AST(std);
1009 if (A_TYPEG(ast) == A_FORALL && !A_IFSTMTG(ast)) {
1010 forallb_std = std;
1011 stack[top] = forallb_std;
1012 top++;
1013 assert(top <= MAXSUBS && top >= 0,
1014 "rewrite_block_forall: FORALL with no ENDFORALL", 0, 4);
1015 } else if (A_TYPEG(ast) == A_ENDFORALL) {
1016 endforall_std = std;
1017 top--;
1018 forallb_std = stack[top];
1019 assert(forallb_std, "rewrite_block_forall: FORALL with no ENDFORALL", 0,
1020 4);
1021 for (std1 = forallb_std; std1 != endforall_std; std1 = STD_NEXT(std1)) {
1022
1023 gbl.lineno = STD_LINENO(std1);
1024
1025 if (std1 == forallb_std) {
1026 forallb = STD_AST(forallb_std);
1027 continue;
1028 }
1029
1030 ast = STD_AST(std1);
1031 /* might be a call or an allocate here,
1032 * front end rewrites array-valued
1033 * functions.
1034 */
1035 if (A_TYPEG(ast) == A_CALL) {
1036 if (!contains_forall_index(ast, A_LISTG(forallb)))
1037 continue;
1038 }
1039 if (A_TYPEG(ast) == A_ALLOC || A_TYPEG(ast) == A_CONTINUE ||
1040 A_TYPEG(ast) == A_COMMENT || A_TYPEG(ast) == A_COMSTR)
1041 continue;
1042 /* or it may be like, z_b_0 = 1 */
1043 if (A_TYPEG(ast) == A_ASN && A_TYPEG(A_DESTG(ast)) == A_ID)
1044 continue;
1045
1046 switch (A_TYPEG(ast)) {
1047 case A_CALL:
1048 case A_ASN:
1049 case A_ICALL:
1050 expr = A_IFEXPRG(forallb);
1051 list = A_LISTG(forallb);
1052 stmt = ast;
1053 break;
1054 case A_WHERE:
1055 expr = A_IFEXPRG(forallb);
1056 where_expr = A_IFEXPRG(ast);
1057 if (expr)
1058 expr = mk_binop(OP_LAND, expr, where_expr, DT_LOG);
1059 else
1060 expr = where_expr;
1061 list = A_LISTG(forallb);
1062 stmt = A_IFSTMTG(ast);
1063 break;
1064 case A_FORALL:
1065 list = concatenate_list(A_LISTG(forallb), A_LISTG(ast));
1066 expr = A_IFEXPRG(forallb);
1067 expr1 = A_IFEXPRG(ast);
1068 if (expr && expr1)
1069 expr = mk_binop(OP_LAND, expr, expr1, DT_LOG);
1070 else if (expr1)
1071 expr = expr1;
1072 stmt = A_IFSTMTG(ast);
1073 break;
1074 default:
1075 interr("rewrite_block_forall: illegal statement in FORALL", ast, 3);
1076 }
1077
1078 assert(stmt && list, "rewrite_block_forall: someting is wrong", ast, 4);
1079 newforall = mk_stmt(A_FORALL, 0);
1080 A_IFSTMTP(newforall, stmt);
1081 A_IFEXPRP(newforall, expr);
1082 A_LISTP(newforall, list);
1083 A_SRCP(newforall, A_SRCG(forallb));
1084 add_stmt_before(newforall, std1);
1085 ast_to_comment(STD_AST(std1));
1086 }
1087 ast_to_comment(STD_AST(forallb_std));
1088 ast_to_comment(STD_AST(endforall_std));
1089 }
1090 }
1091 }
1092
1093 static void
check_subprogram(int std,int ast,int callast)1094 check_subprogram(int std, int ast, int callast)
1095 {
1096 int lop = A_LOPG(callast);
1097 int sptr = memsym_of_ast(lop);
1098 if (SEQUENTG(sptr)) { /* TPR 1786 */
1099 /* go through the arguments;
1100 * if any are array-valued, make forall */
1101 int shape, shapearg, i, cnt, argt, arg;
1102 shape = 0;
1103 cnt = A_ARGCNTG(callast);
1104 argt = A_ARGSG(callast);
1105 for (i = 0; i < cnt; ++i) {
1106 arg = ARGT_ARG(argt, i);
1107 if (arg > 0) {
1108 shape = A_SHAPEG(arg);
1109 shapearg = arg;
1110 if (shape)
1111 break;
1112 }
1113 }
1114 if (shape) { /* i is the argument with the shape */
1115 int ast1;
1116 ast1 = make_forall(shape, shapearg, 0, 0);
1117 for (i = 0; i < cnt; ++i) {
1118 arg = ARGT_ARG(argt, i);
1119 if (arg > 0) {
1120 arg = normalize_forall(ast1, arg, 0);
1121 ARGT_ARG(argt, i) = arg;
1122 }
1123 }
1124 A_IFSTMTP(ast1, ast);
1125 A_IFEXPRP(ast1, 0);
1126 A_STDP(ast1, std);
1127 STD_AST(std) = ast1;
1128 }
1129 }
1130 } /* check_subprogram */
1131
1132 /* This routine is to find an array from expr which has constant bounds.
1133 * We currently allow simple expression with rhs rank 1.
1134 */
1135
1136 static LOGICAL
find_const_bound_rhs(int expr,int * rhs,int * shape)1137 find_const_bound_rhs(int expr, int *rhs, int* shape)
1138 {
1139 int i, nargs, argt;
1140 int asd;
1141 int ndim;
1142 int list;
1143 LOGICAL find1, find2;
1144
1145 if (expr == 0)
1146 return FALSE;
1147
1148 switch (A_TYPEG(expr)) {
1149 case A_BINOP:
1150 find1 = find_const_bound_rhs(A_LOPG(expr), rhs, shape);
1151 if (find1)
1152 return TRUE;
1153 return find_const_bound_rhs(A_ROPG(expr), rhs, shape);
1154 case A_UNOP:
1155 return find_const_bound_rhs(A_LOPG(expr), rhs, shape);
1156 case A_CONV:
1157 return find_const_bound_rhs(A_LOPG(expr), rhs, shape);
1158 case A_PAREN:
1159 return find_const_bound_rhs(A_LOPG(expr), rhs, shape);
1160 case A_ID:
1161 if (DTY(A_DTYPEG(expr)) == TY_ARRAY) {
1162 int shd = A_SHAPEG(expr);
1163 if (shd) {
1164 int ii, arr_lb, arr_ub, arr_st;
1165 int nd = SHD_NDIM(shd);
1166 if (nd > 1)
1167 return FALSE;
1168 for (ii = 0; ii < nd; ++ii) {
1169 arr_lb = SHD_LWB(shd, ii);
1170 arr_ub = SHD_UPB(shd, ii);
1171 arr_st = SHD_STRIDE(shd, ii);
1172 if (A_TYPEG(arr_ub) != A_CNST)
1173 return FALSE;
1174 if (A_TYPEG(arr_lb) != A_CNST)
1175 return FALSE;
1176 if (arr_st != 0 && arr_st != astb.bnd.one)
1177 return FALSE;
1178 }
1179 *rhs = expr;
1180 *shape = shd;
1181 return TRUE;
1182 }
1183 }
1184 return FALSE;
1185 case A_SUBSCR:
1186 if (vector_member(expr)) {
1187 if (A_TYPEG(expr) == A_MEM) {
1188 int sptr = A_SPTRG(A_MEMG(expr));
1189 if (DTY(DTYPEG(sptr)) == TY_ARRAY) {
1190 return FALSE;
1191 }
1192 return FALSE;
1193 }
1194 if (A_TYPEG(expr) == A_SUBSCR) {
1195 int asd, i, n;
1196 asd = A_ASDG(expr);
1197 n = ASD_NDIM(asd);
1198 if (n > 1)
1199 return FALSE;
1200 for (i = 0; i < n; ++i) {
1201 int ss = ASD_SUBS(asd, i);
1202 if (A_SHAPEG(ss) > 0) {
1203 return FALSE;
1204 }
1205 if (A_TYPEG(ss) == A_TRIPLE) {
1206 /* Ignore non-stride 1 for now */
1207 /* check if triplet value is the same as array bounds */
1208 int dtype, lop;
1209 int lwb = A_LBDG(ss);
1210 int upb = A_UPBDG(ss);
1211 int st = A_STRIDEG(ss);
1212 if (st == 0)
1213 st = astb.bnd.one;
1214 if ( st != astb.bnd.one)
1215 return FALSE;
1216
1217 lop = A_LOPG(expr);
1218 /* allow simple expression for now */
1219 if (A_TYPEG(lop) == A_ID && A_SHAPEG(lop)) {
1220 int ii, arr_lb, arr_ub, arr_st;
1221 int shd = A_SHAPEG(lop);
1222 int nd = SHD_NDIM(shd);
1223 if (nd > 1)
1224 return FALSE;
1225 for (ii = 0; ii < nd; ++ii) {
1226 arr_lb = SHD_LWB(shd, ii);
1227 arr_ub = SHD_UPB(shd, ii);
1228 arr_st = SHD_STRIDE(shd, ii);
1229 if (A_TYPEG(arr_ub) != A_CNST)
1230 return FALSE;
1231 if (A_TYPEG(arr_lb) != A_CNST)
1232 return FALSE;
1233 if (arr_lb != lwb ||
1234 arr_ub != upb ||
1235 arr_st != st) {
1236 return FALSE;
1237 }
1238 }
1239 *rhs = expr;
1240 *shape = A_SHAPEG(lop);
1241 return TRUE;
1242 }
1243 }
1244 }
1245 }
1246 } else if (A_TYPEG(A_LOPG(expr)) == A_MEM) {
1247 return find_const_bound_rhs(A_PARENTG(expr), rhs, shape);
1248 }
1249 return FALSE;
1250
1251 case A_MEM:
1252 case A_TRIPLE:
1253 case A_SUBSTR:
1254 case A_INTR:
1255 case A_FUNC:
1256 case A_CNST:
1257 case A_CMPLXC:
1258 default:
1259 return FALSE;
1260 }
1261 }
1262
1263
1264 /* check if this current shape has constant bounds */
1265 static LOGICAL
constant_shape(int shape)1266 constant_shape(int shape)
1267 {
1268 int ii, lb, ub, st;
1269 int nd = SHD_NDIM(shape);
1270
1271 for (ii = 0; ii < nd; ++ii) {
1272 ub = SHD_UPB(shape, ii);
1273 lb = SHD_LWB(shape, ii);
1274 if (A_TYPEG(ub) != A_CNST)
1275 return FALSE;
1276 if (A_TYPEG(lb) != A_CNST)
1277 return FALSE;
1278 }
1279
1280 return TRUE;
1281 }
1282
1283
1284
1285 static void
rewrite_into_forall(void)1286 rewrite_into_forall(void)
1287 {
1288 int std, stdnext;
1289 int shape;
1290 int ast, ast1, ast2, lhs, rhs;
1291 int where_load;
1292 int list;
1293 int wheresym;
1294 int sptr;
1295 int shape1, shape2;
1296 int parallel_depth;
1297 int task_depth;
1298 int copy_ast = 0, dealloc_ast = 0;
1299
1300 /*
1301 * Transform WHERE statements to foralls, and transform block-forall
1302 * statements to single-statement foralls.
1303 *
1304 * Block-foralls can be left alone when back end is prepared to handle
1305 * them.
1306 *
1307 * Subset HPF doesn't allow block foralls.
1308 *
1309 * IF statements are transformed to IF-THEN-ENDIF statements so that
1310 * communication calls can be inserted without trouble.
1311 *
1312 * Some call statements are inspected and elementalized if they
1313 * have array arguments (specifically, F90 IO routines).
1314 */
1315
1316 parallel_depth = 0;
1317 task_depth = 0;
1318 for (std = STD_NEXT(0); std; std = stdnext) {
1319 stdnext = STD_NEXT(std);
1320 gbl.lineno = STD_LINENO(std);
1321 ast = STD_AST(std);
1322 switch (A_TYPEG(ast)) {
1323 case A_WHERE:
1324 if (A_IFSTMTG(ast)) {
1325 if (!A_SHAPEG(A_IFEXPRG(ast)))
1326 error(512, 4, STD_LINENO(std), CNULL, CNULL);
1327 shape1 = A_SHAPEG(A_IFEXPRG(ast));
1328 shape2 = A_SHAPEG(A_DESTG(A_IFSTMTG(ast)));
1329 if (!conform_shape(shape1, shape2))
1330 error(511, 3, STD_LINENO(std), CNULL, CNULL);
1331 /* single-stmt where */
1332 /* create forall stmt */
1333 /* forall is normalized with respect to the LHS expression */
1334 ast1 = make_forall(A_SHAPEG(A_DESTG(A_IFSTMTG(ast))),
1335 A_DESTG(A_IFSTMTG(ast)), A_IFEXPRG(ast), 0);
1336 /* flag to show that it is made from arrray assignment */
1337 A_ARRASNP(ast1, 1);
1338
1339 ast2 = normalize_forall(ast1, A_IFSTMTG(ast), 0);
1340 /* replace this ast with forall */
1341 A_IFSTMTP(ast1, ast2);
1342 A_STDP(ast1, std);
1343 STD_AST(std) = ast1;
1344 } else {
1345 interr("rewrite_info_forall: WHERE construct", std, 4);
1346 }
1347 break;
1348 case A_ELSEWHERE:
1349 case A_ENDWHERE:
1350 interr("rewrite_info_forall: WHERE construct", std, 4);
1351 break;
1352 case A_MP_ATOMICUPDATE:
1353 lhs = A_LOPG(ast);
1354 rhs = A_ROPG(ast);
1355 shape = A_SHAPEG(lhs);
1356 if (shape) {
1357 ast1 = make_forall(shape, lhs, 0, 0);
1358 ast2 = normalize_forall(ast1, ast, 0);
1359 A_IFSTMTP(ast1, ast2);
1360 A_IFEXPRP(ast1, 0);
1361 A_STDP(ast1, std);
1362 STD_AST(std) = ast1;
1363
1364 /* flag to show that it is made from array assignment */
1365 A_ARRASNP(ast1, 1);
1366 STD_ZTRIP(std) = 1;
1367 }
1368
1369 break;
1370 case A_ASN:
1371 /* assignment node, look at lhs */
1372 lhs = A_DESTG(ast);
1373 rhs = A_SRCG(ast);
1374
1375 /* if it is string, don't touch it */
1376 if (A_TYPEG(lhs) == A_SUBSTR && A_TYPEG(A_LOPG(lhs)) == A_SUBSCR)
1377 lhs = A_LOPG(lhs);
1378
1379 shape = A_SHAPEG(lhs);
1380 if (shape) {
1381 /*
1382 * check if array assignment can be collapsed into a single
1383 * memset/move
1384 */
1385 ast1 = collapse_assignment(ast, std);
1386 if (ast1) {
1387 std = add_stmt_after(ast1, std);
1388 ast_to_comment(ast);
1389 } else {
1390 /* this is an array assignment; need to create a forall */
1391
1392 int newrhs, newshape;
1393 if (flg.opt >= 2 && !XBIT(58,0x1000000)
1394 && !constant_shape(shape) &&
1395 find_const_bound_rhs(rhs, &newrhs, &newshape)) {
1396 ast1 = make_forall(newshape, newrhs, 0, 0);
1397 A_CONSTBNDP(ast1, 1);
1398 } else {
1399 ast1 = make_forall(shape, lhs, 0, 0);
1400 }
1401 ast2 = normalize_forall(ast1, ast, 0);
1402 A_IFSTMTP(ast1, ast2);
1403 A_IFEXPRP(ast1, 0);
1404 A_STDP(ast1, std);
1405 STD_AST(std) = ast1;
1406 /* flag to show that it is made from array assignment */
1407 A_ARRASNP(ast1, 1);
1408 STD_ZTRIP(std) = 1;
1409 }
1410 } else {
1411 if (A_TYPEG(rhs) == A_FUNC) {
1412 check_subprogram(std, ast, rhs);
1413 }
1414 }
1415 break;
1416 case A_CALL:
1417 check_subprogram(std, ast, ast);
1418 break;
1419 case A_MP_PARALLEL:
1420 ++parallel_depth;
1421 /*symutl.sc = SC_PRIVATE;*/
1422 set_descriptor_sc(SC_PRIVATE);
1423 break;
1424 case A_MP_ENDPARALLEL:
1425 --parallel_depth;
1426 if (parallel_depth == 0 && task_depth == 0) {
1427 /*symutl.sc = SC_LOCAL;*/
1428 set_descriptor_sc(SC_LOCAL);
1429 }
1430 break;
1431 case A_MP_TASK:
1432 case A_MP_TASKLOOP:
1433 ++task_depth;
1434 set_descriptor_sc(SC_PRIVATE);
1435 break;
1436 case A_MP_ENDTASK:
1437 case A_MP_ETASKLOOP:
1438 --task_depth;
1439 if (parallel_depth == 0 && task_depth == 0) {
1440 set_descriptor_sc(SC_LOCAL);
1441 }
1442 break;
1443 default:
1444 break;
1445 }
1446 }
1447 }
1448
1449 static int
search_arr(int ast)1450 search_arr(int ast)
1451 {
1452 int ast1;
1453
1454 if (A_TYPEG(ast) == A_SUBSCR)
1455 ast = A_LOPG(ast);
1456 /* assert(A_TYPEG(ast) == A_ID, "search_arr: not ID", ast, 4); */
1457 assert(DTY(A_DTYPEG(ast)) == TY_ARRAY, "search_arr: not TY_ARRAY", ast, 4);
1458 return ast;
1459 }
1460
1461 /* Convert ast from an index with oldlb and oldstride to one with
1462 * newlb and newstride. I.e.
1463 * (ast - oldlb) / oldstride * newstride + newlb
1464 */
1465 static int
normalize_subscript(int ast,int oldlb,int oldstride,int newlb,int newstride)1466 normalize_subscript(int ast, int oldlb, int oldstride, int newlb, int newstride)
1467 {
1468 if (oldstride == 0)
1469 oldstride = astb.bnd.one;
1470 if (newstride == 0)
1471 newstride = astb.bnd.one;
1472 if (oldstride == newstride) {
1473 if (oldlb != newlb) {
1474 ast = mk_binop(OP_SUB, ast, oldlb, astb.bnd.dtype);
1475 ast = mk_binop(OP_ADD, ast, newlb, astb.bnd.dtype);
1476 }
1477 } else {
1478 if (oldstride == mk_isz_cval(-1, astb.bnd.dtype)) {
1479 ast = mk_binop(OP_SUB, oldlb, ast, astb.bnd.dtype);
1480 } else {
1481 ast = mk_binop(OP_SUB, ast, oldlb, astb.bnd.dtype);
1482 ast = mk_binop(OP_DIV, ast, oldstride, astb.bnd.dtype);
1483 }
1484 ast = mk_binop(OP_MUL, ast, newstride, astb.bnd.dtype);
1485 ast = mk_binop(OP_ADD, ast, newlb, astb.bnd.dtype);
1486 }
1487 return ast;
1488 }
1489
1490 /** \brief Return TRUE if memast is an A_MEM for an array, or
1491 memast is an A_SUBSCR whose parent is an A_MEM and which
1492 has triplet subscripts */
1493 LOGICAL
vector_member(int memast)1494 vector_member(int memast)
1495 {
1496 if (A_TYPEG(memast) == A_MEM) {
1497 int sptr = A_SPTRG(A_MEMG(memast));
1498 if (DTY(DTYPEG(sptr)) == TY_ARRAY)
1499 return TRUE;
1500 return FALSE;
1501 }
1502 if (A_TYPEG(memast) == A_SUBSCR) {
1503 int asd, i, n;
1504 asd = A_ASDG(memast);
1505 n = ASD_NDIM(asd);
1506 for (i = 0; i < n; ++i) {
1507 int ss = ASD_SUBS(asd, i);
1508 if (A_SHAPEG(ss) > 0)
1509 return TRUE;
1510 if (A_TYPEG(ss) == A_TRIPLE)
1511 return TRUE;
1512 }
1513 }
1514 return FALSE;
1515 } /* vector_member */
1516
1517 static int
normalize_forall_array(int forall_ast,int arr_ast,int inlist)1518 normalize_forall_array(int forall_ast, int arr_ast, int inlist)
1519 {
1520 int i, j, triple;
1521 int list;
1522 int shape, vectmem;
1523 int ast;
1524 int ast1;
1525 int asd;
1526 int subs[MAXSUBS];
1527 int numdim;
1528 int l;
1529 int lwb, stride;
1530 LOGICAL flag;
1531
1532 /* arr_ast is an array subscript or a whole array reference.
1533 * Normalize the indices into arr_ast
1534 */
1535 shape = A_SHAPEG(arr_ast);
1536 assert(shape != 0, "normalize_forall_array: 0 shape", arr_ast, 4);
1537 if (A_TYPEG(arr_ast) == A_ID || A_TYPEG(arr_ast) == A_MEM) {
1538 asd = 0;
1539 numdim = SHD_NDIM(shape);
1540 } else if (A_TYPEG(arr_ast) == A_SUBSCR) {
1541 asd = A_ASDG(arr_ast);
1542 numdim = ASD_NDIM(asd);
1543 j = SHD_NDIM(shape);
1544 } else {
1545 interr("normalize_forall_array:bad ast type", arr_ast, 3);
1546 }
1547
1548 if (numdim < 1 || numdim > MAXSUBS) {
1549 interr("normalize_forall_array:bad numdim", shape, 3);
1550 numdim = 0;
1551 }
1552
1553 /* do this call now, instead of later, because arr_ast may
1554 * be changed in place */
1555 vectmem = vector_member(arr_ast);
1556 if (inlist != 0) {
1557 /* this is a vector subscript. Use the ast list that was passed in */
1558 list = inlist;
1559 } else {
1560 list = A_LISTG(forall_ast);
1561 }
1562 for (i = numdim - 1; i >= 0; i--) {
1563 flag = FALSE;
1564 if (asd) {
1565 if (A_TYPEG(ASD_SUBS(asd, i)) == A_TRIPLE) {
1566 assert(j > 0, "normalize_forall_array: SHD/ASD mismatch", forall_ast,
1567 4);
1568 --j;
1569 lwb = SHD_LWB(shape, j);
1570 stride = SHD_STRIDE(shape, j);
1571 flag = TRUE;
1572 } else if (A_SHAPEG(ASD_SUBS(asd, i))) {
1573 /* vector subscript */
1574 lwb = normalize_forall(forall_ast, ASD_SUBS(asd, i), list);
1575 flag = FALSE;
1576 list = ASTLI_NEXT(list);
1577 --j;
1578 } else {
1579 /* scalar subscript */
1580 lwb = ASD_SUBS(asd, i);
1581 flag = FALSE;
1582 }
1583 } else {
1584 lwb = check_member(arr_ast, SHD_LWB(shape, i));
1585 stride = check_member(arr_ast, SHD_STRIDE(shape, i));
1586 flag = TRUE;
1587 }
1588
1589 if (flag) {
1590 int sptr = ASTLI_SPTR(list);
1591 assert(list != 0, "normalize_forall_array: non-conformable", arr_ast, 4);
1592 triple = ASTLI_TRIPLE(list);
1593 if (sptr == 0) {
1594 subs[i] = triple;
1595 } else {
1596 subs[i] = normalize_subscript(mk_id(sptr), A_LBDG(triple),
1597 A_STRIDEG(triple), lwb, stride);
1598 }
1599 list = ASTLI_NEXT(list);
1600 } else {
1601 subs[i] = lwb;
1602 }
1603 }
1604
1605 ast = search_arr(arr_ast);
1606 if (vectmem) {
1607 /* This is a%b(:), where a and b are both arrays. We want
1608 * a%b(i)
1609 */
1610 ast = mk_subscr(ast, subs, numdim, DDTG(A_DTYPEG(arr_ast)));
1611 } else if (A_TYPEG(ast) == A_MEM) {
1612 /* This is a%b(i), where a and b are both arrays. We want
1613 * a(j)%b(i)
1614 */
1615 int ast1;
1616 int subs1[MAXSUBS];
1617 int n1;
1618 ast1 =
1619 mk_subscr(A_PARENTG(ast), subs, numdim, DDTG(A_DTYPEG(A_PARENTG(ast))));
1620 ast = mk_member(ast1, A_MEMG(ast), DDTG(A_DTYPEG(A_MEMG(ast))));
1621 if (A_TYPEG(arr_ast) == A_SUBSCR) {
1622 asd = A_ASDG(arr_ast);
1623 n1 = ASD_NDIM(asd);
1624 for (i = 0; i < n1; ++i)
1625 subs1[i] = ASD_SUBS(asd, i);
1626 ast = mk_subscr(ast, subs1, n1, DDTG(A_DTYPEG(A_MEMG(ast))));
1627 } else
1628 ast = mk_subscr(ast, subs, numdim, DDTG(A_DTYPEG(arr_ast)));
1629 } else
1630 ast = mk_subscr(ast, subs, numdim, DDTG(A_DTYPEG(arr_ast)));
1631 return ast;
1632 }
1633
1634 static int
normalize_id(int forall_ast,int asgn_ast,int inlist)1635 normalize_id(int forall_ast, int asgn_ast, int inlist)
1636 {
1637 int org_shape, newast, nd, nc;
1638 org_shape = A_SHAPEG(asgn_ast);
1639 newast = normalize_forall_array(forall_ast, asgn_ast, inlist);
1640 /* A_SECSHPP(newast, org_shape); */ /* keep original shape */
1641 /* put info into FINFO table */
1642 nd = mk_finfo();
1643 FINFO_AST(nd) = newast;
1644 FINFO_SHAPE(nd) = org_shape;
1645 FINFO_TYPE(nd) = 0;
1646 A_STARTP(forall_ast, nd);
1647 nc = A_NCOUNTG(forall_ast) + 1;
1648 A_NCOUNTP(forall_ast, nc);
1649 return newast;
1650 } /* normalize_id */
1651
1652 int
normalize_forall(int forall_ast,int asgn_ast,int inlist)1653 normalize_forall(int forall_ast, int asgn_ast, int inlist)
1654 {
1655 /* forall_ast represents a forall statement with one or more indices.
1656 * asgn_ast represents an array assignment with or without triple
1657 * expressions. Create a new ast, replacing the triples or whole-array
1658 * dimensions of the asgn_ast with indices representing the same
1659 * sections, expressed as functions of the forall_ast index variables */
1660 int ast, ast1, ast2;
1661 int dtype;
1662 int argt, nargs, i;
1663 int newast, org_shape;
1664 int nd, nc;
1665 int shape;
1666
1667 if (asgn_ast == 0)
1668 return 0;
1669 switch (A_TYPEG(asgn_ast)) {
1670 case A_ASN:
1671 ast1 = normalize_forall(forall_ast, A_DESTG(asgn_ast), inlist);
1672 ast2 = normalize_forall(forall_ast, A_SRCG(asgn_ast), inlist);
1673 ast = mk_stmt(A_ASN, A_DTYPEG(ast1));
1674 A_DESTP(ast, ast1);
1675 A_SRCP(ast, ast2);
1676 return ast;
1677 case A_MP_ATOMICUPDATE:
1678 ast1 = normalize_forall(forall_ast, A_LOPG(asgn_ast), inlist);
1679 ast2 = normalize_forall(forall_ast, A_ROPG(asgn_ast), inlist);
1680 ast = mk_stmt(A_MP_ATOMICUPDATE, A_DTYPEG(ast1));
1681 A_LOPP(ast, ast1);
1682 A_ROPP(ast, ast2);
1683 A_OPTYPEP(ast, A_OPTYPEG(asgn_ast));
1684 A_MEM_ORDERP(ast, A_MEM_ORDERG(asgn_ast));
1685 return ast;
1686 case A_BINOP:
1687 ast1 = normalize_forall(forall_ast, A_LOPG(asgn_ast), inlist);
1688 ast2 = normalize_forall(forall_ast, A_ROPG(asgn_ast), inlist);
1689 dtype = A_DTYPEG(asgn_ast);
1690 if (DTY(dtype) == TY_ARRAY)
1691 dtype = DTY(dtype + 1);
1692 return mk_binop(A_OPTYPEG(asgn_ast), ast1, ast2, dtype);
1693 case A_UNOP:
1694 ast1 = normalize_forall(forall_ast, A_LOPG(asgn_ast), inlist);
1695 dtype = A_DTYPEG(asgn_ast);
1696 if (DTY(dtype) == TY_ARRAY)
1697 dtype = DTY(dtype + 1);
1698 return mk_unop(A_OPTYPEG(asgn_ast), ast1, dtype);
1699 case A_CONV:
1700 ast1 = normalize_forall(forall_ast, A_LOPG(asgn_ast), inlist);
1701 dtype = A_DTYPEG(asgn_ast);
1702 if (DTY(dtype) == TY_ARRAY)
1703 dtype = DTY(dtype + 1);
1704 if (is_iso_cptr(dtype) && A_OPTYPEG(A_LOPG(asgn_ast))) {
1705 A_DTYPEP(ast1, DT_PTR);
1706 dtype = DT_PTR;
1707 }
1708 return mk_convert(ast1, dtype);
1709 case A_CMPLXC:
1710 case A_CNST:
1711 return asgn_ast;
1712 case A_SUBSTR:
1713 ast = normalize_forall(forall_ast, A_LOPG(asgn_ast), inlist);
1714 return mk_substr(ast, A_LEFTG(asgn_ast), A_RIGHTG(asgn_ast),
1715 A_DTYPEG(asgn_ast));
1716 case A_PAREN:
1717 ast = normalize_forall(forall_ast, A_LOPG(asgn_ast), inlist);
1718 return mk_paren(ast, A_DTYPEG(ast));
1719
1720 case A_INTR:
1721 return inline_spread_shifts(asgn_ast, forall_ast, inlist);
1722 case A_FUNC:
1723 shape = A_SHAPEG(asgn_ast);
1724 if (shape) {
1725 argt = A_ARGSG(asgn_ast);
1726 nargs = A_ARGCNTG(asgn_ast);
1727 for (i = 0; i < nargs; ++i) {
1728 ARGT_ARG(argt, i) =
1729 normalize_forall(forall_ast, ARGT_ARG(argt, i), inlist);
1730 }
1731 dtype = A_DTYPEG(asgn_ast);
1732 if (DTY(dtype) == TY_ARRAY && elemental_func_call(asgn_ast)) {
1733 A_DTYPEP(asgn_ast, DTY(dtype + 1));
1734 A_SHAPEP(asgn_ast, 0);
1735 }
1736 }
1737 return asgn_ast;
1738 case A_SUBSCR:
1739 /* does this subscript have any triplet entries */
1740 if (vector_member(asgn_ast)) {
1741 asgn_ast = normalize_id(forall_ast, asgn_ast, inlist);
1742 }
1743 if (A_TYPEG(A_LOPG(asgn_ast)) == A_MEM) {
1744 /* the parent might have an array index */
1745 int asd, i, n, subs[MAXSUBS], dtype;
1746 asd = A_ASDG(asgn_ast);
1747 ast = normalize_forall(forall_ast, A_PARENTG(A_LOPG(asgn_ast)), inlist);
1748 if (ast != A_PARENTG(A_LOPG(asgn_ast))) {
1749 dtype = A_DTYPEG(A_MEMG(A_LOPG(asgn_ast)));
1750 ast = mk_member(ast, A_MEMG(A_LOPG(asgn_ast)), dtype);
1751 if (DTY(dtype) == TY_ARRAY)
1752 dtype = DTY(dtype + 1);
1753 /* add the member subscripts */
1754 n = ASD_NDIM(asd);
1755 for (i = 0; i < n; ++i) {
1756 subs[i] = ASD_SUBS(asd, i);
1757 }
1758 asgn_ast = mk_subscr(ast, subs, n, dtype);
1759 }
1760 }
1761 return asgn_ast;
1762 case A_MEM:
1763 if (vector_member(asgn_ast)) {
1764 return normalize_id(forall_ast, asgn_ast, inlist);
1765 } else {
1766 /* the parent might have an array index */
1767 ast = normalize_forall(forall_ast, A_PARENTG(asgn_ast), inlist);
1768 /* member should be a scalar here */
1769 return mk_member(ast, A_MEMG(asgn_ast), A_DTYPEG(A_MEMG(asgn_ast)));
1770 }
1771 case A_ID:
1772 if (DTY(A_DTYPEG(asgn_ast)) == TY_ARRAY) {
1773 return normalize_id(forall_ast, asgn_ast, inlist);
1774 }
1775 return asgn_ast;
1776 default:
1777 interr("normalize_forall: bad opc", asgn_ast, 3);
1778 return asgn_ast;
1779 }
1780 }
1781
1782 static LOGICAL
is_reshape(int ast)1783 is_reshape(int ast)
1784 {
1785 /* Is the input ast the source array section of a RESHAPE operation? */
1786
1787 if (A_TYPEG(ast) == A_SUBSCR &&
1788 A_TYPEG(A_LOPG(ast)) == A_ID &&
1789 A_SPTRG(A_LOPG(ast)) &&
1790 strncmp(SYMNAME(A_SPTRG(A_LOPG(ast))), "reshap", 6) == 0)
1791 return TRUE;
1792 return FALSE;
1793 }
1794
1795 /*
1796 * check if array assignment can be collapsed into a single memset/move
1797 */
1798 static int
collapse_assignment(int asn,int std)1799 collapse_assignment(int asn, int std)
1800 {
1801 int lhs, rhs;
1802 int rhs_allocatable;
1803 int shape;
1804 int ast;
1805 int cnst;
1806 int dtype;
1807 int dest;
1808 int src;
1809 int ndim;
1810 int i;
1811 int func;
1812 int sz;
1813 int szdtype;
1814 int one;
1815 int is_zero;
1816 int use_numelm;
1817 char *nm;
1818 FtnRtlEnum rtlRtn;
1819 int rhs_isptr, lhs_isptr;
1820
1821 if (flg.opt < 2)
1822 return 0;
1823
1824 if (XBIT(8, 0x8000000))
1825 return 0;
1826
1827 rhs_isptr = 0;
1828 lhs_isptr = 0;
1829 lhs = A_DESTG(asn);
1830 shape = A_SHAPEG(lhs);
1831 ndim = SHD_NDIM(shape);
1832 if (XBIT(34, 0x200) && ndim > 2) {
1833 /*
1834 * assume -Mconcur is better than collapsing an assignment of 3D
1835 * or greater array. For a >= 3D array:
1836 * + the backend replaces the innermost loop with an idiom, and
1837 * the idiom is now part of the next loop;
1838 * + autopar does not parallelize the loop containing the idiom;
1839 * + autopar parallelizes the outer (originally the 3rd) loop.
1840 */
1841 return 0;
1842 }
1843 /*
1844 * look at the rhs of the assignment; for now, limit it to a
1845 * constant, scalar, array, contiguous array section of a basic
1846 * numeric type.
1847 */
1848 rhs_allocatable = 0;
1849 src = 0;
1850 rhs = A_SRCG(asn);
1851 dtype = A_DTYPEG(rhs);
1852 switch (A_TYPEG(rhs)) {
1853 case A_CONV:
1854 src = 0;
1855 break;
1856 case A_ID:
1857 /* can only be rank 1 if assumed-shape */
1858 src = A_SPTRG(rhs);
1859 if (SCG(src) == SC_DUMMY && ASSUMSHPG(src) && ndim > 1 && !CONTIGATTRG(src))
1860 return 0;
1861 goto rhs_chk;
1862 case A_MEM:
1863 /* member must be array instead of some parent */
1864 src = A_SPTRG(A_MEMG(rhs));
1865 if (DTY(DTYPEG(src)) != TY_ARRAY)
1866 return 0;
1867 rhs_chk:
1868 if (POINTERG(src)) {
1869 rhs_isptr = 1;
1870 }
1871 if (ALLOCATTRG(src)) {
1872 rhs_allocatable = 1;
1873 }
1874 break;
1875 case A_SUBSCR:
1876 if (!contiguous_section(rhs))
1877 return 0;
1878 src = find_array(rhs, NULL);
1879 if (STYPEG(src) != ST_MEMBER && SCG(src) == SC_DUMMY && ASSUMSHPG(src) &&
1880 ndim > 1)
1881 return 0;
1882 if (POINTERG(src)) {
1883 rhs_isptr = 1;
1884 }
1885 rhs = first_element(rhs);
1886 break;
1887 default:
1888 return 0;
1889 }
1890
1891 if (!src) {
1892 /* WANT scalar rhs */
1893 rhs = A_LOPG(rhs);
1894 /* check for scalar to a array conversion */
1895 if (DTY(A_DTYPEG(rhs)) == TY_ARRAY)
1896 return 0;
1897 }
1898 dtype = DDTG(dtype);
1899 if (!DT_ISNUMERIC(dtype) && !DT_ISLOG(dtype))
1900 return 0;
1901 cnst = 0;
1902 if (A_TYPEG(rhs) == A_CNST)
1903 /* scalar constant */
1904 cnst = A_SPTRG(A_ALIASG(rhs));
1905
1906 /* look at the lhs of the assignment */
1907 use_numelm = 1;
1908 if (A_TYPEG(lhs) == A_ID) {
1909 /* can only be rank 1 if assumed-shape */
1910 dest = A_SPTRG(lhs);
1911 if (SCG(dest) == SC_DUMMY && ASSUMSHPG(dest)) {
1912 use_numelm = 0;
1913 /* the entire (type is A_ID) lhs array is referenced:
1914 take advantage of the convention that the passed in
1915 array is always contiguous and allow the collapse
1916 to proceed, (only if the rhs is a reshape array
1917 section for now) */
1918 if (!TARGETG(dest) && is_reshape(rhs)) {
1919 /* proceed with other checks */
1920 }
1921 else {
1922 if (ndim > 1 && !CONTIGATTRG(dest))
1923 return 0;
1924 }
1925 }
1926 } else if (A_TYPEG(lhs) == A_MEM) {
1927 dest = A_SPTRG(A_MEMG(lhs));
1928 /* member must be array instead of some parent */
1929 if (DTY(DTYPEG(dest)) != TY_ARRAY)
1930 return 0;
1931 } else {
1932 use_numelm = 0; /* section??? */
1933 return 0;
1934 }
1935 if (POINTERG(dest)) {
1936 use_numelm = 0;
1937 lhs_isptr = 1;
1938 }
1939 if ((ADD_NUMELM(DTYPEG(dest))) == 0) {
1940 use_numelm = 0;
1941 }
1942 if (ndim <= 1 && !DT_ISCMPLX(dtype) && !ASSUMSHPG(dest))
1943 return 0;
1944 if (ALLOCATTRG(dest)) {
1945 if (src && rhs_allocatable && XBIT(54, 0x1))
1946 /* allocatable <- allocatable & f2003 semantics */
1947 return 0;
1948 use_numelm = 0;
1949 } else if (ALLOCG(dest))
1950 use_numelm = 0;
1951
1952 /***********************************************************
1953 * scn (03 Oct 2014): -0.0 is not considered to be 0.0 here
1954 ***********************************************************/
1955 is_zero = 0;
1956 if (cnst) {
1957 switch (dtype) {
1958 case DT_CMPLX8:
1959 if (CONVAL1G(cnst) == 0 && CONVAL2G(cnst) == 0)
1960 is_zero = 1;
1961 break;
1962 case DT_CMPLX16:
1963 if (CONVAL1G(cnst) == stb.dbl0 && CONVAL2G(cnst) == stb.dbl0)
1964 is_zero = 1;
1965 break;
1966 case DT_BINT:
1967 case DT_SINT:
1968 case DT_INT4:
1969 case DT_BLOG:
1970 case DT_SLOG:
1971 case DT_LOG4:
1972 if (CONVAL2G(cnst) == 0)
1973 is_zero = 1;
1974 break;
1975 case DT_LOG8:
1976 if (CONVAL1G(cnst) == 0 && CONVAL2G(cnst) == 0)
1977 is_zero = 1;
1978 break;
1979 default:
1980 if (cnst == stb.i0 || cnst == stb.k0 || cnst == stb.flt0 ||
1981 cnst == stb.dbl0)
1982 is_zero = 1;
1983 break;
1984 }
1985 }
1986
1987 szdtype = DT_INT8;
1988 sz = one = astb.k1;
1989
1990 if (lhs_isptr || rhs_isptr) {
1991 if (lhs_isptr && rhs_isptr) { /* could have an overlap */
1992 /*** do work in progress ***/
1993 return 0;
1994 }
1995 if (lhs_isptr && !CONTIGATTRG(dest))
1996 return 0;
1997 if (rhs_isptr && !CONTIGATTRG(src))
1998 return 0;
1999
2000 /* For now, we disable this optimization if XBIT(4, 0x800000) is set or
2001 we have an expression such as WXI(N)%CR */
2002 if (XBIT(4, 0x800000) ||
2003 (A_TYPEG(lhs) == A_MEM && A_TYPEG(A_PARENTG(lhs)) == A_SUBSCR))
2004 return 0;
2005 }
2006
2007 if (use_numelm) {
2008 #if DEBUG
2009 if (ADD_NUMELM(DTYPEG(dest)) == 0)
2010 error(0, 2, gbl.lineno, "ADD_NUMELM(DTYPEG(dest) is 0 ", CNULL);
2011 #endif
2012 sz = convert_int(ADD_NUMELM(DTYPEG(dest)), szdtype);
2013 } else {
2014 /* compute size from shape descriptor */
2015 for (i = ndim - 1; i >= 0; i--) {
2016 int lwb, upb, aa;
2017 lwb = check_member(lhs, SHD_LWB(shape, i));
2018 lwb = convert_int(lwb, szdtype);
2019 upb = check_member(lhs, SHD_UPB(shape, i));
2020 upb = convert_int(upb, szdtype);
2021 aa = mk_binop(OP_SUB, upb, lwb, szdtype);
2022 aa = mk_binop(OP_ADD, aa, one, szdtype);
2023 sz = mk_binop(OP_MUL, sz, aa, szdtype);
2024 }
2025 }
2026 if (is_zero) {
2027 if (DT_ISCMPLX(dtype)) {
2028 switch (size_of(dtype)) {
2029 case 8:
2030 rtlRtn = RTE_mzeroz8;
2031 break;
2032 case 16:
2033 rtlRtn = RTE_mzeroz16;
2034 break;
2035 }
2036 } else {
2037 switch (size_of(dtype)) {
2038 case 1:
2039 rtlRtn = RTE_mzero1;
2040 break;
2041 case 2:
2042 rtlRtn = RTE_mzero2;
2043 break;
2044 case 4:
2045 rtlRtn = RTE_mzero4;
2046 break;
2047 case 8:
2048 rtlRtn = RTE_mzero8;
2049 break;
2050 }
2051 }
2052 nm = mkRteRtnNm(rtlRtn);
2053 func = sym_mkfunc_nodesc(nm, DT_INT);
2054 ast = begin_call(A_CALL, func, 2);
2055 add_arg(lhs);
2056 /*add_arg(sz);*/
2057 add_arg(mk_unop(OP_VAL, sz, szdtype));
2058 ccff_info(MSGOPT, "OPT008", gbl.findex, gbl.lineno,
2059 "Memory zero idiom, array assignment replaced by call to %mzero",
2060 "mzero=%s", nm, NULL);
2061 } else if (src) {
2062 if (DT_ISCMPLX(dtype)) {
2063 switch (size_of(dtype)) {
2064 case 8:
2065 rtlRtn = RTE_mcopyz8;
2066 break;
2067 case 16:
2068 rtlRtn = RTE_mcopyz16;
2069 break;
2070 }
2071 } else {
2072 switch (size_of(dtype)) {
2073 case 1:
2074 rtlRtn = RTE_mcopy1;
2075 break;
2076 case 2:
2077 rtlRtn = RTE_mcopy2;
2078 break;
2079 case 4:
2080 rtlRtn = RTE_mcopy4;
2081 break;
2082 case 8:
2083 rtlRtn = RTE_mcopy8;
2084 break;
2085 }
2086 }
2087 nm = mkRteRtnNm(rtlRtn);
2088 func = sym_mkfunc_nodesc(nm, DT_INT);
2089 ast = begin_call(A_CALL, func, 3);
2090 add_arg(lhs);
2091 add_arg(rhs);
2092 /*add_arg(sz);*/
2093 add_arg(mk_unop(OP_VAL, sz, szdtype));
2094 ccff_info(MSGOPT, "OPT006", gbl.findex, gbl.lineno,
2095 "Memory copy idiom, array assignment replaced by call to %mcopy",
2096 "mcopy=%s", nm, NULL);
2097 } else {
2098 if (DT_ISCMPLX(dtype)) {
2099 switch (size_of(dtype)) {
2100 case 8:
2101 rtlRtn = RTE_msetz8;
2102 break;
2103 case 16:
2104 rtlRtn = RTE_msetz16;
2105 break;
2106 }
2107 } else {
2108 switch (size_of(dtype)) {
2109 case 1:
2110 rtlRtn = RTE_mset1;
2111 break;
2112 case 2:
2113 rtlRtn = RTE_mset2;
2114 break;
2115 case 4:
2116 rtlRtn = RTE_mset4;
2117 break;
2118 case 8:
2119 rtlRtn = RTE_mset8;
2120 break;
2121 }
2122 }
2123 nm = mkRteRtnNm(rtlRtn);
2124 func = sym_mkfunc_nodesc(nm, DT_INT);
2125 ast = begin_call(A_CALL, func, 3);
2126 add_arg(lhs);
2127 add_arg(rhs);
2128 /*add_arg(sz);*/
2129 add_arg(mk_unop(OP_VAL, sz, szdtype));
2130 ccff_info(MSGOPT, "OPT007", gbl.findex, gbl.lineno,
2131 "Memory set idiom, array assignment replaced by call to %mset",
2132 "mset=%s", nm, NULL);
2133 }
2134 /*dbg_print_ast(ast, STDERR);*/
2135 return ast;
2136 }
2137
2138 static int
inline_spread_shifts(int asgn_ast,int forall_ast,int inlist)2139 inline_spread_shifts(int asgn_ast, int forall_ast, int inlist)
2140 {
2141 int argt, nargs;
2142 int list, listp, astli;
2143 int newlist;
2144 int count, nidx;
2145 int subs[MAXSUBS];
2146 int ndim;
2147 int dim, cdim, shd;
2148 int srcarray, maskarray;
2149 int newforall;
2150 int i, j;
2151 int asd;
2152 int retval, newast;
2153 int shift, cshift;
2154 int nd;
2155 int func_ast;
2156 int dtype;
2157 int boundary;
2158
2159 assert(A_TYPEG(asgn_ast) == A_INTR, "inline_spread_shifts: wrong ast type",
2160 asgn_ast, 3);
2161 if (INKINDG(A_SPTRG(A_LOPG(asgn_ast))) == IK_INQUIRY)
2162 return asgn_ast;
2163 argt = A_ARGSG(asgn_ast);
2164 nargs = A_ARGCNTG(asgn_ast);
2165 switch (A_OPTYPEG(asgn_ast)) {
2166 case I_SPREAD: /* spread(source, dim, ncopies) */
2167 srcarray = ARGT_ARG(argt, 0);
2168 dim = ARGT_ARG(argt, 1);
2169 if (!A_SHAPEG(srcarray))
2170 dim = astb.i1;
2171 if (A_TYPEG(dim) != A_CNST)
2172 goto ret_norm;
2173 cdim = get_int_cval(A_SPTRG(dim));
2174 newforall = copy_forall(forall_ast);
2175 list = A_LISTG(newforall);
2176 nidx = 1;
2177 for (listp = list; listp != 0; listp = ASTLI_NEXT(listp))
2178 nidx++;
2179 count = 1;
2180 astli = 0;
2181 for (listp = list; listp != 0; listp = ASTLI_NEXT(listp)) {
2182 if (count == nidx - cdim)
2183 astli = listp;
2184 count++;
2185 }
2186 assert(astli, "normalize_forall: something is wrong", astli, 3);
2187 list = delete_astli(list, astli);
2188 A_LISTP(newforall, list);
2189 newast = normalize_forall(newforall, srcarray, inlist);
2190 return newast;
2191
2192 case I_TRANSPOSE: /* transpose(matrix) */
2193 srcarray = ARGT_ARG(argt, 0);
2194 /* transpose the forall index */
2195 newforall = copy_forall(forall_ast);
2196 list = A_LISTG(newforall);
2197 count = 0;
2198 for (listp = list; listp != 0; listp = ASTLI_NEXT(listp)) {
2199 subs[count] = listp;
2200 count++;
2201 assert(count <= MAXSUBS, "inline_spread_shifts: wrong forall", newforall,
2202 4);
2203 }
2204
2205 /* only transpose the first two indices;
2206 * if there are more than two, we assume (hopefully) that
2207 * the others come from the indices added to handle
2208 * componentized array members of derived types */
2209 start_astli();
2210 if (count < 2) {
2211 listp = subs[0];
2212 newlist = add_astli();
2213 ASTLI_SPTR(newlist) = ASTLI_SPTR(listp);
2214 ASTLI_TRIPLE(newlist) = ASTLI_TRIPLE(listp);
2215 } else {
2216 /* switch 1 and 0 */
2217 for (i = 1; i >= 0; --i) {
2218 listp = subs[i];
2219 newlist = add_astli();
2220 ASTLI_SPTR(newlist) = ASTLI_SPTR(listp);
2221 ASTLI_TRIPLE(newlist) = ASTLI_TRIPLE(listp);
2222 }
2223 /* append 2 until the end */
2224 for (i = 2; i < count; ++i) {
2225 listp = subs[i];
2226 newlist = add_astli();
2227 ASTLI_SPTR(newlist) = ASTLI_SPTR(listp);
2228 ASTLI_TRIPLE(newlist) = ASTLI_TRIPLE(listp);
2229 }
2230 }
2231 list = ASTLI_HEAD;
2232 A_LISTP(newforall, list);
2233 newast = normalize_forall(newforall, srcarray, inlist);
2234 return newast;
2235
2236 case I_CSHIFT: /* cshift(array, shift, [dim]) */
2237 case I_EOSHIFT: /* eoshift(array, shift, [boundary, dim]); */
2238 if (A_OPTYPEG(asgn_ast) == I_CSHIFT)
2239 dim = ARGT_ARG(argt, 2);
2240 else
2241 dim = ARGT_ARG(argt, 3);
2242
2243 srcarray = ARGT_ARG(argt, 0);
2244 shift = ARGT_ARG(argt, 1);
2245
2246 if (A_OPTYPEG(asgn_ast) == I_EOSHIFT) {
2247 boundary = ARGT_ARG(argt, 2);
2248 if (!boundary)
2249 ARGT_ARG(argt, 2) = astb.ptr0;
2250 }
2251
2252 if (dim == 0)
2253 dim = mk_cval(1, DT_INT);
2254 assert(A_TYPEG(shift) == A_CNST,
2255 "inline_spread_shifts: shift must be constant", 3, shift);
2256 assert(A_TYPEG(dim) == A_CNST, "inline_spread_shifts: dim must be constant",
2257 3, dim);
2258 cdim = get_int_cval(A_SPTRG(dim));
2259 cshift = get_int_cval(A_SPTRG(shift));
2260 if (cshift <= 0)
2261 shift = mk_cval(-1 * cshift, DT_INT);
2262 retval = normalize_forall(forall_ast, srcarray, inlist);
2263 asd = A_ASDG(retval);
2264 ndim = ASD_NDIM(asd);
2265 list = A_LISTG(forall_ast);
2266 count = 0;
2267 for (i = 0; i < ndim; i++) {
2268 subs[i] = ASD_SUBS(asd, i);
2269 nidx = 0;
2270 astli = 0;
2271 search_forall_idx(ASD_SUBS(asd, i), list, &astli, &nidx);
2272 if (astli)
2273 count++;
2274 if (count == cdim) {
2275 if (cshift > 0)
2276 subs[i] = mk_binop(OP_ADD, ASD_SUBS(asd, i), shift, astb.bnd.dtype);
2277 else
2278 subs[i] = mk_binop(OP_SUB, ASD_SUBS(asd, i), shift, astb.bnd.dtype);
2279 count = 99;
2280 }
2281 }
2282 dtype = A_DTYPEG(retval);
2283 retval = mk_subscr(A_LOPG(retval), subs, ndim, dtype);
2284 ARGT_ARG(argt, 0) = retval;
2285 func_ast = asgn_ast;
2286 retval = mk_func_node(A_TYPEG(func_ast), A_LOPG(func_ast),
2287 A_ARGCNTG(func_ast), argt);
2288 A_DTYPEP(retval, dtype);
2289 A_SHAPEP(retval, 0);
2290 A_OPTYPEP(retval, A_OPTYPEG(func_ast));
2291 return retval;
2292 case I_SUM: /* sum(a+b,dim=1) */
2293 case I_PRODUCT:
2294 case I_MAXVAL:
2295 case I_MINVAL:
2296 case I_ALL:
2297 case I_ANY:
2298 case I_COUNT:
2299 srcarray = ARGT_ARG(argt, 0);
2300 maskarray = ARGT_ARG(argt, 2);
2301 dim = ARGT_ARG(argt, 1);
2302 cdim = 0;
2303 if (dim) {
2304 cdim = get_int_cval(A_SPTRG(dim));
2305 }
2306 assert(cdim, "inline_spread_shifts: reduction intrinsic without dimension",
2307 3, dim);
2308 shd = A_SHAPEG(srcarray);
2309 assert(shd, "inline_spread_shifts: reduction intrinsic without shape", 3,
2310 shd);
2311 list = A_LISTG(forall_ast);
2312 nidx = 1;
2313 for (listp = list; listp != 0; listp = ASTLI_NEXT(listp))
2314 ++nidx;
2315 start_astli();
2316 listp = list;
2317 while (nidx) {
2318 if (nidx == cdim) {
2319 astli = add_astli();
2320 ASTLI_SPTR(astli) = 0;
2321 ASTLI_TRIPLE(astli) =
2322 mk_triple(SHD_LWB(shd, cdim - 1), SHD_UPB(shd, cdim - 1),
2323 SHD_STRIDE(shd, cdim - 1));
2324 } else {
2325 astli = add_astli();
2326 ASTLI_SPTR(astli) = ASTLI_SPTR(listp);
2327 ASTLI_TRIPLE(astli) = ASTLI_TRIPLE(listp);
2328 listp = ASTLI_NEXT(listp);
2329 }
2330 --nidx;
2331 }
2332 newforall = mk_stmt(A_FORALL, 0);
2333 A_LISTP(newforall, ASTLI_HEAD);
2334 srcarray = normalize_forall(newforall, srcarray, inlist);
2335 ARGT_ARG(argt, 0) = srcarray;
2336 if (maskarray) {
2337 maskarray = normalize_forall(newforall, maskarray, inlist);
2338 ARGT_ARG(argt, 2) = maskarray;
2339 }
2340 ARGT_ARG(argt, 1) = 0;
2341 return asgn_ast;
2342 default:
2343 dtype = A_DTYPEG(asgn_ast);
2344 A_DTYPEP(asgn_ast, DDTG(dtype));
2345 A_SHAPEP(asgn_ast, 0);
2346 goto ret_norm;
2347 }
2348 ret_norm:
2349 for (i = 0; i < nargs; ++i) {
2350 ARGT_ARG(argt, i) = normalize_forall(forall_ast, ARGT_ARG(argt, i), inlist);
2351 }
2352 return asgn_ast;
2353 }
2354
2355 static int
copy_forall(int forall)2356 copy_forall(int forall)
2357 {
2358 int newforall;
2359
2360 assert(A_TYPEG(forall) == A_FORALL, "copy_forall:must be FORALL", forall, 3);
2361 newforall = mk_stmt(A_FORALL, 0);
2362 A_IFSTMTP(newforall, A_IFSTMTG(forall));
2363 A_IFEXPRP(newforall, A_IFEXPRG(forall));
2364 A_LISTP(newforall, A_LISTG(forall));
2365 return newforall;
2366 }
2367
2368 int
make_forall(int shape,int astmem,int mask_ast,int lc)2369 make_forall(int shape, int astmem, int mask_ast, int lc)
2370 {
2371 int i, j, l;
2372 int numdim;
2373 int sym;
2374 int list;
2375 int triple, triple1;
2376 int ast, ast1;
2377 int asd, lwb, upb, stride;
2378 int dtype;
2379 int nd;
2380 int dscast;
2381 /* Using the array section in shape, create a forall statement that
2382 * will index it, with the mask_ast as the mask
2383 */
2384
2385 numdim = SHD_NDIM(shape);
2386 if (numdim < 1 || numdim > MAXSUBS) {
2387 interr("make_forall:bad numdim", shape, 3);
2388 numdim = 0;
2389 }
2390 start_astli();
2391 #ifdef DSCASTG
2392 switch (A_TYPEG(astmem)) {
2393 case A_ID:
2394 case A_LABEL:
2395 case A_ENTRY:
2396 case A_SUBSCR:
2397 case A_SUBSTR:
2398 case A_MEM:
2399 dscast = sym_of_ast(astmem);
2400 dscast = (STYPEG(dscast) == ST_VAR || STYPEG(dscast) == ST_ARRAY)
2401 ? DSCASTG(dscast)
2402 : 0;
2403 break;
2404 default:
2405 dscast = 0;
2406 }
2407 #endif
2408
2409 for (i = numdim - 1; i >= 0; i--) {
2410 /* make each forall index */
2411 #ifdef DSCASTG
2412 lwb = check_member((dscast) ? dscast : astmem, SHD_LWB(shape, i));
2413 upb = check_member((dscast) ? dscast : astmem, SHD_UPB(shape, i));
2414 stride = check_member((dscast) ? dscast : astmem, SHD_STRIDE(shape, i));
2415 #else
2416 lwb = check_member(astmem, SHD_LWB(shape, i));
2417 upb = check_member(astmem, SHD_UPB(shape, i));
2418 stride = check_member(astmem, SHD_STRIDE(shape, i));
2419 #endif
2420 if (A_DTYPEG(lwb) == DT_INT8 || A_DTYPEG(upb) == DT_INT8 ||
2421 A_DTYPEG(stride) == DT_INT8)
2422 dtype = DT_INT8;
2423 else
2424 dtype = astb.bnd.dtype;
2425 /* add the triple */
2426 /* sym = trans_getidx();*/
2427 sym = get_init_idx((numdim - 1) - i + lc, dtype);
2428 if (flg.smp && SCG(sym) == SC_PRIVATE) {
2429 /* TASKP(sym, 1) if descriptor is TASKP
2430 * We need this because in host
2431 * routine where we allocate and copy firstprivate for task
2432 * which is done in the host and we need a flag to indicate
2433 * that this is TASKP variable even though it is SC_PRIVATE.
2434 * iliutil then we ignore the fact that it is private when
2435 * it is in host routine.
2436 */
2437 }
2438 list = add_astli();
2439 triple = mk_triple(lwb, upb, stride);
2440 ASTLI_SPTR(list) = sym;
2441 ASTLI_TRIPLE(list) = triple;
2442 }
2443 ast = mk_stmt(A_FORALL, 0);
2444 A_LISTP(ast, ASTLI_HEAD);
2445 /* now make the mask expression, if any */
2446 if (mask_ast) {
2447 ast1 = normalize_forall(ast, mask_ast, 0);
2448 A_IFEXPRP(ast, ast1);
2449 } else
2450 A_IFEXPRP(ast, 0);
2451 trans_clridx();
2452 return ast;
2453 }
2454
2455 void
init_tbl(void)2456 init_tbl(void)
2457 {
2458 tbl.size = 200;
2459 NEW(tbl.base, TABLE, tbl.size);
2460 tbl.avl = 0;
2461 }
2462
2463 void
free_tbl(void)2464 free_tbl(void)
2465 {
2466 FREE(tbl.base);
2467 }
2468
2469 int
get_tbl(void)2470 get_tbl(void)
2471 {
2472 int nd;
2473
2474 nd = tbl.avl++;
2475 NEED(tbl.avl, tbl.base, TABLE, tbl.size, tbl.size + 100);
2476 if (nd > SPTR_MAX || tbl.base == NULL)
2477 errfatal(7);
2478 return nd;
2479 }
2480
2481 #if DEBUG
2482 int *badpointer1 = (int *)0;
2483 long *badpointer2 = (long *)1;
2484 long badnumerator = 99;
2485 long baddenominator = 0;
2486 #endif
2487
2488 void
trans_process_align(void)2489 trans_process_align(void)
2490 {
2491 int sptr;
2492 clear_dist_align();
2493 #if DEBUG
2494 /* convenient place for a segfault */
2495 if (XBIT(4, 0x2000)) {
2496 if (!XBIT(4, 0x1000) || gbl.func_count > 2) {
2497 /* store to null pointer */
2498 *badpointer1 = 99;
2499 }
2500 }
2501 if (XBIT(4, 0x4000)) {
2502 if (!XBIT(4, 0x1000) || gbl.func_count > 2) {
2503 /* divide by zero */
2504 badnumerator = badnumerator / baddenominator;
2505 }
2506 }
2507 if (XBIT(4, 0x8000)) {
2508 if (!XBIT(4, 0x1000) || gbl.func_count > 2) {
2509 /* infinite loop */
2510 while (badnumerator) {
2511 badnumerator = (badnumerator < 1) | 3;
2512 }
2513 }
2514 }
2515 #endif
2516 }
2517
2518 static void
trans_get_descrs(void)2519 trans_get_descrs(void)
2520 {
2521 int sptr, stype;
2522
2523 for (sptr = stb.firstusym; sptr < stb.stg_avail; sptr++) {
2524 stype = STYPEG(sptr);
2525 /* if (stype == ST_ARRAY && SCG(sptr) == SC_NONE)
2526 NODESCP(sptr, 1);
2527 */
2528 /* unused DYNAMIC should be SC_LOCAL */
2529
2530 if (is_array_type(sptr) && !NODESCG(sptr) && !IGNOREG(sptr)) {
2531 if (!is_bad_dtype(DTYPEG(sptr)))
2532 trans_mkdescr(sptr);
2533 }
2534 }
2535 }
2536
2537 /* ------------- Utilities ------------ */
2538
2539 /* need to try to reuse indices */
2540 static struct idxlist {
2541 int idx;
2542 int free;
2543 struct idxlist *next;
2544 } * idxlist;
2545
2546 static int
trans_getidx(void)2547 trans_getidx(void)
2548 {
2549 struct idxlist *p;
2550
2551 for (p = idxlist; p != 0; p = p->next)
2552 if (p->free) {
2553 p->free = 0;
2554 return p->idx;
2555 }
2556 p = (struct idxlist *)getitem(TRANS_AREA, sizeof(struct idxlist));
2557 p->idx = sym_get_scalar("i", 0, DT_INT);
2558 p->free = 0;
2559 p->next = idxlist;
2560 idxlist = p;
2561 return p->idx;
2562 }
2563
2564 static void
trans_clridx(void)2565 trans_clridx(void)
2566 {
2567 struct idxlist *p;
2568
2569 for (p = idxlist; p != 0; p = p->next)
2570 p->free = 1;
2571 }
2572
2573 static void
trans_freeidx(void)2574 trans_freeidx(void)
2575 {
2576 idxlist = 0;
2577 freearea(TRANS_AREA);
2578 }
2579
2580 LOGICAL
is_bad_dtype(int dtype)2581 is_bad_dtype(int dtype)
2582 {
2583 if ((DTYG(dtype) != TY_NCHAR) && (DTYG(dtype) != TY_STRUCT) &&
2584 (DTYG(dtype) != TY_UNION))
2585 return FALSE;
2586 return TRUE;
2587 }
2588
2589 LOGICAL
is_array_type(int sptr)2590 is_array_type(int sptr)
2591 {
2592 int stype;
2593 LOGICAL result;
2594
2595 result = FALSE;
2596 stype = STYPEG(sptr);
2597 if ((stype == ST_ARRAY || stype == ST_MEMBER) &&
2598 DTY(DTYPEG(sptr)) == TY_ARRAY && !DESCARRAYG(sptr))
2599 result = TRUE;
2600 return result;
2601 }
2602
2603 static int
find_allocate(int findstd,int findast)2604 find_allocate(int findstd, int findast)
2605 {
2606 int std, ast;
2607 for (std = STD_PREV(findstd); std; std = STD_PREV(std)) {
2608 ast = STD_AST(std);
2609 if (A_TYPEG(ast) == A_ALLOC && A_TKNG(ast) == TK_ALLOCATE) {
2610 if (contains_ast(ast, findast)) {
2611 return std;
2612 }
2613 } else if (A_TYPEG(ast) != A_ASN) {
2614 break;
2615 }
2616 }
2617 return 0;
2618 } /* find_allocate */
2619
2620 static int
find_deallocate(int findstd,int findast)2621 find_deallocate(int findstd, int findast)
2622 {
2623 int std, ast;
2624 for (std = STD_NEXT(findstd); std; std = STD_NEXT(std)) {
2625 ast = STD_AST(std);
2626 if (A_TYPEG(ast) == A_ALLOC && A_TKNG(ast) == TK_DEALLOCATE) {
2627 if (contains_ast(ast, findast)) {
2628 return std;
2629 }
2630 }
2631 }
2632 return 0;
2633 } /* find_deallocate */
2634
2635 /* the function of this routine is to use lhs for user-defined
2636 * array returning function,
2637 * allocate (tmp)
2638 * call user_func(tmp, ..)
2639 * lhs = tmp + ..
2640 * deallocate(tmp)
2641 * transformed if lhs can be useable
2642 * call user_func(lhs, ...)
2643 * lhs = lhs + ...
2644 *
2645 * lhs is useable
2646 * 1-lhs is not common
2647 * 2-lhs is not appear multiply times
2648 * 3-result is not arg of another function on rhs
2649 * (currently, this is checked with contain_calls(rhs)
2650 * which is very conservative)
2651 */
2652 static LOGICAL
use_lhs_for_user_func(int std)2653 use_lhs_for_user_func(int std)
2654 {
2655
2656 int std1;
2657 int ast;
2658 int sptr, lhs_sptr;
2659 int entry, fval;
2660 int nargs, argt;
2661 int ele, a, asd, ndim, i;
2662 int asn, lhs, src;
2663 int asn_std, alloc_std, dealloc_std;
2664
2665 ast = STD_AST(std);
2666 if (A_TYPEG(ast) != A_CALL)
2667 return FALSE;
2668 entry = A_SPTRG(A_LOPG(ast));
2669 if (!FVALG(entry))
2670 return FALSE;
2671 if (PUREG(entry))
2672 return FALSE;
2673 if (RECURG(entry))
2674 return FALSE;
2675 /* if we are calling an internal function, the internal
2676 * function might modify the LHS variable directly */
2677 if (gbl.internal == 1 && INTERNALG(entry))
2678 return FALSE;
2679 fval = FVALG(entry);
2680 if (POINTERG(fval))
2681 return FALSE;
2682
2683 nargs = A_ARGCNTG(ast);
2684 argt = A_ARGSG(ast);
2685 ele = ARGT_ARG(argt, 0);
2686 assert(A_TYPEG(ele) == A_ID, "use_lhs_for_user_func: fval not ID", ele, 4);
2687 sptr = A_SPTRG(ele);
2688
2689 /* find where ele is used */
2690 asn_std = 0;
2691 for (std1 = STD_NEXT(std); std1; std1 = STD_NEXT(std1)) {
2692 if (asn_std)
2693 break;
2694 ast = STD_AST(std1);
2695 if (!contains_ast(ast, ele))
2696 continue;
2697 if (A_TYPEG(ast) != A_ASN)
2698 return FALSE;
2699 asn_std = std1;
2700 }
2701 if (!asn_std)
2702 return FALSE;
2703 assert(asn_std, "use_lhs_for_user_func: can not find asn", ele, 4);
2704
2705 alloc_std = dealloc_std = 0;
2706
2707 if ((!POINTERG(fval) && !ALLOCG(fval)) && (POINTERG(sptr) || ALLOCG(sptr)) &&
2708 DTY(DTYPEG(sptr)) == TY_ARRAY) {
2709 /* find where ele is allocated */
2710 alloc_std = find_allocate(std, ele);
2711 if (!alloc_std)
2712 return FALSE;
2713 assert(alloc_std, "use_lhs_for_user_func: can not find allocate", ele, 4);
2714
2715 /* find where ele is deallocated */
2716 dealloc_std = find_deallocate(std, ele);
2717 assert(dealloc_std, "use_lhs_for_user_func: can not find deallocate", ele,
2718 4);
2719 }
2720
2721 /* decide about whether lhs can be used as function result */
2722 asn = STD_AST(asn_std);
2723 lhs = A_DESTG(asn);
2724 lhs_sptr = sym_of_ast(lhs);
2725 /* RHS or function might modify array through pointer association */
2726 if (POINTERG(lhs_sptr))
2727 return FALSE;
2728 /* RHS or function might modify array through pointer association */
2729 if (TARGETG(lhs_sptr))
2730 return FALSE;
2731 /* if we are calling an internal function from another internal
2732 * function and the LHS is from the host subprogram, no */
2733 if (gbl.internal > 1 && INTERNALG(entry) && !INTERNALG(lhs_sptr))
2734 return FALSE;
2735 src = A_SRCG(asn);
2736
2737 /* need to have same type */
2738 if (DDTG(DTYPEG(sptr)) != DDTG(DTYPEG(lhs_sptr)))
2739 return FALSE;
2740
2741 /* don't allow if lhs appears at rhs */
2742 if (contains_ast(src, mk_id(lhs_sptr)))
2743 return FALSE;
2744
2745 /* don't allow if call has lhs */
2746 ast = STD_AST(std);
2747 if (contains_ast(ast, mk_id(lhs_sptr)))
2748 return FALSE;
2749
2750 /* don't allow if lhs common */
2751 if (SCG(lhs_sptr) == SC_CMBLK)
2752 return FALSE;
2753
2754 /* don't allow if rhs has call */
2755 if (contains_call(src))
2756 return FALSE;
2757
2758 /* don't allow if the lhs was allocated after the call */
2759 for (std1 = STD_NEXT(std); std1; std1 = STD_NEXT(std1)) {
2760 if (std1 == asn_std)
2761 break;
2762 ast = STD_AST(std1);
2763 if (contains_ast(ast, lhs)) {
2764 return FALSE;
2765 }
2766 }
2767
2768 /* don't allow if any subscript is nontriplet with shape */
2769 for (a = lhs; a;) {
2770 switch (A_TYPEG(a)) {
2771 case A_ID:
2772 a = 0;
2773 break;
2774 case A_MEM:
2775 a = A_PARENTG(a);
2776 break;
2777 case A_SUBSTR:
2778 default:
2779 return FALSE;
2780
2781 case A_SUBSCR:
2782 asd = A_ASDG(a);
2783 ndim = ASD_NDIM(asd);
2784 for (i = 0; i < ndim; ++i) {
2785 int ss = ASD_SUBS(asd, i);
2786 if (A_SHAPEG(ss) != 0 && A_TYPEG(ss) != A_TRIPLE) {
2787 /* vector subscript, ugly */
2788 return FALSE;
2789 }
2790 }
2791 a = A_LOPG(a);
2792 break;
2793 }
2794 }
2795
2796 ast_visit(1, 1);
2797 ast_replace(ele, lhs);
2798 if (A_SRCG(asn) == ele) {
2799 /* don't change tmp(:) = F(b(:)) ; a(:) = tmp(:)
2800 * into a(:) = F(b(:)) ; a(:) = a(:) */
2801 delete_stmt(asn_std);
2802 } else {
2803 /* change the asn */
2804 asn = ast_rewrite(asn);
2805 STD_AST(asn_std) = asn;
2806 }
2807
2808 /* change the call */
2809 ast = STD_AST(std);
2810 ast = ast_rewrite(ast);
2811 STD_AST(std) = ast;
2812
2813 ast_unvisit();
2814
2815 /* delete allocate and deallocate */
2816 if (alloc_std)
2817 delete_stmt(alloc_std);
2818 if (dealloc_std)
2819 delete_stmt(dealloc_std);
2820 return TRUE;
2821 }
2822
2823 /* if the array bounds, or distribute arguments of this template
2824 * contain any variables, return TRUE */
2825 static LOGICAL
variable_template(int tmpl)2826 variable_template(int tmpl)
2827 {
2828 int dtype, dist, i, b;
2829 dtype = DTYPEG(tmpl);
2830 if (DTY(dtype) == TY_ARRAY) {
2831 for (i = 0; i < ADD_NUMDIM(dtype); ++i) {
2832 b = ADD_LWAST(dtype, i);
2833 if (b && A_ALIASG(b) == 0)
2834 return TRUE;
2835 b = ADD_UPAST(dtype, i);
2836 if (!b || A_ALIASG(b) == 0)
2837 return TRUE;
2838 }
2839 }
2840 return FALSE;
2841 } /* variable_template */
2842
2843 /* replace dummy arguments in an alignment descriptor with actual arguments */
2844 static int find_entry, find_nargs, find_argt, find_dpdsc, find_std;
2845
2846 static void
find_args(int ast,int * extra)2847 find_args(int ast, int *extra)
2848 {
2849 if (A_TYPEG(ast) == A_ID && A_REPLG(ast) == 0) {
2850 /* is this a dummy argument? */
2851 int sptr, i;
2852 sptr = A_SPTRG(ast);
2853 for (i = 0; i < find_nargs; ++i) {
2854 int arg;
2855 arg = aux.dpdsc_base[find_dpdsc + i];
2856 if (sptr == arg) {
2857 /* we need to make a copy; get a temp */
2858 int temp, dtype, assn, actual;
2859 char *tempname;
2860 dtype = DTYPEG(sptr);
2861 actual = ARGT_ARG(find_argt, i);
2862 if (DTY(dtype) != TY_ARRAY) {
2863 if (actual && A_DTYPEG(actual) == dtype) {
2864 if (A_ALIASG(actual) && dtype == DT_INT) {
2865 ast_replace(ast, A_ALIASG(actual));
2866 } else {
2867 tempname = mangle_name(SYMNAME(sptr), "t");
2868 temp = getsymbol(tempname);
2869 STYPEP(temp, ST_VAR);
2870 DCLDP(temp, 1);
2871 SCP(temp, SC_LOCAL);
2872 DTYPEP(temp, dtype);
2873 /* copy from i'th actual argument */
2874 assn = mk_assn_stmt(mk_id(temp), ARGT_ARG(find_argt, i), dtype);
2875 add_stmt_before(assn, find_std);
2876 ast_replace(ast, mk_id(temp));
2877 }
2878 }
2879 } else {
2880 /* only handle if the actual is itself an array */
2881 if (A_TYPEG(actual) == A_ID) {
2882 /* must be same type of array */
2883 int adtype;
2884 adtype = A_DTYPEG(actual);
2885 if (DTY(adtype + 1) == DTY(dtype + 1)) {
2886 /* use the actual argument */
2887 ast_replace(ast, actual);
2888 }
2889 }
2890 }
2891 }
2892 }
2893 }
2894 } /* find_args */
2895
2896 static void
find_arguments(int std,int entry,int nargs,int argt,int ast)2897 find_arguments(int std, int entry, int nargs, int argt, int ast)
2898 {
2899 if (PARAMCTG(entry) != nargs || ast == 0)
2900 return;
2901 find_entry = entry;
2902 find_dpdsc = DPDSCG(entry);
2903 if (find_dpdsc == 0)
2904 return;
2905 find_nargs = nargs;
2906 find_argt = argt;
2907 find_std = std;
2908 ast_traverse(ast, NULL, find_args, NULL);
2909 } /* replace_arguments */
2910
2911 static LOGICAL
is_non0_scope(int sptr)2912 is_non0_scope(int sptr)
2913 {
2914 int stype;
2915 int dtype;
2916 ADSC *ad;
2917 int ndim, i;
2918 int lb, ub, ast;
2919 int proc, tmpl;
2920 int dist, align;
2921
2922 stype = STYPEG(sptr);
2923 if (IGNOREG(sptr))
2924 return TRUE;
2925 if (stype == ST_ARRAY) {
2926 dtype = DTYPEG(sptr);
2927 ad = AD_DPTR(dtype);
2928 ndim = AD_NUMDIM(ad);
2929 for (i = 0; i < ndim; ++i) {
2930 lb = AD_LWBD(ad, i);
2931 if (contains_non0_scope(lb))
2932 return TRUE;
2933 lb = AD_LWAST(ad, i);
2934 if (contains_non0_scope(lb))
2935 return TRUE;
2936 ub = AD_UPBD(ad, i);
2937 if (contains_non0_scope(ub))
2938 return TRUE;
2939 ub = AD_UPAST(ad, i);
2940 if (contains_non0_scope(ub))
2941 return TRUE;
2942 }
2943 }
2944 return FALSE;
2945 }
2946
2947 /* This is the callback function for contains_non0_scope(). */
2948 static LOGICAL
_contains_non0_scope(int astSrc,LOGICAL * pflag)2949 _contains_non0_scope(int astSrc, LOGICAL *pflag)
2950 {
2951 if (astSrc && A_TYPEG(astSrc) == A_ID && IGNOREG(A_SPTRG(astSrc))) {
2952 *pflag = TRUE;
2953 return TRUE;
2954 }
2955 return FALSE;
2956 }
2957
2958 /* Return TRUE if astSrc has non zero scope ID somewhere within astSrc.
2959 */
2960 static LOGICAL
contains_non0_scope(int astSrc)2961 contains_non0_scope(int astSrc)
2962 {
2963 LOGICAL result = FALSE;
2964
2965 if (!astSrc)
2966 return FALSE;
2967
2968 ast_visit(1, 1);
2969 ast_traverse(astSrc, _contains_non0_scope, NULL, &result);
2970 ast_unvisit();
2971 return result;
2972 }
2973
2974 static void
_copy(int ast,int * unused)2975 _copy(int ast, int *unused)
2976 {
2977 if (DT_ISINT(A_DTYPEG(ast))) {
2978 int sptr;
2979 /* member reference, subscript, simple ID? */
2980 switch (A_TYPEG(ast)) {
2981 case A_ID:
2982 case A_SUBSCR:
2983 case A_MEM:
2984 /* not section descriptor, not compiler temp */
2985 sptr = memsym_of_ast(ast);
2986 if (!DESCARRAYG(sptr) && !CCSYMG(sptr) && !HCCSYMG(sptr)) {
2987 /* not already copied */
2988 if (A_REPLG(ast) == 0) {
2989 int tmp, newast, ent;
2990 tmp = getcctmp('d', ast, ST_VAR, DT_INT);
2991 newast = mk_id(tmp);
2992 for (ent = gbl.entries; ent != NOSYM; ent = SYMLKG(ent)) {
2993 int entry, asn;
2994 entry = ENTSTDG(ent);
2995 asn = mk_assn_stmt(newast, ast, DT_INT);
2996 add_stmt_after(asn, entry);
2997 }
2998 ast_replace(ast, newast);
2999 }
3000 }
3001 break;
3002 }
3003 }
3004 } /* _copy */
3005
3006 static int
copy_nonconst(int ast)3007 copy_nonconst(int ast)
3008 {
3009 int newast;
3010 if (ast == 0)
3011 return 0;
3012 if (A_TYPEG(ast) == A_CNST)
3013 return ast;
3014
3015 /* anything else, search, replace */
3016 ast_traverse(ast, NULL, _copy, NULL);
3017 newast = ast_rewrite(ast);
3018 return newast;
3019 } /* copy_nonconst */
3020
3021 /* Make an AST id for the descriptor (SDSC or DESCR) of this symbol. */
3022 static int
mk_descr_id(SPTR sptr)3023 mk_descr_id(SPTR sptr)
3024 {
3025 if (SDSCG(sptr)) {
3026 return mk_id(SDSCG(sptr));
3027 } else if (DESCRG(sptr)) {
3028 return mk_id(DESCRG(sptr));
3029 } else {
3030 interr("no descriptor for symbol", sptr, ERR_Fatal);
3031 return 0;
3032 }
3033 }
3034
3035 static int
build_sdsc_node(int ast)3036 build_sdsc_node(int ast)
3037 {
3038 SPTR sptr = sym_of_ast(ast);
3039 int astsdsc;
3040 if (A_TYPEG(ast) == A_SUBSCR)
3041 ast = A_LOPG(ast);
3042 if (A_TYPEG(ast) == A_MEM) {
3043 SPTR sptrmem = memsym_of_ast(ast);
3044 int astparent = A_PARENTG(ast);
3045 astsdsc = mk_id(SDSCG(sptrmem));
3046 astsdsc = mk_member(astparent, astsdsc, DTYPEG(sptr));
3047 } else {
3048 astsdsc = mk_descr_id(sptr);
3049 }
3050 return astsdsc;
3051 }
3052
3053 static int
build_conformable_func_node(int astdest,int astsrc)3054 build_conformable_func_node(int astdest, int astsrc)
3055 {
3056 int ast;
3057 int astfunc;
3058 int astdestsdsc;
3059 int astsrcsdsc;
3060 int sptrdestmem = memsym_of_ast(astdest);
3061 int sptrsrcmem = 0;
3062 int sptrfunc;
3063 int argt;
3064 int dtypesrc = A_DTYPEG(astsrc);
3065 int dtypedest = A_DTYPEG(astdest);
3066 int srcshape = A_SHAPEG(astsrc);
3067 int i;
3068 int nargs;
3069 static FtnRtlEnum rtl_conformable_nn[] = {
3070 RTE_conformable_11v,
3071 RTE_conformable_22v,
3072 RTE_conformable_33v,
3073 RTE_conformable_nnv,
3074 RTE_conformable_nnv,
3075 RTE_conformable_nnv,
3076 RTE_conformable_nnv
3077 };
3078 static FtnRtlEnum rtl_conformable_dn[] = {
3079 RTE_conformable_d1v,
3080 RTE_conformable_d2v,
3081 RTE_conformable_d3v,
3082 RTE_conformable_dnv,
3083 RTE_conformable_dnv,
3084 RTE_conformable_dnv,
3085 RTE_conformable_dnv
3086 };
3087 static FtnRtlEnum rtl_conformable_nd[] = {
3088 RTE_conformable_1dv,
3089 RTE_conformable_2dv,
3090 RTE_conformable_3dv,
3091 RTE_conformable_ndv,
3092 RTE_conformable_ndv,
3093 RTE_conformable_ndv,
3094 RTE_conformable_ndv,
3095 };
3096
3097 if (A_TYPEG(astsrc) == A_ID || A_TYPEG(astsrc) == A_CONV ||
3098 A_TYPEG(astsrc) == A_CNST || A_TYPEG(astsrc) == A_MEM) {
3099 sptrsrcmem = memsym_of_ast(astsrc);
3100 }
3101
3102 astdestsdsc = 0;
3103 if (DESCUSEDG(sptrdestmem)) {
3104 astdestsdsc = build_sdsc_node(astdest);
3105 } else if (SCG(sptrdestmem) == SC_DUMMY && NEWDSCG(sptrdestmem) &&
3106 !ADJARRG(sptrdestmem)) {
3107 astdestsdsc = mk_id(NEWDSCG(sptrdestmem));
3108 }
3109
3110 astsrcsdsc = 0;
3111 if (sptrsrcmem) {
3112 if (DESCUSEDG(sptrsrcmem)) {
3113 astsrcsdsc = build_sdsc_node(astsrc);
3114 } else if (SCG(sptrsrcmem) == SC_DUMMY && NEWDSCG(sptrsrcmem) &&
3115 !srcshape) {
3116 astsrcsdsc = mk_id(NEWDSCG(sptrsrcmem));
3117 }
3118 }
3119
3120 if (astdestsdsc) {
3121 if (astsrcsdsc) {
3122 nargs = 3;
3123 argt = mk_argt(nargs);
3124 ARGT_ARG(argt, 0) = astdest;
3125 ARGT_ARG(argt, 1) = astdestsdsc;
3126 ARGT_ARG(argt, 2) = astsrcsdsc;
3127 sptrfunc = sym_mkfunc(mkRteRtnNm(RTE_conformable_dd), DT_INT);
3128 } else {
3129 int ndim;
3130 if (srcshape) {
3131 ndim = SHD_NDIM(srcshape);
3132 if(ndim <= 3) {
3133 nargs = 2 + ndim;
3134 argt = mk_argt(nargs);
3135 ARGT_ARG(argt, 0) = astdest;
3136 ARGT_ARG(argt, 1) = astdestsdsc;
3137 for (i = 0; i < ndim; i++) {
3138 ARGT_ARG(argt, 2 + i) = mk_unop(OP_VAL,
3139 mk_extent_expr(SHD_LWB(srcshape, i), SHD_UPB(srcshape, i)), astb.bnd.dtype);
3140 }
3141 } else {
3142 nargs = 3 + ndim;
3143 argt = mk_argt(nargs);
3144 ARGT_ARG(argt, 0) = astdest;
3145 ARGT_ARG(argt, 1) = astdestsdsc;
3146 ARGT_ARG(argt, 2) = mk_unop(OP_VAL, mk_cval(ndim, astb.bnd.dtype), astb.bnd.dtype);
3147 for (i = 0; i < ndim; i++) {
3148 ARGT_ARG(argt, 3 + i) = mk_unop(OP_VAL,
3149 mk_extent_expr(SHD_LWB(srcshape, i), SHD_UPB(srcshape, i)), astb.bnd.dtype);
3150 }
3151 }
3152 sptrfunc = sym_mkfunc(mkRteRtnNm(rtl_conformable_dn[ndim-1]), DT_INT);
3153 } else {
3154 /* array = scalar
3155 * generate
3156 * RTE_conformable_dd(dest_addr, dest_sdsc, dest_sdsc)
3157 * will return false iff array is not allocated (i.e., the conformable
3158 * call is an RTE_allocated call) */
3159 nargs = 3;
3160 argt = mk_argt(nargs);
3161 ARGT_ARG(argt, 0) = astdest;
3162 ARGT_ARG(argt, 1) = astdestsdsc;
3163 ARGT_ARG(argt, 2) = astdestsdsc;
3164 sptrfunc = sym_mkfunc(mkRteRtnNm(RTE_conformable_dd), DT_INT);
3165 }
3166 }
3167 } else {
3168 if (astsrcsdsc) {
3169 int ndim = ADD_NUMDIM(dtypesrc);
3170 if(ndim <= 3) {
3171 nargs = 2 + ndim;
3172 argt = mk_argt(nargs);
3173 ARGT_ARG(argt, 0) = astdest;
3174 ARGT_ARG(argt, 1) = astsrcsdsc;
3175 for (i = 0; i < ndim; i++) {
3176 ARGT_ARG(argt, 2 + i) = mk_unop(OP_VAL,
3177 mk_extent_expr(ADD_LWAST(dtypedest, i), ADD_UPAST(dtypedest, i)), astb.bnd.dtype);
3178 }
3179 } else {
3180 nargs = 3 + ndim;
3181 argt = mk_argt(nargs);
3182 ARGT_ARG(argt, 0) = astdest;
3183 ARGT_ARG(argt, 1) = astsrcsdsc;
3184 ARGT_ARG(argt, 2) = mk_unop(OP_VAL, mk_cval(ndim, astb.bnd.dtype), astb.bnd.dtype);
3185 for (i = 0; i < ndim; i++) {
3186 ARGT_ARG(argt, 3 + i) = mk_unop(OP_VAL,
3187 mk_extent_expr(ADD_LWAST(dtypedest, i), ADD_UPAST(dtypedest, i)), astb.bnd.dtype);
3188 }
3189 }
3190 sptrfunc = sym_mkfunc(mkRteRtnNm(rtl_conformable_nd[ndim-1]), DT_INT);
3191 } else {
3192 int ndim;
3193 if (srcshape) {
3194 /* generate
3195 * RTE_conformable_nn(dest_addr, dest_sz, dest_sz, dest_ndim,
3196 * dest_extnt1,src_extnt1, ...,
3197 * dest_extntn,src_extntn) */
3198 ndim = SHD_NDIM(srcshape);
3199 if(ndim <= 3) {
3200 nargs = 1 + 2 * ndim;
3201 argt = mk_argt(nargs);
3202 ARGT_ARG(argt, 0) = astdest;
3203 for (i = 0; i < ndim; i++) {
3204 ARGT_ARG(argt, 1 + i * 2) = mk_unop(OP_VAL,
3205 mk_extent_expr(ADD_LWAST(dtypedest, i), ADD_UPAST(dtypedest, i)), astb.bnd.dtype);
3206 ARGT_ARG(argt, 2 + i * 2) = mk_unop(OP_VAL,
3207 mk_extent_expr(SHD_LWB(srcshape, i), SHD_UPB(srcshape, i)), astb.bnd.dtype);
3208 }
3209 } else {
3210 nargs = 2 + 2 * ndim;
3211 argt = mk_argt(nargs);
3212 ARGT_ARG(argt, 0) = astdest;
3213 ARGT_ARG(argt, 1) = mk_unop(OP_VAL, mk_cval(ndim, astb.bnd.dtype), astb.bnd.dtype);
3214 for (i = 0; i < ndim; i++) {
3215 ARGT_ARG(argt, 2 + i * 2) = mk_unop(OP_VAL,
3216 mk_extent_expr(ADD_LWAST(dtypedest, i), ADD_UPAST(dtypedest, i)), astb.bnd.dtype);
3217 ARGT_ARG(argt, 3 + i * 2) = mk_unop(OP_VAL,
3218 mk_extent_expr(SHD_LWB(srcshape, i), SHD_UPB(srcshape, i)), astb.bnd.dtype);
3219 }
3220 }
3221 } else {
3222 /* array = scalar
3223 * generate
3224 * RTE_conformable_nn(dest_addr, dest_sz, dest_sz, dest_ndim,
3225 * dest_extnt1,dest_extnt1, ..., dest_extntn,dest_extntn)
3226 * will return false iff array is not allocated (i.e., the conformable
3227 * call acts as a RTE_allocated call) */
3228 ndim = ADD_NUMDIM(dtypedest);
3229 if(ndim <= 3) {
3230 nargs = 1 + 2 * ndim;
3231 argt = mk_argt(nargs);
3232 ARGT_ARG(argt, 0) = astdest;
3233 for (i = 0; i < ndim; i++) {
3234 ARGT_ARG(argt, 1 + i * 2) = mk_unop(OP_VAL,
3235 mk_extent_expr(ADD_LWAST(dtypedest, i), ADD_UPAST(dtypedest, i)), astb.bnd.dtype);
3236 ARGT_ARG(argt, 2 + i * 2) = ARGT_ARG(argt, 1 + i * 2);
3237 }
3238 } else {
3239 nargs = 2 + 2 * ndim;
3240 argt = mk_argt(nargs);
3241 ARGT_ARG(argt, 0) = astdest;
3242 ARGT_ARG(argt, 1) = mk_unop(OP_VAL, mk_cval(ndim, astb.bnd.dtype), astb.bnd.dtype);
3243 for (i = 0; i < ndim; i++) {
3244 ARGT_ARG(argt, 2 + i * 2) = mk_unop(OP_VAL,
3245 mk_extent_expr(ADD_LWAST(dtypedest, i), ADD_UPAST(dtypedest, i)), astb.bnd.dtype);
3246 ARGT_ARG(argt, 3 + i * 2) = ARGT_ARG(argt, 2 + i * 2);
3247 }
3248 }
3249 }
3250 sptrfunc = sym_mkfunc(mkRteRtnNm(rtl_conformable_nn[ndim-1]), DT_INT);
3251 }
3252 }
3253
3254 NODESCP(sptrfunc, 1);
3255 astfunc = mk_id(sptrfunc);
3256 A_DTYPEP(astfunc, DT_INT);
3257 ast = mk_func_node(A_FUNC, astfunc, nargs, argt);
3258 A_DTYPEP(ast, DT_INT);
3259 A_OPTYPEP(ast, INTASTG(sptrfunc));
3260 A_LOPP(ast, astfunc);
3261
3262 return ast;
3263 }
3264
3265 /* Generate a conformable test. optype is for a comparison against 0:
3266 * OP_GT => conformable
3267 * OP_EQ => not conformable but big enough
3268 * OP_LT => not conformable and not big enough (or not allocated)
3269 */
3270 int
mk_conformable_test(int dest,int src,int optype)3271 mk_conformable_test(int dest, int src, int optype)
3272 {
3273 int func = build_conformable_func_node(dest, src);
3274 int cmp = mk_binop(optype, func, astb.i0, DT_INT);
3275 int astif = mk_stmt(A_IFTHEN, 0);
3276 A_IFEXPRP(astif, cmp);
3277 return astif;
3278 }
3279
3280 /** \brief Generate a call to poly_conform_types() that is used in polymorphic
3281 * allocatable assignment.
3282 *
3283 * \param dest is the ast representing the LHS of a polymorphic assignment.
3284 * \param src is the ast representing the RHS of a polymorphic assignment.
3285 * \param intrin_type is an ast that represents a descriptor for an
3286 * intrinsic scalar object when src represents an intrinsic scalar
3287 * object. It's zero if src is not a non-zero intrinsic object.
3288 *
3289 * \return the ast representing the function call to poly_conform_types().
3290 */
3291 static int
build_poly_func_node(int dest,int src,int intrin_type)3292 build_poly_func_node(int dest, int src, int intrin_type)
3293 {
3294 int ast, astfunc, src_sdsc_ast, dest_sdsc_ast;
3295 SPTR sptrsrc, sptrdest, sptrfunc;
3296 int argt;
3297 int flag_con = mk_cval1(1, DT_INT);
3298
3299 sptrdest= memsym_of_ast(dest);
3300 sptrsrc = memsym_of_ast(src);
3301
3302 if (intrin_type != 0) {
3303 src_sdsc_ast = intrin_type;
3304 flag_con = mk_cval1(0, DT_INT);
3305 } else {
3306 src_sdsc_ast = get_sdsc_ast(sptrsrc, src);
3307 }
3308
3309 if (STYPEG(sptrdest) == ST_MEMBER) {
3310 dest_sdsc_ast = find_descriptor_ast(sptrdest, dest);
3311 } else {
3312 dest_sdsc_ast = mk_id(SDSCG(sptrdest));
3313 }
3314
3315 argt = mk_argt(4);
3316
3317 ARGT_ARG(argt, 0) = dest;
3318 ARGT_ARG(argt, 1) = dest_sdsc_ast;
3319 ARGT_ARG(argt, 2) = src_sdsc_ast;
3320 flag_con = mk_unop(OP_VAL, flag_con, DT_INT);
3321 ARGT_ARG(argt, 3) = flag_con;
3322
3323 sptrfunc = sym_mkfunc(mkRteRtnNm(RTE_poly_conform_types), DT_INT);
3324
3325 NODESCP(sptrfunc, 1);
3326 astfunc = mk_id(sptrfunc);
3327 A_DTYPEP(astfunc, DT_INT);
3328 ast = mk_func_node(A_FUNC, astfunc, 4, argt);
3329 A_DTYPEP(ast, DT_INT);
3330 A_OPTYPEP(ast, INTASTG(sptrfunc));
3331 A_LOPP(ast, astfunc);
3332
3333 return ast;
3334 }
3335
3336 /** \brief Same as mk_conformable_test() above, except it generates a test
3337 * between two polymorphic scalar objects.
3338 *
3339 * \param dest is the ast representing the LHS of a polymorphic assignment.
3340 * \param src is the ast representing the RHS of a polymorphic assignment.
3341 * \param optype is the type of check (see mk_conformable_test() above).
3342 * \param intrin_type is an ast that represents a descriptor for an
3343 * intrinsic scalar object when src represents an intrinsic scalar
3344 * object. It's zero if src is not a non-zero intrinsic object.
3345 *
3346 * \return an ast representing the "if statement" for the polymorphic test.
3347 */
3348 static int
mk_poly_test(int dest,int src,int optype,int intrin_type)3349 mk_poly_test(int dest, int src, int optype, int intrin_type)
3350 {
3351 int func = build_poly_func_node(dest, src, intrin_type);
3352 int cmp = mk_binop(optype, func, astb.i0, DT_INT);
3353 int astif = mk_stmt(A_IFTHEN, 0);
3354 A_IFEXPRP(astif, cmp);
3355 return astif;
3356 }
3357
3358 int
mk_allocate(int ast)3359 mk_allocate(int ast)
3360 {
3361 int alloc = mk_stmt(A_ALLOC, 0);
3362 A_TKNP(alloc, TK_ALLOCATE);
3363 A_SRCP(alloc, ast);
3364 return alloc;
3365 }
3366
3367 int
mk_deallocate(int ast)3368 mk_deallocate(int ast)
3369 {
3370 int dealloc = mk_stmt(A_ALLOC, 0);
3371 A_TKNP(dealloc, TK_DEALLOCATE);
3372 A_SRCP(dealloc, ast);
3373 return dealloc;
3374 }
3375
3376 /* is_assign_lhs is set when this is for the LHS of an assignment */
3377 void
rewrite_deallocate(int ast,bool is_assign_lhs,int std)3378 rewrite_deallocate(int ast, bool is_assign_lhs, int std)
3379 {
3380 int i;
3381 int sptrmem;
3382 DTYPE dtype = A_DTYPEG(ast);
3383 int shape = A_SHAPEG(ast);
3384 int astparent = ast;
3385 int docnt = 0;
3386 LOGICAL need_endif = FALSE;
3387
3388 assert(DTY(DDTG(dtype)) == TY_DERIVED, "unexpected dtype", dtype, ERR_Fatal);
3389 if (ALLOCATTRG(memsym_of_ast(ast))) {
3390 gen_allocated_check(ast, std, A_IFTHEN, false, is_assign_lhs, false);
3391 need_endif = TRUE;
3392 }
3393 if (shape != 0) {
3394 int asd;
3395 assert(DTY(dtype) == TY_ARRAY, "expecting array dtype", 0, ERR_Fatal);
3396 asd = gen_dos_over_shape(shape, std);
3397 docnt = ASD_NDIM(asd);
3398 if (A_TYPEG(ast) == A_MEM) {
3399 astparent = subscript_allocmem(ast, asd);
3400 } else {
3401 astparent = mk_subscr_copy(ast, asd, DTY(dtype + 1));
3402 }
3403 }
3404
3405 for (sptrmem = DTY(DDTG(dtype) + 1); sptrmem > NOSYM;
3406 sptrmem = SYMLKG(sptrmem)) {
3407 int astdealloc;
3408 int astmem;
3409 if (is_tbp_or_final(sptrmem)) {
3410 continue; /* skip tbp */
3411 }
3412 astmem = mk_id(sptrmem);
3413 astmem = mk_member(astparent, astmem, A_DTYPEG(astmem));
3414 if (!POINTERG(sptrmem) && allocatable_member(sptrmem)) {
3415 rewrite_deallocate(astmem, false, std);
3416 }
3417 if (!ALLOCATTRG(sptrmem)) {
3418 continue;
3419 }
3420 astdealloc = mk_deallocate(astmem);
3421 A_DALLOCMEMP(astdealloc, 1);
3422 add_stmt_before(astdealloc, std);
3423 }
3424
3425 gen_do_ends(docnt, std);
3426 if (need_endif) {
3427 int astendif = mk_stmt(A_ENDIF, 0);
3428 add_stmt_before(astendif, std);
3429 }
3430 }
3431
3432 /** \brief Generate an IF to see if ast is allocated and insert before std.
3433 Caller is responsible for generating ENDIF.
3434 \param atype Type of AST to generate, A_IFTHEN or A_ELSEIF.
3435 \param negate Check for not allocated instead of allocated.
3436 \param is_assign_lhs True if this check is for the LHS of an assignment
3437 */
3438 static void
gen_allocated_check(int ast,int std,int atype,bool negate,bool is_assign_lhs,bool is_assign_lhs2)3439 gen_allocated_check(int ast, int std, int atype, bool negate,
3440 bool is_assign_lhs, bool is_assign_lhs2)
3441 {
3442 int astfunc;
3443 int funcid = mk_id(getsymbol("allocated"));
3444 int argt = mk_argt(1);
3445 int astif = mk_stmt(atype, 0);
3446 int allocstd;
3447
3448 assert(atype == A_IFTHEN || atype == A_ELSEIF, "Bad ast type", atype, ERR_Fatal);
3449 A_DTYPEP(funcid, DT_LOG);
3450 ARGT_ARG(argt, 0) = A_TYPEG(ast) == A_SUBSCR ? A_LOPG(ast) : ast;
3451 astfunc = mk_func_node(A_INTR, funcid, 1, argt);
3452 A_DTYPEP(astfunc, DT_LOG);
3453 A_OPTYPEP(astfunc, I_ALLOCATED);
3454 if (negate)
3455 astfunc = mk_unop(OP_LNOT, astfunc, DT_LOG);
3456 A_IFEXPRP(astif, astfunc);
3457 allocstd = add_stmt_before(astif, std);
3458 STD_RESCOPE(allocstd) = 1;
3459 }
3460
3461 /* Generate DOs over each dimension of shape, insert then before std,
3462 and return the temp loop variables as an ASD. */
3463 static int
gen_dos_over_shape(int shape,int std)3464 gen_dos_over_shape(int shape, int std)
3465 {
3466 int i;
3467 int subs[MAXSUBS];
3468 int ndim = SHD_NDIM(shape);
3469 for (i = 0; i < ndim; i++) {
3470 int astdo = mk_stmt(A_DO, 0);
3471 int sub = mk_id(get_temp(astb.bnd.dtype));
3472 A_DOVARP(astdo, sub);
3473 A_M1P(astdo, SHD_LWB(shape, i));
3474 A_M2P(astdo, SHD_UPB(shape, i));
3475 A_M3P(astdo, SHD_STRIDE(shape, i));
3476 A_M4P(astdo, 0);
3477 add_stmt_before(astdo, std);
3478 subs[i] = sub;
3479 }
3480 return mk_asd(subs, ndim);
3481 }
3482
3483 static void
gen_do_ends(int docnt,int std)3484 gen_do_ends(int docnt, int std)
3485 {
3486 int astdo;
3487 int i;
3488
3489 for (i = 0; i < docnt; i++) {
3490 astdo = mk_stmt(A_ENDDO, 0);
3491 add_stmt_before(astdo, std);
3492 }
3493 }
3494
3495 static void
gen_bounds_assignments(int astdestparent,int astdestmem,int astsrcparent,int astsrcmem,int std)3496 gen_bounds_assignments(int astdestparent, int astdestmem, int astsrcparent,
3497 int astsrcmem, int std)
3498 {
3499 int sptrdest;
3500 int ndim = 0;
3501 int shape;
3502
3503 if (is_array_dtype(A_DTYPEG(astdestmem)))
3504 ndim = ADD_NUMDIM(A_DTYPEG(astdestmem));
3505
3506 if (!astdestparent && A_TYPEG(astdestmem) == A_MEM) {
3507 astdestparent = A_PARENTG(astdestmem);
3508 astdestmem = A_MEMG(astdestmem);
3509 }
3510
3511 if (astsrcparent && SDSCG(A_SPTRG(astsrcmem))) {
3512 shape = mk_mem_ptr_shape(astsrcparent, astsrcmem, A_DTYPEG(astsrcmem));
3513 } else {
3514 shape = A_SHAPEG(astsrcmem);
3515 }
3516 if (shape == 0 && astsrcparent != 0) {
3517 shape = A_SHAPEG(astsrcparent);
3518 }
3519 if (shape == 0) {
3520 assert(ndim == 0, "unexpected ndim", ndim, ERR_Fatal);
3521 return;
3522 }
3523 assert(ndim == SHD_NDIM(shape), "bad shape", 0, ERR_Fatal);
3524 if (A_SHAPEG(astsrcmem) == 0 || A_TYPEG(astsrcmem) == A_SUBSCR) {
3525 shape = mk_bounds_shape(shape);
3526 }
3527
3528 sptrdest = memsym_of_ast(astdestmem);
3529 if (DESCUSEDG(sptrdest)) {
3530 int i;
3531 int astdest = mk_descr_id(sptrdest);
3532 if (astdestparent) {
3533 astdest = mk_member(astdestparent, astdest, astb.bnd.dtype);
3534 }
3535 for (i = 0; i < ndim; i++) {
3536 int stride = SHD_STRIDE(shape, i);
3537 int astlb = SHD_LWB(shape, i);
3538 int astub = SHD_UPB(shape, i);
3539 int astextnt = extent_of_shape(shape, i);
3540 int subscr = mk_cval(get_global_lower_index(i), astb.bnd.dtype);
3541 int ast = mk_subscr(astdest, &subscr, 1, astb.bnd.dtype);
3542 ast = mk_assn_stmt(ast, astlb, astb.bnd.dtype);
3543 add_stmt_before(ast, std);
3544 subscr = mk_cval(get_global_upper_index(i), astb.bnd.dtype);
3545 ast = mk_subscr(astdest, &subscr, 1, astb.bnd.dtype);
3546 ast = mk_assn_stmt(ast, astub, astb.bnd.dtype);
3547 add_stmt_before(ast, std);
3548 subscr = mk_cval(get_global_extent_index(i), astb.bnd.dtype);
3549 ast = mk_subscr(astdest, &subscr, 1, astb.bnd.dtype);
3550 ast = mk_assn_stmt(ast, astextnt, astb.bnd.dtype);
3551 add_stmt_before(ast, std);
3552 }
3553 if (DDTG(A_DTYPEG(A_DESTG(STD_AST(std)))) == DT_DEFERCHAR) {
3554 int lhs_len = get_len_of_deferchar_ast(A_DESTG(STD_AST(std)));
3555 int rhs_len, ast;
3556 if (is_deferlenchar_ast(A_SRCG(STD_AST(std)))) {
3557 rhs_len = get_len_of_deferchar_ast(A_SRCG(STD_AST(std)));
3558 } else {
3559 rhs_len = string_expr_length(A_SRCG(STD_AST(std)));
3560 }
3561 ast = mk_assn_stmt(lhs_len, rhs_len, DT_INT);
3562 add_stmt_before(ast, std);
3563 }
3564 } else {
3565 int i;
3566 DTYPE dtypedest = DTYPEG(sptrdest);
3567 for (i = 0; i < ndim; i++) {
3568 int astlb = SHD_LWB(shape, i);
3569 int astub = SHD_UPB(shape, i);
3570 int astextnt = extent_of_shape(shape, i);
3571 int astlbv = ADD_LWBD(dtypedest, i);
3572 int astubv = ADD_UPBD(dtypedest, i);
3573 int astextntv = ADD_EXTNTAST(dtypedest, i);
3574 if (astlbv != astlb) {
3575 int ast = mk_assn_stmt(astlbv, astlb, astb.bnd.dtype);
3576 add_stmt_before(ast, std);
3577 }
3578 if (astubv != astub) {
3579 int ast = mk_assn_stmt(astubv, astub, astb.bnd.dtype);
3580 add_stmt_before(ast, std);
3581 }
3582 if (astextntv != astextnt) {
3583 int ast = mk_assn_stmt(astextntv, astextnt, astb.bnd.dtype);
3584 add_stmt_before(ast, std);
3585 }
3586 }
3587 }
3588 }
3589
3590 /* Make a new shape that is 1:extent in each dimension. */
3591 static int
mk_bounds_shape(int shape)3592 mk_bounds_shape(int shape)
3593 {
3594 int i;
3595 int ndim = SHD_NDIM(shape);
3596 add_shape_rank(ndim);
3597 for (i = 0; i < ndim; i++) {
3598 int lb = astb.bnd.one;
3599 int ub = extent_of_shape(shape, i);
3600 add_shape_spec(lb, ub, astb.bnd.one);
3601 }
3602 return mk_shape();
3603 }
3604
3605 static int
build_allocation_item(int astdestparent,int astdestmem)3606 build_allocation_item(int astdestparent, int astdestmem)
3607 {
3608 int indx[MAXSUBS];
3609 int ndim;
3610 int astitem;
3611 int sptrdest;
3612 int sptrsdsc;
3613 int astdest;
3614 int astsdsc;
3615 int i;
3616 int subscr;
3617 int lbast;
3618 int ubast;
3619
3620 sptrdest = memsym_of_ast(astdestmem);
3621 if (DTY(DTYPEG(sptrdest)) != TY_ARRAY) {
3622 if (STYPEG(sptrdest) == ST_MEMBER && astdestparent) {
3623 /* FS#20128: astdestmem is an allocatable scalar */
3624 return mk_member(astdestparent, astdestmem, A_DTYPEG(astdestmem));
3625 }
3626 return astdestmem;
3627 }
3628
3629 if (A_TYPEG(astdestmem) == A_SUBSCR)
3630 astdestmem = A_LOPG(astdestmem);
3631 ndim = ADD_NUMDIM(A_DTYPEG(astdestmem));
3632
3633 astdest = astdestmem;
3634 if (astdestparent) {
3635 astdest = mk_member(astdestparent, astdest, astb.bnd.dtype);
3636 } else if (!astdestparent && A_TYPEG(astdestmem) == A_MEM) {
3637 astdestparent = A_PARENTG(astdestmem);
3638 astdestmem = A_MEMG(astdestmem);
3639 }
3640
3641 if (DESCUSEDG(sptrdest)) {
3642 astsdsc = mk_descr_id(memsym_of_ast(astdestmem));
3643 if (astdestparent) {
3644 astsdsc = mk_member(astdestparent, astsdsc, astb.bnd.dtype);
3645 }
3646 for (i = 0; i < ndim; i++) {
3647 subscr = mk_cval(get_global_lower_index(i), astb.bnd.dtype);
3648 lbast = mk_subscr(astsdsc, &subscr, 1, astb.bnd.dtype);
3649 subscr = mk_cval(get_global_upper_index(i), astb.bnd.dtype);
3650 ubast = mk_subscr(astsdsc, &subscr, 1, astb.bnd.dtype);
3651 indx[i] = mk_triple(lbast, ubast, astb.i1);
3652 }
3653 } else {
3654 int dtypedest = DTYPEG(sptrdest);
3655 for (i = 0; i < ndim; i++) {
3656 indx[i] =
3657 mk_triple(ADD_LWBD(dtypedest, i), ADD_UPBD(dtypedest, i), astb.i1);
3658 }
3659 }
3660 astitem = mk_subscr(astdest, indx, ndim, DTYG(A_DTYPEG(astdestmem)));
3661
3662 return astitem;
3663 }
3664
3665 static void
gen_alloc_mbr(int ast,int std)3666 gen_alloc_mbr(int ast, int std)
3667 {
3668 int astfunc = mk_allocate(ast);
3669 SPTR sptr = memsym_of_ast(ast);
3670 add_stmt_before(astfunc, std);
3671 if (is_unl_poly(sptr)) {
3672 check_alloc_ptr_type(sptr, std, A_DTYPEG(ast), 1, 0, ast, ast);
3673 } else {
3674 check_alloc_ptr_type(sptr, std, DTYPEG(sptr), 1, 0, 0, ast);
3675 }
3676 }
3677
3678 static void
gen_dealloc_mbr(int ast,int std)3679 gen_dealloc_mbr(int ast, int std)
3680 {
3681 int astfunc = mk_deallocate(ast);
3682 int std_dealloc = add_stmt_before(astfunc, std);
3683 A_DALLOCMEMP(astfunc, 1);
3684 if (allocatable_member(memsym_of_ast(ast))) {
3685 rewrite_deallocate(ast, true, std_dealloc);
3686 }
3687 }
3688
3689 static void
nullify_member(int ast,int std,int sptr)3690 nullify_member(int ast, int std, int sptr)
3691 {
3692 int dtype = DTYPEG(sptr);
3693 int sptrmem, aast, mem_sptr_id;
3694
3695 for (sptrmem = DTY(DDTG(dtype) + 1); sptrmem > NOSYM;
3696 sptrmem = SYMLKG(sptrmem)) {
3697 if (ALLOCATTRG(sptrmem)) {
3698 aast = mk_id(sptrmem);
3699 mem_sptr_id = mk_member(ast, aast, DTYPEG(sptrmem));
3700 add_stmt_before(add_nullify_ast(mem_sptr_id), std);
3701 }
3702 if (is_tbp_or_final(sptrmem)) {
3703 /* skip tbp */
3704 continue;
3705 }
3706 }
3707 }
3708
3709 static void
handle_allocatable_members(int astdest,int astsrc,int std,bool non_conformable)3710 handle_allocatable_members(int astdest, int astsrc, int std,
3711 bool non_conformable)
3712 {
3713 int sptrmem;
3714 int docnt = 0;
3715 int astdestparent = astdest;
3716 int astsrcparent = astsrc;
3717 DTYPE dtype = A_DTYPEG(astdest);
3718 int shape = A_SHAPEG(astdest);
3719
3720 if (shape != 0) {
3721 int destasd;
3722 int srcasd;
3723 if (A_TYPEG(astdest) == A_MEM) {
3724 int memsptr = A_SPTRG(A_MEMG(astdest));
3725 if (POINTERG(memsptr) || ALLOCATTRG(memsptr)) {
3726 shape = mk_mem_ptr_shape(A_PARENTG(astdest), A_MEMG(astdest), dtype);
3727 }
3728 }
3729 destasd = gen_dos_over_shape(shape, std);
3730 docnt = ASD_NDIM(destasd);
3731 srcasd = normalize_subscripts(destasd, shape, A_SHAPEG(astsrc));
3732 astdestparent = subscript_allocmem(astdest, destasd);
3733 if (A_SHAPEG(astsrc)) {
3734 astsrcparent = subscript_allocmem(astsrc, srcasd);
3735 }
3736 }
3737
3738 for (sptrmem = DTY(DDTG(dtype) + 1); sptrmem > NOSYM;
3739 sptrmem = SYMLKG(sptrmem)) {
3740 /* for allocatable components, build an assignment and recurse */
3741 int astmem;
3742 int astdestcmpnt;
3743 int astsrccmpnt;
3744 if (is_tbp_or_final(sptrmem)) {
3745 continue; /* skip tbp */
3746 }
3747 astmem = mk_id(sptrmem);
3748 astdestcmpnt = mk_member(astdestparent, astmem, A_DTYPEG(astmem));
3749 astsrccmpnt = mk_member(astsrcparent, astmem, A_DTYPEG(astmem));
3750
3751 if (A_SHAPEG(astmem) && DESCUSEDG(sptrmem) &&
3752 !(USELENG(sptrmem) && ALLOCG(sptrmem) && TPALLOCG(sptrmem))) {
3753 int destshape = mk_mem_ptr_shape(astdestparent, astmem, A_DTYPEG(astmem));
3754 int srcshape = mk_mem_ptr_shape(astsrcparent, astmem, A_DTYPEG(astmem));
3755 A_SHAPEP(astdestcmpnt, destshape);
3756 A_SHAPEP(astsrccmpnt, srcshape);
3757 }
3758 if (POINTERG(sptrmem) && !F90POINTERG(sptrmem)) {
3759 int ptr_assign = add_ptr_assign(astdestcmpnt, astsrccmpnt, std);
3760 A_SHAPEP(ptr_assign, A_SHAPEG(astsrccmpnt));
3761 add_stmt_before(ptr_assign, std);
3762 } else {
3763 int stdassncmpnt;
3764 int sym = memsym_of_ast(astdest);
3765 int mem = memsym_of_ast(astdestcmpnt);
3766 int assn = mk_assn_stmt(astdestcmpnt, astsrccmpnt, A_DTYPEG(astsrccmpnt));
3767 A_SHAPEP(assn, A_SHAPEG(astsrccmpnt));
3768 stdassncmpnt = add_stmt_before(assn, std);
3769
3770 if (SCG(sym) == SC_LOCAL && !INMODULEG(sym) && !SAVEG(sym) &&
3771 A_TYPEG(astdest) == A_SUBSCR &&
3772 (ALLOCATTRG(mem) || allocatable_member(mem))) {
3773 /* FS#19743: Make sure this member is NULL. Since we're
3774 * accessing a member in an individual element of an array
3775 * of derived type, we need to make sure member is initially
3776 * NULL here.
3777 */
3778 int i;
3779 LOGICAL const_subscript = FALSE;
3780 int asd = A_ASDG(astdest);
3781 int ndim = ASD_NDIM(asd);
3782 for (i = 0; i < ndim; i++) {
3783 const_subscript = A_TYPEG(ASD_SUBS(asd, i)) == A_CNST;
3784 if (!const_subscript)
3785 break;
3786 }
3787 if (const_subscript) {
3788 add_stmt_after(add_nullify_ast(astdestcmpnt), ENTSTDG(gbl.currsub));
3789 }
3790 }
3791
3792 if ((ALLOCATTRG(sptrmem) || allocatable_member(sptrmem)) &&
3793 !TPALLOCG(sptrmem)) {
3794 rewrite_allocatable_assignment(assn, stdassncmpnt, non_conformable,
3795 true);
3796 }
3797 }
3798
3799 if (ALLOCG(sptrmem) || (POINTERG(sptrmem) && !F90POINTERG(sptrmem))) {
3800 /* skip past $p, $o, $sd $td */
3801 int osptr = sptrmem;
3802 int midnum = MIDNUMG(sptrmem);
3803 int offset = PTROFFG(sptrmem);
3804 int sdsc = SDSCG(sptrmem);
3805 if (sdsc && STYPEG(sdsc) == ST_MEMBER) {
3806 if (SYMLKG(sptrmem) == midnum) {
3807 sptrmem = SYMLKG(sptrmem);
3808 }
3809 if (SYMLKG(sptrmem) == offset) {
3810 sptrmem = SYMLKG(sptrmem);
3811 }
3812 if (SYMLKG(sptrmem) == sdsc) {
3813 sptrmem = SYMLKG(sptrmem);
3814 }
3815 if (CLASSG(osptr) && DESCARRAYG(sptrmem)) {
3816 sptrmem = SYMLKG(sptrmem);
3817 }
3818 } else {
3819 if (midnum && midnum == SYMLKG(sptrmem))
3820 sptrmem = SYMLKG(sptrmem);
3821 if (sdsc && sdsc == SYMLKG(sptrmem))
3822 sptrmem = SYMLKG(sptrmem);
3823 }
3824 }
3825 }
3826
3827 gen_do_ends(docnt, std);
3828 }
3829
3830 static int sptrMatch; /* sptr # for matching */
3831 static int parentMatch; /* sptr # for matching */
3832
3833 /* This is the callback function for contains_sptr(). */
3834 static LOGICAL
_contains_sptr(int astSrc,LOGICAL * pflag)3835 _contains_sptr(int astSrc, LOGICAL *pflag)
3836 {
3837 if (A_TYPEG(astSrc) == A_ID && sptrMatch == A_SPTRG(astSrc) &&
3838 parentMatch == 0) {
3839 *pflag = TRUE;
3840 return TRUE;
3841 } else if (A_TYPEG(astSrc) == A_MEM && sptrMatch == A_SPTRG(astSrc) &&
3842 parentMatch == A_PARENTG(astSrc)) {
3843 *pflag = TRUE;
3844 return TRUE;
3845 }
3846 return FALSE;
3847 }
3848
3849 /* Return TRUE if sptrDst occurs somewhere within astSrc. */
3850 static LOGICAL
contains_sptr(int astSrc,int sptrDst,int astparent)3851 contains_sptr(int astSrc, int sptrDst, int astparent)
3852 {
3853 LOGICAL result = FALSE;
3854
3855 if (!astSrc)
3856 return FALSE;
3857
3858 sptrMatch = sptrDst;
3859 parentMatch = astparent;
3860 ast_visit(1, 1);
3861 ast_traverse(astSrc, _contains_sptr, NULL, &result);
3862 ast_unvisit();
3863 return result;
3864 }
3865
3866 /** \brief Checks whether the user specified an empty array subscript such as
3867 * (:), (:,:), (:,:,:), etc.
3868 *
3869 * \param a is the array subscript ast (A_SUBSCR) to check.
3870 *
3871 * \returns true if \ref a is an empty subscript; else false.
3872 */
3873 static bool
chk_assumed_subscr(int a)3874 chk_assumed_subscr(int a)
3875 {
3876 int i, t, asd, ndim;
3877
3878 if (A_TYPEG(a) != A_SUBSCR)
3879 return false;
3880
3881 asd = A_ASDG(a);
3882 ndim = ASD_NDIM(asd);
3883
3884 assert(ndim >= 1 && ndim <= MAXDIMS, "chk_assumed_subscr: invalid ndim", ndim,
3885 ERR_Fatal);
3886
3887 for (i = 0; i < ndim; i++) {
3888 t = ASD_SUBS(asd, i);
3889 if (A_MASKG(t) != (lboundMask | uboundMask | strideMask))
3890 return false;
3891 }
3892 return true;
3893 }
3894
3895 /** \brief Create a non-subscripted "alias" or "replacement" ast to a
3896 * subscripted expression.
3897 *
3898 * This is used in a poly_asn() call where the source argument cannot
3899 * directly handle an A_SUBSCR which could be an array slice. This
3900 * function either returns the array object if it has an empty
3901 * subscript expression or a pointer to a contiguous shallow copy
3902 * of the array slice.
3903 *
3904 * \param subAst is the subscripted expression that we are processing.
3905 * \param std is the std for adding statements.
3906 *
3907 * \returns the replacement ast.
3908 */
3909 static int
mk_ptr_subscr(int subAst,int std)3910 mk_ptr_subscr(int subAst, int std)
3911 {
3912 SPTR ptr;
3913 int ptr_ast, ast;
3914 DTYPE dtype, eldtype;
3915 int asn_ast, temp_arr;
3916 int subscr[MAXRANK];
3917
3918 if (A_TYPEG(subAst) != A_SUBSCR) {
3919 return subAst;
3920 }
3921
3922 dtype = A_DTYPEG(subAst);
3923
3924 if (chk_assumed_subscr(subAst)) {
3925 /* The subscript references the whole array, so return just the array
3926 * symbol.
3927 */
3928 return A_LOPG(subAst);
3929 }
3930
3931 /* We have an array slice, so we want to create a shallow contiguous
3932 * copy of the array.
3933 */
3934 eldtype = DDTG(dtype);
3935 temp_arr = mk_assign_sptr(subAst, "a", subscr, eldtype, &ptr_ast);
3936 asn_ast = mk_assn_stmt(ptr_ast, subAst, eldtype);
3937 if (ALLOCG(temp_arr)) {
3938 ast = gen_alloc_dealloc(TK_ALLOCATE, ptr_ast, 0);
3939 std = add_stmt_before(ast, std);
3940 std = add_stmt_after(mk_stmt(A_CONTINUE, 0), std);
3941 }
3942 add_stmt_before(asn_ast, std);
3943 if (ALLOCG(temp_arr)) {
3944 check_and_add_auto_dealloc_from_ast(ptr_ast);
3945 }
3946
3947 return mk_id(temp_arr);
3948 }
3949
3950 /** \brief Computes the descriptor on the right hand side of an allocatable
3951 * polymorphic assignment.
3952 *
3953 * \param sptrsrc is the symbol table pointer of the object associated with
3954 * the right hand side of a polymorphic assignment.
3955 * \param astsrc is the AST representing the right hand side of a polymorphic
3956 * assignment.
3957 *
3958 * \return an AST representing the descriptor for the right hand side of the
3959 * polymorphic assignment.
3960 */
3961 static int
get_sdsc_ast(SPTR sptrsrc,int astsrc)3962 get_sdsc_ast(SPTR sptrsrc, int astsrc)
3963 {
3964 int src_sdsc_ast;
3965
3966 if (!SDSCG(sptrsrc)) {
3967 DTYPE src_dtype = DTYPEG(sptrsrc);
3968 if (CLASSG(sptrsrc) && STYPEG(sptrsrc) != ST_MEMBER &&
3969 SCG(sptrsrc) == SC_DUMMY) {
3970 src_sdsc_ast = mk_id(get_type_descr_arg(gbl.currsub, sptrsrc));
3971 } else if (DTY(src_dtype) == TY_ARRAY && DESCRG(sptrsrc)) {
3972 src_sdsc_ast = mk_id(DESCRG(sptrsrc));
3973 DESCUSEDP(sptrsrc, TRUE);
3974 NODESCP(sptrsrc, FALSE);
3975 } else if (DTY(src_dtype) == TY_DERIVED) {
3976 src_sdsc_ast = mk_id(get_static_type_descriptor(sptrsrc));
3977 } else {
3978 get_static_descriptor(sptrsrc);
3979 src_sdsc_ast = STYPEG(sptrsrc) != ST_MEMBER ? mk_id(SDSCG(sptrsrc)) :
3980 check_member(astsrc, mk_id(SDSCG(sptrsrc)));
3981 }
3982 } else if (STYPEG(sptrsrc) == ST_MEMBER) {
3983 src_sdsc_ast = find_descriptor_ast(sptrsrc, astsrc);
3984 } else {
3985 src_sdsc_ast = mk_id(SDSCG(sptrsrc));
3986 }
3987 return src_sdsc_ast;
3988 }
3989
3990 /** \brief This function counts the number of allocatable members/components in
3991 * a derived type member expression (e.g., a%b, a%b%c, a%b%c%d, etc.).
3992 *
3993 * \param ast is the AST of the expression that we are testing.
3994 *
3995 * \return an integer representing the number of allocatable members.
3996 */
3997 static int
count_allocatable_members(int ast)3998 count_allocatable_members(int ast)
3999 {
4000 SPTR sptr;
4001 int num_alloc_members = 0;
4002 while (1) {
4003 switch (A_TYPEG(ast)) {
4004 case A_ID:
4005 case A_LABEL:
4006 case A_ENTRY:
4007 if (ALLOCATTRG(A_SPTRG(ast))) {
4008 ++num_alloc_members;
4009 }
4010 return num_alloc_members;
4011 case A_FUNC:
4012 case A_CALL:
4013 case A_SUBSCR:
4014 case A_SUBSTR:
4015 ast = A_LOPG(ast);
4016 if (A_TYPEG(ast) == A_MEM)
4017 ast = A_MEMG(ast);
4018 break;
4019 case A_MEM:
4020 if (ALLOCATTRG(A_SPTRG(A_MEMG(ast)))) {
4021 ++num_alloc_members;
4022 }
4023 ast = A_PARENTG(ast);
4024 break;
4025 default:
4026 interr("count_allocatable_members: unexpected ast", ast, 3);
4027 return 0;
4028 }
4029 }
4030 }
4031
4032
4033 /* MORE - possible performance improvements:
4034 * 1) The RTE_conformable_* RTL functions' return values are ternary
4035 * returning
4036 * 1 ==> conformable
4037 * 0 ==> not conformable but big enough
4038 * -1 --> not conformable, no big enough
4039 * but the code generated below collapses values 0 and -1 into "not
4040 * conformable".
4041 * An "ALLOCATE" could be saved by separating these two states (would need
4042 * to
4043 * reset bounds variables and "remember" actual allocation size).
4044 * 2) check assignments to allocatable arrays where the shape of the RHS is
4045 * known to be compatiable with the LHS, e.g.,
4046 * alloc_array = alloc_array + scalar_value
4047 * in this case nothing needs to be done
4048 * 3) optimize assignments of derived type initializers, e.g.,
4049 * derived_type%alloc_component = (prototype instance)%alloc_component
4050 */
4051 static void
rewrite_allocatable_assignment(int astasgn,const int std,bool non_conformable,bool handle_alloc_members)4052 rewrite_allocatable_assignment(int astasgn, const int std,
4053 bool non_conformable,
4054 bool handle_alloc_members )
4055 {
4056 int sptrdest;
4057 int shape;
4058 int astdestparent;
4059 int astsrcparent;
4060 int astif;
4061 int ast;
4062 int targstd, newstd;
4063 SPTR sptrsrc = NOSYM;
4064 DTYPE dtype = A_DTYPEG(astasgn);
4065 int astdest = A_DESTG(astasgn);
4066 DTYPE dtypedest = A_DTYPEG(astdest);
4067 int astsrc = A_SRCG(astasgn);
4068 DTYPE dtypesrc = A_DTYPEG(astsrc);
4069 LOGICAL alloc_scalar_parent_only = FALSE;
4070 LOGICAL needFinalization;
4071 SPTR parentSrcSptr = NOSYM;
4072 SPTR parentDestSptr;
4073 bool is_poly_assign; /* true when we have an F2008 polymorphic assignment */
4074
4075 again:
4076 if (A_TYPEG(astdest) != A_ID && A_TYPEG(astdest) != A_MEM &&
4077 A_TYPEG(astdest) != A_CONV && A_TYPEG(astdest) != A_SUBSCR) {
4078 return;
4079 }
4080 if (A_TYPEG(astdest) == A_SUBSCR && DTYG(A_DTYPEG(astdest)) != TY_DERIVED) {
4081 return;
4082 }
4083 if (A_TYPEG(astsrc) == A_FUNC) {
4084 if (!XBIT(54, 0x1)) {
4085 if (A_DTYPEG(astdest) == DT_DEFERCHAR ||
4086 A_DTYPEG(astdest) == DT_DEFERNCHAR) {
4087 int fval = FVALG(A_SPTRG(A_LOPG(astsrc)));
4088 if (DTYPEG(fval) == DT_DEFERCHAR || DTYPEG(fval) == DT_DEFERNCHAR)
4089 return;
4090 } else {
4091 return;
4092 }
4093
4094 /* function calls assigned to allocatables are handled in
4095 * semfunc.c:func_call */
4096 }
4097 }
4098
4099 sptrdest = memsym_of_ast(astdest);
4100 parentDestSptr = sym_of_ast(astdest);
4101 needFinalization = has_finalized_component(sptrdest);
4102 if (XBIT(54, 0x1) && !XBIT(54, 0x4) && ALLOCATTRG(sptrdest) &&
4103 A_TYPEG(astdest) == A_SUBSCR && DTY(dtypesrc) == TY_ARRAY &&
4104 DTY(dtypedest) == TY_ARRAY) {
4105 /* FS#21080: destination array inherits shape from source array
4106 * under F2003 semantics, so we can disregard empty subscripts.
4107 */
4108 int i;
4109 int empty_subscript;
4110 int asd = A_ASDG(astdest);
4111 int ndim = ASD_NDIM(asd);
4112 for (empty_subscript = i = 0; i < ndim; i++) {
4113 if (A_TYPEG(ASD_SUBS(asd, i)) == A_TRIPLE &&
4114 A_MASKG(ASD_SUBS(asd, i)) == (lboundMask | uboundMask | strideMask)) {
4115 empty_subscript = 1;
4116 } else {
4117 empty_subscript = 0;
4118 break;
4119 }
4120 }
4121 if (empty_subscript) {
4122 astdest = A_LOPG(astdest);
4123 goto again;
4124 }
4125 }
4126
4127 while (A_TYPEG(astsrc) == A_CONV) {
4128 astsrc = A_LOPG(astsrc);
4129 }
4130
4131 if (ALLOCATTRG(sptrdest) && A_TYPEG(astsrc) == A_INTR &&
4132 A_OPTYPEG(astsrc) == I_NULL) {
4133 ast = mk_deallocate(astdest);
4134 A_DALLOCMEMP(ast, 1);
4135 add_stmt_before(ast, std);
4136 ast_to_comment(astasgn);
4137 return;
4138 }
4139
4140 if (A_TYPEG(astsrc) == A_ID || A_TYPEG(astsrc) == A_CONV ||
4141 A_TYPEG(astsrc) == A_SUBSCR || A_TYPEG(astsrc) == A_CNST ||
4142 A_TYPEG(astsrc) == A_MEM) {
4143 sptrsrc = memsym_of_ast(astsrc);
4144 parentSrcSptr = sym_of_ast(astsrc);
4145 if (STYPEG(sptrdest) == ST_MEMBER && STYPEG(sptrsrc) == ST_MEMBER &&
4146 ALLOCDESCG(sptrdest)) {
4147 /* FS#19589: Make sure we propagate type descriptor from source
4148 * to destination.
4149 */
4150 check_pointer_type(astdest, astsrc, std, 1);
4151 }
4152 }
4153
4154 is_poly_assign = (!handle_alloc_members ||
4155 count_allocatable_members(astdest) == 1) &&
4156 CLASSG(sptrdest) &&
4157 !MONOMORPHICG(sptrdest) && parentSrcSptr > NOSYM &&
4158 !CCSYMG(parentSrcSptr) && !HCCSYMG(parentDestSptr);
4159 if (XBIT(54, 0x1) && !XBIT(54, 0x4) && sptrsrc != NOSYM &&
4160 (A_TYPEG(astdest) == A_ID || A_TYPEG(astdest) == A_MEM) &&
4161 ALLOCATTRG(sptrdest) &&
4162 (is_poly_assign || (DTY(DTYPEG(sptrdest)) == TY_ARRAY &&
4163 DTY(DTYPEG(sptrsrc)) == TY_ARRAY && allocatable_member(sptrdest)
4164 && !has_vector_subscript_ast(astsrc)))) {
4165 int std2 = std;
4166 int alloc_std;
4167 int src_sdsc_ast = 0;
4168 int intrin_type = 0;
4169 int tmp_desc = 0; /* holds an intrinsic pseudo descriptor when non-zero */
4170 DTYPE src_dtype = DTYPEG(sptrsrc);
4171 int intrin_ast;
4172
4173 if (DT_ISBASIC(DDTG(src_dtype))) {
4174 /* DTYPE of right hand side is an intrinsic data type, so generate an
4175 * intrinsic pseudo descriptor (stored in the tmp_desc variable).
4176 */
4177 tmp_desc = getcctmp_sc('d', sem.dtemps++, ST_VAR, astb.bnd.dtype, sem.sc);
4178 intrin_type = mk_cval(dtype_to_arg(DDTG(src_dtype)),
4179 astb.bnd.dtype);
4180 tmp_desc = mk_id(tmp_desc);
4181 intrin_ast = mk_assn_stmt(tmp_desc, intrin_type, astb.bnd.dtype);
4182 intrin_type = mk_unop(OP_VAL, intrin_type, DT_INT);
4183 add_stmt_before(intrin_ast, std2);
4184 }
4185
4186 /* Allocate function result that's an array of derived types
4187 * with allocatable components and -Mallocatable=03.
4188 */
4189
4190 /* Generate statements like this:
4191 if (.not. allocated(src)) then
4192 if (allocated(dest)) deallocate(dest)
4193 else
4194 if (.not. conformable(src, dest)) then
4195 if (allocated(dest) deallocate(dest)
4196 allocate(dest, source=src)
4197 else // generated iff dest has final subroutines
4198 finalize(dest)
4199 end if
4200 poly_asn(src, dest)
4201 end if <-- std2
4202 ... <-- std
4203
4204 */
4205
4206 if (ALLOCATTRG(sptrsrc)) {
4207 /* if (.not. allocated(src)) then deallocate(dest) else ... end if */
4208 gen_allocated_check(astsrc, std, A_IFTHEN, true, false, false);
4209 gen_dealloc_if_allocated(astdest, std);
4210 add_stmt_before(mk_stmt(A_ELSE, 0), std);
4211 std2 = add_stmt_before(mk_stmt(A_ENDIF, 0), std);
4212 }
4213
4214 /* if (.not. conformable(src, dst)) then */
4215 astif = DTY(DTYPEG(sptrdest)) != TY_ARRAY ?
4216 mk_poly_test(astdest, astsrc, OP_LT, tmp_desc) :
4217 mk_conformable_test(astdest, astsrc, OP_LT);
4218 add_stmt_before(astif, std2);
4219 gen_dealloc_if_allocated(astdest, std2);
4220 /* allocate(dest, source=src) */
4221
4222 ast = mk_allocate(0);
4223 A_STARTP(ast, astsrc);
4224 A_DTYPEP(ast, DTY(DTYPEG(sptrdest)) != TY_ARRAY ? A_DTYPEG(astsrc) :
4225 dup_array_dtype(A_DTYPEG(astsrc)));
4226 if (DTY(dtypedest) == TY_ARRAY) {
4227 int astdest2 =
4228 add_shapely_subscripts(astdest, astsrc, A_DTYPEG(astsrc),
4229 DDTG(dtypedest));
4230 A_SRCP(ast, astdest2);
4231 } else {
4232 A_SRCP(ast, astdest);
4233 }
4234 alloc_std = add_stmt_before(ast, std2);
4235 src_sdsc_ast = get_sdsc_ast(sptrsrc, astsrc);
4236
4237 if (CLASSG(sptrdest) && DTY(DTYPEG(sptrdest)) == TY_ARRAY &&
4238 A_TYPEG(astsrc) == A_SUBSCR) {
4239 init_sdsc_bounds(sptrdest, A_DTYPEG(astsrc), alloc_std,
4240 sym_of_ast(astdest), astsrc, src_sdsc_ast);
4241 }
4242
4243 if (needFinalization) {
4244 /* Objects are conformable but we still need to finalize destination */
4245 int std3 = add_stmt_before(mk_stmt(A_ELSE, 0), std2);
4246 gen_finalization_for_sym(sptrdest, std3, astdest);
4247 needFinalization = FALSE;
4248 }
4249 add_stmt_before(mk_stmt(A_ENDIF, 0), std2);
4250
4251 if (CLASSG(sptrdest) || (STYPEG(SDSCG(sptrsrc)) == ST_MEMBER &&
4252 STYPEG(SDSCG(sptrdest)) == ST_MEMBER)) {
4253 /* Generate call to poly_asn(). This call takes care of
4254 * the member to member assignments. This includes propagating
4255 * the source descriptor values to the destination descriptor.
4256 */
4257 int dest_sdsc_ast;
4258 SPTR fsptr = sym_mkfunc_nodesc(mkRteRtnNm(RTE_poly_asn), DT_NONE);
4259 int argt = mk_argt(5);
4260 int std3;
4261 int flag_con = 2;
4262 int flag_ast;
4263
4264 if (STYPEG(sptrdest) == ST_MEMBER) {
4265 dest_sdsc_ast = find_descriptor_ast(sptrdest, astdest);
4266 } else {
4267 dest_sdsc_ast = mk_id(SDSCG(sptrdest));
4268 }
4269
4270 if (tmp_desc != 0 && DT_ISBASIC(src_dtype)) {
4271 src_sdsc_ast = tmp_desc;
4272 flag_con = 0;
4273 }
4274
4275 flag_ast = mk_cval1(flag_con, DT_INT);
4276 flag_ast = mk_unop(OP_VAL, flag_ast, DT_INT);
4277 std3 = add_stmt_before(mk_stmt(A_CONTINUE, 0), std2);
4278 ARGT_ARG(argt, 4) = flag_ast;
4279 ARGT_ARG(argt, 0) = A_TYPEG(astdest) == A_SUBSCR ? A_LOPG(astdest)
4280 : astdest;
4281 ARGT_ARG(argt, 1) = dest_sdsc_ast;
4282 ARGT_ARG(argt, 2) = mk_ptr_subscr(astsrc, std3);
4283 ARGT_ARG(argt, 3) = src_sdsc_ast;
4284 ast = mk_id(fsptr);
4285 ast = mk_func_node(A_CALL, ast, 5, argt);
4286 std2 = add_stmt_before(ast, std2);
4287 if (intrin_type != 0) {
4288 /* Assign intrinsic type to destination's (unlimited polymorphic)
4289 * descriptor.
4290 */
4291 ast = mk_set_type_call(dest_sdsc_ast, intrin_type, TRUE);
4292 add_stmt_before(ast, std2); /* before call to poly_asn() */
4293 if (flag_con == 2) {
4294 /* 2 for Flag argument means poly_asn() will copy source descriptor
4295 * to destination descriptor. Therefore, make sure we re-assign the
4296 * type after the call too.
4297 */
4298 ast = mk_set_type_call(dest_sdsc_ast, intrin_type, TRUE);
4299 add_stmt_after(ast, std2); /* after call to poly_asn() */
4300 }
4301 }
4302 ast_to_comment(astasgn);
4303 return;
4304 }
4305 }
4306
4307 /* ignore default initialization */
4308 if (sptrsrc > NOSYM) {
4309 SPTR sptr;
4310 if (A_TYPEG(astsrc) != A_MEM) {
4311 sptr = sptrsrc;
4312 } else if (A_TYPEG(A_PARENTG(astsrc)) == A_FUNC) {
4313 sptr = sym_of_ast(A_LOPG(A_PARENTG(astsrc)));
4314 } else {
4315 sptr = ast_is_sym(astsrc) ? sym_of_ast(astsrc) : 0;
4316 }
4317 /*
4318 * This little bit of once-undocumented magic (formerly a string
4319 * comparison on the name of the RHS symbol!) forces the use of a
4320 * block copy for a derived type assignment whose right-hand side is
4321 * a compiler-created initialized prototype object used for
4322 * filling in new instances. In such circumstances, the left-hand
4323 * side of the assignment must be assumed to be uninitialized
4324 * garbage.
4325 */
4326 if (sptr > NOSYM && INITIALIZERG(sptr))
4327 return;
4328 }
4329
4330 /* Notes for deciphering the following code:
4331 * XBIT(54, 0x1) -> enable "full F'03 allocatable attribute regularization"
4332 * XBIT(54, 0x4) -> *No* 2003 allocatable assignment semantics for
4333 * allocatable components
4334 */
4335 /* Per flyspray 15461, for user-defined type assignment:
4336 a[i] = b , A_TYPEG(astdest) is a A_SUBSCR, also need
4337 to check for allocatable member.
4338 */
4339 if (!ALLOCATTRG(sptrdest) || A_TYPEG(astdest) == A_SUBSCR) {
4340 if (DTYG(dtypedest) == TY_DERIVED && !HCCSYMG(sptrdest) && !XBIT(54, 0x4) &&
4341 allocatable_member(sptrdest)) {
4342 handle_allocatable_members(astdest, astsrc, std, false);
4343 ast_to_comment(astasgn);
4344 return;
4345 }
4346 if (STYPEG(sptrdest) == ST_MEMBER && !XBIT(54, 0x4) && XBIT(54, 0x1)) {
4347 /* FS#19118 - this typically occurs with an intrinsic assignment
4348 * that has a structure constructor on the right hand side. We need
4349 * to make sure the parent object is allocated when -Mallocatable=03
4350 * is used.
4351 */
4352 astdest = A_PARENTG(astdest);
4353 if (A_TYPEG(astdest) == A_SUBSCR)
4354 astdest = A_LOPG(astdest);
4355 if (A_TYPEG(astdest) == A_MEM) {
4356 sptrdest = A_SPTRG(A_MEMG(astdest));
4357 } else
4358 sptrdest = A_SPTRG(astdest);
4359 dtypedest = A_DTYPEG(astdest);
4360 if (!ALLOCATTRG(sptrdest) || DTY(dtypedest) == TY_ARRAY)
4361 return;
4362 alloc_scalar_parent_only = TRUE; /* not returning on this one path */
4363 } else {
4364 return;
4365 }
4366 }
4367
4368 /*
4369 * The test of absence of -Mallocatable=O3 is required here ...
4370 */
4371 if (!XBIT(54, 0x1) && A_TYPEG(astdest) == A_ID && ALLOCATTRG(sptrdest) &&
4372 DTYG(dtypedest) == TY_DERIVED && !POINTERG(sptrdest) && !XBIT(54, 0x4) &&
4373 allocatable_member(sptrdest)) {
4374 /*
4375 * bug1 of f15460 -- have an allocatable array of derived type
4376 * containing allocatable components; with pre-F2003 semantics,
4377 * still must handle the allocatable components.
4378 */
4379 /*add check here too ?*/
4380 handle_allocatable_members(astdest, astsrc, std, false);
4381 ast_to_comment(astasgn);
4382 }
4383
4384 if (DTY(DTYPEG(sptrdest)) == TY_ARRAY && DTY(A_DTYPEG(astsrc)) != TY_ARRAY) {
4385 /* By definition, for
4386 * array = scalar
4387 * the scalar has the same shape as the array.
4388 * Therefore, there is no need apply any allocatable
4389 * semantics.
4390 * NOTE: CANNOT move this check before the checks for an
4391 * array containing allocatable components.
4392 */
4393
4394 if (XBIT(54, 0x1)) {
4395 /* For F2003 allocatation semantics, if the LHS is not allocated, then
4396 * allocate it as a size one array. Otherwise, leave it alone and
4397 * perform any applicable finalization.
4398 */
4399 int subs[MAXDIMS];
4400 int astdest2, ndims, i;
4401 ADSC *ad;
4402 ad = AD_DPTR(DTYPEG(sptrdest));
4403 ndims = AD_NUMDIM(ad);
4404 gen_allocated_check(astdest, std, A_IFTHEN, true, true, true);
4405 for (i = 0; i < ndims; ++i) {
4406 subs[i] = mk_triple(astb.i1, astb.i1, 0);
4407 }
4408 astdest2 = mk_subscr(astdest, subs, ndims, DTYPEG(sptrdest));
4409 ast = mk_allocate(astdest2);
4410 newstd = add_stmt_before(ast, std);
4411 STD_RESCOPE(newstd) = 1;
4412 if (needFinalization) {
4413 int std2 = add_stmt_before(mk_stmt(A_ELSE, 0), std);
4414 gen_finalization_for_sym(sptrdest, std2, astdest);
4415 }
4416 newstd = add_stmt_before(mk_stmt(A_ENDIF, 0), std);
4417 STD_RESCOPE(newstd) = 1;
4418 }
4419
4420 if (XBIT(54, 0x1) && DTYG(dtypedest) == TY_DERIVED && !POINTERG(sptrdest) &&
4421 !XBIT(54, 0x4) && allocatable_member(sptrdest)) {
4422 /* FS#18432: F2003 allocatable semantics, handle the
4423 * allocatable components
4424 */
4425 handle_allocatable_members(astdest, astsrc, std, false);
4426 ast_to_comment(astasgn);
4427 }
4428
4429 return;
4430 }
4431
4432 if (!XBIT(54, 0x1) && A_TYPEG(astdest) != A_MEM) {
4433 if (DDTG(A_DTYPEG(astdest)) == DT_DEFERCHAR ||
4434 DDTG(A_DTYPEG(astdest)) == DT_DEFERNCHAR) {
4435 /* 03 semantics default for scalar allocatable deferred char */
4436 ;
4437 } else
4438 return; /* allocatable array assignment with pre F2003 semantics */
4439 }
4440
4441 if (XBIT(54, 0x4))
4442 return; /* not using F'03 assignment semantics for allocatable components */
4443
4444 /* move this block to a separate subroutine eventually */
4445 astdestparent = 0;
4446 if (A_TYPEG(astdest) == A_MEM) {
4447 astdestparent = A_PARENTG(astdest);
4448 }
4449
4450 if (ALLOCATTRG(sptrdest) &&
4451 (DTY(dtypedest) == TY_ARRAY || DTY(dtypedest) == TY_CHAR ||
4452 DTY(dtypedest) == TY_NCHAR) &&
4453 (contains_sptr(astsrc, sptrdest, astdestparent) ||
4454 A_TYPEG(astsrc) == A_FUNC || A_TYPEG(astsrc) == A_INTR)) {
4455 int temp_ast;
4456 SPTR temp_sptr;
4457 int std2;
4458 int stdlast = STD_LAST;
4459 int shape = A_SHAPEG(astsrc);
4460 if (shape != 0) {
4461 if (DDTG(A_DTYPEG(astsrc)) == DT_DEFERCHAR ||
4462 DDTG(A_DTYPEG(astsrc)) == DT_DEFERNCHAR) {
4463 DTYPE temp_dtype = get_type(2, TY_CHAR, string_expr_length(astsrc));
4464 temp_dtype = dtype_with_shape(temp_dtype, shape);
4465 temp_sptr = get_arr_temp(temp_dtype, FALSE, FALSE, FALSE);
4466 DTYPEP(temp_sptr, temp_dtype);
4467 } else {
4468 DTYPE temp_dtype = dtype_with_shape(dtype, shape);
4469 temp_sptr = get_arr_temp(temp_dtype, TRUE, TRUE, FALSE);
4470 }
4471 } else if (DTY(dtypedest) == TY_CHAR || DTY(dtypedest) == TY_NCHAR) {
4472 DTYPE temp_dtype = get_type(2, TY_CHAR, string_expr_length(astsrc));
4473 temp_sptr = get_ch_temp(temp_dtype);
4474 } else {
4475 /* error if it is TY_CHAR it must have shape */
4476 interr("transfrm: expecting shape for astsrc in assignment stmt", astasgn,
4477 ERR_Warning);
4478 goto no_lhs_on_rhs;
4479 }
4480 /*
4481 * NOTE - if the rhs warrants creating compiler allocatable, the
4482 * corresponding code will be added to the 'end' of the routine
4483 * since the routines being called, such as get_arr_temp(), are
4484 * 'semant' routines. Therefore, the generated statements need
4485 * to be 'moved' to the current position.
4486 */
4487 targstd = std;
4488 move_stmts_before(STD_NEXT(stdlast), targstd);
4489
4490 temp_ast = mk_id(temp_sptr);
4491 ast = mk_assn_stmt(temp_ast, astsrc, A_DTYPEG(astasgn));
4492 std2 = add_stmt_before(ast, std);
4493 rewrite_allocatable_assignment(ast, std2, false, handle_alloc_members);
4494 ast = mk_assn_stmt(astdest, temp_ast, A_DTYPEG(astasgn));
4495 std2 = add_stmt_after(ast, std2);
4496 rewrite_allocatable_assignment(ast, std2, false, handle_alloc_members);
4497 ast_to_comment(astasgn);
4498 gen_deallocate_arrays();
4499
4500 targstd = std;
4501 move_stmts_after(STD_NEXT(stdlast), targstd);
4502
4503 return;
4504 }
4505
4506 no_lhs_on_rhs:
4507 if (sptrsrc != NOSYM && ALLOCATTRG(sptrsrc)) {
4508 /* generate a check for an allocated source */
4509 gen_allocated_check(astsrc, std, A_IFTHEN, false, false, false);
4510 }
4511
4512 if (DTY(DTYPEG(sptrdest)) != TY_ARRAY) {
4513 /* Scalar assignment:
4514 * If the dest has not been allocated, then it must be.
4515 * Arrays will be handled based on conformability (below).
4516 */
4517 if (DTY(dtypedest) == TY_CHAR || DTY(dtypedest) == TY_NCHAR ) {
4518 if (!SDSCG(sptrdest)) {
4519 get_static_descriptor(sptrdest);
4520 }
4521 gen_automatic_reallocation(astdest, astsrc, std);
4522 } else {
4523 int istd;
4524 gen_allocated_check(astdest, std, A_IFTHEN, true, true, false);
4525 gen_alloc_mbr(build_allocation_item(0, astdest), std);
4526 astif = mk_stmt(A_ENDIF, 0);
4527 istd = add_stmt_before(astif, std);
4528 if (DTYG(dtypedest) == TY_DERIVED && !XBIT(54, 0x4) &&
4529 allocatable_member(sptrdest)) {
4530 nullify_member(astdest, istd, sptrdest);
4531 }
4532 }
4533 }
4534
4535 if (alloc_scalar_parent_only) {
4536 goto fin;
4537 }
4538
4539 shape = A_SHAPEG(astdest);
4540 if (shape != 0 && !non_conformable) {
4541 /* destination is array, generate conformability check */
4542 if (DTYG(dtypedest) == TY_DERIVED) {
4543 astif = mk_conformable_test(astdest, astsrc, OP_GT);
4544 add_stmt_before(astif, std);
4545 if (needFinalization) {
4546 /* Arrays are conformable but we still need to finalize destination */
4547 int std2 = add_stmt_before(mk_stmt(A_CONTINUE, 0), std);
4548 gen_finalization_for_sym(sptrdest, std2, astdest);
4549 needFinalization = FALSE;
4550 }
4551 } else {
4552 /* array of scalar, generate: if( tmp .le. 0 ) then => not conformable */
4553 astif = mk_conformable_test(astdest, astsrc, OP_LE);
4554 add_stmt_before(astif, std);
4555 if (DDTG(dtypedest) == DT_DEFERCHAR || DDTG(dtypedest) == DT_DEFERNCHAR) {
4556 /* Add length check for deferred char to the IF expr as well */
4557 int lhs_len = size_ast_of(astdest, DDTG(dtypedest));
4558 int rhs_len, binopast, ifexpr;
4559 if (is_deferlenchar_ast(astsrc)) {
4560 rhs_len = get_len_of_deferchar_ast(astsrc);
4561 } else {
4562 rhs_len = string_expr_length(astsrc);
4563 }
4564 binopast = mk_binop(OP_NE, lhs_len, rhs_len, DT_LOG);
4565 ifexpr = mk_binop(OP_LOR, binopast, A_IFEXPRG(astif), DT_LOG);
4566 A_IFEXPRP(astif, ifexpr);
4567 }
4568 }
4569 }
4570
4571 if (DTYG(dtypedest) == TY_DERIVED) {
4572 if (!XBIT(54, 0x4) && allocatable_member(sptrdest)) {
4573 handle_allocatable_members(astdest, astsrc, std, false);
4574 ast_to_comment(astasgn);
4575 }
4576 }
4577
4578 if (shape != 0) {
4579 if (A_TYPEG(astdest) == A_MEM) {
4580 shape = mk_mem_ptr_shape(A_PARENTG(astdest), A_MEMG(astdest), dtypedest);
4581 assert(shape != 0, "shape must not be 0", 0, ERR_Fatal);
4582 }
4583
4584 if (DTY(dtype) == TY_ARRAY && DTY(DTY(dtype + 1)) == TY_DERIVED) {
4585 int destasd, srcasd;
4586 /*
4587 * in the "else" of array of derived type conformability test
4588 * loop over array deallocating allocatable members
4589 */
4590 int sptrmem;
4591 gen_allocated_check(astsrc, std, A_ELSEIF, false, false, false);
4592 gen_allocated_check(astdest, std, A_IFTHEN, false, true, false);
4593
4594 /* deallocate/re-allocate array */
4595 gen_dealloc_mbr(astdest, std);
4596 astif = mk_stmt(A_ENDIF, 0); /* endif allocated dest */
4597 add_stmt_before(astif, std);
4598
4599 gen_bounds_assignments(0, astdest, 0, astsrc, std);
4600
4601 ast = build_allocation_item(0, astdest);
4602 gen_alloc_mbr(ast, std);
4603
4604 /* loop over array re-allocating allocatable members and assigning
4605 * the src components to the newly alloc'd dest components */
4606 destasd = gen_dos_over_shape(shape, std);
4607 srcasd = normalize_subscripts(destasd, shape, A_SHAPEG(astsrc));
4608 astdestparent = subscript_allocmem(astdest, destasd);
4609 astsrcparent = subscript_allocmem(astsrc, srcasd);
4610 for (sptrmem = DTY(DDTG(dtype) + 1); sptrmem > NOSYM;
4611 sptrmem = SYMLKG(sptrmem)) {
4612 int astmem = mk_id(sptrmem);
4613 int astdestcmpnt = mk_member(astdestparent, astmem, A_DTYPEG(astmem));
4614 int astsrccmpnt = mk_member(astsrcparent, astmem, A_DTYPEG(astmem));
4615 if (is_tbp_or_final(sptrmem)) {
4616 /* skip tbp */
4617 continue;
4618 }
4619 if (ALLOCATTRG(sptrmem)) {
4620 gen_allocated_check(astsrccmpnt, std, A_IFTHEN, false, false, false);
4621 gen_bounds_assignments(astdestparent, astmem, astsrcparent, astmem,
4622 std);
4623 if (DTY(A_DTYPEG(astmem)) == TY_CHAR ||
4624 DTY(A_DTYPEG(astmem)) == TY_NCHAR) {
4625 if (!SDSCG(sptrdest)) {
4626 get_static_descriptor(sptrdest);
4627 }
4628 gen_automatic_reallocation(astdestcmpnt, astsrccmpnt, std);
4629 } else {
4630 ast = build_allocation_item(astdestparent, astmem);
4631 gen_alloc_mbr(ast, std);
4632 }
4633 if (DTYG(DTYPEG(sptrmem)) == TY_DERIVED && !XBIT(54, 0x4) &&
4634 allocatable_member(sptrmem)) {
4635 handle_allocatable_members(astdestcmpnt, astsrccmpnt, std, true);
4636 } else {
4637 ast = mk_assn_stmt(astdestcmpnt, astsrccmpnt, A_DTYPEG(astmem));
4638 add_stmt_before(ast, std);
4639 }
4640 astif = mk_stmt(A_ELSE, 0);
4641 add_stmt_before(astif, std);
4642 ast = mk_member(astdestparent, mk_id(MIDNUMG(sptrmem)),
4643 DTYPEG(MIDNUMG(sptrmem)));
4644 {
4645 int aa = begin_call(A_ICALL, intast_sym[I_NULLIFY], 1);
4646 A_OPTYPEP(aa, I_NULLIFY);
4647 add_arg(ast);
4648 ast = aa;
4649 }
4650 add_stmt_before(ast, std);
4651 astif = mk_stmt(A_ENDIF, 0);
4652 add_stmt_before(astif, std);
4653 } else if (POINTERG(sptrmem) && !F90POINTERG(sptrmem)) {
4654 astsrccmpnt = mk_member(astsrcparent, astmem, A_DTYPEG(astmem));
4655 ast = add_ptr_assign(astdestcmpnt, astsrccmpnt, std);
4656 A_SHAPEP(ast, A_SHAPEG(astsrccmpnt));
4657 add_stmt_before(ast, std);
4658 } else if (DTYG(DTYPEG(sptrmem)) == TY_DERIVED && !XBIT(54, 0x4) &&
4659 allocatable_member(sptrmem)) {
4660 handle_allocatable_members(astdestcmpnt, astsrccmpnt, std, true);
4661 } else {
4662 astsrccmpnt = mk_member(astsrcparent, astmem, A_DTYPEG(astmem));
4663 ast = mk_assn_stmt(astdestcmpnt, astsrccmpnt, A_DTYPEG(astmem));
4664 add_stmt_before(ast, std);
4665 }
4666
4667 if (ALLOCG(sptrmem) || (POINTERG(sptrmem) && !F90POINTERG(sptrmem))) {
4668 sptrmem = SDSCG(sptrmem); /* set-up to move past $p, $o, $sd */
4669 }
4670 }
4671 gen_do_ends(ASD_NDIM(destasd), std);
4672 } else {
4673 /* in the "not conformable" path of conformability check for allocatable
4674 * array of intrinsic type, generate:
4675 * rewrite_deallocate(dest)
4676 * allocate(dest(lb(src): ub(src)))
4677 * endif */
4678 int astmem;
4679 int astsrcmem;
4680
4681 if (!non_conformable) {
4682 gen_dealloc_mbr(astdest, std);
4683 }
4684 if (A_TYPEG(astdest) == A_MEM) {
4685 astdestparent = A_PARENTG(astdest);
4686 astmem = A_MEMG(astdest);
4687 } else {
4688 astdestparent = 0;
4689 astmem = astdest;
4690 }
4691 if (A_TYPEG(astsrc) == A_MEM) {
4692 astsrcparent = A_PARENTG(astsrc);
4693 astsrcmem = A_MEMG(astsrc);
4694 } else {
4695 astsrcparent = 0;
4696 astsrcmem = astsrc;
4697 }
4698 gen_bounds_assignments(astdestparent, astmem, astsrcparent, astsrcmem,
4699 std);
4700 ast = build_allocation_item(astdestparent, astmem);
4701 gen_alloc_mbr(ast, std);
4702 }
4703 if (!non_conformable) {
4704 astif = mk_stmt(A_ENDIF, 0);
4705 add_stmt_before(astif, std);
4706 }
4707 }
4708 fin:
4709 if (sptrsrc != NOSYM && ALLOCATTRG(sptrsrc)) {
4710 /* Generate the ELSE part of "if (allocated(src))" to deallocate dest.
4711 * Ensure the lineno comes from std. */
4712 int stdend = add_stmt_after(mk_stmt(A_ENDIF, 0), std);
4713 gen_allocated_check(astdest, stdend, A_ELSEIF, false, true, false);
4714 gen_dealloc_mbr(astdest, stdend);
4715 }
4716 }
4717
4718 /* if (allocated(ast)) deallocate(ast) */
4719 void
gen_dealloc_if_allocated(int ast,int std)4720 gen_dealloc_if_allocated(int ast, int std)
4721 {
4722 int alloc_ast = mk_deallocate(ast);
4723 gen_allocated_check(ast, std, A_IFTHEN, false, true, false);
4724 add_stmt_before(alloc_ast, std);
4725 add_stmt_before(mk_stmt(A_ENDIF, 0), std);
4726 }
4727
4728 static void
find_allocatable_assignment(void)4729 find_allocatable_assignment(void)
4730 {
4731 int std;
4732 int stdnext;
4733 int workshare_depth;
4734
4735 sem.sc = SC_LOCAL;
4736 workshare_depth = 0;
4737 for (std = STD_NEXT(0); std != 0; std = stdnext) {
4738 int ast;
4739 int match;
4740
4741 ast = STD_AST(std);
4742 stdnext = STD_NEXT(std);
4743 switch (A_TYPEG(ast)) {
4744 case A_MP_PARALLEL:
4745 case A_MP_TASK:
4746 case A_MP_TASKLOOP:
4747 A_OPT1P(ast, sem.sc);
4748 sem.sc = SC_PRIVATE;
4749 break;
4750 case A_MP_ENDPARALLEL:
4751 case A_MP_ENDTASK:
4752 match = A_LOPG(ast);
4753 sem.sc = A_OPT1G(match);
4754 A_OPT1P(match, 0);
4755 break;
4756 case A_MP_WORKSHARE:
4757 workshare_depth++;
4758 break;
4759 case A_MP_ENDWORKSHARE:
4760 workshare_depth--;
4761 break;
4762 case A_ASN:
4763 if (!workshare_depth &&
4764 (A_TYPEG(A_DESTG(ast)) != A_SUBSCR
4765 /* Per flyspray 15461, for user-defined type assignment:
4766 a[i] = b , A_TYPEG(A_DESTG(ast)) is a A_SUBSCR, also need
4767 to check for allocatable member if it is user-defined type.
4768 */
4769 || DTYG(A_DTYPEG(A_DESTG(ast))) == TY_DERIVED)) {
4770 rewrite_allocatable_assignment(ast, std, false, false);
4771 }
4772 break;
4773 }
4774 }
4775 }
4776
4777 /* Create new asd from subscripts in oldasd by normalizing from oldshape to
4778 newshape. */
4779 static int
normalize_subscripts(int oldasd,int oldshape,int newshape)4780 normalize_subscripts(int oldasd, int oldshape, int newshape)
4781 {
4782 int i;
4783 int newsubs[MAXSUBS];
4784 int ndim = SHD_NDIM(oldshape);
4785
4786 assert(ndim == ASD_NDIM(oldasd), "ndim does not match", ndim, ERR_Fatal);
4787 for (i = 0; i < ndim; i++) {
4788 int oldsub = ASD_SUBS(oldasd, i);
4789 newsubs[i] = normalize_subscript(
4790 oldsub, SHD_LWB(oldshape, i), SHD_STRIDE(oldshape, i),
4791 SHD_LWB(newshape, i), SHD_STRIDE(newshape, i));
4792 }
4793 return mk_asd(newsubs, ndim);
4794 }
4795
4796 /* aref represents a reference to an allocatable component where its parent
4797 * has shape. asd represents subscripts to be applied.
4798 * Need to recurse through the parent to find the correct object
4799 * to which the subscripts are applied. After the subscripting has been
4800 * done, need to (re)apply the member and the subscript references which we
4801 * had recursed.
4802 */
4803 static int
subscript_allocmem(int aref,int asd)4804 subscript_allocmem(int aref, int asd)
4805 {
4806 int ndim = ASD_NDIM(asd);
4807 int subs[MAXSUBS];
4808
4809 switch (A_TYPEG(aref)) {
4810 case A_SUBSCR: {
4811 int asd2 = A_ASDG(aref);
4812 int n = ASD_NDIM(asd2);
4813 int ast, i, vector;
4814 for (i = 0, vector = 0; i < n; ++i) {
4815 int sub = ASD_SUBS(asd2, i);
4816 if (DTY(A_DTYPEG(sub)) == TY_ARRAY) {
4817 int tmp = ASD_SUBS(asd, vector);
4818 int subasd = mk_asd(&tmp, 1);
4819 if (A_TYPEG(sub) == A_SUBSCR) {
4820 sub = subscript_allocmem(sub, subasd);
4821 } else {
4822 sub = mk_subscr_copy(sub, subasd, DTY(A_DTYPEG(sub) + 1));
4823 }
4824 vector++;
4825 } else if (A_TYPEG(sub) == A_TRIPLE) {
4826 sub = ASD_SUBS(asd, vector);
4827 vector++;
4828 }
4829 subs[i] = sub;
4830 }
4831 ast = A_LOPG(aref);
4832 if (vector == 0) {
4833 ast = subscript_allocmem(ast, asd);
4834 }
4835 return mk_subscr(ast, subs, n, A_DTYPEG(aref));
4836 }
4837 case A_MEM:
4838 if (vector_member(aref)) {
4839 return mk_subscr_copy(aref, asd, DTY(A_DTYPEG(aref) + 1));
4840 } else {
4841 int ast = subscript_allocmem(A_PARENTG(aref), asd);
4842 return mk_member(ast, A_MEMG(aref), A_DTYPEG(A_MEMG(aref)));
4843 }
4844 case A_ID:
4845 assert(DTY(A_DTYPEG(aref)) == TY_ARRAY, "subscript_allocmem: not array", 0,
4846 4);
4847 return mk_subscr_copy(aref, asd, DTY(A_DTYPEG(aref) + 1));
4848 default:
4849 interr("subscript_allocmem: bad ast type", A_TYPEG(aref), ERR_Fatal);
4850 return 0;
4851 }
4852 }
4853