1 /*
2 * Copyright (c) 1995-2018, NVIDIA CORPORATION. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *
16 */
17
18 /* clang-format off */
19
20 /** \file
21 * \brief
22 * Collection of misc Fortran intrinsics (present, min, max, ajustl, adjustl, ...).
23 *
24 */
25
26 #include <time.h>
27 #include <string.h>
28 #include <sys/time.h>
29 #include <unistd.h>
30 #include "stdioInterf.h"
31 #include "fioMacros.h"
32 #include "llcrit.h"
33 #include "global.h"
34 #include "memops.h"
35
36 MP_SEMAPHORE(static, sem);
37 #include "type.h"
38
39 extern double __fort_second();
40 extern long __fort_getoptn(char *, long);
41
42 #define time(x) __fort_time(x)
43
44 typedef __INT8_T MXINT_T;
45 static void store_mxint_t(void *, F90_Desc *, MXINT_T);
46 static MXINT_T mxint(F90_Desc *);
47
48 __INT_T
ENTFTN(ILEN,ilen)49 ENTFTN(ILEN, ilen)(void *ib, __INT_T *size)
50 {
51 /*
52 * if i is nonnegative,
53 * ilen(i) = ceiling(log2(i+1))
54 * if i is negative,
55 * ilen(i) = ceiling(log2(-i))
56 */
57 unsigned ui;
58 int i, k, ln;
59
60 i = I8(__fort_varying_int)(ib, size);
61 if (i < 0)
62 i = -i;
63 else
64 ++i;
65
66 /* find bit position (relative to 0) of the leftmost 1 bit */
67
68 ui = i;
69 ln = -1;
70 k = (*size * 8) >> 1;
71 while (k) {
72 if (ui >> k) {
73 ui >>= k;
74 ln += k;
75 }
76 k >>= 1;
77 }
78 if (ui)
79 ++ln;
80
81 /* if i is larger than 2**(bit pos), increase by one */
82
83 if (i ^ (1 << ln))
84 ++ln;
85 return ln;
86 }
87
88 __LOG_T
ENTF90(PRESENT,present)89 ENTF90(PRESENT, present)(void *p)
90 {
91 if (p == NULL) {
92 return 0;
93 }
94
95 #if defined(DESC_I8)
96 if (!((__INT4_T *)(p) >= ENTCOMN(0, 0) &&
97 (__INT4_T *)(p) <= (ENTCOMN(0, 0) + 3)))
98 #else
99 if (!((__INT_T *)(p) >= ENTCOMN(0, 0) &&
100 (__INT_T *)(p) <= (ENTCOMN(0, 0) + 3)))
101 #endif
102 return GET_DIST_TRUE_LOG;
103 else
104 return 0;
105 }
106
107 __LOG_T
ENTF90(PRESENT_PTR,present_ptr)108 ENTF90(PRESENT_PTR, present_ptr)(void *p)
109 {
110 if (p == NULL) {
111 return 0;
112 }
113
114 #if defined(DESC_I8)
115 if (!((__INT4_T *)(p) >= ENTCOMN(0, 0) &&
116 (__INT4_T *)(p) <= (ENTCOMN(0, 0) + 3)) &&
117 !(*(__INT4_T **)(p) >= ENTCOMN(0, 0) &&
118 *(__INT4_T **)(p) <= (ENTCOMN(0, 0) + 3)))
119
120 #else
121 if (!((__INT_T *)(p) >= ENTCOMN(0, 0) &&
122 (__INT_T *)(p) <= (ENTCOMN(0, 0) + 3)) &&
123 !(*(__INT_T **)(p) >= ENTCOMN(0, 0) &&
124 *(__INT_T **)(p) <= (ENTCOMN(0, 0) + 3)))
125 #endif
126 return GET_DIST_TRUE_LOG;
127 else
128 return 0;
129 }
130
131 __LOG_T
ENTF90(PRESENTCA,presentca)132 ENTF90(PRESENTCA, presentca)(DCHAR(p) DCLEN64(p))
133 {
134 if (CADR(p) == NULL) {
135 return 0;
136 }
137
138 if (CADR(p) != ABSENTC)
139 return GET_DIST_TRUE_LOG;
140 else
141 return 0;
142 }
143
144 /* 32 bit CLEN version */
145 __LOG_T
ENTF90(PRESENTC,presentc)146 ENTF90(PRESENTC, presentc)(DCHAR(p) DCLEN(p))
147 {
148 ENTF90(PRESENTCA, presentca)(CADR(p), (__CLEN_T)CLEN(p));
149 }
150
151 /** \brief
152 * -i8 variant of present
153 */
154 __LOG8_T
ENTF90(KPRESENT,kpresent)155 ENTF90(KPRESENT, kpresent)(void *p)
156 {
157
158
159 return (__INT8_T)ISPRESENT(p) ? GET_DIST_TRUE_LOG : 0;
160 }
161
162 __LOG8_T
ENTF90(KPRESENT_PTR,kpresent_ptr)163 ENTF90(KPRESENT_PTR, kpresent_ptr)(void *p)
164 {
165
166 /*
167 * -i8 variant of present
168 */
169
170 if (p == NULL) {
171 return 0;
172 }
173
174 #if defined(DESC_I8)
175 if (!((__INT4_T *)(p) >= ENTCOMN(0, 0) &&
176 (__INT4_T *)(p) <= (ENTCOMN(0, 0) + 3)) &&
177 !(*(__INT4_T **)(p) >= ENTCOMN(0, 0) &&
178 *(__INT4_T **)(p) <= (ENTCOMN(0, 0) + 3)))
179
180 #else
181 if (!((__INT_T *)(p) >= ENTCOMN(0, 0) &&
182 (__INT_T *)(p) <= (ENTCOMN(0, 0) + 3)) &&
183 !(*(__INT_T **)(p) >= ENTCOMN(0, 0) &&
184 *(__INT_T **)(p) <= (ENTCOMN(0, 0) + 3)))
185 #endif
186 return GET_DIST_TRUE_LOG;
187 else
188 return 0;
189 }
190
191 __LOG8_T
ENTF90(KPRESENTCA,kpresentca)192 ENTF90(KPRESENTCA, kpresentca)(DCHAR(p) DCLEN64(p))
193 {
194
195 /*
196 * -i8 variant of PRESENTC
197 */
198
199 return (__INT8_T)ISPRESENTC(p) ? GET_DIST_TRUE_LOG : 0;
200 }
201
202 /* 32 bit CLEN version */
203 __LOG8_T
ENTF90(KPRESENTC,kpresentc)204 ENTF90(KPRESENTC, kpresentc)(DCHAR(p) DCLEN(p))
205 {
206 return ENTF90(KPRESENTCA, kpresentca)(CADR(p), (__CLEN_T)CLEN(p));
207 }
208
209 __LOG_T
ENTF90(IS_IOSTAT_END,is_iostat_end)210 ENTF90(IS_IOSTAT_END, is_iostat_end)(__INT4_T i)
211 {
212
213 return (i == -1) ? GET_DIST_TRUE_LOG : 0;
214 }
215
216 __LOG8_T
ENTF90(KIS_IOSTAT_END,kis_iostat_end)217 ENTF90(KIS_IOSTAT_END, kis_iostat_end)(__INT4_T i)
218 {
219
220 return (i == -1) ? GET_DIST_TRUE_LOG : 0;
221 }
222
223 __LOG_T
ENTF90(IS_IOSTAT_EOR,is_iostat_eor)224 ENTF90(IS_IOSTAT_EOR, is_iostat_eor)(__INT4_T i)
225 {
226
227 return (i == -2) ? GET_DIST_TRUE_LOG : 0;
228 }
229
230 __LOG8_T
ENTF90(KIS_IOSTAT_EOR,kis_iostat_eor)231 ENTF90(KIS_IOSTAT_EOR, kis_iostat_eor)(__INT4_T i)
232 {
233
234 return (i == -2) ? GET_DIST_TRUE_LOG : 0;
235 }
236
237 void
ENTF90(LOC,loc)238 *ENTF90(LOC, loc)(void *p)
239 {
240 return p;
241 }
242
243 __INT_T
ENTF90(IMAX,imax)244 ENTF90(IMAX, imax)(__INT_T i, __INT_T j)
245 {
246 return (i > j) ? i : j;
247 }
248
249 #if !defined(DESC_I8)
250 void
ENTF90(MIN,min)251 ENTF90(MIN, min)(int *nargs, ...)
252 {
253 char *nextstr;
254 char *minstr;
255 char *result;
256 int i, j;
257 __CLEN_T clen;
258 va_list argp;
259
260 va_start(argp, nargs);
261 j = *nargs;
262
263 /* First loop through the argument list to get character len */
264 result = va_arg(argp, char *);
265 minstr = va_arg(argp, char *);
266 if (result == NULL)
267 return;
268
269 /* argument list */
270 for (i = 0; i < j; ++i) {
271 nextstr = va_arg(argp, char *);
272 }
273 clen = va_arg(argp, __CLEN_T);
274 va_end(argp);
275
276 /* start real comparison */
277 va_start(argp, nargs);
278 result = va_arg(argp, char *);
279 minstr = va_arg(argp, char *);
280 if (minstr == NULL)
281 return;
282 for (i = 0; i < j - 1; ++i) {
283 nextstr = va_arg(argp, char *);
284 if (nextstr) {
285 if (strncmp(nextstr, minstr, clen) < 0)
286 minstr = nextstr;
287 }
288 }
289 strncpy(result, minstr, clen);
290 va_end(argp);
291 }
292
293 void
ENTF90(MAX,max)294 ENTF90(MAX, max)(int *nargs, ...)
295 {
296 char *nextstr;
297 char *maxstr;
298 char *result;
299 int i, j;
300 __CLEN_T clen;
301 va_list argp;
302
303 va_start(argp, nargs);
304 j = *nargs;
305
306 /* First loop through the argument list to get character len */
307 result = va_arg(argp, char *);
308 maxstr = va_arg(argp, char *);
309 if (result == NULL)
310 return;
311
312 /* argument list */
313 for (i = 0; i < j; ++i) {
314 nextstr = va_arg(argp, char *);
315 }
316 clen = va_arg(argp, __CLEN_T);
317 va_end(argp);
318
319 /* start real comparison */
320 va_start(argp, nargs);
321 result = va_arg(argp, char *);
322 maxstr = va_arg(argp, char *);
323 if (maxstr == NULL)
324 return;
325 for (i = 0; i < j - 1; ++i) {
326 nextstr = va_arg(argp, char *);
327 if (nextstr) {
328 if (strncmp(nextstr, maxstr, clen) > 0)
329 maxstr = nextstr;
330 }
331 }
332 strncpy(result, maxstr, clen);
333 va_end(argp);
334 }
335
336 #endif
337
338 __INT8_T
ENTF90(KICHARA,kichara)339 ENTF90(KICHARA, kichara)
340 (DCHAR(c) DCLEN64(c))
341 {
342 return (__INT8_T)(CADR(c)[0] & 0xff);
343 }
344 /* 32 bit CLEN version */
345 __INT8_T
ENTF90(KICHAR,kichar)346 ENTF90(KICHAR, kichar)
347 (DCHAR(c) DCLEN(c))
348 {
349 return ENTF90(KICHARA, kichara)(CADR(c), (__CLEN_T)CLEN(c));
350 }
351
352 __INT_T
ENTF90(LENA,lena)353 ENTF90(LENA, lena)(DCHAR(s) DCLEN64(s))
354 {
355 return (__INT_T)CLEN(s);
356 }
357 /* 32 bit CLEN version */
358 __INT_T
ENTF90(LEN,len)359 ENTF90(LEN, len)(DCHAR(s) DCLEN(s))
360 {
361 return (__INT_T) ENTF90(LENA, lena)(CADR(s), (__CLEN_T)CLEN(s));
362 }
363
364 __INT8_T
ENTF90(KLENA,klena)365 ENTF90(KLENA, klena)(DCHAR(s) DCLEN64(s))
366 {
367 return (__INT8_T)CLEN(s);
368 }
369 /* 32 bit CLEN version */
370 __INT8_T
ENTF90(KLEN,klen)371 ENTF90(KLEN, klen)(DCHAR(s) DCLEN(s))
372 {
373 return ENTF90(KLENA, klena)(CADR(s), (__CLEN_T)CLEN(s));
374 }
375
376 __INT_T
ENTF90(NLENA,nlena)377 ENTF90(NLENA, nlena)(DCHAR(s) DCLEN64(s))
378 {
379 return (__INT_T)CLEN(s);
380 }
381 /* 32 bit CLEN version */
382 __INT_T
ENTF90(NLEN,nlen)383 ENTF90(NLEN, nlen)(DCHAR(s) DCLEN(s))
384 {
385 return ENTF90(NLENA, nlena)(CADR(s), (__CLEN_T)CLEN(s));
386 }
387
388 __CLEN_T
ENTF90(ADJUSTLA,adjustla)389 ENTF90(ADJUSTLA, adjustla)
390 (DCHAR(res), DCHAR(expr) DCLEN64(res) DCLEN64(expr))
391 {
392 __CLEN_T i, j, elen, rlen;
393
394 elen = CLEN(expr);
395 rlen = CLEN(res);
396 for (i = 0; i < elen && CADR(expr)[i] == ' '; ++i)
397 ;
398 for (j = 0; i < elen; ++i, ++j)
399 CADR(res)[j] = CADR(expr)[i];
400 for (; j < rlen; ++j)
401 CADR(res)[j] = ' ';
402 return elen;
403 }
404 /* 32 bit CLEN version */
405 __INT_T
ENTF90(ADJUSTL,adjustl)406 ENTF90(ADJUSTL, adjustl)
407 (DCHAR(res), DCHAR(expr) DCLEN(res) DCLEN(expr))
408 {
409 return (__INT_T)ENTF90(ADJUSTLA, adjustla)(CADR(res), CADR(expr),
410 (__CLEN_T)CLEN(res), (__CLEN_T)CLEN(expr));
411 }
412
413 __CLEN_T
ENTF90(ADJUSTRA,adjustra)414 ENTF90(ADJUSTRA, adjustra)
415 (DCHAR(res), DCHAR(expr) DCLEN64(res) DCLEN64(expr))
416 {
417 __CLEN_T i, j, len;
418
419 len = CLEN(expr);
420 for (i = len; i-- > 0 && CADR(expr)[i] == ' ';)
421 ;
422 for (++i, j = len-1; i-- > 0; --j)
423 CADR(res)[j] = CADR(expr)[i];
424 for (++j; j-- > 0; )
425 CADR(res)[j] = ' ';
426 return len;
427 }
428 /* 32 bit CLEN version */
429 __INT_T
ENTF90(ADJUSTR,adjustr)430 ENTF90(ADJUSTR, adjustr)
431 (DCHAR(res), DCHAR(expr) DCLEN(res) DCLEN(expr))
432 {
433 return (__INT_T)ENTF90(ADJUSTRA, adjustra)(CADR(res), CADR(expr),
434 (__CLEN_T)CLEN(res), (__CLEN_T)CLEN(expr));
435 }
436
437 static void
fstrcpy(char * s1,char * s2,__CLEN_T len1,__CLEN_T len2)438 fstrcpy(char *s1, char *s2, __CLEN_T len1, __CLEN_T len2)
439 {
440 __CLEN_T i;
441
442 if (len2 < len1) {
443 for (i = 0; i < len2; ++i)
444 s1[i] = s2[i];
445 for (; i < len1; ++i)
446 s1[i] = ' ';
447 } else {
448 for (i = 0; i < len1; ++i)
449 s1[i] = s2[i];
450 }
451 }
452
453 static char *month[12] = {"Jan", "Feb", "Mar", "Apr", "May", "Jun",
454 "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"};
455
456 static int
yr2(int yr)457 yr2(int yr)
458 {
459 int y = yr;
460 if (y > 99)
461 y = y % 100;
462 return y;
463 }
464
465 void
ENTFTN(DATEA,datea)466 ENTFTN(DATEA, datea)(DCHAR(date), F90_Desc *dated DCLEN64(date))
467 {
468 char loc_buf[16];
469 time_t ltime;
470 struct tm *lt;
471
472 ltime = time();
473 MP_P(sem);
474 ;
475 lt = localtime(<ime);
476 sprintf(loc_buf, "%2d-%3s-%02d", lt->tm_mday, month[lt->tm_mon],
477 yr2(lt->tm_year));
478 MP_V(sem);
479 fstrcpy(CADR(date), loc_buf, CLEN(date), 9);
480 }
481 /* 32 bit CLEN version */
482 void
ENTFTN(DATE,date)483 ENTFTN(DATE, date)(DCHAR(date), F90_Desc *dated DCLEN(date))
484 {
485 ENTFTN(DATEA, datea)(CADR(date), dated, (__CLEN_T)CLEN(date));
486 }
487
488 void
ENTFTN(DATEW,datew)489 ENTFTN(DATEW, datew)(void *date, F90_Desc *dated)
490 {
491 char loc_buf[16];
492 time_t ltime;
493 struct tm *lt;
494
495 ltime = time();
496 MP_P(sem);
497 ;
498 lt = localtime(<ime);
499 sprintf(loc_buf, "%2d-%3s-%02d", lt->tm_mday, month[lt->tm_mon],
500 yr2(lt->tm_year));
501 MP_V(sem);
502 fstrcpy(date, loc_buf, 9, 9);
503 }
504
505 void
ENTFTN(JDATE,jdate)506 ENTFTN(JDATE, jdate)(__INT4_T *i, __INT4_T *j, __INT4_T *k, F90_Desc *id,
507 F90_Desc *jd, F90_Desc *kd)
508 {
509 time_t ltime;
510 struct tm *ltimvar;
511
512 ltime = time();
513 MP_P(sem);
514 ;
515 ltimvar = localtime(<ime);
516 *i = ltimvar->tm_mon + 1;
517 *j = ltimvar->tm_mday;
518 *k = yr2(ltimvar->tm_year);
519 MP_V(sem);
520 }
521
522 void
ENTFTN(IDATE,idate)523 ENTFTN(IDATE, idate)(__INT2_T *i, __INT2_T *j, __INT2_T *k, F90_Desc *id,
524 F90_Desc *jd, F90_Desc *kd)
525 {
526 time_t ltime;
527 struct tm *ltimvar;
528
529 ltime = time();
530 MP_P(sem);
531 ;
532 ltimvar = localtime(<ime);
533 *i = ltimvar->tm_mon + 1;
534 *j = ltimvar->tm_mday;
535 *k = yr2(ltimvar->tm_year);
536 MP_V(sem);
537 }
538
539 /* trying to deal with loss of significant digits in
540 real*4 version.
541 */
542 #define TIME_THRESHOLD2 1.033944E+09
543 #define TIME_THRESHOLD1 1.003944E+09
544
545 void
ENTFTN(CPU_TIME,cpu_time)546 ENTFTN(CPU_TIME, cpu_time)(__REAL4_T *x)
547 {
548 extern double __fort_second();
549 double secs;
550 __REAL4_T res;
551
552 secs = __fort_second();
553 if (secs > TIME_THRESHOLD2)
554 res = secs - TIME_THRESHOLD2;
555 else if (secs > TIME_THRESHOLD1)
556 res = secs - TIME_THRESHOLD1;
557 else
558 res = secs;
559 *x = res;
560 }
561
562 void
ENTFTN(CPU_TIMED,cpu_timed)563 ENTFTN(CPU_TIMED, cpu_timed)(__REAL8_T *x)
564 {
565 extern double __fort_second();
566 double secs;
567 __REAL8_T res;
568
569 secs = __fort_second();
570 /* probably not necessary for this version, except that
571 user could mix real*4 and real*8 versions.
572 */
573 if (secs > TIME_THRESHOLD2)
574 res = secs - TIME_THRESHOLD2;
575 else if (secs > TIME_THRESHOLD1)
576 res = secs - TIME_THRESHOLD1;
577 else
578 res = secs;
579 *x = res;
580 }
581
582 __REAL4_T
ENTFTN(SECNDS,secnds)583 ENTFTN(SECNDS, secnds)(__REAL4_T *x, F90_Desc *xd)
584 {
585 static int called = 0;
586 static int diffs;
587 int i;
588 time_t ltime;
589 struct tm *lt;
590 __REAL4_T f;
591
592 ltime = time();
593 if (called == 0) {
594 called = 1; /* first time called */
595 /*
596 * compute value to subtract from time(0) to give seconds since
597 * midnight
598 */
599 MP_P(sem);
600 ;
601 lt = localtime(<ime);
602 i = lt->tm_sec + (60 * lt->tm_min) + (3600 * lt->tm_hour);
603 MP_V(sem);
604 diffs = ltime - i;
605 }
606 f = (__REAL4_T)(ltime - diffs);
607 return (f - *x);
608 }
609
610 __REAL8_T
ENTFTN(SECNDSD,secndsd)611 ENTFTN(SECNDSD, secndsd)(__REAL8_T *x, F90_Desc *xd)
612 {
613 static int called = 0;
614 static int diffs;
615 int i;
616 time_t ltime;
617 struct tm *lt;
618 __REAL8_T f;
619
620 ltime = time();
621 if (called == 0) {
622 called = 1; /* first time called */
623 /*
624 * compute value to subtract from time() to give seconds since
625 * midnight
626 */
627 MP_P(sem);
628 ;
629 lt = localtime(<ime);
630 i = lt->tm_sec + (60 * lt->tm_min) + (3600 * lt->tm_hour);
631 MP_V(sem);
632 diffs = ltime - i;
633 }
634 f = (__REAL8_T)(ltime - diffs);
635 return (f - *x);
636 }
637
638 void
ENTFTN(FTIMEA,ftimea)639 ENTFTN(FTIMEA, ftimea)(DCHAR(tbuf), F90_Desc *tbufd DCLEN64(tbuf))
640 {
641 char loc_buf[16];
642 time_t ltime;
643 struct tm *ltimvar;
644
645 ltime = time();
646 MP_P(sem);
647 ;
648 ltimvar = localtime(<ime);
649 sprintf(loc_buf, "%2.2d:%2.2d:%2.2d", ltimvar->tm_hour, ltimvar->tm_min,
650 ltimvar->tm_sec);
651 MP_V(sem);
652 fstrcpy(CADR(tbuf), loc_buf, CLEN(tbuf), 8);
653 }
654 /* 32 bit CLEN version */
655 void
ENTFTN(FTIME,ftime)656 ENTFTN(FTIME, ftime)(DCHAR(tbuf), F90_Desc *tbufd DCLEN(tbuf))
657 {
658 ENTFTN(FTIMEA, ftimea)(CADR(tbuf), tbufd, (__CLEN_T)CLEN(tbuf));
659 }
660
661 void
ENTFTN(FTIMEW,ftimew)662 ENTFTN(FTIMEW, ftimew)(void *tbuf, F90_Desc *tbufd)
663 {
664 char loc_buf[16];
665 time_t ltime;
666 struct tm *ltimvar;
667
668 ltime = time();
669 MP_P(sem);
670 ;
671 ltimvar = localtime(<ime);
672 sprintf(loc_buf, "%2.2d:%2.2d:%2.2d", ltimvar->tm_hour, ltimvar->tm_min,
673 ltimvar->tm_sec);
674 MP_V(sem);
675 fstrcpy(tbuf, loc_buf, 8, 8);
676 }
677
678 static int
I8(next_index)679 I8(next_index)(__INT_T *index, F90_Desc *s)
680 {
681 __INT_T i;
682
683 for (i = 0; i < F90_RANK_G(s); i++) {
684 index[i]++;
685 if (index[i] <= DIM_UBOUND_G(s, i)) {
686 return 1; /* keep going */
687 }
688 index[i] = F90_DIM_LBOUND_G(s, i);
689 }
690 return 0; /* finished */
691 }
692
693 void
ENTFTN(DANDTA,dandta)694 ENTFTN(DANDTA, dandta)(DCHAR(date), DCHAR(tbuf), DCHAR(zone),
695 __STAT_T *values, F90_Desc *dated, F90_Desc *tbufd,
696 F90_Desc *zoned,
697 F90_Desc *valuesd DCLEN64(date) DCLEN64(tbuf) DCLEN64(zone))
698 {
699 int tvalues[8];
700 int i;
701 char c;
702 time_t ltime;
703 struct tm *tm, tmx;
704 char loc_buf[16];
705 int ms;
706 #if defined(TARGET_OSX)
707 struct timeval t;
708 struct timezone tz0;
709 #else
710 struct timeval t;
711 #endif
712
713 #if defined(TARGET_OSX)
714 gettimeofday(&t, &tz0);
715 ltime = t.tv_sec;
716 ms = t.tv_usec / 1000;
717 #else
718 gettimeofday(&t, (void *)0);
719 ltime = t.tv_sec;
720 ms = t.tv_usec / 1000;
721 #endif
722 MP_P(sem);
723 ;
724 tm = localtime(<ime);
725 if (tm == NULL) {
726 fprintf(__io_stderr(), "BAD return value from localtime(0x%lx)\n",
727 (long)ltime);
728 perror("localtime: ");
729 exit(1);
730 }
731 memcpy(&tmx, tm, sizeof(struct tm));
732 tm = &tmx;
733 MP_V(sem);
734 if (ISPRESENTC(date) && CLEN(date) > 0) {
735 sprintf(loc_buf, "%04d%02d%02d", tm->tm_year + 1900, tm->tm_mon + 1,
736 tm->tm_mday);
737 fstrcpy(CADR(date), loc_buf, CLEN(date), 8);
738 }
739 if (ISPRESENTC(tbuf) && CLEN(tbuf) > 0) {
740 sprintf(loc_buf, "%02d%02d%02d.%03d", tm->tm_hour, tm->tm_min, tm->tm_sec,
741 ms);
742 fstrcpy(CADR(tbuf), loc_buf, CLEN(tbuf), 10);
743 }
744 if (ISPRESENTC(zone) && CLEN(zone) > 0) {
745 i = __io_timezone(tm);
746 c = '+';
747 if (i < 0) {
748 i = -i;
749 c = '-';
750 }
751 i /= 60;
752 sprintf(loc_buf, "%c%02d%02d", c, i / 60, i % 60);
753 fstrcpy(CADR(zone), loc_buf, CLEN(zone), 5);
754 }
755 if (ISPRESENT(values)) {
756 tvalues[0] = tm->tm_year + 1900;
757 tvalues[1] = tm->tm_mon + 1;
758 tvalues[2] = tm->tm_mday;
759 i = __io_timezone(tm);
760 c = '+';
761 if (i < 0) {
762 i = -i;
763 c = '-';
764 }
765 i /= 60;
766 if (c == '-')
767 i = -i;
768 tvalues[3] = i;
769 tvalues[4] = tm->tm_hour;
770 tvalues[5] = tm->tm_min;
771 tvalues[6] = tm->tm_sec;
772 tvalues[7] = ms;
773 if (valuesd && F90_TAG_G(valuesd) == __DESC) {
774 char *la;
775 __INT_T index[7];
776
777 for (i = 0; i < F90_RANK_G(valuesd); ++i) {
778 if (DIM_UBOUND_G(valuesd, i) < F90_DIM_LBOUND_G(valuesd, i))
779 return;
780 index[i] = F90_DIM_LBOUND_G(valuesd, i);
781 }
782 for (i = 0; i < 8; ++i) {
783 la = I8(__fort_local_address)(values, valuesd, index);
784 if (la) {
785 /* *((int *)la) = tvalues[i]; */
786 store_mxint_t(la, valuesd, tvalues[i]);
787 }
788 if (I8(next_index)(index, valuesd) == 0)
789 break;
790 }
791 } else {
792 for (i = 0; i < 8; ++i)
793 values[i] = tvalues[i];
794 }
795 }
796 }
797 /* 32 bit CLEN version */
798 void
ENTFTN(DANDT,dandt)799 ENTFTN(DANDT, dandt)(DCHAR(date), DCHAR(tbuf), DCHAR(zone),
800 __STAT_T *values, F90_Desc *dated, F90_Desc *tbufd,
801 F90_Desc *zoned,
802 F90_Desc *valuesd DCLEN(date) DCLEN(tbuf) DCLEN(zone))
803 {
804 ENTFTN(DANDTA, dandta)(CADR(date), CADR(tbuf), CADR(zone), values, dated,
805 tbufd, zoned, valuesd, (__CLEN_T)CLEN(date),
806 (__CLEN_T)CLEN(tbuf), (__CLEN_T)CLEN(zone));
807 }
808
809 void
ENTFTN(SYSCLK,sysclk)810 ENTFTN(SYSCLK, sysclk)(__STAT_T *count, __STAT_T *count_rate,
811 __STAT_T *count_max, F90_Desc *countd,
812 F90_Desc *count_rated, F90_Desc *count_maxd)
813 {
814 static MXINT_T resol; /* resolution, tics per second */
815 int sz;
816
817 if (resol == 0) {
818 int def;
819 #if defined(TARGET_X8664)
820 def = 1000000;
821 #else
822 def = sizeof(__STAT_T) < 8 ? 1000 : 1000000;
823 #endif
824 resol = __fort_getoptn("-system_clock_rate", def);
825 if (resol <= 0)
826 __fort_abort("invalid value given for system_clock rate");
827 }
828 if (ISPRESENT(count_rate)) {
829 if (ISPRESENT(count)) {
830 sz = GET_DIST_SIZE_OF(TYPEKIND(countd));
831 } else {
832 sz = GET_DIST_SIZE_OF(TYPEKIND(count_rated));
833 }
834 switch (sz) {
835 case 1:
836 resol = 10;
837 break;
838 case 2:
839 resol = 1000;
840 break;
841 case 4:
842 resol = 1000000;
843 break;
844 default: /* big*/
845 resol = 10000000;
846 break;
847 }
848 }
849 if (ISPRESENT(count)) {
850 double t = __fort_second();
851 MXINT_T mxt;
852 mxt = mxint(countd);
853 if (t * resol > mxt) {
854 t = 0;
855 __fort_set_second(t);
856 }
857 store_mxint_t(count, countd, (t)*resol);
858 }
859 if (ISPRESENT(count_rate)) {
860 store_mxint_t(count_rate, count_rated, resol);
861 }
862 if (ISPRESENT(count_max)) {
863 if (ISPRESENT(count)) {
864 store_mxint_t(count_max, count_maxd, mxint(countd));
865 } else {
866 store_mxint_t(count_max, count_maxd, mxint(count_maxd));
867 }
868 }
869 }
870
871 void
ENTF90(MVBITS,mvbits)872 ENTF90(MVBITS, mvbits)(void *from, void *frompos, void *len, void *to,
873 void *topos, __INT_T *szfrom, __INT_T *szfrompos,
874 __INT_T *szlen, __INT_T *sztopos)
875 {
876 __INT1_T f1, t1, m1;
877 __INT2_T f2, t2, m2;
878 __INT4_T f4, t4, m4;
879 __INT8_T f8, t8, m8;
880 int fp = I8(__fort_varying_int)(frompos, szfrompos);
881 int ln = I8(__fort_varying_int)(len, szlen);
882 int tp = I8(__fort_varying_int)(topos, sztopos);
883
884 #undef MVBIT_OVFL
885 #define MVBIT_OVFL(w) ((fp + ln) > (w) || (tp + ln) > (w))
886
887 if (fp < 0 || tp < 0 || ln <= 0)
888 return;
889 switch (*szfrom) {
890 case 1:
891 if (MVBIT_OVFL(8))
892 break;
893 if (ln == 8) {
894 *(__INT1_T *)to = *(__INT1_T *)from;
895 break;
896 }
897 f1 = *(__INT1_T *)from;
898 t1 = *(__INT1_T *)to;
899 m1 = (~(-1 << ln)) << tp;
900 *(__INT1_T *)to = (t1 & ~m1) | (((f1 >> fp) << tp) & m1);
901 break;
902 case 2:
903 if (MVBIT_OVFL(16))
904 break;
905 if (ln == 16) {
906 *(__INT2_T *)to = *(__INT2_T *)from;
907 break;
908 }
909 f2 = *(__INT2_T *)from;
910 t2 = *(__INT2_T *)to;
911 m2 = (~(-1 << ln)) << tp;
912 *(__INT2_T *)to = (t2 & ~m2) | (((f2 >> fp) << tp) & m2);
913 break;
914 case 4:
915 if (MVBIT_OVFL(32))
916 break;
917 if (ln == 32) {
918 *(__INT4_T *)to = *(__INT4_T *)from;
919 break;
920 }
921 f4 = *(__INT4_T *)from;
922 t4 = *(__INT4_T *)to;
923 m4 = (~(-1 << ln)) << tp;
924 *(__INT4_T *)to = (t4 & ~m4) | (((f4 >> fp) << tp) & m4);
925 break;
926 case 8:
927 if (MVBIT_OVFL(64))
928 break;
929 if (ln == 64) {
930 *(__INT8_T *)to = *(__INT8_T *)from;
931 break;
932 }
933 f8 = *(__INT8_T *)from;
934 t8 = *(__INT8_T *)to;
935 m8 = (~((__INT8_T)-1 << ln)) << tp;
936 *(__INT8_T *)to = (t8 & ~m8) | (((f8 >> fp) << tp) & m8);
937 break;
938 default:
939 __fort_abort("MVBITS: unsupported from/to integer size");
940 }
941 }
942
943 /** \brief
944 *Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank>
945 */
946 __INT_T
ENTF90(LB,lb)947 ENTF90(LB, lb)(__INT4_T *rank, __INT4_T *dim, ...)
948 {
949 va_list va;
950 __INT_T *lb;
951 __INT_T *ub;
952 __INT_T d;
953
954 d = *dim;
955 if (d < 1 || d > *rank)
956 __fort_abort("LBOUND: invalid dim");
957 va_start(va, dim);
958 while (d-- > 0) {
959 lb = va_arg(va, __INT_T *);
960 ub = va_arg(va, __INT_T *);
961 }
962 va_end(va);
963 if (!ISPRESENT(lb))
964 __fort_abort("LBOUND: lower bound not present for specified dim");
965 if (!ISPRESENT(ub))
966 /* presumably, it's the last dimension of an assumed array */
967 return *lb;
968 return (*lb <= *ub) ? *lb : 1;
969 }
970
971 /* Varargs:
972 * __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank>
973 */
974 __INT1_T
ENTF90(LB1,lb1)975 ENTF90(LB1, lb1)(__INT4_T *rank, __INT4_T *dim, ...)
976 {
977 va_list va;
978 __INT_T *lb;
979 __INT_T *ub;
980 __INT_T d;
981
982 d = *dim;
983 if (d < 1 || d > *rank)
984 __fort_abort("LBOUND: invalid dim");
985 va_start(va, dim);
986 while (d-- > 0) {
987 lb = va_arg(va, __INT_T *);
988 ub = va_arg(va, __INT_T *);
989 }
990 va_end(va);
991 if (!ISPRESENT(lb))
992 __fort_abort("LBOUND: lower bound not present for specified dim");
993 if (!ISPRESENT(ub))
994 /* presumably, it's the last dimension of an assumed array */
995 return (__INT1_T)*lb;
996 return (__INT1_T)(*lb <= *ub) ? *lb : 1;
997 }
998
999 /* Varargs:
1000 * __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank>
1001 */
1002 __INT2_T
ENTF90(LB2,lb2)1003 ENTF90(LB2, lb2)(__INT4_T *rank, __INT4_T *dim, ...)
1004 {
1005 va_list va;
1006 __INT_T *lb;
1007 __INT_T *ub;
1008 __INT_T d;
1009
1010 d = *dim;
1011 if (d < 1 || d > *rank)
1012 __fort_abort("LBOUND: invalid dim");
1013 va_start(va, dim);
1014 while (d-- > 0) {
1015 lb = va_arg(va, __INT_T *);
1016 ub = va_arg(va, __INT_T *);
1017 }
1018 va_end(va);
1019 if (!ISPRESENT(lb))
1020 __fort_abort("LBOUND: lower bound not present for specified dim");
1021 if (!ISPRESENT(ub))
1022 /* presumably, it's the last dimension of an assumed array */
1023 return (__INT2_T)*lb;
1024 return (__INT2_T)(*lb <= *ub) ? *lb : 1;
1025 }
1026
1027 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1028 __INT4_T
ENTF90(LB4,lb4)1029 ENTF90(LB4, lb4)(__INT4_T *rank, __INT4_T *dim, ...)
1030 {
1031 va_list va;
1032 __INT_T *lb;
1033 __INT_T *ub;
1034 __INT_T d;
1035
1036 d = *dim;
1037 if (d < 1 || d > *rank)
1038 __fort_abort("LBOUND: invalid dim");
1039 va_start(va, dim);
1040 while (d-- > 0) {
1041 lb = va_arg(va, __INT_T *);
1042 ub = va_arg(va, __INT_T *);
1043 }
1044 va_end(va);
1045 if (!ISPRESENT(lb))
1046 __fort_abort("LBOUND: lower bound not present for specified dim");
1047 if (!ISPRESENT(ub))
1048 /* presumably, it's the last dimension of an assumed array */
1049 return (__INT4_T)*lb;
1050 return (__INT4_T)(*lb <= *ub) ? *lb : 1;
1051 }
1052
1053 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1054 __INT8_T
ENTF90(LB8,lb8)1055 ENTF90(LB8, lb8)(__INT4_T *rank, __INT4_T *dim, ...)
1056 {
1057 va_list va;
1058 __INT_T *lb;
1059 __INT_T *ub;
1060 __INT_T d;
1061
1062 d = *dim;
1063 if (d < 1 || d > *rank)
1064 __fort_abort("LBOUND: invalid dim");
1065 va_start(va, dim);
1066 while (d-- > 0) {
1067 lb = va_arg(va, __INT_T *);
1068 ub = va_arg(va, __INT_T *);
1069 }
1070 va_end(va);
1071 if (!ISPRESENT(lb))
1072 __fort_abort("LBOUND: lower bound not present for specified dim");
1073 if (!ISPRESENT(ub))
1074 /* presumably, it's the last dimension of an assumed array */
1075 return (__INT8_T)*lb;
1076 return (__INT8_T)(*lb <= *ub) ? *lb : 1;
1077 }
1078
1079 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1080 __INT8_T
ENTF90(KLB,klb)1081 ENTF90(KLB, klb)(__INT4_T *rank, __INT4_T *dim, ...)
1082 {
1083
1084 /*
1085 * -i8 variant of LB
1086 */
1087
1088 va_list va;
1089 __INT_T *lb;
1090 __INT_T *ub;
1091 __INT_T d;
1092
1093 d = *dim;
1094 if (d < 1 || d > *rank)
1095 __fort_abort("LBOUND: invalid dim");
1096 va_start(va, dim);
1097 while (d-- > 0) {
1098 lb = va_arg(va, __INT_T *);
1099 ub = va_arg(va, __INT_T *);
1100 }
1101 va_end(va);
1102 if (!ISPRESENT(lb))
1103 __fort_abort("LBOUND: lower bound not present for specified dim");
1104 if (!ISPRESENT(ub))
1105 /* presumably, it's the last dimension of an assumed array */
1106 return *lb;
1107 return (__INT8_T)(*lb <= *ub) ? *lb : 1;
1108 }
1109
1110 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1111 __INT_T
ENTF90(UB,ub)1112 ENTF90(UB, ub)(__INT4_T *rank, __INT4_T *dim, ...)
1113 {
1114 va_list va;
1115 __INT_T *lb;
1116 __INT_T *ub;
1117 __INT_T d;
1118
1119 d = *dim;
1120 if (d < 1 || d > *rank)
1121 __fort_abort("UBOUND: invalid dim");
1122 va_start(va, dim);
1123 while (d-- > 0) {
1124 lb = va_arg(va, __INT_T *);
1125 ub = va_arg(va, __INT_T *);
1126 }
1127 va_end(va);
1128 if (!ISPRESENT(ub))
1129 __fort_abort("UBOUND: upper bound not present for specified dim");
1130 return (*lb <= *ub) ? *ub : 0;
1131 }
1132
1133 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1134 __INT1_T
ENTF90(UB1,ub1)1135 ENTF90(UB1, ub1)(__INT4_T *rank, __INT4_T *dim, ...)
1136 {
1137 va_list va;
1138 __INT_T *lb;
1139 __INT_T *ub;
1140 __INT_T d;
1141
1142 d = *dim;
1143 if (d < 1 || d > *rank)
1144 __fort_abort("UBOUND: invalid dim");
1145 va_start(va, dim);
1146 while (d-- > 0) {
1147 lb = va_arg(va, __INT_T *);
1148 ub = va_arg(va, __INT_T *);
1149 }
1150 va_end(va);
1151 if (!ISPRESENT(ub))
1152 __fort_abort("UBOUND: upper bound not present for specified dim");
1153 return (__INT1_T)(*lb <= *ub) ? *ub : 0;
1154 }
1155
1156 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1157 __INT2_T
ENTF90(UB2,ub2)1158 ENTF90(UB2, ub2)(__INT4_T *rank, __INT4_T *dim, ...)
1159 {
1160 va_list va;
1161 __INT_T *lb;
1162 __INT_T *ub;
1163 __INT_T d;
1164
1165 d = *dim;
1166 if (d < 1 || d > *rank)
1167 __fort_abort("UBOUND: invalid dim");
1168 va_start(va, dim);
1169 while (d-- > 0) {
1170 lb = va_arg(va, __INT_T *);
1171 ub = va_arg(va, __INT_T *);
1172 }
1173 va_end(va);
1174 if (!ISPRESENT(ub))
1175 __fort_abort("UBOUND: upper bound not present for specified dim");
1176 return (__INT2_T)(*lb <= *ub) ? *ub : 0;
1177 }
1178
1179 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1180 __INT4_T
ENTF90(UB4,ub4)1181 ENTF90(UB4, ub4)(__INT4_T *rank, __INT4_T *dim, ...)
1182 {
1183 va_list va;
1184 __INT_T *lb;
1185 __INT_T *ub;
1186 __INT_T d;
1187
1188 d = *dim;
1189 if (d < 1 || d > *rank)
1190 __fort_abort("UBOUND: invalid dim");
1191 va_start(va, dim);
1192 while (d-- > 0) {
1193 lb = va_arg(va, __INT_T *);
1194 ub = va_arg(va, __INT_T *);
1195 }
1196 va_end(va);
1197 if (!ISPRESENT(ub))
1198 __fort_abort("UBOUND: upper bound not present for specified dim");
1199 return (__INT4_T)(*lb <= *ub) ? *ub : 0;
1200 }
1201
1202 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1203 __INT8_T
ENTF90(UB8,ub8)1204 ENTF90(UB8, ub8)(__INT4_T *rank, __INT4_T *dim, ...)
1205 {
1206 va_list va;
1207 __INT_T *lb;
1208 __INT_T *ub;
1209 __INT_T d;
1210
1211 d = *dim;
1212 if (d < 1 || d > *rank)
1213 __fort_abort("UBOUND: invalid dim");
1214 va_start(va, dim);
1215 while (d-- > 0) {
1216 lb = va_arg(va, __INT_T *);
1217 ub = va_arg(va, __INT_T *);
1218 }
1219 va_end(va);
1220 if (!ISPRESENT(ub))
1221 __fort_abort("UBOUND: upper bound not present for specified dim");
1222 return (__INT8_T)(*lb <= *ub) ? *ub : 0;
1223 }
1224
1225 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<rank>, __INT_T *ub<rank> */
1226 __INT8_T
ENTF90(KUB,kub)1227 ENTF90(KUB, kub)(__INT4_T *rank, __INT4_T *dim, ...)
1228 {
1229
1230 /*
1231 * -i8 variant of UB
1232 */
1233
1234 va_list va;
1235 __INT_T *lb;
1236 __INT_T *ub;
1237 __INT_T d;
1238
1239 d = *dim;
1240 if (d < 1 || d > *rank)
1241 __fort_abort("UBOUND: invalid dim");
1242 va_start(va, dim);
1243 while (d-- > 0) {
1244 lb = va_arg(va, __INT_T *);
1245 ub = va_arg(va, __INT_T *);
1246 }
1247 va_end(va);
1248 if (!ISPRESENT(ub))
1249 __fort_abort("UBOUND: upper bound not present for specified dim");
1250 return (__INT8_T)(*lb <= *ub) ? *ub : 0;
1251 }
1252
1253 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1254 void
ENTF90(LBA,lba)1255 ENTF90(LBA, lba)(__INT_T *arr, __INT4_T *size, ...)
1256 {
1257 va_list va;
1258 __INT_T *lb;
1259 __INT_T *ub;
1260 __INT_T s;
1261
1262 s = *size;
1263 va_start(va, size);
1264 while (s-- > 0) {
1265 lb = va_arg(va, __INT_T *);
1266 if (!ISPRESENT(lb))
1267 __fort_abort("LBOUND: lower bound not present");
1268 ub = va_arg(va, __INT_T *);
1269 if (!ISPRESENT(ub))
1270 /* presumably, it's the last dimension of an assumed array */
1271 *arr++ = *lb;
1272 else
1273 *arr++ = (*lb <= *ub) ? *lb : 1;
1274 }
1275 va_end(va);
1276 }
1277
1278 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1279 void
ENTF90(LBA1,lba1)1280 ENTF90(LBA1, lba1)(__INT1_T *arr, __INT4_T *size, ...)
1281 {
1282 va_list va;
1283 __INT_T *lb;
1284 __INT_T *ub;
1285 __INT_T s;
1286
1287 s = *size;
1288 va_start(va, size);
1289 while (s-- > 0) {
1290 lb = va_arg(va, __INT_T *);
1291 if (!ISPRESENT(lb))
1292 __fort_abort("LBOUND: lower bound not present");
1293 ub = va_arg(va, __INT_T *);
1294 if (!ISPRESENT(ub))
1295 /* presumably, it's the last dimension of an assumed array */
1296 *arr++ = *lb;
1297 else
1298 *arr++ = (*lb <= *ub) ? *lb : 1;
1299 }
1300 va_end(va);
1301 }
1302
1303 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1304 void
ENTF90(LBA2,lba2)1305 ENTF90(LBA2, lba2)(__INT2_T *arr, __INT4_T *size, ...)
1306 {
1307 va_list va;
1308 __INT_T *lb;
1309 __INT_T *ub;
1310 __INT_T s;
1311
1312 s = *size;
1313 va_start(va, size);
1314 while (s-- > 0) {
1315 lb = va_arg(va, __INT_T *);
1316 if (!ISPRESENT(lb))
1317 __fort_abort("LBOUND: lower bound not present");
1318 ub = va_arg(va, __INT_T *);
1319 if (!ISPRESENT(ub))
1320 /* presumably, it's the last dimension of an assumed array */
1321 *arr++ = *lb;
1322 else
1323 *arr++ = (*lb <= *ub) ? *lb : 1;
1324 }
1325 va_end(va);
1326 }
1327
1328 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1329 void
ENTF90(LBA4,lba4)1330 ENTF90(LBA4, lba4)(__INT4_T *arr, __INT4_T *size, ...)
1331 {
1332 va_list va;
1333 __INT_T *lb;
1334 __INT_T *ub;
1335 __INT_T s;
1336
1337 s = *size;
1338 va_start(va, size);
1339 while (s-- > 0) {
1340 lb = va_arg(va, __INT_T *);
1341 if (!ISPRESENT(lb))
1342 __fort_abort("LBOUND: lower bound not present");
1343 ub = va_arg(va, __INT_T *);
1344 if (!ISPRESENT(ub))
1345 /* presumably, it's the last dimension of an assumed array */
1346 *arr++ = *lb;
1347 else
1348 *arr++ = (*lb <= *ub) ? *lb : 1;
1349 }
1350 va_end(va);
1351 }
1352
1353 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1354 void
ENTF90(LBA8,lba8)1355 ENTF90(LBA8, lba8)(__INT8_T *arr, __INT4_T *size, ...)
1356 {
1357 va_list va;
1358 __INT_T *lb;
1359 __INT_T *ub;
1360 __INT_T s;
1361
1362 s = *size;
1363 va_start(va, size);
1364 while (s-- > 0) {
1365 lb = va_arg(va, __INT_T *);
1366 if (!ISPRESENT(lb))
1367 __fort_abort("LBOUND: lower bound not present");
1368 ub = va_arg(va, __INT_T *);
1369 if (!ISPRESENT(ub))
1370 /* presumably, it's the last dimension of an assumed array */
1371 *arr++ = *lb;
1372 else
1373 *arr++ = (*lb <= *ub) ? *lb : 1;
1374 }
1375 va_end(va);
1376 }
1377
1378 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1379 void
ENTF90(KLBA,klba)1380 ENTF90(KLBA, klba)(__INT8_T *arr, __INT4_T *size, ...)
1381 {
1382
1383 /*
1384 * -i8 variant of LBA
1385 */
1386
1387 va_list va;
1388 __INT_T *lb;
1389 __INT_T *ub;
1390 __INT_T s;
1391
1392 s = *size;
1393 va_start(va, size);
1394 while (s-- > 0) {
1395 lb = va_arg(va, __INT_T *);
1396 if (!ISPRESENT(lb))
1397 __fort_abort("LBOUND: lower bound not present");
1398 ub = va_arg(va, __INT_T *);
1399 if (!ISPRESENT(ub))
1400 /* presumably, it's the last dimension of an assumed array */
1401 *arr++ = *lb;
1402 else
1403 *arr++ = (*lb <= *ub) ? *lb : 1;
1404 }
1405 va_end(va);
1406 }
1407
1408 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1409 void
ENTF90(UBA,uba)1410 ENTF90(UBA, uba)(__INT_T *arr, __INT4_T *size, ...)
1411 {
1412 va_list va;
1413 __INT_T *lb;
1414 __INT_T *ub;
1415 __INT_T s;
1416
1417 s = *size;
1418 va_start(va, size);
1419 while (s-- > 0) {
1420 lb = va_arg(va, __INT_T *);
1421 ub = va_arg(va, __INT_T *);
1422 if (!ISPRESENT(ub))
1423 __fort_abort("UBOUND: upper bound not present");
1424 *arr++ = (*lb <= *ub) ? *ub : 0;
1425 }
1426 va_end(va);
1427 }
1428
1429 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1430 void
ENTF90(UBA1,uba1)1431 ENTF90(UBA1, uba1)(__INT1_T *arr, __INT4_T *size, ...)
1432 {
1433 va_list va;
1434 __INT_T *lb;
1435 __INT_T *ub;
1436 __INT_T s;
1437
1438 s = *size;
1439 va_start(va, size);
1440 while (s-- > 0) {
1441 lb = va_arg(va, __INT_T *);
1442 ub = va_arg(va, __INT_T *);
1443 if (!ISPRESENT(ub))
1444 __fort_abort("UBOUND: upper bound not present");
1445 *arr++ = (*lb <= *ub) ? *ub : 0;
1446 }
1447 va_end(va);
1448 }
1449
1450 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1451 void
ENTF90(UBA2,uba2)1452 ENTF90(UBA2, uba2)(__INT2_T *arr, __INT4_T *size, ...)
1453 {
1454 va_list va;
1455 __INT_T *lb;
1456 __INT_T *ub;
1457 __INT_T s;
1458
1459 s = *size;
1460 va_start(va, size);
1461 while (s-- > 0) {
1462 lb = va_arg(va, __INT_T *);
1463 ub = va_arg(va, __INT_T *);
1464 if (!ISPRESENT(ub))
1465 __fort_abort("UBOUND: upper bound not present");
1466 *arr++ = (*lb <= *ub) ? *ub : 0;
1467 }
1468 va_end(va);
1469 }
1470
1471 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1472 void
ENTF90(UBA4,uba4)1473 ENTF90(UBA4, uba4)(__INT4_T *arr, __INT4_T *size, ...)
1474 {
1475 va_list va;
1476 __INT_T *lb;
1477 __INT_T *ub;
1478 __INT_T s;
1479
1480 s = *size;
1481 va_start(va, size);
1482 while (s-- > 0) {
1483 lb = va_arg(va, __INT_T *);
1484 ub = va_arg(va, __INT_T *);
1485 if (!ISPRESENT(ub))
1486 __fort_abort("UBOUND: upper bound not present");
1487 *arr++ = (*lb <= *ub) ? *ub : 0;
1488 }
1489 va_end(va);
1490 }
1491
1492 /* Varargs; __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1493 void
ENTF90(UBA8,uba8)1494 ENTF90(UBA8, uba8)(__INT8_T *arr, __INT4_T *size, ...)
1495 {
1496 va_list va;
1497 __INT_T *lb;
1498 __INT_T *ub;
1499 __INT_T s;
1500
1501 s = *size;
1502 va_start(va, size);
1503 while (s-- > 0) {
1504 lb = va_arg(va, __INT_T *);
1505 ub = va_arg(va, __INT_T *);
1506 if (!ISPRESENT(ub))
1507 __fort_abort("UBOUND: upper bound not present");
1508 *arr++ = (*lb <= *ub) ? *ub : 0;
1509 }
1510 va_end(va);
1511 }
1512
1513 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1514 void
ENTF90(KUBA,kuba)1515 ENTF90(KUBA, kuba)(__INT8_T *arr, __INT4_T *size, ...)
1516 {
1517
1518 /*
1519 * -i8 variant of UBA
1520 */
1521
1522 va_list va;
1523 __INT_T *lb;
1524 __INT_T *ub;
1525 __INT_T s;
1526
1527 s = *size;
1528 va_start(va, size);
1529 while (s-- > 0) {
1530 lb = va_arg(va, __INT_T *);
1531 ub = va_arg(va, __INT_T *);
1532 if (!ISPRESENT(ub))
1533 __fort_abort("UBOUND: upper bound not present");
1534 *arr++ = (*lb <= *ub) ? *ub : 0;
1535 }
1536 va_end(va);
1537 }
1538
1539 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1540 void
ENTF90(LBAZ,lbaz)1541 ENTF90(LBAZ, lbaz)(__INT4_T *arr, __INT4_T *size, ...)
1542 {
1543 va_list va;
1544 __INT_T *lb;
1545 __INT_T *ub;
1546 __INT_T s;
1547
1548 s = *size;
1549 va_start(va, size);
1550 while (s-- > 0) {
1551 lb = va_arg(va, __INT_T *);
1552 if (!ISPRESENT(lb))
1553 __fort_abort("LBOUND: lower bound not present");
1554 ub = va_arg(va, __INT_T *);
1555 if (!ISPRESENT(ub))
1556 /* presumably, it's the last dimension of an assumed array */
1557 *arr++ = *lb;
1558 else
1559 *arr++ = (*lb <= *ub) ? *lb : 1;
1560 }
1561 va_end(va);
1562 }
1563
1564 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1565 void
ENTF90(LBAZ1,lbaz1)1566 ENTF90(LBAZ1, lbaz1)(__INT1_T *arr, __INT4_T *size, ...)
1567 {
1568 va_list va;
1569 __INT_T *lb;
1570 __INT_T *ub;
1571 __INT_T s;
1572
1573 s = *size;
1574 va_start(va, size);
1575 while (s-- > 0) {
1576 lb = va_arg(va, __INT_T *);
1577 if (!ISPRESENT(lb))
1578 __fort_abort("LBOUND: lower bound not present");
1579 ub = va_arg(va, __INT_T *);
1580 if (!ISPRESENT(ub))
1581 /* presumably, it's the last dimension of an assumed array */
1582 *arr++ = *lb;
1583 else
1584 *arr++ = (*lb <= *ub) ? *lb : 1;
1585 }
1586 va_end(va);
1587 }
1588
1589 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1590 void
ENTF90(LBAZ2,lbaz2)1591 ENTF90(LBAZ2, lbaz2)(__INT2_T *arr, __INT4_T *size, ...)
1592 {
1593 va_list va;
1594 __INT_T *lb;
1595 __INT_T *ub;
1596 __INT_T s;
1597
1598 s = *size;
1599 va_start(va, size);
1600 while (s-- > 0) {
1601 lb = va_arg(va, __INT_T *);
1602 if (!ISPRESENT(lb))
1603 __fort_abort("LBOUND: lower bound not present");
1604 ub = va_arg(va, __INT_T *);
1605 if (!ISPRESENT(ub))
1606 /* presumably, it's the last dimension of an assumed array */
1607 *arr++ = *lb;
1608 else
1609 *arr++ = (*lb <= *ub) ? *lb : 1;
1610 }
1611 va_end(va);
1612 }
1613
1614 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1615 void
ENTF90(LBAZ4,lbaz4)1616 ENTF90(LBAZ4, lbaz4)(__INT4_T *arr, __INT4_T *size, ...)
1617 {
1618 va_list va;
1619 __INT_T *lb;
1620 __INT_T *ub;
1621 __INT_T s;
1622
1623 s = *size;
1624 va_start(va, size);
1625 while (s-- > 0) {
1626 lb = va_arg(va, __INT_T *);
1627 if (!ISPRESENT(lb))
1628 __fort_abort("LBOUND: lower bound not present");
1629 ub = va_arg(va, __INT_T *);
1630 if (!ISPRESENT(ub))
1631 /* presumably, it's the last dimension of an assumed array */
1632 *arr++ = *lb;
1633 else
1634 *arr++ = (*lb <= *ub) ? *lb : 1;
1635 }
1636 va_end(va);
1637 }
1638
1639 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1640 void
ENTF90(LBAZ8,lbaz8)1641 ENTF90(LBAZ8, lbaz8)(__INT8_T *arr, __INT4_T *size, ...)
1642 {
1643 va_list va;
1644 __INT_T *lb;
1645 __INT_T *ub;
1646 __INT_T s;
1647
1648 s = *size;
1649 va_start(va, size);
1650 while (s-- > 0) {
1651 lb = va_arg(va, __INT_T *);
1652 if (!ISPRESENT(lb))
1653 __fort_abort("LBOUND: lower bound not present");
1654 ub = va_arg(va, __INT_T *);
1655 if (!ISPRESENT(ub))
1656 /* presumably, it's the last dimension of an assumed array */
1657 *arr++ = *lb;
1658 else
1659 *arr++ = (*lb <= *ub) ? *lb : 1;
1660 }
1661 va_end(va);
1662 }
1663
1664 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1665 void
ENTF90(KLBAZ,klbaz)1666 ENTF90(KLBAZ, klbaz)(__INT8_T *arr, __INT4_T *size, ...)
1667 {
1668 /*
1669 * -i8 variant of LBAZ
1670 */
1671
1672 va_list va;
1673 __INT_T *lb;
1674 __INT_T *ub;
1675 __INT_T s;
1676
1677 s = *size;
1678 va_start(va, size);
1679 while (s-- > 0) {
1680 lb = va_arg(va, __INT_T *);
1681 if (!ISPRESENT(lb))
1682 __fort_abort("LBOUND: lower bound not present");
1683 ub = va_arg(va, __INT_T *);
1684 if (!ISPRESENT(ub))
1685 /* presumably, it's the last dimension of an assumed array */
1686 *arr++ = *lb;
1687 else
1688 *arr++ = (*lb <= *ub) ? *lb : 1;
1689 }
1690 va_end(va);
1691 }
1692
1693 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1694 void
ENTF90(UBAZ,ubaz)1695 ENTF90(UBAZ, ubaz)(__INT4_T *arr, __INT_T *size, ...)
1696 {
1697 va_list va;
1698 __INT_T *lb;
1699 __INT_T *ub;
1700 __INT_T s;
1701
1702 s = *size;
1703 va_start(va, size);
1704 while (s-- > 0) {
1705 lb = va_arg(va, __INT_T *);
1706 ub = va_arg(va, __INT_T *);
1707 if (!ISPRESENT(ub))
1708 __fort_abort("UBOUND: upper bound not present");
1709 *arr++ = (*lb <= *ub) ? *ub : 0;
1710 }
1711 va_end(va);
1712 }
1713
1714 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1715 void
ENTF90(UBAZ1,ubaz1)1716 ENTF90(UBAZ1, ubaz1)(__INT1_T *arr, __INT4_T *size, ...)
1717 {
1718 va_list va;
1719 __INT_T *lb;
1720 __INT_T *ub;
1721 __INT_T s;
1722
1723 s = *size;
1724 va_start(va, size);
1725 while (s-- > 0) {
1726 lb = va_arg(va, __INT_T *);
1727 ub = va_arg(va, __INT_T *);
1728 if (!ISPRESENT(ub))
1729 __fort_abort("UBOUND: upper bound not present");
1730 *arr++ = (*lb <= *ub) ? *ub : 0;
1731 }
1732 va_end(va);
1733 }
1734
1735 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1736 void
ENTF90(UBAZ2,ubaz2)1737 ENTF90(UBAZ2, ubaz2)(__INT2_T *arr, __INT4_T *size, ...)
1738 {
1739 va_list va;
1740 __INT_T *lb;
1741 __INT_T *ub;
1742 __INT_T s;
1743
1744 s = *size;
1745 va_start(va, size);
1746 while (s-- > 0) {
1747 lb = va_arg(va, __INT_T *);
1748 ub = va_arg(va, __INT_T *);
1749 if (!ISPRESENT(ub))
1750 __fort_abort("UBOUND: upper bound not present");
1751 *arr++ = (*lb <= *ub) ? *ub : 0;
1752 }
1753 va_end(va);
1754 }
1755
1756 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1757 void
ENTF90(UBAZ4,ubaz4)1758 ENTF90(UBAZ4, ubaz4)(__INT4_T *arr, __INT4_T *size, ...)
1759 {
1760 va_list va;
1761 __INT_T *lb;
1762 __INT_T *ub;
1763 __INT_T s;
1764
1765 s = *size;
1766 va_start(va, size);
1767 while (s-- > 0) {
1768 lb = va_arg(va, __INT_T *);
1769 ub = va_arg(va, __INT_T *);
1770 if (!ISPRESENT(ub))
1771 __fort_abort("UBOUND: upper bound not present");
1772 *arr++ = (*lb <= *ub) ? *ub : 0;
1773 }
1774 va_end(va);
1775 }
1776
1777 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1778 void
ENTF90(UBAZ8,ubaz8)1779 ENTF90(UBAZ8, ubaz8)(__INT8_T *arr, __INT4_T *size, ...)
1780 {
1781 va_list va;
1782 __INT_T *lb;
1783 __INT_T *ub;
1784 __INT_T s;
1785
1786 s = *size;
1787 va_start(va, size);
1788 while (s-- > 0) {
1789 lb = va_arg(va, __INT_T *);
1790 ub = va_arg(va, __INT_T *);
1791 if (!ISPRESENT(ub))
1792 __fort_abort("UBOUND: upper bound not present");
1793 *arr++ = (*lb <= *ub) ? *ub : 0;
1794 }
1795 va_end(va);
1796 }
1797
1798 /* Varargs: __INT_T *lb1, __INT_T *ub1, ... __INT_T *lb<size>, __INT_T *ub<size> */
1799 void
ENTF90(KUBAZ,kubaz)1800 ENTF90(KUBAZ, kubaz)(__INT8_T *arr, __INT4_T *size, ...)
1801 {
1802
1803 /*
1804 * -i8 variant of UBAZ
1805 */
1806
1807 va_list va;
1808 __INT_T *lb;
1809 __INT_T *ub;
1810 __INT_T s;
1811
1812 s = *size;
1813 va_start(va, size);
1814 while (s-- > 0) {
1815 lb = va_arg(va, __INT_T *);
1816 ub = va_arg(va, __INT_T *);
1817 if (!ISPRESENT(ub))
1818 __fort_abort("UBOUND: upper bound not present");
1819 *arr++ = (*lb <= *ub) ? *ub : 0;
1820 }
1821 va_end(va);
1822 }
1823
1824 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1825 __INT_T
ENTF90(LBOUND,lbound)1826 ENTF90(LBOUND, lbound)(__INT4_T *rank, __INT4_T *dim, ...)
1827 {
1828 va_list va;
1829 __INT_T *bnd;
1830 __INT_T d;
1831
1832 d = *dim;
1833 if (d < 1 || d > *rank)
1834 __fort_abort("LBOUND: invalid dim");
1835 va_start(va, dim);
1836 while (d-- > 0)
1837 bnd = va_arg(va, __INT_T *);
1838 va_end(va);
1839 if (!ISPRESENT(bnd))
1840 __fort_abort("LBOUND: lower bound not present for specified dim");
1841 return *bnd;
1842 }
1843
1844 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1845 __INT1_T
ENTF90(LBOUND1,lbound1)1846 ENTF90(LBOUND1, lbound1)(__INT4_T *rank, __INT4_T *dim, ...)
1847 {
1848 va_list va;
1849 __INT_T *bnd;
1850 __INT_T d;
1851
1852 d = *dim;
1853 if (d < 1 || d > *rank)
1854 __fort_abort("LBOUND: invalid dim");
1855 va_start(va, dim);
1856 while (d-- > 0)
1857 bnd = va_arg(va, __INT_T *);
1858 va_end(va);
1859 if (!ISPRESENT(bnd))
1860 __fort_abort("LBOUND: lower bound not present for specified dim");
1861 return (__INT1_T)*bnd;
1862 }
1863
1864 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1865 __INT2_T
ENTF90(LBOUND2,lbound2)1866 ENTF90(LBOUND2, lbound2)(__INT4_T *rank, __INT4_T *dim, ...)
1867 {
1868 va_list va;
1869 __INT_T *bnd;
1870 __INT_T d;
1871
1872 d = *dim;
1873 if (d < 1 || d > *rank)
1874 __fort_abort("LBOUND: invalid dim");
1875 va_start(va, dim);
1876 while (d-- > 0)
1877 bnd = va_arg(va, __INT_T *);
1878 va_end(va);
1879 if (!ISPRESENT(bnd))
1880 __fort_abort("LBOUND: lower bound not present for specified dim");
1881 return (__INT2_T)*bnd;
1882 }
1883
1884 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1885 __INT4_T
ENTF90(LBOUND4,lbound4)1886 ENTF90(LBOUND4, lbound4)(__INT4_T *rank, __INT4_T *dim, ...)
1887 {
1888 va_list va;
1889 __INT_T *bnd;
1890 __INT_T d;
1891
1892 d = *dim;
1893 if (d < 1 || d > *rank)
1894 __fort_abort("LBOUND: invalid dim");
1895 va_start(va, dim);
1896 while (d-- > 0)
1897 bnd = va_arg(va, __INT_T *);
1898 va_end(va);
1899 if (!ISPRESENT(bnd))
1900 __fort_abort("LBOUND: lower bound not present for specified dim");
1901 return (__INT4_T)*bnd;
1902 }
1903
1904 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1905 __INT8_T
ENTF90(LBOUND8,lbound8)1906 ENTF90(LBOUND8, lbound8)(__INT4_T *rank, __INT4_T *dim, ...)
1907 {
1908 va_list va;
1909 __INT_T *bnd;
1910 __INT_T d;
1911
1912 d = *dim;
1913 if (d < 1 || d > *rank)
1914 __fort_abort("LBOUND: invalid dim");
1915 va_start(va, dim);
1916 while (d-- > 0)
1917 bnd = va_arg(va, __INT_T *);
1918 va_end(va);
1919 if (!ISPRESENT(bnd))
1920 __fort_abort("LBOUND: lower bound not present for specified dim");
1921 return (__INT8_T)*bnd;
1922 }
1923
1924 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1925 __INT8_T
ENTF90(KLBOUND,klbound)1926 ENTF90(KLBOUND, klbound)(__INT4_T *rank, __INT4_T *dim, ...)
1927 {
1928
1929 /*
1930 * -i8 variant of lbound
1931 */
1932
1933 va_list va;
1934 __INT_T *bnd;
1935 __INT_T d;
1936
1937 d = *dim;
1938 if (d < 1 || d > *rank)
1939 __fort_abort("LBOUND: invalid dim");
1940 va_start(va, dim);
1941
1942 while (d-- > 0)
1943 bnd = va_arg(va, __INT_T *);
1944 va_end(va);
1945 if (!ISPRESENT(bnd))
1946 __fort_abort("LBOUND: lower bound not present for specified dim");
1947 return (__INT8_T)*bnd;
1948 }
1949
1950 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1951 __INT_T
ENTF90(UBOUND,ubound)1952 ENTF90(UBOUND, ubound)(__INT4_T *rank, __INT4_T *dim, ...)
1953 {
1954 va_list va;
1955 __INT_T *bnd;
1956 __INT_T d;
1957
1958 d = *dim;
1959 if (d < 1 || d > *rank)
1960 __fort_abort("UBOUND: invalid dim");
1961 va_start(va, dim);
1962 while (d-- > 0)
1963 bnd = va_arg(va, __INT_T *);
1964 va_end(va);
1965 if (!ISPRESENT(bnd))
1966 __fort_abort("UBOUND: upper bound not present for specified dim");
1967 return *bnd;
1968 }
1969
1970 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1971 __INT1_T
ENTF90(UBOUND1,ubound1)1972 ENTF90(UBOUND1, ubound1)(__INT4_T *rank, __INT4_T *dim, ...)
1973 {
1974 va_list va;
1975 __INT_T *bnd;
1976 __INT_T d;
1977
1978 d = *dim;
1979 if (d < 1 || d > *rank)
1980 __fort_abort("UBOUND: invalid dim");
1981 va_start(va, dim);
1982 while (d-- > 0)
1983 bnd = va_arg(va, __INT_T *);
1984 va_end(va);
1985 if (!ISPRESENT(bnd))
1986 __fort_abort("UBOUND: upper bound not present for specified dim");
1987 return (__INT1_T)*bnd;
1988 }
1989
1990 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
1991 __INT2_T
ENTF90(UBOUND2,ubound2)1992 ENTF90(UBOUND2, ubound2)(__INT4_T *rank, __INT4_T *dim, ...)
1993 {
1994 va_list va;
1995 __INT_T *bnd;
1996 __INT_T d;
1997
1998 d = *dim;
1999 if (d < 1 || d > *rank)
2000 __fort_abort("UBOUND: invalid dim");
2001 va_start(va, dim);
2002 while (d-- > 0)
2003 bnd = va_arg(va, __INT_T *);
2004 va_end(va);
2005 if (!ISPRESENT(bnd))
2006 __fort_abort("UBOUND: upper bound not present for specified dim");
2007 return (__INT2_T)*bnd;
2008 }
2009
2010 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
2011 __INT4_T
ENTF90(UBOUND4,ubound4)2012 ENTF90(UBOUND4, ubound4)(__INT4_T *rank, __INT4_T *dim, ...)
2013 {
2014 va_list va;
2015 __INT_T *bnd;
2016 __INT_T d;
2017
2018 d = *dim;
2019 if (d < 1 || d > *rank)
2020 __fort_abort("UBOUND: invalid dim");
2021 va_start(va, dim);
2022 while (d-- > 0)
2023 bnd = va_arg(va, __INT_T *);
2024 va_end(va);
2025 if (!ISPRESENT(bnd))
2026 __fort_abort("UBOUND: upper bound not present for specified dim");
2027 return (__INT4_T)*bnd;
2028 }
2029
2030 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
2031 __INT8_T
ENTF90(UBOUND8,ubound8)2032 ENTF90(UBOUND8, ubound8)(__INT4_T *rank, __INT4_T *dim, ...)
2033 {
2034 va_list va;
2035 __INT_T *bnd;
2036 __INT_T d;
2037
2038 d = *dim;
2039 if (d < 1 || d > *rank)
2040 __fort_abort("UBOUND: invalid dim");
2041 va_start(va, dim);
2042 while (d-- > 0)
2043 bnd = va_arg(va, __INT_T *);
2044 va_end(va);
2045 if (!ISPRESENT(bnd))
2046 __fort_abort("UBOUND: upper bound not present for specified dim");
2047 return (__INT8_T)*bnd;
2048 }
2049
2050 /* Varargs: __INT_T *ub1, ... __INT_T *ub<rank> */
2051 __INT8_T
ENTF90(KUBOUND,kubound)2052 ENTF90(KUBOUND, kubound)(__INT4_T *rank, __INT4_T *dim, ...)
2053 {
2054
2055 /*
2056 * -i8 variant of ubound
2057 */
2058
2059 va_list va;
2060 __INT_T *bnd;
2061 __INT_T d;
2062
2063 d = *dim;
2064 if (d < 1 || d > *rank)
2065 __fort_abort("UBOUND: invalid dim");
2066 va_start(va, dim);
2067 while (d-- > 0)
2068 bnd = va_arg(va, __INT_T *);
2069 va_end(va);
2070 if (!ISPRESENT(bnd))
2071 __fort_abort("UBOUND: upper bound not present for specified dim");
2072 return (__INT8_T)*bnd;
2073 }
2074
2075 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2076 void
ENTF90(LBOUNDA,lbounda)2077 ENTF90(LBOUNDA, lbounda)(__INT_T *arr, __INT4_T *size, ...)
2078 {
2079 va_list va;
2080 __INT_T *bnd;
2081 __INT_T s;
2082
2083 s = *size;
2084 va_start(va, size);
2085 while (s-- > 0) {
2086 bnd = va_arg(va, __INT_T *);
2087 if (!ISPRESENT(bnd))
2088 __fort_abort("LBOUND: lower bound not present");
2089 *arr++ = *bnd;
2090 }
2091 va_end(va);
2092 }
2093
2094 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2095 void
ENTF90(LBOUNDA1,lbounda1)2096 ENTF90(LBOUNDA1, lbounda1)(__INT1_T *arr, __INT4_T *size, ...)
2097 {
2098 va_list va;
2099 __INT_T *bnd;
2100 __INT_T s;
2101
2102 s = *size;
2103 va_start(va, size);
2104 while (s-- > 0) {
2105 bnd = va_arg(va, __INT_T *);
2106 if (!ISPRESENT(bnd))
2107 __fort_abort("LBOUND: lower bound not present");
2108 *arr++ = *bnd;
2109 }
2110 va_end(va);
2111 }
2112
2113 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2114 void
ENTF90(LBOUNDA2,lbounda2)2115 ENTF90(LBOUNDA2, lbounda2)(__INT2_T *arr, __INT4_T *size, ...)
2116 {
2117 va_list va;
2118 __INT_T *bnd;
2119 __INT_T s;
2120
2121 s = *size;
2122 va_start(va, size);
2123 while (s-- > 0) {
2124 bnd = va_arg(va, __INT_T *);
2125 if (!ISPRESENT(bnd))
2126 __fort_abort("LBOUND: lower bound not present");
2127 *arr++ = *bnd;
2128 }
2129 va_end(va);
2130 }
2131
2132 /*Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2133 void
ENTF90(LBOUNDA4,lbounda4)2134 ENTF90(LBOUNDA4, lbounda4)(__INT4_T *arr, __INT4_T *size, ...)
2135 {
2136 va_list va;
2137 __INT_T *bnd;
2138 __INT_T s;
2139
2140 s = *size;
2141 va_start(va, size);
2142 while (s-- > 0) {
2143 bnd = va_arg(va, __INT_T *);
2144 if (!ISPRESENT(bnd))
2145 __fort_abort("LBOUND: lower bound not present");
2146 *arr++ = *bnd;
2147 }
2148 va_end(va);
2149 }
2150
2151 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2152 void
ENTF90(LBOUNDA8,lbounda8)2153 ENTF90(LBOUNDA8, lbounda8)(__INT8_T *arr, __INT4_T *size, ...)
2154 {
2155 va_list va;
2156 __INT_T *bnd;
2157 __INT_T s;
2158
2159 s = *size;
2160 va_start(va, size);
2161 while (s-- > 0) {
2162 bnd = va_arg(va, __INT_T *);
2163 if (!ISPRESENT(bnd))
2164 __fort_abort("LBOUND: lower bound not present");
2165 *arr++ = *bnd;
2166 }
2167 va_end(va);
2168 }
2169
2170 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2171 void
ENTF90(KLBOUNDA,klbounda)2172 ENTF90(KLBOUNDA, klbounda)(__INT8_T *arr, __INT4_T *size, ...)
2173 {
2174
2175 /*
2176 * -i8 variant of LBOUNDA
2177 */
2178
2179 va_list va;
2180 __INT_T *bnd;
2181 __INT_T s;
2182
2183 s = *size;
2184 va_start(va, size);
2185 while (s-- > 0) {
2186 bnd = va_arg(va, __INT_T *);
2187 if (!ISPRESENT(bnd))
2188 __fort_abort("LBOUND: lower bound not present");
2189 *arr++ = *bnd;
2190 }
2191 va_end(va);
2192 }
2193
2194 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2195 void
ENTF90(UBOUNDA,ubounda)2196 ENTF90(UBOUNDA, ubounda)(__INT_T *arr, __INT4_T *size, ...)
2197 {
2198 va_list va;
2199 __INT_T *bnd;
2200 __INT_T s;
2201
2202 s = *size;
2203 va_start(va, size);
2204 while (s-- > 0) {
2205 bnd = va_arg(va, __INT_T *);
2206 if (!ISPRESENT(bnd))
2207 __fort_abort("UBOUND: upper bound not present");
2208 *arr++ = *bnd;
2209 }
2210 va_end(va);
2211 }
2212
2213 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2214 void
ENTF90(UBOUNDA1,ubounda1)2215 ENTF90(UBOUNDA1, ubounda1)(__INT1_T *arr, __INT4_T *size, ...)
2216 {
2217 va_list va;
2218 __INT_T *bnd;
2219 __INT_T s;
2220
2221 s = *size;
2222 va_start(va, size);
2223 while (s-- > 0) {
2224 bnd = va_arg(va, __INT_T *);
2225 if (!ISPRESENT(bnd))
2226 __fort_abort("UBOUND: upper bound not present");
2227 *arr++ = *bnd;
2228 }
2229 va_end(va);
2230 }
2231
2232 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2233 void
ENTF90(UBOUNDA2,ubounda2)2234 ENTF90(UBOUNDA2, ubounda2)(__INT2_T *arr, __INT4_T *size, ...)
2235 {
2236 va_list va;
2237 __INT_T *bnd;
2238 __INT_T s;
2239
2240 s = *size;
2241 va_start(va, size);
2242 while (s-- > 0) {
2243 bnd = va_arg(va, __INT_T *);
2244 if (!ISPRESENT(bnd))
2245 __fort_abort("UBOUND: upper bound not present");
2246 *arr++ = *bnd;
2247 }
2248 va_end(va);
2249 }
2250
2251 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2252 void
ENTF90(UBOUNDA4,ubounda4)2253 ENTF90(UBOUNDA4, ubounda4)(__INT4_T *arr, __INT4_T *size, ...)
2254 {
2255 va_list va;
2256 __INT_T *bnd;
2257 __INT_T s;
2258
2259 s = *size;
2260 va_start(va, size);
2261 while (s-- > 0) {
2262 bnd = va_arg(va, __INT_T *);
2263 if (!ISPRESENT(bnd))
2264 __fort_abort("UBOUND: upper bound not present");
2265 *arr++ = *bnd;
2266 }
2267 va_end(va);
2268 }
2269
2270 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2271 void
ENTF90(UBOUNDA8,ubounda8)2272 ENTF90(UBOUNDA8, ubounda8)(__INT8_T *arr, __INT4_T *size, ...)
2273 {
2274 va_list va;
2275 __INT_T *bnd;
2276 __INT_T s;
2277
2278 s = *size;
2279 va_start(va, size);
2280 while (s-- > 0) {
2281 bnd = va_arg(va, __INT_T *);
2282 if (!ISPRESENT(bnd))
2283 __fort_abort("UBOUND: upper bound not present");
2284 *arr++ = *bnd;
2285 }
2286 va_end(va);
2287 }
2288
2289 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2290 void
ENTF90(KUBOUNDA,kubounda)2291 ENTF90(KUBOUNDA, kubounda)(__INT8_T *arr, __INT4_T *size, ...)
2292 {
2293
2294 /*
2295 * -i8 variant of ubounda
2296 */
2297
2298 va_list va;
2299 __INT_T *bnd;
2300 __INT_T s;
2301
2302 s = *size;
2303 va_start(va, size);
2304 while (s-- > 0) {
2305 bnd = va_arg(va, __INT_T *);
2306 if (!ISPRESENT(bnd))
2307 __fort_abort("UBOUND: upper bound not present");
2308 *arr++ = *bnd;
2309 }
2310 va_end(va);
2311 }
2312
2313 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2314 void
ENTF90(LBOUNDAZ,lboundaz)2315 ENTF90(LBOUNDAZ, lboundaz)(__INT4_T *arr, __INT4_T *size, ...)
2316 {
2317 va_list va;
2318 __INT_T *bnd;
2319 __INT_T s;
2320
2321 s = *size;
2322 va_start(va, size);
2323 while (s-- > 0) {
2324 bnd = va_arg(va, __INT_T *);
2325 if (!ISPRESENT(bnd))
2326 __fort_abort("LBOUND: lower bound not present");
2327 *arr++ = *bnd;
2328 }
2329 va_end(va);
2330 }
2331
2332 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2333 void
ENTF90(LBOUNDAZ1,lboundaz1)2334 ENTF90(LBOUNDAZ1, lboundaz1)(__INT1_T *arr, __INT4_T *size, ...)
2335 {
2336 va_list va;
2337 __INT_T *bnd;
2338 __INT_T s;
2339
2340 s = *size;
2341 va_start(va, size);
2342 while (s-- > 0) {
2343 bnd = va_arg(va, __INT_T *);
2344 if (!ISPRESENT(bnd))
2345 __fort_abort("LBOUND: lower bound not present");
2346 *arr++ = *bnd;
2347 }
2348 va_end(va);
2349 }
2350
2351 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2352 void
ENTF90(LBOUNDAZ2,lboundaz2)2353 ENTF90(LBOUNDAZ2, lboundaz2)(__INT2_T *arr, __INT4_T *size, ...)
2354 {
2355 va_list va;
2356 __INT_T *bnd;
2357 __INT_T s;
2358
2359 s = *size;
2360 va_start(va, size);
2361 while (s-- > 0) {
2362 bnd = va_arg(va, __INT_T *);
2363 if (!ISPRESENT(bnd))
2364 __fort_abort("LBOUND: lower bound not present");
2365 *arr++ = *bnd;
2366 }
2367 va_end(va);
2368 }
2369
2370 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2371 void
ENTF90(LBOUNDAZ4,lboundaz4)2372 ENTF90(LBOUNDAZ4, lboundaz4)(__INT4_T *arr, __INT4_T *size, ...)
2373 {
2374 va_list va;
2375 __INT_T *bnd;
2376 __INT_T s;
2377
2378 s = *size;
2379 va_start(va, size);
2380 while (s-- > 0) {
2381 bnd = va_arg(va, __INT_T *);
2382 if (!ISPRESENT(bnd))
2383 __fort_abort("LBOUND: lower bound not present");
2384 *arr++ = *bnd;
2385 }
2386 va_end(va);
2387 }
2388
2389 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2390 void
ENTF90(LBOUNDAZ8,lboundaz8)2391 ENTF90(LBOUNDAZ8, lboundaz8)(__INT8_T *arr, __INT4_T *size, ...)
2392 {
2393 va_list va;
2394 __INT_T *bnd;
2395 __INT_T s;
2396
2397 s = *size;
2398 va_start(va, size);
2399 while (s-- > 0) {
2400 bnd = va_arg(va, __INT_T *);
2401 if (!ISPRESENT(bnd))
2402 __fort_abort("LBOUND: lower bound not present");
2403 *arr++ = *bnd;
2404 }
2405 va_end(va);
2406 }
2407
2408 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2409 void
ENTF90(KLBOUNDAZ,klboundaz)2410 ENTF90(KLBOUNDAZ, klboundaz)(__INT8_T *arr, __INT4_T *size, ...)
2411 {
2412
2413 /*
2414 * -i8 variant of LBOUNDAZ
2415 */
2416
2417 va_list va;
2418 __INT_T *bnd;
2419 __INT_T s;
2420
2421 s = *size;
2422 va_start(va, size);
2423 while (s-- > 0) {
2424 bnd = va_arg(va, __INT_T *);
2425 if (!ISPRESENT(bnd))
2426 __fort_abort("LBOUND: lower bound not present");
2427 *arr++ = *bnd;
2428 }
2429 va_end(va);
2430 }
2431
2432 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2433 void
ENTF90(UBOUNDAZ,uboundaz)2434 ENTF90(UBOUNDAZ, uboundaz)(__INT4_T *arr, __INT4_T *size, ...)
2435 {
2436 va_list va;
2437 __INT_T *bnd;
2438 __INT_T s;
2439
2440 s = *size;
2441 va_start(va, size);
2442 while (s-- > 0) {
2443 bnd = va_arg(va, __INT_T *);
2444 if (!ISPRESENT(bnd))
2445 __fort_abort("UBOUND: upper bound not present");
2446 *arr++ = *bnd;
2447 }
2448 va_end(va);
2449 }
2450
2451 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2452 void
ENTF90(UBOUNDAZ1,uboundaz1)2453 ENTF90(UBOUNDAZ1, uboundaz1)(__INT1_T *arr, __INT4_T *size, ...)
2454 {
2455 va_list va;
2456 __INT_T *bnd;
2457 __INT_T s;
2458
2459 s = *size;
2460 va_start(va, size);
2461 while (s-- > 0) {
2462 bnd = va_arg(va, __INT_T *);
2463 if (!ISPRESENT(bnd))
2464 __fort_abort("UBOUND: upper bound not present");
2465 *arr++ = *bnd;
2466 }
2467 va_end(va);
2468 }
2469
2470 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2471 void
ENTF90(UBOUNDAZ2,uboundaz2)2472 ENTF90(UBOUNDAZ2, uboundaz2)(__INT2_T *arr, __INT4_T *size, ...)
2473 {
2474 va_list va;
2475 __INT_T *bnd;
2476 __INT_T s;
2477
2478 s = *size;
2479 va_start(va, size);
2480 while (s-- > 0) {
2481 bnd = va_arg(va, __INT_T *);
2482 if (!ISPRESENT(bnd))
2483 __fort_abort("UBOUND: upper bound not present");
2484 *arr++ = *bnd;
2485 }
2486 va_end(va);
2487 }
2488
2489 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2490 void
ENTF90(UBOUNDAZ4,uboundaz4)2491 ENTF90(UBOUNDAZ4, uboundaz4)(__INT4_T *arr, __INT4_T *size, ...)
2492 {
2493 va_list va;
2494 __INT_T *bnd;
2495 __INT_T s;
2496
2497 s = *size;
2498 va_start(va, size);
2499 while (s-- > 0) {
2500 bnd = va_arg(va, __INT_T *);
2501 if (!ISPRESENT(bnd))
2502 __fort_abort("UBOUND: upper bound not present");
2503 *arr++ = *bnd;
2504 }
2505 va_end(va);
2506 }
2507
2508 /* Varargs: __INT_T *lb1, ... __INT_T *lb<size> */
2509 void
ENTF90(UBOUNDAZ8,uboundaz8)2510 ENTF90(UBOUNDAZ8, uboundaz8)(__INT8_T *arr, __INT4_T *size, ...)
2511 {
2512 va_list va;
2513 __INT_T *bnd;
2514 __INT_T s;
2515
2516 s = *size;
2517 va_start(va, size);
2518 while (s-- > 0) {
2519 bnd = va_arg(va, __INT_T *);
2520 if (!ISPRESENT(bnd))
2521 __fort_abort("UBOUND: upper bound not present");
2522 *arr++ = *bnd;
2523 }
2524 va_end(va);
2525 }
2526
2527 /* Vargs: __INT_T *lb1, ... __INT_T *lb<size> */
2528 void
ENTF90(KUBOUNDAZ,kuboundaz)2529 ENTF90(KUBOUNDAZ, kuboundaz)(__INT8_T *arr, __INT4_T *size, ...)
2530 {
2531
2532 /*
2533 * -i8 variant of uboundaz
2534 */
2535
2536 va_list va;
2537 __INT_T *bnd;
2538 __INT_T s;
2539
2540 s = *size;
2541 va_start(va, size);
2542 while (s-- > 0) {
2543 bnd = va_arg(va, __INT_T *);
2544 if (!ISPRESENT(bnd))
2545 __fort_abort("UBOUND: upper bound not present");
2546 *arr++ = *bnd;
2547 }
2548 va_end(va);
2549 }
2550
2551 /* Vargs: { *lwb, *upb, *stride }* */
2552 __INT_T
ENTF90(SIZE,size)2553 ENTF90(SIZE, size)(__INT4_T *rank, __INT4_T *dim, ...)
2554 {
2555 va_list va;
2556 __INT_T *lwb, *upb, *stride;
2557 int d;
2558 __INT_T extent;
2559
2560 va_start(va, dim);
2561
2562 if (!ISPRESENT(dim)) {
2563
2564 /* size = product of all extents */
2565
2566 extent = 1;
2567 d = *rank;
2568 while (d-- > 0) {
2569 lwb = va_arg(va, __INT_T *);
2570 upb = va_arg(va, __INT_T *);
2571 stride = va_arg(va, __INT_T *);
2572 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2573 __fort_abort("SIZE: bounds not present");
2574 extent *= (*upb - *lwb + *stride) / *stride;
2575 if (extent < 0)
2576 extent = 0;
2577 }
2578 } else {
2579
2580 /* size = extent in dimension 'dim' */
2581
2582 d = *dim;
2583 if (d < 1 || d > *rank)
2584 __fort_abort("SIZE: invalid dim");
2585
2586 while (d-- > 0) {
2587 lwb = va_arg(va, __INT_T *);
2588 upb = va_arg(va, __INT_T *);
2589 stride = va_arg(va, __INT_T *);
2590 }
2591 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2592 __fort_abort("SIZE: bounds not present for specified dim");
2593 extent = (*upb - *lwb + *stride) / *stride;
2594 if (extent < 0)
2595 extent = 0;
2596 }
2597 va_end(va);
2598 return extent;
2599 }
2600
2601 /* Vargs: { *lwb, *upb, *stride }* */
2602 __INT8_T
ENTF90(KSIZE,ksize)2603 ENTF90(KSIZE, ksize)(__INT4_T *rank, __INT4_T *dim, ...)
2604 {
2605
2606 /*
2607 * -i8 variant of SIZE
2608 */
2609
2610 va_list va;
2611 __INT_T *lwb, *upb, *stride;
2612 int d;
2613 __INT_T extent;
2614
2615 va_start(va, dim);
2616
2617 if (!ISPRESENT(dim)) {
2618
2619 /* size = product of all extents */
2620
2621 extent = 1;
2622 d = *rank;
2623 while (d-- > 0) {
2624 lwb = va_arg(va, __INT_T *);
2625 upb = va_arg(va, __INT_T *);
2626 stride = va_arg(va, __INT_T *);
2627 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2628 __fort_abort("SIZE: bounds not present");
2629 extent *= (*upb - *lwb + *stride) / *stride;
2630 if (extent < 0)
2631 extent = 0;
2632 }
2633 } else {
2634
2635 /* size = extent in dimension 'dim' */
2636
2637 d = *dim;
2638 if (d < 1 || d > *rank)
2639 __fort_abort("SIZE: invalid dim");
2640
2641 while (d-- > 0) {
2642 lwb = va_arg(va, __INT_T *);
2643 upb = va_arg(va, __INT_T *);
2644 stride = va_arg(va, __INT_T *);
2645 }
2646 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2647 __fort_abort("SIZE: bounds not present for specified dim");
2648 extent = (*upb - *lwb + *stride) / *stride;
2649 if (extent < 0)
2650 extent = 0;
2651 }
2652 va_end(va);
2653 return (__INT8_T)extent;
2654 }
2655
2656 #define BITS_PER_BYTE 8
2657 __INT8_T
ENTF90(CLASS_OBJ_SIZE,class_obj_size)2658 ENTF90(CLASS_OBJ_SIZE, class_obj_size)(F90_Desc *d)
2659 {
2660 __INT8_T storage_sz;
2661
2662 if (!d->tag)
2663 return 0;
2664
2665 storage_sz = ENTF90(KGET_OBJECT_SIZE, kget_object_size)(d);
2666 return storage_sz;
2667 }
2668
2669 /* Vargs: { *lwb, *upb, *stride }* */
2670 void
ENTF90(SHAPE,shape)2671 ENTF90(SHAPE, shape)(__INT4_T *arr, __INT_T *rank, ...)
2672 {
2673 va_list va;
2674 __INT_T *lwb, *upb, *stride;
2675 int d;
2676 __INT_T extent;
2677
2678 d = *rank;
2679 va_start(va, rank);
2680 while (d-- > 0) {
2681 lwb = va_arg(va, __INT_T *);
2682 upb = va_arg(va, __INT_T *);
2683 stride = va_arg(va, __INT_T *);
2684 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2685 __fort_abort("SHAPE: bounds not present");
2686 extent = (*upb - *lwb + *stride) / *stride;
2687 if (extent < 0)
2688 extent = 0;
2689 *arr++ = extent;
2690 }
2691 va_end(va);
2692 }
2693
2694 /* Vargs: { *lwb, *upb, *stride }* */
2695 void
ENTF90(SHAPE1,shape1)2696 ENTF90(SHAPE1, shape1)(__INT1_T *arr, __INT_T *rank, ...)
2697 {
2698 va_list va;
2699 __INT_T *lwb, *upb, *stride;
2700 int d;
2701 __INT_T extent;
2702
2703 d = *rank;
2704 va_start(va, rank);
2705 while (d-- > 0) {
2706 lwb = va_arg(va, __INT_T *);
2707 upb = va_arg(va, __INT_T *);
2708 stride = va_arg(va, __INT_T *);
2709 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2710 __fort_abort("SHAPE: bounds not present");
2711 extent = (*upb - *lwb + *stride) / *stride;
2712 if (extent < 0)
2713 extent = 0;
2714 *arr++ = extent;
2715 }
2716 va_end(va);
2717 }
2718
2719 /* Vargs: { *lwb, *upb, *stride }* */
2720 void
ENTF90(SHAPE2,shape2)2721 ENTF90(SHAPE2, shape2)(__INT2_T *arr, __INT_T *rank, ...)
2722 {
2723 va_list va;
2724 __INT_T *lwb, *upb, *stride;
2725 int d;
2726 __INT_T extent;
2727
2728 d = *rank;
2729 va_start(va, rank);
2730 while (d-- > 0) {
2731 lwb = va_arg(va, __INT_T *);
2732 upb = va_arg(va, __INT_T *);
2733 stride = va_arg(va, __INT_T *);
2734 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2735 __fort_abort("SHAPE: bounds not present");
2736 extent = (*upb - *lwb + *stride) / *stride;
2737 if (extent < 0)
2738 extent = 0;
2739 *arr++ = extent;
2740 }
2741 va_end(va);
2742 }
2743
2744 /* Vargs: { *lwb, *upb, *stride }* */
2745 void
ENTF90(SHAPE4,shape4)2746 ENTF90(SHAPE4, shape4)(__INT4_T *arr, __INT_T *rank, ...)
2747 {
2748 va_list va;
2749 __INT_T *lwb, *upb, *stride;
2750 int d;
2751 __INT_T extent;
2752
2753 d = *rank;
2754 va_start(va, rank);
2755 while (d-- > 0) {
2756 lwb = va_arg(va, __INT_T *);
2757 upb = va_arg(va, __INT_T *);
2758 stride = va_arg(va, __INT_T *);
2759 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2760 __fort_abort("SHAPE: bounds not present");
2761 extent = (*upb - *lwb + *stride) / *stride;
2762 if (extent < 0)
2763 extent = 0;
2764 *arr++ = extent;
2765 }
2766 va_end(va);
2767 }
2768
2769 /* Vargs: { *lwb, *upb, *stride }* */
2770 void
ENTF90(SHAPE8,shape8)2771 ENTF90(SHAPE8, shape8)(__INT8_T *arr, __INT_T *rank, ...)
2772 {
2773 va_list va;
2774 __INT_T *lwb, *upb, *stride;
2775 int d;
2776 __INT_T extent;
2777
2778 d = *rank;
2779 va_start(va, rank);
2780 while (d-- > 0) {
2781 lwb = va_arg(va, __INT_T *);
2782 upb = va_arg(va, __INT_T *);
2783 stride = va_arg(va, __INT_T *);
2784 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2785 __fort_abort("SHAPE: bounds not present");
2786 extent = (*upb - *lwb + *stride) / *stride;
2787 if (extent < 0)
2788 extent = 0;
2789 *arr++ = extent;
2790 }
2791 va_end(va);
2792 }
2793
2794 /** \brief Vargs: { *lwb, *upb, *stride }* */
2795 void
ENTF90(KSHAPE,kshape)2796 ENTF90(KSHAPE, kshape)(__INT8_T *arr, __INT_T *rank, ...)
2797 {
2798 va_list va;
2799 __INT_T *lwb, *upb, *stride;
2800 int d;
2801 __INT_T extent;
2802
2803 d = *rank;
2804 va_start(va, rank);
2805 while (d-- > 0) {
2806 lwb = va_arg(va, __INT_T *);
2807 upb = va_arg(va, __INT_T *);
2808 stride = va_arg(va, __INT_T *);
2809 if (!ISPRESENT(lwb) || !ISPRESENT(upb) || !ISPRESENT(stride))
2810 __fort_abort("SHAPE: bounds not present");
2811 extent = (*upb - *lwb + *stride) / *stride;
2812 if (extent < 0)
2813 extent = 0;
2814 *arr++ = extent;
2815 }
2816 va_end(va);
2817 }
2818
2819 /** \brief ACHAR function returns LEN;
2820 *
2821 * avoid confusion on returning character result
2822 */
2823 __INT_T
ENTF90(ACHARA,achara)2824 ENTF90(ACHARA, achara)
2825 (DCHAR(res), void *i, __INT_T *size DCLEN64(res))
2826 {
2827 *CADR(res) = I8(__fort_varying_int)(i, size);
2828 return 1;
2829 }
2830 /* 32 bit CLEN version */
2831 __INT_T
ENTF90(ACHAR,achar)2832 ENTF90(ACHAR, achar)
2833 (DCHAR(res), void *i, __INT_T *size DCLEN(res))
2834 {
2835 return ENTF90(ACHARA, achara) (CADR(res), i, size, (__CLEN_T)CLEN(res));
2836 }
2837
2838 __CLEN_T
ENTF90(REPEATA,repeata)2839 ENTF90(REPEATA, repeata)
2840 (DCHAR(res), DCHAR(expr), void *ncopies, __INT_T *size DCLEN64(res) DCLEN64(expr))
2841 {
2842 __CLEN_T i, len;
2843 int _ncopies;
2844
2845 len = CLEN(expr);
2846 _ncopies = I8(__fort_varying_int)(ncopies, size);
2847 for (i = 0; i < _ncopies; ++i) {
2848 strncpy(CADR(res) + i * len, CADR(expr), len);
2849 }
2850 return _ncopies * len;
2851 }
2852 /* 32 bit CLEN version */
2853 __INT_T
ENTF90(REPEAT,repeat)2854 ENTF90(REPEAT, repeat)
2855 (DCHAR(res), DCHAR(expr), void *ncopies, __INT_T *size DCLEN(res) DCLEN(expr))
2856 {
2857 return (__INT_T)ENTF90(REPEATA, repeata) (CADR(res), CADR(expr), ncopies,
2858 size, (__CLEN_T)CLEN(res), (__CLEN_T)CLEN(expr));
2859 }
2860
2861 __INT_T
ENTF90(TRIMA,trima)2862 ENTF90(TRIMA, trima)
2863 (DCHAR(res), DCHAR(expr) DCLEN64(res) DCLEN64(expr))
2864 {
2865 int i, j;
2866 char *rcptr;
2867 char *ecptr;
2868
2869 /*
2870 * The for loop below results in a call to _c_mcopy1.
2871 * Not the most efficient thing to do when len is around 4 or so.
2872 * If the target generates illegal alignment errors, need to not do
2873 * int copies. So, enable fast code for x8664 only for now.
2874 *
2875 i = CLEN(expr)-1;
2876 while (i >= 0 && CADR(expr)[i] == ' ')
2877 --i;
2878 if (i < 0)
2879 return 0;
2880 for (j = 0; j <= i; ++j)
2881 CADR(res)[j] = CADR(expr)[j];
2882 return i+1;
2883 */
2884 i = CLEN(expr);
2885 while (i > 0) {
2886 if (CADR(expr)[i - 1] != ' ') {
2887 #if defined(TARGET_X8664)
2888 if (i <= 11) {
2889 int *rptr = ((int *)CADR(res));
2890 int *eptr = ((int *)CADR(expr));
2891 if (i & 0xc) {
2892 *rptr = *eptr;
2893 if (i == 4)
2894 return i;
2895 rptr++;
2896 eptr++;
2897 if (i & 8) {
2898 *rptr = *eptr;
2899 if (i == 8)
2900 return i;
2901 rptr++;
2902 eptr++;
2903 }
2904 }
2905 rcptr = (char *)rptr;
2906 ecptr = (char *)eptr;
2907 #else
2908 if (i <= 3) {
2909 rcptr = ((char *)CADR(res));
2910 ecptr = ((char *)CADR(expr));
2911 #endif
2912 j = i & 3;
2913 if (j > 2)
2914 *rcptr++ = *ecptr++;
2915 if (j > 1)
2916 *rcptr++ = *ecptr++;
2917 if (j > 0)
2918 *rcptr = *ecptr;
2919 } else {
2920 for (j = 0; j < i; ++j)
2921 CADR(res)[j] = CADR(expr)[j];
2922 }
2923 return i;
2924 } else {
2925 --i;
2926 }
2927 }
2928 return 0;
2929 }
2930 /* 32 bit CLEN version */
2931 __INT_T
2932 ENTF90(TRIM, trim)
2933 (DCHAR(res), DCHAR(expr) DCLEN(res) DCLEN(expr))
2934 {
2935 return ENTF90(TRIMA, trima) (CADR(res), CADR(expr), (__CLEN_T)CLEN(res), (__CLEN_T)CLEN(expr));
2936 }
2937
2938 __INT_T
2939 ENTF90(IACHARA, iachara)(DCHAR(c) DCLEN64(c)) { return *CADR(c); }
2940 /* 32 bit CLEN version */
2941 __INT_T
2942 ENTF90(IACHAR, iachar)(DCHAR(c) DCLEN(c))
2943 {
2944 return ENTF90(IACHARA, iachara)(CADR(c), (__CLEN_T)CLEN(c));
2945 }
2946
2947 /** \brief
2948 * -i8 variant of iachar
2949 */
2950 __INT8_T
2951 ENTF90(KIACHARA, kiachara)(DCHAR(c) DCLEN64(c))
2952 {
2953 return (__INT8_T)*CADR(c);
2954 }
2955 /* 32 bit CLEN version */
2956 __INT8_T
2957 ENTF90(KIACHAR, kiachar)(DCHAR(c) DCLEN(c))
2958 {
2959 return ENTF90(KIACHARA, kiachara)(CADR(c), (__CLEN_T)CLEN(c));
2960 }
2961
2962 void
2963 ENTF90(MERGECHA, mergecha)(DCHAR(result), DCHAR(tsource), DCHAR(fsource),
2964 void *mask, __INT_T *szmask DCLEN64(result)
2965 DCLEN64(tsource) DCLEN64(fsource))
2966 {
2967 if (I8(__fort_varying_log)(mask, szmask))
2968 fstrcpy(CADR(result), CADR(tsource), CLEN(result), CLEN(tsource));
2969 else
2970 fstrcpy(CADR(result), CADR(fsource), CLEN(result), CLEN(fsource));
2971 }
2972 /* 32 bit CLEN version */
2973 void
2974 ENTF90(MERGECH, mergech)(DCHAR(result), DCHAR(tsource), DCHAR(fsource),
2975 void *mask, __INT_T *szmask DCLEN(result)
2976 DCLEN(tsource) DCLEN(fsource))
2977 {
2978 ENTF90(MERGECHA, mergecha)(CADR(result), CADR(tsource), CADR(fsource), mask,
2979 szmask, (__CLEN_T)CLEN(result),
2980 (__CLEN_T)CLEN(tsource), (__CLEN_T)CLEN(fsource));
2981 }
2982
2983 void
2984 ENTF90(MERGEDT, mergedt)(void *result, void *tsource, void *fsource,
2985 __INT_T *size, void *mask, __INT_T *szmask)
2986 {
2987 __fort_bcopy(result, I8(__fort_varying_log)(mask, szmask) ? tsource : fsource,
2988 *size);
2989 }
2990
2991 __INT1_T
2992 ENTF90(MERGEI1, mergei1)
2993 (__INT1_T *tsource, __INT1_T *fsource, void *mask, __INT_T *size)
2994 {
2995 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
2996 }
2997
2998 __INT2_T
2999 ENTF90(MERGEI2, mergei2)
3000 (__INT2_T *tsource, __INT2_T *fsource, void *mask, __INT_T *size)
3001 {
3002 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3003 }
3004
3005 __INT4_T
3006 ENTF90(MERGEI, mergei)
3007 (__INT4_T *tsource, __INT4_T *fsource, void *mask, __INT_T *size)
3008 {
3009 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3010 }
3011
3012 __INT8_T
3013 ENTF90(MERGEI8, mergei8)
3014 (__INT8_T *tsource, __INT8_T *fsource, void *mask, __INT_T *size)
3015 {
3016 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3017 }
3018
3019 __LOG1_T
3020 ENTF90(MERGEL1, mergel1)
3021 (__LOG1_T *tsource, __LOG1_T *fsource, void *mask, __INT_T *size)
3022 {
3023 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3024 }
3025
3026 __LOG2_T
3027 ENTF90(MERGEL2, mergel2)
3028 (__LOG2_T *tsource, __LOG2_T *fsource, void *mask, __INT_T *size)
3029 {
3030 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3031 }
3032
3033 __LOG4_T
3034 ENTF90(MERGEL, mergel)
3035 (__LOG4_T *tsource, __LOG4_T *fsource, void *mask, __INT_T *size)
3036 {
3037 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3038 }
3039
3040 __LOG8_T
3041 ENTF90(MERGEL8, mergel8)
3042 (__LOG8_T *tsource, __LOG8_T *fsource, void *mask, __INT_T *size)
3043 {
3044 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3045 }
3046
3047 __REAL4_T
3048 ENTF90(MERGER, merger)
3049 (__REAL4_T *tsource, __REAL4_T *fsource, void *mask, __INT_T *size)
3050 {
3051 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3052 }
3053
3054 __REAL8_T
3055 ENTF90(MERGED, merged)
3056 (__REAL8_T *tsource, __REAL8_T *fsource, void *mask, __INT_T *size)
3057 {
3058 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3059 }
3060
3061 __REAL16_T
3062 ENTF90(MERGEQ, mergeq)
3063 (__REAL16_T *tsource, __REAL16_T *fsource, void *mask, __INT_T *size)
3064 {
3065 return I8(__fort_varying_log)(mask, size) ? *tsource : *fsource;
3066 }
3067
3068 __INT_T
3069 ENTF90(LENTRIMA, lentrima)(DCHAR(str) DCLEN64(str))
3070 {
3071 __INT_T i;
3072
3073 for (i = (__INT_T)CLEN(str); i-- > 0;)
3074 if (CADR(str)[i] != ' ')
3075 break;
3076 return i + 1;
3077 }
3078 /* 32 bit CLEN version */
3079 __INT_T
3080 ENTF90(LENTRIM, lentrim)(DCHAR(str) DCLEN(str))
3081 {
3082 return ENTF90(LENTRIMA, lentrima)(CADR(str), (__CLEN_T)CLEN(str));
3083 }
3084
3085 __INT8_T
3086 ENTF90(KLENTRIMA, klentrima)(DCHAR(str) DCLEN64(str))
3087 {
3088 /*
3089 * -i8 variant of lentrim
3090 */
3091
3092 __INT8_T i;
3093
3094 for (i = (__INT8_T)CLEN(str); i-- > 0;)
3095 if (CADR(str)[i] != ' ')
3096 break;
3097 return i + 1;
3098 }
3099 /* 32 bit CLEN version */
3100 __INT8_T
3101 ENTF90(KLENTRIM, klentrim)(DCHAR(str) DCLEN(str))
3102 {
3103 return (__INT8_T) ENTF90(KLENTRIMA, klentrima)(CADR(str), (__CLEN_T)CLEN(str));
3104 }
3105
3106 __INT_T
3107 ENTF90(SCANA, scana)
3108 (DCHAR(str), DCHAR(set), void *back, __INT_T *size DCLEN64(str) DCLEN64(set))
3109 {
3110 __INT_T i, j;
3111
3112 if (ISPRESENT(back) && I8(__fort_varying_log)(back, size)) {
3113 for (i = (__INT_T)CLEN(str); i-- > 0;)
3114 for (j = 0; j < (__INT_T)CLEN(set); ++j)
3115 if (CADR(set)[j] == CADR(str)[i])
3116 return i + 1;
3117 return 0;
3118 } else {
3119 for (i = 0; i < (__INT_T)CLEN(str); ++i)
3120 for (j = 0; j < (__INT_T)CLEN(set); ++j)
3121 if (CADR(set)[j] == CADR(str)[i])
3122 return i + 1;
3123 return 0;
3124 }
3125 }
3126 /* 32 bit CLEN version */
3127 __INT_T
3128 ENTF90(SCAN, scan)
3129 (DCHAR(str), DCHAR(set), void *back, __INT_T *size DCLEN(str) DCLEN(set))
3130 {
3131 return ENTF90(SCANA, scana) (CADR(str), CADR(set), back, size,
3132 (__CLEN_T)CLEN(str), (__CLEN_T)CLEN(set));
3133 }
3134
3135 /** \brief
3136 * -i8 variant of SCAN
3137 */
3138 __INT8_T
3139 ENTF90(KSCANA, kscana)
3140 (DCHAR(str), DCHAR(set), void *back, __INT_T *size DCLEN64(str) DCLEN64(set))
3141 {
3142 __INT8_T i, j;
3143
3144 if (ISPRESENT(back) && I8(__fort_varying_log)(back, size)) {
3145 for (i = (__INT8_T)CLEN(str); i-- > 0;)
3146 for (j = 0; j < (__INT8_T)CLEN(set); ++j)
3147 if (CADR(set)[j] == CADR(str)[i])
3148 return i + 1;
3149 return 0;
3150 } else {
3151 for (i = 0; i < (__INT8_T)CLEN(str); ++i)
3152 for (j = 0; j < (__INT8_T)CLEN(set); ++j)
3153 if (CADR(set)[j] == CADR(str)[i])
3154 return i + 1;
3155 return 0;
3156 }
3157 }
3158 /* 32 bit CLEN version */
3159 __INT8_T
3160 ENTF90(KSCAN, kscan)
3161 (DCHAR(str), DCHAR(set), void *back, __INT_T *size DCLEN(str) DCLEN(set))
3162 {
3163 return ENTF90(KSCANA, kscana) (CADR(str), CADR(set), back, size,
3164 (__CLEN_T)CLEN(str), (__CLEN_T)CLEN(set));
3165 }
3166
3167 __INT_T
3168 ENTF90(VERIFYA, verifya)
3169 (DCHAR(str), DCHAR(set), void *back, __INT_T *size DCLEN64(str) DCLEN64(set))
3170 {
3171 __INT_T i, j;
3172
3173 if (ISPRESENT(back) && I8(__fort_varying_log)(back, size)) {
3174 for (i = (__INT_T)CLEN(str); i-- > 0;) {
3175 for (j = 0; j < (__INT_T)CLEN(set); ++j)
3176 if (CADR(set)[j] == CADR(str)[i])
3177 goto contb;
3178 return i + 1;
3179 contb:;
3180 }
3181 return 0;
3182 } else {
3183 for (i = 0; i < (__INT_T)CLEN(str); ++i) {
3184 for (j = 0; j < (__INT_T)CLEN(set); ++j)
3185 if (CADR(set)[j] == CADR(str)[i])
3186 goto contf;
3187 return i + 1;
3188 contf:;
3189 }
3190 return 0;
3191 }
3192 }
3193 /* 32 bit CLEN version */
3194 __INT_T
3195 ENTF90(VERIFY, verify)
3196 (DCHAR(str), DCHAR(set), void *back, __INT_T *size DCLEN(str) DCLEN(set))
3197 {
3198 return ENTF90(VERIFYA, verifya) (CADR(str), CADR(set), back, size,
3199 (__CLEN_T)CLEN(str), (__CLEN_T)CLEN(set));
3200 }
3201
3202 /** \brief
3203 * -i8 variant of VERIFY
3204 */
3205 __INT8_T
3206 ENTF90(KVERIFYA, kverifya)
3207 (DCHAR(str), DCHAR(set), void *back, __INT_T *size DCLEN64(str) DCLEN64(set))
3208 {
3209 __INT8_T i, j;
3210
3211 if (ISPRESENT(back) && I8(__fort_varying_log)(back, size)) {
3212 for (i = (__INT8_T)CLEN(str); i-- > 0;) {
3213 for (j = 0; j < (__INT8_T)CLEN(set); ++j)
3214 if (CADR(set)[j] == CADR(str)[i])
3215 goto contb;
3216 return i + 1;
3217 contb:;
3218 }
3219 return 0;
3220 } else {
3221 for (i = 0; i < (__INT8_T)CLEN(str); ++i) {
3222 for (j = 0; j < (__INT8_T)CLEN(set); ++j)
3223 if (CADR(set)[j] == CADR(str)[i])
3224 goto contf;
3225 return i + 1;
3226 contf:;
3227 }
3228 return 0;
3229 }
3230 }
3231 /* 32 bit CLEN version */
3232 __INT8_T
3233 ENTF90(KVERIFY, kverify)
3234 (DCHAR(str), DCHAR(set), void *back, __INT_T *size DCLEN(str) DCLEN(set))
3235 {
3236 return ENTF90(KVERIFYA, kverifya) (CADR(str), CADR(set), back,
3237 size, (__CLEN_T)CLEN(str), (__CLEN_T)CLEN(set));
3238 }
3239
3240 /** \brief
3241 * -i8 variant of INDEX
3242 */
3243 __INT8_T
3244 ENTF90(KINDEXA, kindexa)
3245 (DCHAR(string), DCHAR(substring), void *back,
3246 __INT_T *size DCLEN64(string) DCLEN64(substring))
3247 {
3248 __INT8_T i, n;
3249
3250 n = (__INT8_T)CLEN(string) - (__INT8_T)CLEN(substring);
3251 if (n < 0)
3252 return 0;
3253 if (ISPRESENT(back) && I8(__fort_varying_log)(back, size)) {
3254 if (CLEN(substring) == 0)
3255 return (__INT8_T)CLEN(string) + 1;
3256 for (i = n; i >= 0; --i) {
3257 if (CADR(string)[i] == CADR(substring)[0] &&
3258 strncmp(CADR(string) + i, CADR(substring), CLEN(substring)) == 0)
3259 return i + 1;
3260 }
3261 } else {
3262 if (CLEN(substring) == 0)
3263 return 1;
3264 for (i = 0; i <= n; ++i) {
3265 if (CADR(string)[i] == CADR(substring)[0] &&
3266 strncmp(CADR(string) + i, CADR(substring), CLEN(substring)) == 0)
3267 return i + 1;
3268 }
3269 }
3270 return 0;
3271 }
3272 /* 32 bit CLEN version */
3273 __INT8_T
3274 ENTF90(KINDEX, kindex)
3275 (DCHAR(string), DCHAR(substring), void *back,
3276 __INT_T *size DCLEN(string) DCLEN(substring))
3277 {
3278 return ENTF90(KINDEXA, kindexa) (CADR(string), CADR(substring),
3279 back, size, (__CLEN_T)CLEN(string), (__CLEN_T)CLEN(substring));
3280 }
3281
3282 __INT_T
3283 ENTF90(INDEXA, indexa)
3284 (DCHAR(string), DCHAR(substring), void *back,
3285 __INT_T *size DCLEN64(string) DCLEN64(substring))
3286 {
3287 __INT_T i, n;
3288
3289 n = (__INT_T)CLEN(string) - (__INT_T)CLEN(substring);
3290 if (n < 0)
3291 return 0;
3292 if (ISPRESENT(back) && I8(__fort_varying_log)(back, size)) {
3293 if (CLEN(substring) == 0)
3294 return (__INT_T)CLEN(string) + 1;
3295 for (i = n; i >= 0; --i) {
3296 if (CADR(string)[i] == CADR(substring)[0] &&
3297 strncmp(CADR(string) + i, CADR(substring), CLEN(substring)) == 0)
3298 return i + 1;
3299 }
3300 } else {
3301 if (CLEN(substring) == 0)
3302 return 1;
3303 for (i = 0; i <= n; ++i) {
3304 if (CADR(string)[i] == CADR(substring)[0] &&
3305 strncmp(CADR(string) + i, CADR(substring), CLEN(substring)) == 0)
3306 return i + 1;
3307 }
3308 }
3309 return 0;
3310 }
3311 /* 32 bit CLEN version */
3312 __INT_T
3313 ENTF90(INDEX, index)
3314 (DCHAR(string), DCHAR(substring), void *back,
3315 __INT_T *size DCLEN(string) DCLEN(substring))
3316 {
3317 return ENTF90(INDEXA, indexa) (CADR(string), CADR(substring), back,
3318 size, (__CLEN_T)CLEN(string), (__CLEN_T)CLEN(substring));
3319 }
3320
3321 __INT_T
3322 ENTFTN(LEADZ, leadz)(void *i, __INT_T *size)
3323 {
3324 unsigned ui; /* unsigned representation of 'i' */
3325 int nz; /* number of leading zero bits in 'i' */
3326 int k;
3327
3328 ui = (unsigned)I8(__fort_varying_int)(i, size);
3329 nz = *size * 8;
3330 k = nz >> 1;
3331 while (k) {
3332 if (ui >> k) {
3333 ui >>= k;
3334 nz -= k;
3335 }
3336 k >>= 1;
3337 }
3338 if (ui)
3339 --nz;
3340 return nz;
3341 }
3342
3343 __INT_T
3344 ENTFTN(POPCNT, popcnt)(void *i, __INT_T *size)
3345 {
3346 unsigned ui, uj; /* unsigned representation of 'i' */
3347 __INT8_T ll;
3348
3349 switch (*size) {
3350 case 8:
3351 ll = *(__INT8_T *)i;
3352 ui = (unsigned)(ll & 0xFFFFFFFF);
3353 uj = (unsigned)((ll >> 32) & 0xFFFFFFFF);
3354 ui = (ui & 0x55555555) + (ui >> 1 & 0x55555555);
3355 uj = (uj & 0x55555555) + (uj >> 1 & 0x55555555);
3356 ui = (ui & 0x33333333) + (ui >> 2 & 0x33333333);
3357 uj = (uj & 0x33333333) + (uj >> 2 & 0x33333333);
3358 ui = (ui & 0x07070707) + (ui >> 4 & 0x07070707);
3359 ui += (uj & 0x07070707) + (uj >> 4 & 0x07070707);
3360 ui += ui >> 8;
3361 ui += ui >> 16;
3362 ui &= 0x7f;
3363 break;
3364 case 4:
3365 ui = (unsigned)(*(__INT4_T *)i);
3366 ui = (ui & 0x55555555) + (ui >> 1 & 0x55555555);
3367 ui = (ui & 0x33333333) + (ui >> 2 & 0x33333333);
3368 ui = (ui & 0x07070707) + (ui >> 4 & 0x07070707);
3369 ui += ui >> 8;
3370 ui += ui >> 16;
3371 ui &= 0x3f;
3372 break;
3373 case 2:
3374 ui = (unsigned)(*(__INT2_T *)i);
3375 ui = (ui & 0x5555) + (ui >> 1 & 0x5555);
3376 ui = (ui & 0x3333) + (ui >> 2 & 0x3333);
3377 ui = (ui & 0x0707) + (ui >> 4 & 0x0707);
3378 ui += ui >> 8;
3379 ui &= 0x1f;
3380 break;
3381 case 1:
3382 ui = (unsigned)(*(__INT1_T *)i);
3383 ui = (ui & 0x55) + (ui >> 1 & 0x55);
3384 ui = (ui & 0x33) + (ui >> 2 & 0x33);
3385 ui += ui >> 4;
3386 ui &= 0xf;
3387 break;
3388 default:
3389 __fort_abort("POPCNT: invalid size");
3390 }
3391 return ui;
3392 }
3393
3394 __INT_T
3395 ENTFTN(POPPAR, poppar)(void *i, __INT_T *size)
3396 {
3397 int ii;
3398 __INT8_T ll;
3399
3400 switch (*size) {
3401 case 8:
3402 ll = *(__INT8_T *)i;
3403 ii = ll ^ ll >> 32;
3404 ii ^= ii >> 16;
3405 ii ^= ii >> 8;
3406 break;
3407 case 4:
3408 ii = *(__INT4_T *)i;
3409 ii ^= ii >> 16;
3410 ii ^= ii >> 8;
3411 break;
3412 case 2:
3413 ii = *(__INT2_T *)i;
3414 ii ^= ii >> 8;
3415 break;
3416 case 1:
3417 ii = *(__INT1_T *)i;
3418 break;
3419 default:
3420 __fort_abort("POPPAR: invalid size");
3421 }
3422 ii ^= ii >> 4;
3423 ii ^= ii >> 2;
3424 ii ^= ii >> 1;
3425 return ii & 1;
3426 }
3427
3428 /*
3429 * The following functions are called when the compiler needs to invoke an
3430 * intrinsic which has lost its intrinsic property (i.e, the intrinsic is
3431 * a user-defined object).
3432 */
3433
3434 __INT4_T
3435 ENTF90(JMAX0, jmax0)(__INT4_T *a, __INT4_T *b) { return (*a > *b) ? *a : *b; }
3436
3437 __INT4_T
3438 ENTF90(MAX0, max0)(__INT4_T *a, __INT4_T *b) { return (*a > *b) ? *a : *b; }
3439
3440 __INT8_T
3441 ENTF90(KMAX, kmax)(__INT8_T *a, __INT8_T *b) { return (*a > *b) ? *a : *b; }
3442
3443 __INT4_T
3444 ENTF90(MIN0, min0)(__INT4_T *a, __INT4_T *b) { return (*a < *b) ? *a : *b; }
3445
3446 __INT4_T
3447 ENTF90(MOD, mod)(__INT4_T *a, __INT4_T *b) { return *a % *b; }
3448
3449 __INT_T
3450 ENTF90(INT, int)(void *a, __INT_T *ty)
3451 {
3452 switch (*ty) {
3453 case __INT1:
3454 return *(__INT1_T *)a;
3455 case __LOG1:
3456 return *(__LOG1_T *)a;
3457 case __INT2:
3458 return *(__INT2_T *)a;
3459 case __LOG2:
3460 return *(__LOG2_T *)a;
3461 case __INT4:
3462 return *(__INT4_T *)a;
3463 case __LOG4:
3464 return *(__LOG4_T *)a;
3465 case __INT8:
3466 return *(__INT8_T *)a;
3467 case __LOG8:
3468 return *(__LOG8_T *)a;
3469 case __REAL4:
3470 return *(__REAL4_T *)a;
3471 case __REAL8:
3472 return *(__REAL8_T *)a;
3473 case __REAL16:
3474 return *(__REAL16_T *)a;
3475 case __CPLX8:
3476 return ((__CPLX8_T *)a)->r;
3477 case __CPLX16:
3478 return ((__CPLX16_T *)a)->r;
3479 case __CPLX32:
3480 return ((__CPLX32_T *)a)->r;
3481 default:
3482 __fort_abort("INT: invalid argument type");
3483 }
3484 return 0; /* sgi warning: gotta have a return! */
3485 }
3486
3487 __INT1_T
3488 ENTF90(INT1, int1)(void *a, __INT_T *ty)
3489 {
3490 switch (*ty) {
3491 case __INT1:
3492 return *(__INT1_T *)a;
3493 case __LOG1:
3494 return *(__LOG1_T *)a;
3495 case __INT2:
3496 return *(__INT2_T *)a;
3497 case __LOG2:
3498 return *(__LOG2_T *)a;
3499 case __INT4:
3500 return *(__INT4_T *)a;
3501 case __LOG4:
3502 return *(__LOG4_T *)a;
3503 case __INT8:
3504 return *(__INT8_T *)a;
3505 case __LOG8:
3506 return *(__LOG8_T *)a;
3507 case __REAL4:
3508 return *(__REAL4_T *)a;
3509 case __REAL8:
3510 return *(__REAL8_T *)a;
3511 case __REAL16:
3512 return *(__REAL16_T *)a;
3513 case __CPLX8:
3514 return ((__CPLX8_T *)a)->r;
3515 case __CPLX16:
3516 return ((__CPLX16_T *)a)->r;
3517 case __CPLX32:
3518 return ((__CPLX32_T *)a)->r;
3519 default:
3520 __fort_abort("INT1: invalid argument type");
3521 }
3522 return 0; /* sgi warning: gotta have a return! */
3523 }
3524
3525 __INT2_T
3526 ENTF90(INT2, int2)(void *a, __INT_T *ty)
3527 {
3528 switch (*ty) {
3529 case __INT1:
3530 return *(__INT1_T *)a;
3531 case __LOG1:
3532 return *(__LOG1_T *)a;
3533 case __INT2:
3534 return *(__INT2_T *)a;
3535 case __LOG2:
3536 return *(__LOG2_T *)a;
3537 case __INT4:
3538 return *(__INT4_T *)a;
3539 case __LOG4:
3540 return *(__LOG4_T *)a;
3541 case __INT8:
3542 return *(__INT8_T *)a;
3543 case __LOG8:
3544 return *(__LOG8_T *)a;
3545 case __REAL4:
3546 return *(__REAL4_T *)a;
3547 case __REAL8:
3548 return *(__REAL8_T *)a;
3549 case __REAL16:
3550 return *(__REAL16_T *)a;
3551 case __CPLX8:
3552 return ((__CPLX8_T *)a)->r;
3553 case __CPLX16:
3554 return ((__CPLX16_T *)a)->r;
3555 case __CPLX32:
3556 return ((__CPLX32_T *)a)->r;
3557 default:
3558 __fort_abort("INT2: invalid argument type");
3559 }
3560 return 0; /* sgi warning: gotta have a return! */
3561 }
3562
3563 __INT4_T
3564 ENTF90(INT4, int4)(void *a, __INT_T *ty)
3565 {
3566 switch (*ty) {
3567 case __INT1:
3568 return *(__INT1_T *)a;
3569 case __LOG1:
3570 return *(__LOG1_T *)a;
3571 case __INT2:
3572 return *(__INT2_T *)a;
3573 case __LOG2:
3574 return *(__LOG2_T *)a;
3575 case __INT4:
3576 return *(__INT4_T *)a;
3577 case __LOG4:
3578 return *(__LOG4_T *)a;
3579 case __INT8:
3580 return *(__INT8_T *)a;
3581 case __LOG8:
3582 return *(__LOG8_T *)a;
3583 case __REAL4:
3584 return *(__REAL4_T *)a;
3585 case __REAL8:
3586 return *(__REAL8_T *)a;
3587 case __REAL16:
3588 return *(__REAL16_T *)a;
3589 case __CPLX8:
3590 return ((__CPLX8_T *)a)->r;
3591 case __CPLX16:
3592 return ((__CPLX16_T *)a)->r;
3593 case __CPLX32:
3594 return ((__CPLX32_T *)a)->r;
3595 default:
3596 __fort_abort("INT4: invalid argument type");
3597 }
3598 return 0; /* sgi warning: gotta have a return! */
3599 }
3600
3601 __INT8_T
3602 ENTF90(INT8, int8)(void *a, __INT_T *ty)
3603 {
3604 switch (*ty) {
3605 case __INT1:
3606 return *(__INT1_T *)a;
3607 case __LOG1:
3608 return *(__LOG1_T *)a;
3609 case __INT2:
3610 return *(__INT2_T *)a;
3611 case __LOG2:
3612 return *(__LOG2_T *)a;
3613 case __INT4:
3614 return *(__INT4_T *)a;
3615 case __LOG4:
3616 return *(__LOG4_T *)a;
3617 case __INT8:
3618 return *(__INT8_T *)a;
3619 case __LOG8:
3620 return *(__LOG8_T *)a;
3621 case __REAL4:
3622 return *(__REAL4_T *)a;
3623 case __REAL8:
3624 return *(__REAL8_T *)a;
3625 case __REAL16:
3626 return *(__REAL16_T *)a;
3627 case __CPLX8:
3628 return ((__CPLX8_T *)a)->r;
3629 case __CPLX16:
3630 return ((__CPLX16_T *)a)->r;
3631 case __CPLX32:
3632 return ((__CPLX32_T *)a)->r;
3633 default:
3634 __fort_abort("INT8: invalid argument type");
3635 }
3636 return 0; /* sgi warning: gotta have a return! */
3637 }
3638
3639 __LOG1_T
3640 ENTF90(LOG1, log1)(void *a, __INT_T *ty)
3641 {
3642 switch (*ty) {
3643 case __INT1:
3644 return *(__INT1_T *)a & GET_DIST_MASK_LOG1 ? GET_DIST_TRUE_LOG1 : 0;
3645 case __LOG1:
3646 return *(__LOG1_T *)a & GET_DIST_MASK_LOG1 ? GET_DIST_TRUE_LOG1 : 0;
3647 case __INT2:
3648 return *(__INT2_T *)a & GET_DIST_MASK_LOG2 ? GET_DIST_TRUE_LOG1 : 0;
3649 case __LOG2:
3650 return *(__LOG2_T *)a & GET_DIST_MASK_LOG2 ? GET_DIST_TRUE_LOG1 : 0;
3651 case __INT4:
3652 return *(__INT4_T *)a & GET_DIST_MASK_LOG4 ? GET_DIST_TRUE_LOG1 : 0;
3653 case __LOG4:
3654 return *(__LOG4_T *)a & GET_DIST_MASK_LOG4 ? GET_DIST_TRUE_LOG1 : 0;
3655 case __INT8:
3656 return *(__INT8_T *)a & GET_DIST_MASK_LOG8 ? GET_DIST_TRUE_LOG1 : 0;
3657 case __LOG8:
3658 return *(__LOG8_T *)a & GET_DIST_MASK_LOG8 ? GET_DIST_TRUE_LOG1 : 0;
3659 default:
3660 __fort_abort("LOG1: invalid argument type");
3661 }
3662 return 0; /* sgi warning: gotta have a return! */
3663 }
3664
3665 __LOG2_T
3666 ENTF90(LOG2, log2)(void *a, __INT_T *ty)
3667 {
3668 switch (*ty) {
3669 case __INT1:
3670 return *(__INT1_T *)a & GET_DIST_MASK_LOG1 ? GET_DIST_TRUE_LOG2 : 0;
3671 case __LOG1:
3672 return *(__LOG1_T *)a & GET_DIST_MASK_LOG1 ? GET_DIST_TRUE_LOG2 : 0;
3673 case __INT2:
3674 return *(__INT2_T *)a & GET_DIST_MASK_LOG2 ? GET_DIST_TRUE_LOG2 : 0;
3675 case __LOG2:
3676 return *(__LOG2_T *)a & GET_DIST_MASK_LOG2 ? GET_DIST_TRUE_LOG2 : 0;
3677 case __INT4:
3678 return *(__INT4_T *)a & GET_DIST_MASK_LOG4 ? GET_DIST_TRUE_LOG2 : 0;
3679 case __LOG4:
3680 return *(__LOG4_T *)a & GET_DIST_MASK_LOG4 ? GET_DIST_TRUE_LOG2 : 0;
3681 case __INT8:
3682 return *(__INT8_T *)a & GET_DIST_MASK_LOG8 ? GET_DIST_TRUE_LOG2 : 0;
3683 case __LOG8:
3684 return *(__LOG8_T *)a & GET_DIST_MASK_LOG8 ? GET_DIST_TRUE_LOG2 : 0;
3685 default:
3686 __fort_abort("LOG2: invalid argument type");
3687 }
3688 return 0; /* sgi warning: gotta have a return! */
3689 }
3690
3691 __LOG4_T
3692 ENTF90(LOG4, log4)(void *a, __INT_T *ty)
3693 {
3694 switch (*ty) {
3695 case __INT1:
3696 return *(__INT1_T *)a & GET_DIST_MASK_LOG1 ? GET_DIST_TRUE_LOG4 : 0;
3697 case __LOG1:
3698 return *(__LOG1_T *)a & GET_DIST_MASK_LOG1 ? GET_DIST_TRUE_LOG4 : 0;
3699 case __INT2:
3700 return *(__INT2_T *)a & GET_DIST_MASK_LOG2 ? GET_DIST_TRUE_LOG4 : 0;
3701 case __LOG2:
3702 return *(__LOG2_T *)a & GET_DIST_MASK_LOG2 ? GET_DIST_TRUE_LOG4 : 0;
3703 case __INT4:
3704 return *(__INT4_T *)a & GET_DIST_MASK_LOG4 ? GET_DIST_TRUE_LOG4 : 0;
3705 case __LOG4:
3706 return *(__LOG4_T *)a & GET_DIST_MASK_LOG4 ? GET_DIST_TRUE_LOG4 : 0;
3707 case __INT8:
3708 return *(__INT8_T *)a & GET_DIST_MASK_LOG8 ? GET_DIST_TRUE_LOG4 : 0;
3709 case __LOG8:
3710 return *(__LOG8_T *)a & GET_DIST_MASK_LOG8 ? GET_DIST_TRUE_LOG4 : 0;
3711 default:
3712 __fort_abort("LOG4: invalid argument type");
3713 }
3714 return 0; /* sgi warning: gotta have a return! */
3715 }
3716
3717 __LOG8_T
3718 ENTF90(LOG8, log8)(void *a, __INT_T *ty)
3719 {
3720 switch (*ty) {
3721 case __INT1:
3722 return *(__INT1_T *)a & GET_DIST_MASK_LOG1 ? GET_DIST_TRUE_LOG8 : 0;
3723 case __LOG1:
3724 return *(__LOG1_T *)a & GET_DIST_MASK_LOG1 ? GET_DIST_TRUE_LOG8 : 0;
3725 case __INT2:
3726 return *(__INT2_T *)a & GET_DIST_MASK_LOG2 ? GET_DIST_TRUE_LOG8 : 0;
3727 case __LOG2:
3728 return *(__LOG2_T *)a & GET_DIST_MASK_LOG2 ? GET_DIST_TRUE_LOG8 : 0;
3729 case __INT4:
3730 return *(__INT4_T *)a & GET_DIST_MASK_LOG4 ? GET_DIST_TRUE_LOG8 : 0;
3731 case __LOG4:
3732 return *(__LOG4_T *)a & GET_DIST_MASK_LOG4 ? GET_DIST_TRUE_LOG8 : 0;
3733 case __INT8:
3734 return *(__INT8_T *)a & GET_DIST_MASK_LOG8 ? GET_DIST_TRUE_LOG8 : 0;
3735 case __LOG8:
3736 return *(__LOG8_T *)a & GET_DIST_MASK_LOG8 ? GET_DIST_TRUE_LOG8 : 0;
3737 default:
3738 __fort_abort("LOG8: invalid argument type");
3739 }
3740 return 0; /* sgi warning: gotta have a return! */
3741 }
3742
3743 __REAL_T
3744 ENTF90(REAL, real)(void *a, __INT_T *ty)
3745 {
3746 switch (*ty) {
3747 case __INT1:
3748 return *(__INT1_T *)a;
3749 case __LOG1:
3750 return *(__LOG1_T *)a;
3751 case __INT2:
3752 return *(__INT2_T *)a;
3753 case __LOG2:
3754 return *(__LOG2_T *)a;
3755 case __INT4:
3756 return *(__INT4_T *)a;
3757 case __LOG4:
3758 return *(__LOG4_T *)a;
3759 case __INT8:
3760 return *(__INT8_T *)a;
3761 case __LOG8:
3762 return *(__LOG8_T *)a;
3763 case __REAL4:
3764 return *(__REAL4_T *)a;
3765 case __CPLX8:
3766 return ((__CPLX8_T *)a)->r;
3767 case __REAL8:
3768 return *(__REAL8_T *)a;
3769 case __CPLX16:
3770 return ((__CPLX16_T *)a)->r;
3771 default:
3772 __fort_abort("REAL: invalid argument type");
3773 }
3774 return 0.0; /* sgi warning: gotta have a return! */
3775 }
3776
3777 __DBLE_T
3778 ENTF90(DBLE, dble)(void *a, __INT_T *ty)
3779 {
3780 switch (*ty) {
3781 case __INT1:
3782 return *(__INT1_T *)a;
3783 case __LOG1:
3784 return *(__LOG1_T *)a;
3785 case __INT2:
3786 return *(__INT2_T *)a;
3787 case __LOG2:
3788 return *(__LOG2_T *)a;
3789 case __INT4:
3790 return *(__INT4_T *)a;
3791 case __LOG4:
3792 return *(__LOG4_T *)a;
3793 case __INT8:
3794 return *(__INT8_T *)a;
3795 case __LOG8:
3796 return *(__LOG8_T *)a;
3797 case __REAL4:
3798 return *(__REAL4_T *)a;
3799 case __CPLX8:
3800 return ((__CPLX8_T *)a)->r;
3801 case __REAL8:
3802 return *(__REAL8_T *)a;
3803 case __CPLX16:
3804 return ((__CPLX16_T *)a)->r;
3805 default:
3806 __fort_abort("DBLE: invalid argument type");
3807 }
3808 return 0.0; /* sgi warning: gotta have a return! */
3809 }
3810
3811 __REAL4_T
3812 ENTF90(REAL4, real4)(void *a, __INT_T *ty)
3813 {
3814 switch (*ty) {
3815 case __INT1:
3816 return *(__INT1_T *)a;
3817 case __LOG1:
3818 return *(__LOG1_T *)a;
3819 case __INT2:
3820 return *(__INT2_T *)a;
3821 case __LOG2:
3822 return *(__LOG2_T *)a;
3823 case __INT4:
3824 return *(__INT4_T *)a;
3825 case __LOG4:
3826 return *(__LOG4_T *)a;
3827 case __INT8:
3828 return *(__INT8_T *)a;
3829 case __LOG8:
3830 return *(__LOG8_T *)a;
3831 case __REAL4:
3832 return *(__REAL4_T *)a;
3833 case __CPLX8:
3834 return ((__CPLX8_T *)a)->r;
3835 case __REAL8:
3836 return *(__REAL8_T *)a;
3837 case __CPLX16:
3838 return ((__CPLX16_T *)a)->r;
3839 default:
3840 __fort_abort("REAL4: invalid argument type");
3841 }
3842 return 0.0; /* sgi warning: gotta have a return! */
3843 }
3844
3845 __REAL8_T
3846 ENTF90(REAL8, real8)(void *a, __INT_T *ty)
3847 {
3848 switch (*ty) {
3849 case __INT1:
3850 return *(__INT1_T *)a;
3851 case __LOG1:
3852 return *(__LOG1_T *)a;
3853 case __INT2:
3854 return *(__INT2_T *)a;
3855 case __LOG2:
3856 return *(__LOG2_T *)a;
3857 case __INT4:
3858 return *(__INT4_T *)a;
3859 case __LOG4:
3860 return *(__LOG4_T *)a;
3861 case __INT8:
3862 return *(__INT8_T *)a;
3863 case __LOG8:
3864 return *(__LOG8_T *)a;
3865 case __REAL4:
3866 return *(__REAL4_T *)a;
3867 case __CPLX8:
3868 return ((__CPLX8_T *)a)->r;
3869 case __REAL8:
3870 return *(__REAL8_T *)a;
3871 case __CPLX16:
3872 return ((__CPLX16_T *)a)->r;
3873 default:
3874 __fort_abort("REAL8: invalid argument type");
3875 }
3876 return 0.0; /* sgi warning: gotta have a return! */
3877 }
3878
3879 __REAL_T
3880 ENTF90(AMAX1, amax1)(__REAL_T *a, __REAL_T *b) { return (*a > *b) ? *a : *b; }
3881
3882 __DBLE_T
3883 ENTF90(DMAX1, dmax1)(__DBLE_T *a, __DBLE_T *b) { return (*a > *b) ? *a : *b; }
3884
3885 __REAL_T
3886 ENTF90(AMIN1, amin1)(__REAL_T *a, __REAL_T *b) { return (*a < *b) ? *a : *b; }
3887
3888 __DBLE_T
3889 ENTF90(DMIN1, dmin1)(__DBLE_T *a, __DBLE_T *b) { return (*a < *b) ? *a : *b; }
3890
3891 double fmod();
3892
3893 __REAL_T
3894 ENTF90(AMOD, amod)(__REAL_T *a, __REAL_T *b) { return fmod(*a, *b); }
3895
3896 __DBLE_T
3897 ENTF90(DMOD, dmod)(__DBLE_T *a, __DBLE_T *b)
3898 {
3899 return fmod(*a, *b);
3900 }
3901
3902 __INT4_T
3903 ENTF90(MODULO, modulo)(__INT4_T *a, __INT4_T *p)
3904 {
3905 __INT4_T q, r;
3906
3907 q = (*a) / (*p);
3908 r = (*a) - q * (*p);
3909 if (r != 0 && ((*a) ^ (*p)) < 0) { /* signs differ */
3910 r += (*p);
3911 }
3912 return r;
3913 }
3914
3915 __INT8_T
3916 ENTF90(I8MODULO, i8modulo)(__INT8_T *a, __INT8_T *p)
3917 {
3918 __INT8_T q, r;
3919
3920 q = (*a) / (*p);
3921 r = (*a) - q * (*p);
3922 if (r != 0 && ((*a) ^ (*p)) < 0) { /* signs differ */
3923 r += (*p);
3924 }
3925 return r;
3926 }
3927
3928 __INT2_T
3929 ENTF90(IMODULO, imodulo)(__INT2_T *a, __INT2_T *p)
3930 {
3931 __INT_T q, r;
3932
3933 q = (*a) / (*p);
3934 r = (*a) - q * (*p);
3935 if (r != 0 && ((*a) ^ (*p)) < 0) { /* signs differ */
3936 r += (*p);
3937 }
3938 return r;
3939 }
3940
3941 __REAL_T
3942 ENTF90(AMODULO, amodulo)(__REAL_T *x, __REAL_T *y)
3943 {
3944 double d;
3945 d = fmod(*x, *y);
3946 if (d != 0 && ((*x < 0 && *y > 0) || (*x > 0 && *y < 0)))
3947 d += *y;
3948 return d;
3949 }
3950
3951 __DBLE_T
3952 ENTF90(DMODULO, dmodulo)(__DBLE_T *x, __DBLE_T *y)
3953 {
3954 double d;
3955 d = fmod(*x, *y);
3956 if (d != 0 && ((*x < 0 && *y > 0) || (*x > 0 && *y < 0)))
3957 d += *y;
3958 return d;
3959 }
3960
3961 __INT4_T
3962 ENTF90(MODULOv, modulov)(__INT4_T a, __INT4_T p)
3963 {
3964 __INT4_T q, r;
3965
3966 q = (a) / (p);
3967 r = (a)-q * (p);
3968 if (r != 0 && ((a) ^ (p)) < 0) { /* signs differ */
3969 r += (p);
3970 }
3971 return r;
3972 }
3973
3974 __INT8_T
3975 ENTF90(I8MODULOv, i8modulov)(__INT8_T a, __INT8_T p)
3976 {
3977 __INT8_T q, r;
3978
3979 q = (a) / (p);
3980 r = (a)-q * (p);
3981 if (r != 0 && ((a) ^ (p)) < 0) { /* signs differ */
3982 r += (p);
3983 }
3984 return r;
3985 }
3986
3987 __INT2_T
3988 ENTF90(IMODULOv, imodulov)(__INT2_T a, __INT2_T p)
3989 {
3990 __INT_T q, r;
3991
3992 q = (a) / (p);
3993 r = (a)-q * (p);
3994 if (r != 0 && ((a) ^ (p)) < 0) { /* signs differ */
3995 r += (p);
3996 }
3997 return r;
3998 }
3999
4000 __REAL_T
4001 ENTF90(AMODULOv, amodulov)(__REAL_T x, __REAL_T y)
4002 {
4003 double d;
4004 d = fmod(x, y);
4005 if (d != 0 && ((x < 0 && y > 0) || (x > 0 && y < 0)))
4006 d += y;
4007 return d;
4008 }
4009
4010 __DBLE_T
4011 ENTF90(DMODULOv, dmodulov)(__DBLE_T x, __DBLE_T y)
4012 {
4013 double d;
4014 d = fmod(x, y);
4015 if (d != 0 && ((x < 0 && y > 0) || (x > 0 && y < 0)))
4016 d += y;
4017 return d;
4018 }
4019
4020 __INT_T
4021 ENTF90(CEILING, ceiling)(__REAL_T *r)
4022 {
4023 __DBLE_T x;
4024 int a;
4025
4026 x = *r;
4027 a = x; /* integer part */
4028 if (a == x)
4029 return x;
4030 if (x > 0)
4031 return a + 1;
4032 return a;
4033 }
4034
4035 __INT8_T
4036 ENTF90(KCEILING, kceiling)(__REAL_T *r)
4037 {
4038 __DBLE_T x;
4039 __INT8_T a;
4040
4041 x = *r;
4042 a = x; /* integer part */
4043 if (a == x)
4044 return x;
4045 if (x > 0)
4046 return a + 1;
4047 return a;
4048 }
4049
4050 __INT_T
4051 ENTF90(DCEILING, dceiling)(__DBLE_T *r)
4052 {
4053 __DBLE_T x;
4054 int a;
4055
4056 x = *r;
4057 a = x; /* integer part */
4058 if (a == x)
4059 return x;
4060 if (x > 0)
4061 return a + 1;
4062 return a;
4063 }
4064
4065 __INT8_T
4066 ENTF90(KDCEILING, kdceiling)(__DBLE_T *r)
4067 {
4068 __DBLE_T x;
4069 __INT8_T a;
4070
4071 x = *r;
4072 a = x; /* integer part */
4073 if (a == x)
4074 return x;
4075 if (x > 0)
4076 return a + 1;
4077 return a;
4078 }
4079
4080 __INT_T
4081 ENTF90(CEILINGv, ceilingv)(__REAL_T r)
4082 {
4083 __DBLE_T x;
4084 int a;
4085
4086 x = r;
4087 a = x; /* integer part */
4088 if (a == x)
4089 return x;
4090 if (x > 0)
4091 return a + 1;
4092 return a;
4093 }
4094
4095 __INT8_T
4096 ENTF90(KCEILINGv, kceilingv)(__REAL_T r)
4097 {
4098 __DBLE_T x;
4099 __INT8_T a;
4100
4101 x = r;
4102 a = x; /* integer part */
4103 if (a == x)
4104 return x;
4105 if (x > 0)
4106 return a + 1;
4107 return a;
4108 }
4109
4110 __INT_T
4111 ENTF90(DCEILINGv, dceilingv)(__DBLE_T r)
4112 {
4113 __DBLE_T x;
4114 int a;
4115
4116 x = r;
4117 a = x; /* integer part */
4118 if (a == x)
4119 return x;
4120 if (x > 0)
4121 return a + 1;
4122 return a;
4123 }
4124
4125 __INT8_T
4126 ENTF90(KDCEILINGv, kdceilingv)(__DBLE_T r)
4127 {
4128 __DBLE_T x;
4129 __INT8_T a;
4130
4131 x = r;
4132 a = x; /* integer part */
4133 if (a == x)
4134 return x;
4135 if (x > 0)
4136 return a + 1;
4137 return a;
4138 }
4139
4140 __INT_T
4141 ENTF90(FLOOR, floor)(__REAL_T *r)
4142 {
4143 __DBLE_T x;
4144 int a;
4145
4146 x = *r;
4147 a = x; /* integer part */
4148 if (a == x)
4149 return x;
4150 if (x < 0)
4151 return a - 1;
4152 return a;
4153 }
4154
4155 __INT_T
4156 ENTF90(KFLOOR, kfloor)(__REAL_T *r)
4157 {
4158 __DBLE_T x;
4159 __INT8_T a;
4160
4161 x = *r;
4162 a = x; /* integer part */
4163 if (a == x)
4164 return x;
4165 if (x < 0)
4166 return a - 1;
4167 return a;
4168 }
4169
4170 __INT_T
4171 ENTF90(DFLOOR, dfloor)(__DBLE_T *r)
4172 {
4173 __DBLE_T x;
4174 int a;
4175
4176 x = *r;
4177 a = x; /* integer part */
4178 if (a == x)
4179 return x;
4180 if (x < 0)
4181 return a - 1;
4182 return a;
4183 }
4184
4185 __INT8_T
4186 ENTF90(KDFLOOR, kdfloor)(__DBLE_T *r)
4187 {
4188 __DBLE_T x;
4189 __INT8_T a;
4190
4191 x = *r;
4192 a = x; /* integer part */
4193 if (a == x)
4194 return x;
4195 if (x < 0)
4196 return a - 1;
4197 return a;
4198 }
4199
4200 __INT_T
4201 ENTF90(FLOORv, floorv)(__REAL_T r)
4202 {
4203 __DBLE_T x;
4204 int a;
4205
4206 x = r;
4207 a = x; /* integer part */
4208 if (a == x)
4209 return x;
4210 if (x < 0)
4211 return a - 1;
4212 return a;
4213 }
4214
4215 __INT_T
4216 ENTF90(KFLOORv, kfloorv)(__REAL_T r)
4217 {
4218 __DBLE_T x;
4219 __INT8_T a;
4220
4221 x = r;
4222 a = x; /* integer part */
4223 if (a == x)
4224 return x;
4225 if (x < 0)
4226 return a - 1;
4227 return a;
4228 }
4229
4230 __INT_T
4231 ENTF90(DFLOORv, dfloorv)(__DBLE_T r)
4232 {
4233 __DBLE_T x;
4234 int a;
4235
4236 x = r;
4237 a = x; /* integer part */
4238 if (a == x)
4239 return x;
4240 if (x < 0)
4241 return a - 1;
4242 return a;
4243 }
4244
4245 __INT8_T
4246 ENTF90(KDFLOORv, kdfloorv)(__DBLE_T r)
4247 {
4248 __DBLE_T x;
4249 __INT8_T a;
4250
4251 x = r;
4252 a = x; /* integer part */
4253 if (a == x)
4254 return x;
4255 if (x < 0)
4256 return a - 1;
4257 return a;
4258 }
4259
4260 /** \brief selected_int_kind(r) */
4261 __INT_T
4262 ENTF90(SEL_INT_KIND, sel_int_kind)
4263 (char *rb, F90_Desc *rd)
4264 {
4265 int r;
4266
4267 r = I8(__fort_fetch_int)(rb, rd);
4268 if (r <= 2)
4269 return 1;
4270 if (r <= 4)
4271 return 2;
4272 if (r <= 9)
4273 return 4;
4274 if (r <= 18)
4275 return 8;
4276 return -1;
4277 }
4278
4279 /** \brief
4280 * -i8 variant of SEL_INT_KIND
4281 */
4282 __INT8_T
4283 ENTF90(KSEL_INT_KIND, ksel_int_kind)
4284 (char *rb, F90_Desc *rd)
4285 {
4286 int r;
4287
4288 r = I8(__fort_fetch_int)(rb, rd);
4289 if (r <= 2)
4290 return 1;
4291 if (r <= 4)
4292 return 2;
4293 if (r <= 9)
4294 return 4;
4295 if (r <= 18)
4296 return 8;
4297 return -1;
4298 }
4299
4300 /* selected_char_kind(r) */
4301
4302 /** \brief Check charset
4303 *
4304 * Make sure this routine is consistent with
4305 * - fe90: semfunc.c:_selected_char_kind()
4306 * - f90: dinit.c:_selected_char_kind()
4307 */
4308 static int
4309 _selected_char_kind(char *p, __CLEN_T len)
4310 {
4311 if (__fortio_eq_str(p, len, "ASCII"))
4312 return 1;
4313 else if (__fortio_eq_str(p, len, "DEFAULT"))
4314 return 1;
4315 return -1;
4316 }
4317
4318 __INT_T
4319 ENTF90(SEL_CHAR_KINDA, sel_char_kinda)
4320 (DCHAR(p), F90_Desc *rd DCLEN64(p))
4321 {
4322 int r;
4323 r = _selected_char_kind(CADR(p), CLEN(p));
4324 return r;
4325 }
4326 /* 32 bit CLEN version */
4327 __INT_T
4328 ENTF90(SEL_CHAR_KIND, sel_char_kind)
4329 (DCHAR(p), F90_Desc *rd DCLEN(p))
4330 {
4331 return ENTF90(SEL_CHAR_KINDA, sel_char_kinda) (CADR(p), rd, (__CLEN_T)CLEN(p));
4332 }
4333
4334 __INT8_T
4335 ENTF90(KSEL_CHAR_KINDA, ksel_char_kinda)
4336 (DCHAR(p), F90_Desc *rd DCLEN(p))
4337 {
4338 int r;
4339 r = _selected_char_kind(CADR(p), CLEN(p));
4340 return r;
4341 }
4342 /* 32 bit CLEN version */
4343 __INT8_T
4344 ENTF90(KSEL_CHAR_KIND, ksel_char_kind)
4345 (DCHAR(p), F90_Desc *rd DCLEN(p))
4346 {
4347 return ENTF90(KSEL_CHAR_KINDA, ksel_char_kinda) (CADR(p), rd, (__CLEN_T)CLEN(p));
4348 }
4349
4350 /* Real model support functions */
4351
4352 /* IEEE floating point - 4 byte reals, 8 byte double precision */
4353
4354 /** \brief selected_real_kind(p,r) */
4355 __INT8_T
4356 ENTF90(KSEL_REAL_KIND, ksel_real_kind)
4357 (char *pb, char *rb, F90_Desc *pd, F90_Desc *rd)
4358 {
4359
4360 /*
4361 * -i8 variant of SEL_REAL_KIND
4362 */
4363
4364 int p, r, e, k;
4365
4366 e = 0;
4367 k = 0;
4368 if (ISPRESENT(pb)) {
4369 p = I8(__fort_fetch_int)(pb, pd);
4370 if (p <= 6)
4371 k = 4;
4372 else if (p <= 15)
4373 k = 8;
4374 else
4375 e -= 1;
4376 }
4377 if (ISPRESENT(rb)) {
4378 r = I8(__fort_fetch_int)(rb, rd);
4379 if (r <= 37) {
4380 if (k < 4)
4381 k = 4;
4382 }
4383 else if (r <= 307) {
4384 if (k < 8)
4385 k = 8;
4386 }
4387 else
4388 e -= 2;
4389 }
4390 return (__INT8_T)e ? e : k;
4391 }
4392
4393 __INT_T
4394 ENTF90(SEL_REAL_KIND, sel_real_kind)
4395 (char *pb, char *rb, F90_Desc *pd, F90_Desc *rd)
4396 {
4397 int p, r, e, k;
4398
4399 e = 0;
4400 k = 0;
4401 if (ISPRESENT(pb)) {
4402 p = I8(__fort_fetch_int)(pb, pd);
4403 if (p <= 6)
4404 k = 4;
4405 else if (p <= 15)
4406 k = 8;
4407 else
4408 e -= 1;
4409 }
4410 if (ISPRESENT(rb)) {
4411 r = I8(__fort_fetch_int)(rb, rd);
4412 if (r <= 37) {
4413 if (k < 4)
4414 k = 4;
4415 }
4416 else if (r <= 307) {
4417 if (k < 8)
4418 k = 8;
4419 }
4420
4421 else
4422 e -= 2;
4423 }
4424 return e ? e : k;
4425 }
4426
4427 __INT_T
4428 ENTF90(EXPONX, exponx)(__REAL4_T f)
4429 {
4430 union {
4431 __INT4_T i;
4432 __REAL4_T r;
4433 } g;
4434 g.r = f;
4435 if ((g.i & ~0x80000000) == 0)
4436 return 0;
4437 else
4438 return ((g.i >> 23) & 0xFF) - 126;
4439 }
4440
4441 __INT_T
4442 ENTF90(EXPON, expon)(__REAL4_T *f)
4443 {
4444 return ENTF90(EXPONX, exponx)(*f);
4445 }
4446
4447 __INT_T
4448 ENTF90(EXPONDX, expondx)(__REAL8_T d)
4449 {
4450 union {
4451 __INT8_T i;
4452 __REAL8_T r;
4453 } g;
4454 g.r = d;
4455 if ((((g.i >> 32) & ~0x80000000) | (g.i & 0xffffffff)) == 0)
4456 return 0;
4457 else
4458 return ((g.i >> 52) & 0x7FF) - 1022;
4459 }
4460
4461 __INT_T
4462 ENTF90(EXPOND, expond)(__REAL8_T *d)
4463 {
4464 return ENTF90(EXPONDX, expondx)(*d);
4465 }
4466
4467 __INT8_T
4468 ENTF90(KEXPONX, kexponx)(__REAL4_T f)
4469 {
4470 return ENTF90(EXPONX, exponx)(f);
4471 }
4472
4473 __INT8_T
4474 ENTF90(KEXPON, kexpon)(__REAL4_T *f)
4475 {
4476 return ENTF90(KEXPONX, kexponx)(*f);
4477 }
4478
4479 __INT8_T
4480 ENTF90(KEXPONDX, kexpondx)(__REAL8_T d)
4481 {
4482 return ENTF90(EXPONDX, expondx)(d);
4483 }
4484
4485 __INT8_T
4486 ENTF90(KEXPOND, kexpond)(__REAL8_T *d) {
4487 return ENTF90(KEXPONDX, kexpondx)(*d);
4488 }
4489
4490 __REAL4_T
4491 ENTF90(FRACX, fracx)(__REAL4_T f)
4492 {
4493 union {
4494 __REAL4_T f;
4495 __INT4_T i;
4496 } x;
4497
4498 x.f = f;
4499 if (x.f != 0.0) {
4500 x.i &= ~0x7F800000;
4501 x.i |= 0x3F000000;
4502 }
4503 return x.f;
4504 }
4505
4506 __REAL4_T
4507 ENTF90(FRAC, frac)(__REAL4_T *f) { return ENTF90(FRACX, fracx)(*f); }
4508
4509 __REAL8_T
4510 ENTF90(FRACDX, fracdx)(__REAL8_T d)
4511 {
4512 __REAL8_SPLIT x;
4513
4514 x.d = d;
4515 if (x.d != 0.0) {
4516 x.i.h &= ~0x7FF00000;
4517 x.i.h |= 0x3FE00000;
4518 }
4519 return x.d;
4520 }
4521
4522 __REAL8_T
4523 ENTF90(FRACD, fracd)(__REAL8_T *d) { return ENTF90(FRACDX, fracdx)(*d); }
4524
4525 /** \brief NEAREST(X,S) has a value equal to the machine representable number
4526 * distinct from X and nearest to it in the direction of the infinity
4527 * with the same sign as S.
4528 *
4529 * The 'sign' argument is equal to the
4530 * fortran logical expression (S .ge. 0.0). */
4531 __REAL4_T
4532 ENTF90(NEARESTX, nearestx)(__REAL4_T f, __LOG_T sign)
4533 {
4534 union {
4535 __REAL4_T f;
4536 __INT4_T i;
4537 } x;
4538
4539 x.f = f;
4540 if (x.f == 0.0) {
4541 x.i = (sign & GET_DIST_MASK_LOG) ? 0x00800000 : 0x80800000;
4542 } else if ((x.i & 0x7F800000) != 0x7F800000) { /* not nan or inf */
4543 if ((x.f < 0.0) ^ (sign & GET_DIST_MASK_LOG))
4544 ++x.i;
4545 else
4546 --x.i;
4547 }
4548 return x.f;
4549 }
4550
4551 __REAL4_T
4552 ENTF90(NEAREST, nearest)(__REAL4_T *f, __LOG_T *sign)
4553 {
4554 return ENTF90(NEARESTX, nearestx)(*f, *sign);
4555 }
4556
4557 __REAL8_T
4558 ENTF90(NEARESTDX, nearestdx)(__REAL8_T d, __LOG_T sign)
4559 {
4560 __REAL8_SPLIT x;
4561
4562 x.d = d;
4563 if (x.d == 0.0) {
4564 x.i.h = (sign & 1) ? 0x00100000 : 0x80100000;
4565 x.i.l = 0;
4566 } else {
4567 if ((x.ll >> 52 & 0x7FF) != 0x7FF) { /* not nan or inf */
4568 if ((x.d < 0) ^ (sign & GET_DIST_MASK_LOG))
4569 ++x.ll;
4570 else
4571 --x.ll;
4572 }
4573 }
4574 return x.d;
4575 }
4576
4577 __REAL8_T
4578 ENTF90(NEARESTD, nearestd)(__REAL8_T *d, __LOG_T *sign)
4579 {
4580 return ENTF90(NEARESTDX, nearestdx)(*d, *sign);
4581 }
4582
4583 __REAL4_T
4584 ENTF90(RRSPACINGX, rrspacingx)(__REAL4_T f)
4585 {
4586 union {
4587 __REAL4_T f;
4588 __INT4_T i;
4589 } x, y;
4590
4591 x.f = f;
4592 if (x.f == 0)
4593 return 0;
4594 y.i = (x.i & 0xFF << 23) ^ 0xFF << 23;
4595 x.f *= y.f;
4596 if (x.f < 0)
4597 x.f = -x.f;
4598 y.i = (22 + 127) << 23;
4599 x.f *= y.f;
4600 return x.f;
4601 }
4602
4603 __REAL4_T
4604 ENTF90(RRSPACING, rrspacing)(__REAL4_T *f)
4605 {
4606 return ENTF90(RRSPACINGX, rrspacingx)(*f);
4607 }
4608
4609 __REAL8_T
4610 ENTF90(RRSPACINGDX, rrspacingdx)(__REAL8_T d)
4611 {
4612 __REAL8_SPLIT x, y;
4613
4614 x.d = d;
4615 if (x.d == 0)
4616 return 0;
4617 y.i.h = (x.i.h & 0x7FF << 20) ^ 0x7FF << 20;
4618 y.i.l = 0;
4619 x.d *= y.d;
4620 if (x.d < 0)
4621 x.d = -x.d;
4622 y.i.h = (51 + 1023) << 20;
4623 y.i.l = 0;
4624 x.d *= y.d;
4625 return x.d;
4626 }
4627
4628 __REAL8_T
4629 ENTF90(RRSPACINGD, rrspacingd)(__REAL8_T *d)
4630 {
4631 return ENTF90(RRSPACINGDX, rrspacingdx)(*d);
4632 }
4633
4634 __REAL4_T
4635 ENTF90(SCALEX, scalex)(__REAL4_T f, __INT_T i)
4636 {
4637 int e;
4638 union {
4639 __REAL4_T f;
4640 __INT4_T i;
4641 } x;
4642
4643 e = 127 + i;
4644 if (e < 0)
4645 e = 0;
4646 else if (e > 255)
4647 e = 255;
4648 x.i = e << 23;
4649 return f * x.f;
4650 }
4651
4652 __REAL4_T
4653 ENTF90(SCALE, scale)(__REAL4_T *f, void *i, __INT_T *size)
4654 {
4655 int e;
4656 union {
4657 __REAL4_T f;
4658 __INT4_T i;
4659 } x;
4660
4661 e = 127 + I8(__fort_varying_int)(i, size);
4662 if (e < 0)
4663 e = 0;
4664 else if (e > 255)
4665 e = 255;
4666 x.i = e << 23;
4667 return *f * x.f;
4668 }
4669
4670 __REAL8_T
4671 ENTF90(SCALEDX, scaledx)(__REAL8_T d, __INT_T i)
4672 {
4673 int e;
4674 __REAL8_SPLIT x;
4675
4676 e = 1023 + i;
4677 if (e < 0)
4678 e = 0;
4679 else if (e > 2047)
4680 e = 2047;
4681 x.i.h = e << 20;
4682 x.i.l = 0;
4683 return d * x.d;
4684 }
4685
4686 __REAL8_T
4687 ENTF90(SCALED, scaled)(__REAL8_T *d, void *i, __INT_T *size)
4688 {
4689 int e;
4690 __REAL8_SPLIT x;
4691
4692 e = 1023 + I8(__fort_varying_int)(i, size);
4693 if (e < 0)
4694 e = 0;
4695 else if (e > 2047)
4696 e = 2047;
4697 x.i.h = e << 20;
4698 x.i.l = 0;
4699 return *d * x.d;
4700 }
4701
4702 __REAL4_T
4703 ENTF90(SETEXPX, setexpx)(__REAL4_T f, __INT_T i)
4704 {
4705 int e;
4706 union {
4707 __REAL4_T f;
4708 __INT4_T i;
4709 } x, y;
4710
4711 y.f = f;
4712 if (y.f == 0.0)
4713 return y.f;
4714 y.i &= ~0x7F800000;
4715 y.i |= 0x3F800000;
4716 e = 126 + i;
4717 if (e < 0)
4718 e = 0;
4719 else if (e > 255)
4720 e = 255;
4721 x.i = e << 23;
4722 return x.f * y.f;
4723 }
4724
4725 __REAL4_T
4726 ENTF90(SETEXP, setexp)(__REAL4_T *f, void *i, __INT_T *size)
4727 {
4728 int e;
4729 union {
4730 __REAL4_T f;
4731 __INT4_T i;
4732 } x, y;
4733
4734 y.f = *f;
4735 if (y.f == 0.0)
4736 return y.f;
4737 y.i &= ~0x7F800000;
4738 y.i |= 0x3F800000;
4739 e = 126 + I8(__fort_varying_int)(i, size);
4740 if (e < 0)
4741 e = 0;
4742 else if (e > 255)
4743 e = 255;
4744 x.i = e << 23;
4745 return x.f * y.f;
4746 }
4747
4748 __REAL8_T
4749 ENTF90(SETEXPDX, setexpdx)(__REAL8_T d, __INT_T i)
4750 {
4751 int e;
4752 __REAL8_SPLIT x, y;
4753
4754 y.d = d;
4755 if (y.d == 0.0)
4756 return y.d;
4757 y.i.h &= ~0x7FF00000;
4758 y.i.h |= 0x3FF00000;
4759 e = 1022 + i;
4760 if (e < 0)
4761 e = 0;
4762 else if (e > 2047)
4763 e = 2047;
4764 x.i.h = e << 20;
4765 x.i.l = 0;
4766 return x.d * y.d;
4767 }
4768
4769 __REAL8_T
4770 ENTF90(SETEXPD, setexpd)(__REAL8_T *d, void *i, __INT_T *size)
4771 {
4772 int e;
4773 __REAL8_SPLIT x, y;
4774
4775 y.d = *d;
4776 if (y.d == 0.0)
4777 return y.d;
4778 y.i.h &= ~0x7FF00000;
4779 y.i.h |= 0x3FF00000;
4780 e = 1022 + I8(__fort_varying_int)(i, size);
4781 if (e < 0)
4782 e = 0;
4783 else if (e > 2047)
4784 e = 2047;
4785 x.i.h = e << 20;
4786 x.i.l = 0;
4787 return x.d * y.d;
4788 }
4789
4790 __REAL4_T
4791 ENTF90(SPACINGX, spacingx)(__REAL4_T f)
4792 {
4793 int e;
4794 union {
4795 __REAL4_T f;
4796 __INT4_T i;
4797 } x;
4798
4799 x.f = f;
4800 e = ((x.i >> 23) & 0xFF) - 23;
4801 if (e < 1)
4802 e = 1;
4803 x.i = e << 23;
4804 return x.f;
4805 }
4806
4807 __REAL4_T
4808 ENTF90(SPACING, spacing)(__REAL4_T *f)
4809 {
4810 return ENTF90(SPACINGX, spacingx)(*f);
4811 }
4812
4813 __REAL8_T
4814 ENTF90(SPACINGDX, spacingdx)(__REAL8_T d)
4815 {
4816 int e;
4817 __REAL8_SPLIT x;
4818
4819 x.d = d;
4820 e = ((x.i.h >> 20) & 0x7FF) - 52;
4821 if (e < 1)
4822 e = 1;
4823 x.i.h = e << 20;
4824 x.i.l = 0;
4825 return x.d;
4826 }
4827
4828 __REAL8_T
4829 ENTF90(SPACINGD, spacingd)(__REAL8_T *d)
4830 {
4831 return ENTF90(SPACINGDX, spacingdx)(*d);
4832 }
4833
4834 #ifndef DESC_I8
4835
4836 typedef __INT8_T SZ_T;
4837
4838 #undef _MZERO
4839 #define _MZERO(n, t) \
4840 void \
4841 ENTF90(MZEROERO##n, mzero##n)(void *d, SZ_T size) \
4842 { \
4843 if (d && size > 0) \
4844 __c_mzero##n(d, size); \
4845 }
4846
4847 _MZERO(1, char)
4848
4849 _MZERO(2, short)
4850
4851 _MZERO(4, int)
4852
4853 _MZERO(8, long long)
4854
4855 void
4856 ENTF90(MZEROZ8, mzeroz8)(void *d, SZ_T size)
4857 {
4858 if (d && size > 0) {
4859 __c_mzero4(d, size * 2);
4860 }
4861 }
4862
4863 void
4864 ENTF90(MZEROZ16, mzeroz16)(void *d, SZ_T size)
4865 {
4866 if (d && size > 0) {
4867 __c_mzero8(d, size * 2);
4868 }
4869 }
4870
4871 #undef _MSET
4872 #define _MSET(n, t) \
4873 void ENTF90(MSET##n, mset##n)(void *d, void *v, SZ_T size) \
4874 { \
4875 if (d && size > 0) \
4876 __c_mset##n(d, *((t *)v), size); \
4877 }
4878
4879 _MSET(1, char)
4880
4881 _MSET(2, short)
4882
4883 _MSET(4, int)
4884
4885 _MSET(8, long long)
4886
4887 void
4888 ENTF90(MSETZ8, msetz8)(void *d, void *v, SZ_T size)
4889 {
4890 if (d) {
4891 SZ_T i;
4892 int *pd;
4893 int v0, v1;
4894 pd = (int *)d;
4895 v0 = ((int *)v)[0];
4896 v1 = ((int *)v)[1];
4897 for (i = 0; i < size; i++) {
4898 pd[0] = v0;
4899 pd[1] = v1;
4900 pd += 2;
4901 }
4902 }
4903 }
4904
4905 void
4906 ENTF90(MSETZ16, msetz16)(void *d, void *v, SZ_T size)
4907 {
4908 if (d) {
4909 SZ_T i;
4910 long long *pd;
4911 long long v0, v1;
4912 pd = (long long *)d;
4913 v0 = ((long long *)v)[0];
4914 v1 = ((long long *)v)[1];
4915 for (i = 0; i < size; i++) {
4916 pd[0] = v0;
4917 pd[1] = v1;
4918 pd += 2;
4919 }
4920 }
4921 }
4922
4923 #undef _MCOPY
4924 #define _MCOPY(n, t) \
4925 void ENTF90(MCOPY##n, mcopy##n)(void *d, void *v, SZ_T size) \
4926 { \
4927 if (d && v && size > 0) \
4928 __c_mcopy##n(d, v, size); \
4929 }
4930
4931 _MCOPY(1, char)
4932
4933 _MCOPY(2, short)
4934
4935 _MCOPY(4, int)
4936
4937 _MCOPY(8, long long)
4938
4939 void
4940 ENTF90(MCOPYZ8, mcopyz8)(void *d, void *v, SZ_T size)
4941 {
4942 if (d && v && size) {
4943 __c_mcopy4(d, v, size * 2);
4944 }
4945 }
4946
4947 void
4948 ENTF90(MCOPYZ16, mcopyz16)(void *d, void *v, SZ_T size)
4949 {
4950 if (d && v && size) {
4951 __c_mcopy8(d, v, size * 2);
4952 }
4953 }
4954
4955 #endif /* #if !defined(DESC_I8) */
4956
4957 /** \brief
4958 * helper function to store the MXINT_T value into a simple numerical type
4959 */
4960 static void
4961 store_mxint_t(void *b, F90_Desc *bd, MXINT_T v)
4962 {
4963 switch (TYPEKIND(bd)) {
4964 case __INT1:
4965 case __LOG1:
4966 *(__INT1_T *)b = (__INT1_T)v;
4967 break;
4968 case __INT2:
4969 case __LOG2:
4970 *(__INT2_T *)b = (__INT2_T)v;
4971 break;
4972 case __INT4:
4973 case __LOG4:
4974 *(__INT4_T *)b = (__INT4_T)v;
4975 break;
4976 case __INT8:
4977 case __LOG8:
4978 *(__INT8_T *)b = (__INT8_T)v;
4979 break;
4980 case __REAL4:
4981 *(__REAL4_T *)b = (__REAL4_T)v;
4982 break;
4983 case __REAL8:
4984 *(__REAL8_T *)b = (__REAL8_T)v;
4985 break;
4986 case __REAL16:
4987 *(__REAL16_T *)b = (__REAL16_T)v;
4988 break;
4989 default:
4990 *(__STAT_T *)b = (__STAT_T)v;
4991 }
4992 }
4993
4994 /** \brief
4995 * helper function to store the STAT_T value into a varying int
4996 */
4997 static MXINT_T
4998 mxint(F90_Desc *bd)
4999 {
5000 MXINT_T v;
5001
5002 switch (TYPEKIND(bd)) {
5003 case __INT1:
5004 case __LOG1:
5005 v = ~((__INT1_T)1 << (8 * sizeof(__INT1_T) - 1));
5006 break;
5007 case __INT2:
5008 case __LOG2:
5009 v = ~((__INT2_T)1 << (8 * sizeof(__INT2_T) - 1));
5010 break;
5011 case __INT8:
5012 case __LOG8:
5013 v = ~((__INT8_T)1 << (8 * sizeof(__INT8_T) - 1));
5014 break;
5015 case __INT4:
5016 case __LOG4:
5017 default:
5018 v = ~((__INT4_T)1 << (8 * sizeof(__INT4_T) - 1));
5019 break;
5020 }
5021 return v;
5022 }
5023