1 /*
2 * Copyright (c) 2005 - 2010, Nils R. Weller
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
16 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
18 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
19 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
20 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
21 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
22 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
24 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
25 * POSSIBILITY OF SUCH DAMAGE.
26 */
27 #include "icode.h"
28 #include <stdio.h>
29 #include <stdlib.h>
30 #include <stdarg.h>
31 #include <string.h>
32 #include <assert.h>
33 #include <ctype.h>
34 #include "misc.h"
35 #include "token.h"
36 #include "control.h"
37 #include "decl.h"
38 #include "type.h"
39 #include "debug.h"
40 #include "defs.h"
41 #include "attribute.h"
42 #include "expr.h"
43 #include "error.h"
44 #include "subexpr.h"
45 #include "symlist.h"
46 #include "zalloc.h"
47 #include "reg.h"
48 #include "fcatalog.h"
49 #include "cc1_main.h"
50 #include "typemap.h"
51 #include "functions.h"
52 #include "backend.h" /* for get_sizeof() */
53 #include "scope.h"
54 #include "x87_nonsense.h"
55 #include "inlineasm.h"
56 #include "n_libc.h"
57
58 int optimizing;
59 static int doing_stmtexpr;
60
61 #if 0
62 sparc
63 mov arg1, %o0
64 mov arg2, %o1
65 ...
66 page 297
67 #endif
68
69
70 struct vreg *
fcall_to_icode(struct fcall_data * fcall,struct icode_list * il,struct token * t,int eval)71 fcall_to_icode(
72 struct fcall_data *fcall,
73 struct icode_list *il,
74 struct token *t,
75 int eval) {
76
77 struct expr *ex;
78 struct vreg *args[128];
79 struct token *from_consts[128];
80 struct vreg **argp = NULL;
81 struct vreg *ret = NULL;
82 struct type *fty;
83 struct ty_func *fdecl;
84 struct sym_entry *se;
85 int i = 0;
86 int j;
87 int nstack = sizeof args / sizeof args[0];
88 int alloc = 0;
89
90
91 if (picflag) {
92 if (!curfunc->pic_initialized && backend->need_pic_init && eval) {
93 backend->icode_initialize_pic(curfunc, il);
94 curfunc->pic_initialized = 1;
95 }
96 }
97
98 /* XXX bad */
99 if (fcall->callto != NULL) {
100 fty = fcall->callto->dtype;
101 } else {
102 fty = fcall->calltovr->type;
103 }
104 fdecl = fcall->functype;
105
106
107 for (ex = fcall->args; ex != NULL; ex = ex->next) {
108 if (i == nstack - 1) {
109 alloc = nstack * 2;
110 argp = n_xmalloc(alloc * sizeof *argp);
111 memcpy(argp, args, nstack * sizeof *argp);
112 memset(from_consts, 0, sizeof from_consts);
113 } else if (i >= nstack) {
114 if (i == alloc - 1) {
115 alloc *= 2;
116 argp = n_xrealloc(argp, alloc * sizeof *argp);
117 }
118 } else {
119 if (ex->op == 0
120 && ex->data->meat
121 && ex->data->meat->type == TOK_STRING_LITERAL) {
122 from_consts[i] = ex->data->meat;
123 } else {
124 from_consts[i] = NULL;
125 }
126 }
127
128 if (ex->op == 0
129 && !ex->data->is_expr
130 && ex->data->only_load) {
131 /*
132 * Don't need to generate anything
133 */
134 if (argp != NULL) {
135 argp[i++] = ex->data->res;
136 } else {
137 args[i++] = ex->data->res;
138 }
139
140 if (!eval) {
141 /*
142 * 07/15/08: For constant expressions, reset
143 * the vreg and load flags. Otherwise a
144 * subsequent non-constant evaluation will
145 * get an unbacked vreg because it relied
146 * on the constant evaluation, which does
147 * not load registers with values, etc
148 * XXX Not sure this is fully correct
149 */
150 ex->data->res = NULL;
151 ex->data->only_load = 0;
152 ex->res = NULL;
153 }
154 continue;
155 }
156
157 if ((ex->res = expr_to_icode(ex, NULL, il, 0, 0, eval)) == NULL) {
158 /* XXX free stuff */
159 if (i > nstack) free(argp);
160 return NULL;
161 }
162
163 if (argp != NULL) {
164 argp[i++] = ex->res;
165 } else {
166 args[i++] = ex->res;
167 }
168
169 if (!eval) {
170 /*
171 * 07/15/08: For constant expressions, reset
172 * the vreg and load flags. Otherwise a
173 * subsequent non-constant evaluation will
174 * get an unbacked vreg because it relied
175 * on the constant evaluation, which does
176 * not load registers with values, etc
177 * XXX Not sure this is fully correct
178 */
179 if (ex->data) {
180 ex->data->res = NULL;
181 ex->data->only_load = 0;
182 }
183 ex->res = NULL;
184 }
185 }
186 if (argp != NULL) argp[i] = NULL;
187 else args[i] = NULL;
188
189 /* Check correctness of arguments */
190 if (fdecl->nargs != -1 && fdecl->nargs != fcall->nargs) {
191 if (!fdecl->variadic || fdecl->nargs - 1 > fcall->nargs) {
192 if (fdecl->was_just_declared || fdecl->type == FDTYPE_KR) {
193 warningfl(t,
194 "Call to function `%s' with wrong number of aguments",
195 fty->name? fty->name: "");
196 } else {
197 errorfl(t,
198 "Call to function `%s' with wrong number of aguments",
199 fty->name? fty->name: "");
200 return NULL;
201 }
202 }
203 } else if (fdecl->nargs == -1 && fcall->nargs > 0 && fty->is_def) {
204 /*
205 * 02/21/09: Warn about calls with arguments to functions which
206 * are DEFINED without a prototype declaration (void foo() {}
207 */
208 warningfl(t,
209 "Call to function `%s' with wrong number of aguments",
210 fty->name? fty->name: "");
211 }
212
213 if (fdecl->scope != NULL) {
214 se = fdecl->scope->slist;
215 } else {
216 se = NULL;
217 }
218
219 /*if (fcall->calltovr == NULL)*/ /* XXX */
220 for (j = 0; j < i && fdecl->nargs != -1; ++j) {
221 struct vreg *arg = argp? argp[j]: args[j];
222
223 /*
224 * 02/15/09: XXXXXXXXXXXXXXXXXXX This does not do typechecking
225 * for arguments outside of the ellpisis!!! But if we just remove
226 * this part, we're getting bogus warnings, so fix it properly
227 * later
228 * 02/21/09: OK, added. j >= fdecl->nargs is true if we're inside
229 * of the ellipsis
230 */
231 if (fdecl->variadic && j >= fdecl->nargs) {
232 #if 0
233 if (se != NULL && se->dec == fdecl->lastarg) {
234 se = NULL;
235 }
236 #endif
237 se = NULL;
238 } else if (se != NULL) {
239 struct type *param_type = NULL;
240
241 if (is_transparent_union(se->dec->dtype)) {
242 param_type = get_transparent_union_type(t, se->dec->dtype, arg);
243 if (param_type == NULL) {
244 return NULL;
245 }
246 }
247
248 if (param_type == NULL) {
249 /*
250 * Not transparent union - check type
251 */
252 param_type = se->dec->dtype;
253 if (fdecl->type == FDTYPE_KR) {
254 if (check_types_assign(t, se->dec->dtype,
255 arg, 1, 1) != 0) {
256 /*
257 * There is a mismatch here, but
258 * we only warn instead of
259 * erroring because most other
260 * compilers probably accept this
261 * code without warning because
262 * it is a K&R function (e.g. Ruby
263 * declares a ``void *'' parameter
264 * but passes a ``long''
265 */
266 warningfl(t, "Incompatible "
267 "argument type. This "
268 "may be an undetected "
269 "bug due to a K&R "
270 "declaration instead "
271 "of an ANSI prototype.");
272 }
273 } else {
274 if (check_types_assign(t, se->dec->dtype,
275 arg, 1, 0) != 0) {
276 return NULL;
277 }
278 }
279 }
280 if (is_arithmetic_type(param_type)
281 && eval) {
282 /*
283 * Make parameter agree with argument
284 */
285 arg = backend->
286 icode_make_cast(arg,param_type,il);
287
288 if (argp) {
289 argp[j] = arg;
290 } else {
291 args[j] = arg;
292 }
293 }
294
295 se = se->next;
296 } else {
297 /* Must be implicitly declared */
298 ;
299 }
300 }
301
302 if (fdecl->nargs == -1) {
303 /* XXX finish this .................... */
304 }
305
306 if (fty->fastattr & CATTR_FORMAT) {
307 check_format_string(t, fty, fdecl, args, from_consts, i);
308 }
309
310 if (eval) {
311 ret = backend->icode_make_fcall(fcall, argp? argp: args, i, il);
312 } else {
313 ret = vreg_alloc(NULL,NULL,NULL,NULL);
314 ret->type = n_xmemdup(fcall->calltovr->type, sizeof(struct type));
315 functype_to_rettype(ret->type);
316 if (ret->type->code != TY_VOID || ret->type->tlist != NULL) {
317 ret->size = backend->get_sizeof_type(ret->type,NULL);
318 } else {
319 ret->size = 0;
320 }
321 }
322
323 if (i > nstack) free(argp);
324 return ret;
325 }
326
327
328 /*
329 * Promote vr if necessary
330 */
331 int
pro_mote(struct vreg ** vr,struct icode_list * il,int eval)332 pro_mote(struct vreg **vr, struct icode_list *il, int eval) {
333 struct type *ty = (*vr)->type;
334 int is_constant = 0;
335
336 if (il != NULL && eval) {
337 if (is_x87_trash(*vr)) {
338 *vr = x87_anonymify(*vr, il);
339 } else {
340 /*
341 * 04/13/08: Don't anonymify constants!
342 */
343 if ((*vr)->from_const != NULL
344 && (*vr)->from_const->type != TOK_STRING_LITERAL
345 && !IS_FLOATING((*vr)->from_const->type)
346 && Oflag != -1) {
347 *vr = dup_vreg(*vr);
348 is_constant = 1; /* 07/13/08: Whoops, forgot this! */
349 } else {
350
351 vreg_anonymify(vr, NULL, NULL, il);
352 }
353 }
354 }
355 if (ty->tlist != NULL) {
356 return 0;
357 }
358
359 /*
360 * 08/09/08: Handle bitfields
361 */
362 if (ty->tbit != NULL) {
363 if (eval) {
364 *vr = promote_bitfield(*vr, il);
365 *vr = backend->icode_make_cast(*vr,
366 cross_get_bitfield_promoted_type((*vr)->type),
367 il);
368 } else {
369 (*vr)->type = cross_get_bitfield_promoted_type((*vr)->type); /*make_basic_type(TY_INT);*/
370 (*vr)->size = backend->get_sizeof_type((*vr)->type, NULL);
371 }
372 } else if (IS_CHAR(ty->code)
373 || IS_SHORT(ty->code)) {
374 /* Need promotion! */
375 ty = make_basic_type(TY_INT);
376 if (il != NULL && eval) {
377 if (is_constant) {
378 /*
379 * 04/12/08: Don't anonymify constants
380 */
381 struct token *t;
382
383 t = cross_convert_const_token((*vr)->from_const,
384 TY_INT);
385 (*vr)->from_const = t;
386 } else {
387 *vr = backend->icode_make_cast(*vr, ty, il);
388 }
389 }
390 (*vr)->type = ty;
391 (*vr)->size = backend->get_sizeof_type(ty, NULL);
392 return 1;
393 }
394 return 0;
395 }
396
397
398 /*
399 * Perform usual arithmetic conversions
400 */
401 static int
convert_operands(struct vreg ** left,struct vreg ** right,struct icode_list * left_il,struct icode_list * right_il,int op0,struct token * optok,int eval)402 convert_operands(
403 struct vreg **left,
404 struct vreg **right,
405 struct icode_list *left_il,
406 struct icode_list *right_il,
407 int op0,
408 struct token *optok,
409 int eval) {
410
411 struct type *lt = (*left)->type;
412 struct type *rt = (*right)->type;
413 struct operator *op;
414 int larit;
415 int rarit;
416 int is_cond_op;
417
418
419 /*
420 * 08/07/09: There were STILL bugs with the conditional
421 * operator! Usual arithmetic conversion between condop
422 * operands operates in a very restricted environment,
423 * because registers allocated and temporarily saved
424 * items on the one side are not valid on the other. So
425 * we have to invalidate GPRs properly after using them
426 *
427 * Now we add LOTS of invalidations just to make sure
428 * that no stale data can be used. This is costly
429 */
430 is_cond_op = left_il != right_il;
431
432 larit = is_arithmetic_type(lt);
433 rarit = is_arithmetic_type(rt);
434 op = &operators[LOOKUP_OP2(op0)];
435
436 if (larit && rarit) {
437 /*
438 * Both are arithmetic types and may need usual
439 * arithmetic conversions
440 */
441 pro_mote(left, left_il, eval);
442 if (is_cond_op) {
443 backend->invalidate_gprs(left_il, 1, 0);
444 }
445
446 pro_mote(right, right_il, eval);
447 if (is_cond_op) {
448 backend->invalidate_gprs(right_il, 1, 0);
449 }
450
451 lt = (*left)->type;
452 rt = (*right)->type;
453 if ((op0 != TOK_OP_BSHL && op0 != TOK_OP_BSHR)
454 /*|| backend->arch == ARCH_X868 */ /* :-( */) {
455 if (rt->code > lt->code) {
456 /*
457 * 06/01/08: In long vs unsigned int, the
458 * result type is only long if long can
459 * represent all unsigned int values;
460 * Otherwise both are converted to unsigned
461 * long. This was not handled correctly
462 */
463 if (eval) {
464 /*
465 * XXX 07/13/08: We should convert
466 * constants at compile time!
467 */
468 if (!(rt->code == TY_LONG
469 && lt->code == TY_UINT)
470 || cross_get_target_arch_properties()->long_can_store_uint) {
471 *left = backend->
472 icode_make_cast(*left,
473 rt,
474 left_il);
475 } else {
476 /* long vs uint */
477 *left = backend->
478 icode_make_cast(*left,
479 make_basic_type(TY_ULONG),
480 left_il);
481 *right = backend->
482 icode_make_cast(*right,
483 make_basic_type(TY_ULONG),
484 right_il);
485 }
486 } else {
487 (*left)->type = rt;
488 }
489 } else if (lt->code > rt->code) {
490 if (eval) {
491 /*
492 * XXX 07/13/08: We should convert
493 * constants at compile time!
494 */
495 if (!(lt->code == TY_LONG
496 && rt->code == TY_UINT)
497 || cross_get_target_arch_properties()->long_can_store_uint) {
498 *right = backend->
499 icode_make_cast(*right,
500 lt,
501 right_il);
502 } else {
503 /* long vs uint */
504 *right = backend->
505 icode_make_cast(*right,
506 make_basic_type(TY_ULONG),
507 right_il);
508 *left = backend->
509 icode_make_cast(*left,
510 make_basic_type(TY_ULONG),
511 left_il);
512 }
513 #if 0
514 *right = backend->
515 icode_make_cast(*right, lt,
516 right_il);
517 #endif
518 } else {
519 (*right)->type = lt;
520 }
521 }
522 }
523 return 0;
524 } else if (lt->tlist == NULL &&
525 (lt->code == TY_STRUCT || lt->code == TY_UNION)) {
526 errorfl(optok, "Cannot use %s type with `%s' operator",
527 lt->code == TY_STRUCT? "structure": "union",
528 op->name);
529 return 1;
530 } else if (rt->tlist == NULL &&
531 (rt->code == TY_STRUCT || rt->code == TY_UNION)) {
532 errorfl(optok, "Cannot use %s type with `%s' operator",
533 rt->code == TY_STRUCT? "structure": "union",
534 op->name);
535 return 1;
536 } else {
537 /* At least one side is a pointer or array */
538 if (lt->tlist == NULL || rt->tlist == NULL) {
539 } else {
540 }
541 }
542 return 1;
543 }
544
545 /*
546 * XXX There is some duplicated stuff in promote() and expr_to_icode(),
547 * which should be cleaned up. And this is improprely named because it
548 * doesn't only do promotions but also usual arithmetic conversion. And
549 * in some cases it does it wrongly
550 *
551 * Should be fully replaced with convert_operands() and pro_mote()
552 */
553 struct type *
promote(struct vreg ** left,struct vreg ** right,int op0,struct token * optok,struct icode_list * il,int eval)554 promote(struct vreg **left, struct vreg **right, int op0, struct token *optok,
555 struct icode_list *il, int eval) {
556 struct type *lt = (*left)->type;
557 struct type *rt = right?
558 (void *)(*right)->type: (void *)NULL;
559 struct type *ret;
560 struct type *towhat = NULL;
561 struct type_node *tnl;
562 struct type_node *tnr;
563 struct operator *op;
564 int is_void = 0;
565
566 if (il != NULL && eval) {
567 if (is_x87_trash(*left)) {
568 *left = x87_anonymify(*left, il);
569 } else {
570 /*
571 * 04/13/08: Don't anonymify constants!
572 */
573 if ((*left)->from_const != NULL
574 && (*left)->from_const->type != TOK_STRING_LITERAL
575 && !IS_FLOATING((*left)->from_const->type)
576 && Oflag != -1) {
577 *left = dup_vreg(*left);
578 } else {
579 vreg_anonymify(left, NULL, NULL, il);
580 }
581 }
582 }
583
584
585 if ((*left)->type->tbit != NULL) {
586 if (eval) {
587 if (right) {
588 vreg_faultin_protected(*right, NULL, NULL, *left, il, 0);
589 } else {
590 vreg_faultin(NULL, NULL, *left, il, 0);
591 }
592 }
593 (void) pro_mote(left, il, eval);
594 }
595 if (right != NULL && (*right)->type->tbit != NULL) {
596 if (eval) {
597 vreg_faultin_protected(*left, NULL, NULL, *right, il, 0);
598 }
599 (void) pro_mote(right, il, eval);
600 }
601
602 if (op0 == 0) {
603 op = NULL; /* XXX */
604 } else {
605 op = &operators[LOOKUP_OP2(op0)];
606 }
607 if (right == NULL) {
608 /*
609 * Promoting argument to unary operator or result of
610 * conditional operator
611 */
612 towhat = (*left)->type;
613 if (lt->tlist != NULL) {
614 if (op0 == TOK_OP_LNEG) {
615 }
616 } else if (IS_CHAR(lt->code)
617 || IS_SHORT(lt->code)) {
618 towhat = make_basic_type(TY_INT);
619 if (il != NULL) {
620 if (eval) {
621 *left = backend->icode_make_cast(*left,
622 towhat, il);
623 } else {
624 (*left)->type = towhat;
625 }
626 }
627 }
628 return towhat;
629 }
630
631 if (lt->tlist == NULL || rt->tlist == NULL) {
632 if (lt->tlist != rt->tlist) {
633 /*
634 * Basic type used with pointer type -
635 * only valid for some operations
636 */
637 struct type *pointer;
638 struct vreg *basic_vreg;
639 struct type *basic;
640
641 if (lt->tlist) {
642 pointer = lt;
643 basic = rt;
644 basic_vreg = *right;
645 } else {
646 pointer = rt;
647 basic = lt;
648 basic_vreg = *left;
649 }
650 if (basic->code == TY_STRUCT
651 || basic->code == TY_UNION) {
652 errorfl(optok, "Cannot use "
653 "%s type with `%s' "
654 "operator", basic->code ==
655 TY_STRUCT? "structure":
656 "union", op? op->name: "<unknown>");
657 return NULL;
658 } else if (IS_FLOATING(basic->code)) {
659 errorfl(optok, "Cannot use "
660 "pointer types with floating "
661 "point types");
662 return NULL;
663 } else if (op0 == TOK_OP_PLUS
664 || op0 == TOK_OP_MINUS) {
665 /* Probably OK - pointer arithmetic */
666 if (pointer->code == TY_VOID
667 && pointer->tlist->next == NULL) {
668 /* if (std != GNU) {
669 errorfl(optok, "Cannot "
670 "perform pointer arithmetic "
671 "on void pointers (cast to "
672 "`(char *)' instead!)");
673 } else { */
674 warningfl(optok, "Pointer arithmetic "
675 "on void pointers is a GNU C "
676 "extension (you should cast "
677 "to `(char *)' instead!)");
678 #if 0
679 return NULL;
680 #endif
681 }
682 } else if ((op0 == TOK_OP_LEQU
683 || op0 == TOK_OP_LNEQU)
684 && basic_vreg->is_nullptr_const) {
685 ;
686 } else if (op0 == TOK_OP_COMMA) {
687 ;
688 } else {
689 errorfl(optok, "Cannot use "
690 "pointer type with `%s' operator "
691 "and basic type",
692 op? op->name: "<unknown>");
693 return NULL;
694 }
695
696 if (pointer->tlist->type == TN_ARRAY_OF
697 || pointer->tlist->type == TN_VARARRAY_OF) {
698 pointer = n_xmemdup(pointer, sizeof *pointer);
699 copy_tlist(&pointer->tlist, pointer->tlist);
700 pointer->tlist->type = TN_POINTER_TO;
701
702 /* XXX 12/07/24: Is this needed? */
703 (void) backend->get_sizeof_type(pointer, NULL);
704 }
705 return pointer;
706 } else {
707 /* Both are basic types */
708 if (convert_operands(left, right, il, il, op0,
709 optok, eval) != 0) {
710 return NULL;
711 } else {
712 return (*left)->type;
713 }
714 }
715 }
716
717 tnl = lt->tlist;
718 tnr = rt->tlist;
719
720 if (tnl->type == TN_FUNCTION
721 && tnr->type == TN_POINTER_TO) {
722 tnr = tnr->next;
723 } else if (tnr->type == TN_FUNCTION
724 && tnl->type == TN_POINTER_TO) {
725 tnl = tnl->next;
726 }
727
728 /* Dealing with two pointer types */
729 if ((lt->code == TY_VOID && tnl->next == NULL)
730 || (rt->code == TY_VOID && tnr->next == NULL)) {
731 /*
732 * 08/03/07: Avoid error for
733 *
734 * char *p;
735 * if (&p == (void *)bla) {
736 *
737 * May not be 100% correct
738 */
739 is_void = 1;
740 } else {
741 for (;
742 tnl != NULL && tnr != NULL;
743 tnl = tnl->next, tnr = tnr->next) {
744 if (tnl->type != tnr->type
745 || (tnl->type == TN_ARRAY_OF
746 && tnl->arrarg_const
747 != tnr->arrarg_const)) {
748 if (tnl != lt->tlist
749 || (tnl->type != TN_ARRAY_OF
750 && tnr->type != TN_ARRAY_OF
751 && tnl->type != TN_VARARRAY_OF
752 && tnr->type != TN_VARARRAY_OF)) {
753 errorfl(optok,
754 "Incompatible pointer types in "
755 "expression");
756 return NULL;
757 }
758 }
759 }
760 }
761
762 if (!is_void) {
763 if (tnl != tnr
764 && !(*right)->is_nullptr_const
765 && !(*left)->is_nullptr_const) {
766 /*
767 * XXX this fails for function vs void pointers!!!!
768 * !!!!!!!!!!!!!!!!!!!!!
769 */
770 errorfl(optok,
771 "Incompatible types in expression");
772 return NULL;
773 }
774 }
775
776 /* XXX this is complete nonsense */
777 if (op0 == TOK_OP_LEQU
778 || op0 == TOK_OP_LNEQU
779 || op0 == TOK_OP_GREAT
780 || op0 == TOK_OP_SMALL
781 || op0 == TOK_OP_GREATEQ
782 || op0 == TOK_OP_SMALLEQ) {
783 /*
784 * The resulting type of relational operators applied to
785 * two pointer values is of type ``int''
786 */
787 ret = make_basic_type(TY_INT);
788 ret = n_xmemdup(ret, sizeof *ret);
789 return ret;
790 } else if (op0 == TOK_OP_MINUS) {
791 ret = make_basic_type(TY_INT);
792 ret = n_xmemdup(ret, sizeof *ret);
793 return ret;
794 }
795
796 return lt;
797 }
798
799
800 /*
801 * Perform pointer arithmetic on lres or rres (result is returned.) For add
802 * this means: mul n, elemsize; add ptr, n
803 * For sub this needs to do one of two separate things: Either subtract a
804 * pointer from another pointer and calculate the number of elements between
805 * them (sub ptr1, ptr2; div result, elemsize), or just subtract an element
806 * count (mul n, elemsize; sub ptr, n)
807 *
808 * As a simple optimization, the scaling is done using shift left/shift right
809 * rather than mul/div instructions if the element size is a power of two
810 */
811 static void
ptrarit(struct vreg ** lres0,struct vreg ** rres0,struct icode_list * ilp,int op,int eval)812 ptrarit(
813 struct vreg **lres0,
814 struct vreg **rres0,
815 struct icode_list *ilp,
816 int op,
817 int eval) {
818
819 struct vreg *toscale;
820 struct vreg *addto;
821 struct vreg *lres = *lres0;
822 struct vreg *rres = *rres0;
823 struct type *ty;
824 struct icode_instr *ii;
825 int factor;
826 int both_ptr;
827 int is_vla;
828 struct vreg *tmpvr;
829
830 both_ptr = lres->type->tlist != NULL && rres->type->tlist != NULL;
831
832 /*
833 * 04/13/08: Additional faultins needed if either side is a now
834 * non-anonymified constant! ptr + 123
835 */
836 vreg_faultin_protected(rres, NULL, NULL, lres, ilp, 0);
837 vreg_faultin_protected(lres, NULL, NULL, rres, ilp, 0);
838 reg_set_unallocatable(rres->pregs[0]);
839 reg_set_unallocatable(lres->pregs[0]);
840 if (lres->type->tlist) {
841 toscale = rres;
842 addto = lres;
843 if (eval) {
844 vreg_anonymify(&addto, NULL, NULL, ilp);
845 }
846 *lres0 = lres = addto;
847 } else {
848 toscale = lres;
849 addto = rres;
850 if (eval) {
851 vreg_anonymify(&addto, NULL, NULL, ilp);
852 }
853 *rres0 = rres = addto;
854 }
855 reg_set_allocatable(rres->pregs[0]);
856 reg_set_allocatable(lres->pregs[0]);
857
858 ty = addto->type;
859
860 /*
861 * 05/22/11: Handle VLA elements: If the pointer we're working
862 * with points to a VLA, then we need to compute its size at
863 * runtime
864 */
865 if (IS_VLA(ty->flags)) {
866 is_vla = 1;
867 factor = 0;
868 } else {
869 is_vla = 0;
870 factor = backend->
871 get_sizeof_elem_type(ty);
872 }
873
874 if (factor > 1 || is_vla) {
875 /*
876 * Scaling something that may be from a variable -
877 * register is not cached value anymore afterwards
878 */
879 tmpvr = vreg_alloc(NULL, NULL, NULL, make_basic_type(TY_INT));
880
881 if (toscale == lres) {
882 vreg_anonymify(&toscale, NULL, NULL, ilp);
883 pro_mote(&toscale, ilp, eval);
884 *lres0 = lres = toscale;
885 } else {
886 vreg_anonymify(&toscale, NULL, NULL, ilp);
887 pro_mote(&toscale, ilp, eval);
888 *rres0 = rres = toscale;
889 }
890
891 /*
892 * 12/23/08: Convert index to size_t. This is necessary on 64bit
893 * architectures like PPC64. For example, if we have
894 *
895 * p[-1] (where p is ``int *'')
896 *
897 * ... then the shift or multiplication to scale -1 for an int
898 * must be performed using 64bit instructions to ensure that it
899 * remains negative (i.e. sign-extended to the upper word)
900 *
901 * size_t is a GPR on all supported architectures so it is a
902 * suitable type
903 */
904 toscale = backend->icode_make_cast(toscale, backend->get_size_t(), ilp);
905
906 if (!is_vla && (factor & (factor - 1)) == 0) {
907 /*
908 * Scaling by (constant - not VLA) power of two - we can shift!
909 */
910 int shift_by = 0;
911
912 while (factor /= 2) {
913 ++shift_by;
914 }
915
916 tmpvr->from_const = const_from_value(&shift_by, NULL);
917
918 if (op == TOK_OP_PLUS) {
919 ii = icode_make_shl(toscale, tmpvr);
920 append_icode_list(ilp, ii);
921 }
922 if (op == TOK_OP_PLUS) {
923 ii = icode_make_add(addto, toscale);
924 append_icode_list(ilp, ii);
925 } else {
926 if (!both_ptr) {
927 /* Second sub operand is scaled */
928 ii = icode_make_shl(toscale, tmpvr);
929 append_icode_list(ilp, ii);
930 }
931 /*
932 * 06/14/09: This was missing - the
933 * icode_prepare_op() may trash our target
934 * register
935 */
936 vreg_faultin(NULL, NULL, addto, ilp, 0);
937 ii = icode_make_sub(addto, toscale);
938 append_icode_list(ilp, ii);
939 if (both_ptr) {
940 /* Result of p1 - p is scaled */
941 ii = icode_make_shr(addto, tmpvr);
942 append_icode_list(ilp, ii);
943 }
944 }
945 } else {
946 if (is_vla) {
947 /*
948 * 05/22/11: VLA - size must be determined at
949 * runtime
950 */
951 tmpvr = get_sizeof_elem_vla_type(ty, ilp);
952 } else {
953 tmpvr->from_const = const_from_value(&factor, NULL);
954 }
955
956 if (op == TOK_OP_PLUS) {
957 backend->
958 icode_prepare_op(&toscale, &tmpvr,
959 TOK_OP_MULTI, ilp);
960 ii = icode_make_mul(toscale, tmpvr);
961 append_icode_list(ilp, ii);
962 }
963 if (op == TOK_OP_PLUS) {
964 vreg_faultin_protected(toscale, NULL, NULL,
965 addto,
966 ilp, 0);
967 ii = icode_make_add(addto, toscale);
968 append_icode_list(ilp, ii);
969 } else {
970 if (!both_ptr) {
971 backend->icode_prepare_op(&toscale,
972 &tmpvr,
973 TOK_OP_MULTI, ilp);
974 ii = icode_make_mul(toscale, tmpvr);
975 append_icode_list(ilp, ii);
976 }
977
978 /*
979 * 06/14/09: This was missing - the
980 * icode_prepare_op() may trash our target
981 * register
982 */
983 vreg_faultin(NULL, NULL, addto, ilp, 0);
984 ii = icode_make_sub(addto, toscale);
985 append_icode_list(ilp, ii);
986 if (both_ptr) {
987 backend->icode_prepare_op
988 (&addto, &tmpvr, TOK_OP_DIVIDE, ilp);
989 ii = icode_make_div(addto, tmpvr);
990 append_icode_list(ilp, ii);
991 }
992 }
993 }
994 } else {
995 pro_mote(&toscale, ilp, eval);
996 if (op == TOK_OP_PLUS) {
997 ii = icode_make_add(addto, toscale);
998 } else {
999 ii = icode_make_sub(addto, toscale);
1000 }
1001 append_icode_list(ilp, ii);
1002 }
1003
1004 if (op == TOK_OP_PLUS && toscale == lres) {
1005 /*
1006 * This is an uncommon operand ordering, such as ``123 + buf''.
1007 * The result is stored in buf's register, but 123's register
1008 * is returned!
1009 */
1010 icode_make_copyreg(toscale->pregs[0], addto->pregs[0],
1011 addto->type, addto->type, ilp); /* XXX long long?? */
1012 }
1013
1014 free_pregs_vreg(toscale, ilp, 0, 0);
1015 }
1016
1017
1018 void
do_add_sub(struct vreg ** lres,struct vreg ** rres,int op,struct token * optok,struct icode_list * il,int eval)1019 do_add_sub(struct vreg **lres, struct vreg **rres,
1020 int op, struct token *optok, struct icode_list *il,
1021 int eval) {
1022 struct icode_instr *ii = NULL;
1023
1024 if ((*lres)->type->tlist != NULL
1025 || (*rres)->type->tlist != NULL) {
1026 /* Need to scale first */
1027 struct type *newty = NULL;
1028
1029 if (is_floating_type((*lres)->type)
1030 || is_floating_type((*rres)->type)) {
1031 errorfl(optok,
1032 "Cannot do pointer arithmetic with floating "
1033 "point values");
1034 return;
1035 }
1036 if ((*lres)->type->tlist != NULL
1037 && (*rres)->type->tlist != NULL) {
1038 /* ptr - ptr2 */
1039 if (op == TOK_OP_PLUS) {
1040 errorfl(optok,
1041 "Cannot add pointers to pointers");
1042 return;
1043 } else {
1044 /* Result of p - p2 is of type ptrdiff_t */
1045 newty = make_basic_type(TY_LONG); /* XXX */
1046 }
1047 } else if ((*lres)->type->tlist != NULL) {
1048 /* ptr +/- integer */
1049 if ((*lres)->type->tlist->type == TN_ARRAY_OF
1050 || (*lres)->type->tlist->type == TN_VARARRAY_OF) {
1051 /* Becomes pointer */
1052 newty = n_xmemdup((*lres)->type,
1053 sizeof *(*lres)->type);
1054 copy_tlist(&newty->tlist, newty->tlist);
1055 newty->tlist->type = TN_POINTER_TO;
1056 }
1057 } else { /* if ((*rres)->type->tlist != NULL) { */
1058 /* integer +/- ptr */
1059 if ((*rres)->type->tlist->type == TN_ARRAY_OF
1060 || (*rres)->type->tlist->type == TN_VARARRAY_OF) {
1061 /* Becomes pointer */
1062 newty = n_xmemdup((*rres)->type,
1063 sizeof *(*rres)->type);
1064 copy_tlist(&newty->tlist, newty->tlist);
1065 newty->tlist->type = TN_POINTER_TO;
1066 } else {
1067 /* Is pointer */
1068 newty = n_xmemdup((*rres)->type,
1069 sizeof(struct type));
1070 }
1071 }
1072
1073 if (eval) {
1074 ptrarit(lres, rres, il, op, eval);
1075 }
1076 if (newty != NULL) {
1077 /*
1078 * The result still has type ``pointer'' and
1079 * may be stored in a 64bit register (e.g. on
1080 * AMD64) that is bigger than int. Hence,
1081 * we need to change type and register
1082 */
1083 if (eval) {
1084 *lres = backend->
1085 icode_make_cast(*lres, newty, il);
1086 } else {
1087 vreg_set_new_type(*lres, newty);
1088 }
1089 }
1090 return;
1091 } else {
1092 if (eval) {
1093 if (is_x87_trash(*lres)) {
1094 *lres = x87_do_binop(*lres, *rres, op, il);
1095 } else {
1096 vreg_anonymify(lres, NULL, NULL, il);
1097
1098 if (op == TOK_OP_PLUS) {
1099 ii = icode_make_add(*lres, *rres);
1100 } else {
1101 ii = icode_make_sub(*lres, *rres);
1102 }
1103 }
1104 }
1105 }
1106 if (eval && ii != NULL) {
1107 append_icode_list(il, ii);
1108 }
1109 }
1110
1111
1112 static int
can_transform_to_bitwise(struct vreg ** left,struct vreg ** right,int * operator,struct icode_list * il)1113 can_transform_to_bitwise(struct vreg **left, struct vreg **right,
1114 int *operator, struct icode_list *il) {
1115
1116 struct vreg **dest = NULL;
1117 struct vreg **src = NULL;
1118 struct token *t;
1119 int op = *operator;
1120 int transformed_op = 0;
1121 int reorder_operands = 0;
1122
1123
1124 if (op != TOK_OP_MOD
1125 && op != TOK_OP_DIVIDE
1126 && op != TOK_OP_MULTI
1127 && op != TOK_OP_COMOD
1128 && op != TOK_OP_CODIVIDE
1129 && op != TOK_OP_COMULTI) {
1130 return 0;
1131 }
1132
1133 if (Oflag == -1
1134 || !is_integral_type((*left)->type)
1135 || !is_integral_type((*right)->type)) {
1136 return 0;
1137 }
1138
1139 if ((*left)->from_const) {
1140 /*
1141 * Left is constant, so right isn't. This is only good
1142 * for multiplication because division is not commutative
1143 */
1144 if (op != TOK_OP_MULTI && op != TOK_OP_COMULTI) {
1145 return 0;
1146 }
1147
1148 /*
1149 * 16 * foo
1150 *
1151 * Original left operand becomes right operand
1152 */
1153 dest = right;
1154 src = left;
1155
1156 /*
1157 * 07/14/08: This was missing - const * nonconst is changed to
1158 * nonconst << const, so we have to reverse the order of
1159 * operands
1160 */
1161 reorder_operands = 1;
1162 } else if ((*right)->from_const) {
1163 /*
1164 * foo <op> 16
1165 */
1166 dest = left;
1167 src = right;
1168 if (op == TOK_OP_DIVIDE) {
1169 /*
1170 * 07/14/08: XXX: Turned shift transformation off for
1171 * signed values! Because negative values are not
1172 * handled correctly, the result is off by one
1173 */
1174 if ((*left)->type->sign != TOK_KEY_UNSIGNED) {
1175 return 0;
1176 }
1177 }
1178 } else {
1179 return 0;
1180 }
1181
1182 switch (op) {
1183 case TOK_OP_MOD:
1184 case TOK_OP_COMOD:
1185 /*
1186 * % pow2 can be transformed to % (pow2 - 1)
1187 *
1188 * 01/28/10: Wow, this was wrong for signed values!
1189 * (Tcl bug)
1190 */
1191 if ((*dest)->type->sign != TOK_KEY_UNSIGNED) {
1192 return 0;
1193 }
1194 t = cross_get_pow2_minus_1((*src)->from_const);
1195 if (t == NULL) {
1196 return 0;
1197 }
1198 transformed_op = op == TOK_OP_MOD? TOK_OP_BAND: TOK_OP_COBAND;
1199 break;
1200 case TOK_OP_MULTI:
1201 case TOK_OP_DIVIDE:
1202 case TOK_OP_COMULTI:
1203 case TOK_OP_CODIVIDE:
1204 /* * pow2 and / pow2 can be transformed to <</>> bits */
1205 t = cross_get_pow2_shiftbits((*src)->from_const);
1206 if (t == NULL) {
1207 return 0;
1208 }
1209 if (op == TOK_OP_MULTI || op == TOK_OP_COMULTI) {
1210 transformed_op = op == TOK_OP_MULTI?
1211 TOK_OP_BSHL: TOK_OP_COBSHL;
1212 } else {
1213 transformed_op = op == TOK_OP_DIVIDE?
1214 TOK_OP_BSHR: TOK_OP_COBSHR;
1215 }
1216 break;
1217 default:
1218 unimpl();
1219 break;
1220 }
1221 *src = dup_vreg(*src);
1222 (*src)->from_const = t;
1223
1224 if (!backend->have_immediate_op((*dest)->type, transformed_op)) {
1225 vreg_set_unallocatable(*dest);
1226 /* vreg_faultin_protected(*src, NULL, NULL, *dest, il, 0);*/
1227 vreg_anonymify(src, NULL, NULL, il);
1228 vreg_set_allocatable(*dest);
1229 }
1230
1231 *operator = transformed_op;
1232
1233 if (reorder_operands) {
1234 /* 07/14/08: Reorder operands */
1235 struct vreg *temp = *left;
1236 *left = *right;
1237 *right = temp;
1238 }
1239 return 1;
1240 }
1241
1242 static int
1243 do_bitwise(struct vreg **lres0, struct vreg *rres,
1244 struct operator *operator, struct token *optok, struct icode_list *il,
1245 int eval);
1246
1247 static int
do_mul(struct vreg ** lres0,struct vreg * rres,struct operator * operator,struct token * op,struct icode_list * il,int eval)1248 do_mul(struct vreg **lres0, struct vreg *rres,
1249 struct operator *operator, struct token *op, struct icode_list *il,
1250 int eval) {
1251
1252 struct icode_instr *ii;
1253 struct vreg *lres;
1254 struct reg *lres_preg1 = NULL;
1255 struct reg *lres_preg2 = NULL;
1256
1257 /*
1258 * 04/12/08: Moved type-checking up
1259 */
1260 lres = *lres0;
1261 if (operator->value == TOK_OP_MOD) {
1262 if (!is_integral_type(lres->type)
1263 || !is_integral_type(rres->type)) {
1264 errorfl(op /* XXX */,
1265 "Operands of `%%' operator must have integral"
1266 " type");
1267 return -1;
1268 }
1269 } else if (!is_arithmetic_type(lres->type)
1270 || !is_arithmetic_type(rres->type)) {
1271 errorfl(op /* XXX */,
1272 "Operands of `%s' operator must have arithmetic "
1273 "(integral or floating point) type",
1274 operator->name);
1275 return -1;
1276 }
1277
1278 if (!eval) {
1279 /* We're already done */
1280 return 0;
1281 }
1282
1283 if (eval) {
1284 /*
1285 * 04/12/08: Optimize divisions and multiplications by
1286 * power-of-two values by using bitwise opertors
1287 */
1288 #if 0
1289 if (Oflag != -1) {
1290 struct vreg *left_tmp = *lres0;
1291 struct vreg *right_tmp = rres;
1292
1293 if (can_transform_to_bitwise(&left_tmp,
1294 &right_tmp, &operator, il)) {
1295 int rc;
1296
1297 rc = do_bitwise(&left_tmp, right_tmp,operator,
1298 op, il, eval);
1299 *lres0 = left_tmp;
1300 return rc;
1301 }
1302 }
1303 #endif
1304
1305 if (!is_x87_trash(*lres0)) {
1306 vreg_anonymify(lres0, NULL, NULL, il);
1307 } else {
1308 *lres0 = x87_anonymify(*lres0, il);
1309 }
1310 }
1311 lres = *lres0;
1312
1313
1314 if (lres->is_multi_reg_obj) {
1315 /*
1316 * long long ... so this operation is carried out using
1317 * a function call and we need to invalidate GPRs
1318 * XXX hm this belongs into icode_prepare_op ?!??!
1319 */
1320 lres->pregs[0]->used = lres->pregs[1]->used = 0;
1321 rres->pregs[0]->used = rres->pregs[1]->used = 0;
1322 lres_preg1 = lres->pregs[0];
1323 lres_preg2 = lres->pregs[1];
1324 backend->invalidate_gprs(il, 1, INV_FOR_FCALL);
1325 }
1326
1327 if (is_x87_trash(lres)) {
1328 *lres0 = x87_do_binop(*lres0, rres, operator->value, il);
1329 } else {
1330 if (operator->value == TOK_OP_MULTI) {
1331 ii = icode_make_mul(lres, rres);
1332 } else if (operator->value == TOK_OP_DIVIDE) {
1333 ii = icode_make_div(lres, rres);
1334 } else {
1335 /* MOD */
1336 ii = icode_make_mod(lres, rres);
1337 }
1338 append_icode_list(il, ii);
1339 }
1340
1341
1342 if (lres->is_multi_reg_obj) {
1343 if (backend->arch == ARCH_X86) {
1344 /* long long results are returned in eax:edx */
1345 vreg_map_preg(lres, &x86_gprs[0]);
1346 vreg_map_preg2(lres, &x86_gprs[3]);
1347 } else if (backend->arch == ARCH_POWER) {
1348 /* Results are returned in dest pregs */
1349 vreg_map_preg(lres, lres_preg1);
1350 vreg_map_preg2(lres, lres_preg2);
1351 } else {
1352 unimpl();
1353 }
1354 }
1355 return 0;
1356 }
1357
1358 static int
do_bitwise(struct vreg ** lres0,struct vreg * rres,struct operator * operator,struct token * optok,struct icode_list * il,int eval)1359 do_bitwise(struct vreg **lres0, struct vreg *rres,
1360 struct operator *operator, struct token *optok, struct icode_list *il,
1361 int eval) {
1362
1363 struct icode_instr *ii = NULL;
1364 struct vreg *lres;
1365 int op = operator->value;
1366
1367 if (eval) {
1368 vreg_anonymify(lres0, NULL, NULL, il);
1369 }
1370 lres = *lres0;
1371
1372 if (!is_integral_type(lres->type)
1373 || !is_integral_type(rres->type)) {
1374 errorfl(optok,
1375 "Both operands of the `%s' operator have to be "
1376 "of integral type", operator->name);
1377 return -1;
1378 }
1379 if (!eval) {
1380 /* We're already done */
1381 return 0;
1382 }
1383
1384 /* XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX ... anonymify broken */
1385 #if 0
1386 lres = n_xmemdup(lres, sizeof *lres);
1387 #endif
1388 lres = copy_vreg(lres);
1389
1390 if (op == TOK_OP_BSHL) {
1391 ii = icode_make_shl(lres, rres);
1392 } else if (op == TOK_OP_BSHR) {
1393 ii = icode_make_shr(lres, rres);
1394 } else if (op == TOK_OP_BAND) {
1395 ii = icode_make_and(lres, rres);
1396 } else if (op == TOK_OP_BOR) {
1397 ii = icode_make_or(lres, rres);
1398 } else if (op == TOK_OP_BXOR) {
1399 ii = icode_make_xor(lres, rres);
1400 } else {
1401 unimpl();
1402 }
1403
1404 append_icode_list(il, ii);
1405 return 0;
1406 }
1407
1408 void
boolify_result(struct vreg * vr,struct icode_list * il)1409 boolify_result(struct vreg *vr, struct icode_list *il) {
1410 struct icode_instr *ii;
1411 struct icode_instr *label = icode_make_label(NULL);
1412
1413 ii = icode_make_cmp(vr, NULL);
1414 append_icode_list(il, ii);
1415 ii = icode_make_branch(label, INSTR_BR_EQUAL, vr);
1416 append_icode_list(il, ii);
1417
1418 /* At this point, the value is nonzero so it always becomes 1 */
1419 ii = icode_make_setreg(vr->pregs[0], 1);
1420 append_icode_list(il, ii);
1421 append_icode_list(il, label);
1422 }
1423
1424
1425
1426 void
mask_source_for_bitfield(struct type * ltype,struct vreg * rres,struct icode_list * il,int for_reading)1427 mask_source_for_bitfield(struct type *ltype, struct vreg *rres,
1428 struct icode_list *il, int for_reading) {
1429
1430 struct vreg *maskvr;
1431 struct token *tok;
1432 struct icode_instr *ii;
1433
1434 (void) for_reading;
1435 /* tok = make_bitfield_mask(ltype, for_reading, &shift_bits);*/
1436 tok = ltype->tbit->bitmask_tok;
1437
1438 maskvr = vreg_alloc(NULL,tok,NULL,NULL);
1439
1440 vreg_set_unallocatable(rres);
1441 vreg_faultin_protected(rres, NULL, NULL, maskvr, il, 0);
1442
1443 /*
1444 * 10/12/08: Hmm, ltype doesn't correspond to the input type
1445 * storage unit type, so cast to make them match
1446 */
1447 maskvr = backend->icode_make_cast(maskvr, /*ltype*/rres->type, il);
1448 vreg_set_allocatable(rres);
1449
1450 ii = icode_make_and(rres, maskvr);
1451 append_icode_list(il, ii);
1452 }
1453
1454 static void
shift_bitfield(struct type * ty,struct vreg * vr,int encode,struct icode_list * il)1455 shift_bitfield(struct type *ty, struct vreg *vr, int encode, struct icode_list *il) {
1456 struct token *shiftbits;
1457 struct icode_instr *ii = NULL;
1458 struct vreg *shiftvr = NULL;
1459
1460 shiftbits = ty->tbit->shifttok;
1461
1462 vreg_faultin(NULL, NULL, vr, il, 0);
1463
1464
1465 if (shiftbits != NULL) {
1466 /* Only shift if shift count not 0 */
1467 shiftvr = vreg_alloc(NULL,shiftbits,NULL,NULL);
1468 vreg_faultin_protected(vr, NULL, NULL, shiftvr, il, 0);
1469 }
1470
1471 if (encode) {
1472 if (shiftvr != NULL) {
1473 backend->icode_prepare_op(&vr, &shiftvr, TOK_OP_BSHL, il);
1474 ii = icode_make_shl(vr, shiftvr);
1475 }
1476 } else {
1477 /*
1478 * Decode. First decide whether we need to sign-extend
1479 */
1480 struct vreg *andvr;
1481
1482 /*
1483 * Mask off all unrelated bits. This must be done at the
1484 * beginning because if we do it after sign-extension, then
1485 * we will lose sign bits
1486 */
1487 andvr = vreg_alloc(NULL,ty->tbit->bitmask_tok_with_shiftbits,NULL,NULL);
1488 vreg_faultin_protected(vr, NULL, NULL, andvr, il, 0);
1489 vreg_faultin_protected(andvr, NULL, NULL, vr, il, 0);
1490 backend->icode_prepare_op(&vr, &andvr, TOK_OP_BAND, il);
1491 ii = icode_make_and(vr, andvr);
1492 append_icode_list(il, ii);
1493 ii = NULL;
1494
1495 if (ty->sign != TOK_KEY_UNSIGNED) {
1496 /* Sign-extend */
1497 struct vreg *sign_ext_left;
1498 struct vreg *sign_ext_right;
1499
1500 if (/*vr->type*/ty->tbit->shifttok_signext_left != NULL) {
1501 sign_ext_left = vreg_alloc(NULL,
1502 /*vr->type*/ty->tbit->shifttok_signext_left,NULL,NULL);
1503 vreg_faultin_protected(vr, NULL, NULL, sign_ext_left, il, 0);
1504 backend->icode_prepare_op(&vr, &sign_ext_left, TOK_OP_BSHL, il);
1505 ii = icode_make_shl(vr, sign_ext_left);
1506 append_icode_list(il, ii);
1507 }
1508
1509 if (/*vr->type*/ty->tbit->shifttok_signext_right != NULL) {
1510 sign_ext_right = vreg_alloc(NULL,
1511 /*vr->type*/ty->tbit->shifttok_signext_right,NULL,NULL);
1512 vreg_faultin_protected(vr, NULL, NULL, sign_ext_right, il, 0);
1513 backend->icode_prepare_op(&vr, &sign_ext_right, TOK_OP_BSHR, il);
1514 ii = icode_make_shr(vr, sign_ext_right);
1515 }
1516 } else {
1517 if (shiftvr != NULL) {
1518 /* shift right to get to the value */
1519 backend->icode_prepare_op(&vr, &shiftvr, TOK_OP_BSHR, il);
1520 ii = icode_make_shr(vr, shiftvr);
1521 }
1522 }
1523
1524 }
1525 if (ii != NULL) {
1526 append_icode_list(il, ii);
1527 }
1528 }
1529
1530
1531
1532 static void
encode_bitfield(struct type * ty,struct vreg * vr,struct icode_list * il)1533 encode_bitfield(struct type *ty, struct vreg *vr, struct icode_list *il) {
1534 shift_bitfield(ty, vr, 1, il);
1535 }
1536
1537 void
decode_bitfield(struct type * ty,struct vreg * vr,struct icode_list * il)1538 decode_bitfield(struct type *ty, struct vreg *vr, struct icode_list *il) {
1539 shift_bitfield(ty, vr, 0, il);
1540 }
1541
1542 void
load_and_decode_bitfield(struct vreg ** lres,struct icode_list * il)1543 load_and_decode_bitfield(struct vreg **lres, struct icode_list *il) {
1544 *lres = promote_bitfield(*lres, il);
1545 *lres = backend->icode_make_cast(*lres,
1546 cross_get_bitfield_promoted_type((*lres)->type), il);
1547 }
1548
1549
1550 #if 0 /* OBSOLETE!!! */
1551 void
1552 write_back_bitfield_with_or(struct vreg *destvr, struct vreg *lres,
1553 struct type *desttype,
1554 struct icode_list *il) {
1555
1556 struct icode_instr *ii;
1557 struct vreg *temp;
1558
1559 /*
1560 * Limit source value to bitfield range and
1561 * encode it
1562 */
1563 lres = backend->icode_make_cast(lres, desttype, il);
1564 mask_source_for_bitfield(desttype, lres, il, 0);
1565 encode_bitfield(desttype, lres, il);
1566
1567 vreg_faultin_protected(lres, NULL, NULL, destvr, il, 0);
1568
1569 backend->icode_prepare_op(&destvr, &lres, TOK_OP_BOR, il);
1570 ii = icode_make_or(destvr, lres);
1571 append_icode_list(il, ii);
1572
1573 icode_make_store(/*NULL*/curfunc, destvr, destvr, il);
1574 }
1575 #endif
1576
1577
1578 void
write_back_bitfield_by_assignment(struct vreg * lres,struct vreg * rres,struct icode_list * ilp)1579 write_back_bitfield_by_assignment(struct vreg *lres, struct vreg *rres,
1580 struct icode_list *ilp) {
1581
1582 struct vreg *and_vr;
1583 struct icode_instr *ii;
1584 struct type *orig_rres_type = rres->type;
1585 struct vreg *orig_rres = rres;
1586
1587
1588 /*
1589 * 07/20/08: Bitfield assignment!
1590 */
1591 mask_source_for_bitfield(lres->type, rres, ilp, 0);
1592 encode_bitfield(lres->type, rres, ilp);
1593
1594 and_vr = vreg_alloc(NULL,lres->type->tbit->bitmask_inv_tok,NULL,NULL);
1595 vreg_faultin_protected(lres, NULL, NULL, /*rres*/and_vr, ilp, 0);
1596 vreg_faultin_protected(and_vr, NULL, NULL, lres, ilp, 0);
1597
1598 rres = backend->icode_make_cast(rres, lres->type, ilp);
1599 /*
1600 * Mask off old bitfield value by ANDing with the inverted
1601 * bitmask
1602 */
1603 backend->icode_prepare_op(&lres, &and_vr, TOK_OP_BAND, ilp);
1604 ii = icode_make_and(lres, and_vr);
1605 append_icode_list(ilp, ii);
1606
1607 vreg_faultin_protected(rres, NULL, NULL, lres, ilp, 0);
1608 vreg_faultin_protected(lres, NULL, NULL, rres, ilp, 0);
1609 backend->icode_prepare_op(&lres, &rres, TOK_OP_BOR, ilp);
1610 ii = icode_make_or(lres, rres);
1611 append_icode_list(ilp, ii);
1612
1613
1614 /*
1615 * 03/04/09: Added vreg_faultin_ptr() (and vreg_set_unallocatable()
1616 * to ensure that the actual bitfield value will not be trashed by
1617 * it).
1618 *
1619 * This is needed because the ORing and ANDing above may trash a
1620 * pointer preg if lres comes from a pointer, so it must be
1621 * reloaded
1622 */
1623 vreg_set_unallocatable(lres);
1624 vreg_faultin_ptr(lres, ilp);
1625
1626 icode_make_store(NULL, lres, lres, ilp);
1627
1628 vreg_set_allocatable(lres);
1629
1630 /*
1631 * 10/12/08: Seems that rres is used after writing it
1632 * back! XXX Find out where and why!
1633 * This means that the icode_make_cast() above will trash
1634 * the caller's vreg because it will break the register
1635 * mapping. So convert the value back, and map the result
1636 * register to the caller's vr
1637 */
1638 rres = backend->icode_make_cast(rres, orig_rres_type, ilp);
1639 vreg_map_preg(orig_rres, rres->pregs[0]);
1640 }
1641
1642 int
emul_conv_ldouble_to_double(struct vreg ** temp_lres,struct vreg ** temp_rres,struct vreg * lres,struct vreg * rres,struct icode_list * ilp,int eval)1643 emul_conv_ldouble_to_double(struct vreg **temp_lres,
1644 struct vreg **temp_rres,
1645 struct vreg *lres,
1646 struct vreg *rres,
1647 struct icode_list *ilp,
1648 int eval) {
1649
1650 int changed_vrs = 0;
1651
1652 if (lres->type->code == TY_LDOUBLE
1653 && (rres == NULL || rres->type->code == TY_LDOUBLE)
1654 && backend->emulate_long_double
1655 && eval) {
1656 /*
1657 * 11/20/08: For now we emulate 128
1658 * bit long double by converting it
1659 * to double and back whenever we
1660 * need to carry out arithmetic
1661 * operations
1662 *
1663 * XXX Perhaps we should use icode_
1664 * prepare_op() here...
1665 */
1666 if (rres != NULL) {
1667 vreg_set_unallocatable(rres);
1668 }
1669 *temp_lres = backend->icode_make_cast(
1670 lres,
1671 make_basic_type(TY_DOUBLE),
1672 ilp);
1673 vreg_set_unallocatable(lres);
1674 if (rres != NULL) {
1675 *temp_rres = backend->icode_make_cast(
1676 rres,
1677 make_basic_type(TY_DOUBLE),
1678 ilp);
1679 }
1680 changed_vrs = 1;
1681 vreg_faultin(NULL, NULL, *temp_lres, ilp, 0);
1682 if (rres != NULL) {
1683 vreg_faultin_protected(*temp_lres, NULL, NULL, *temp_rres, ilp, 0);
1684 }
1685 vreg_set_allocatable(lres);
1686 if (rres != NULL) {
1687 vreg_set_allocatable(rres);
1688 }
1689 } else {
1690 changed_vrs = 0;
1691 *temp_lres = lres;
1692 if (rres != NULL) {
1693 *temp_rres = rres;
1694 }
1695 }
1696 return changed_vrs;
1697 }
1698
1699 /*
1700 * XXX not ``eval-clean'' (sizeof)
1701 */
1702 static struct vreg *
do_comp_assign(struct vreg * lres,struct vreg * rres,int op,struct token * optok,struct icode_list * il,int eval)1703 do_comp_assign(struct vreg *lres, struct vreg *rres,
1704 int op, struct token *optok, struct icode_list *il, int eval) {
1705 struct operator *operator;
1706 struct vreg *destvr = copy_vreg(lres);
1707 struct type *desttype = lres->type;
1708 int op2 = op;
1709 int needprep = 0;
1710 struct vreg *temp_lres;
1711 struct vreg *temp_rres;
1712
1713 if (op == TOK_OP_CODIVIDE
1714 || op == TOK_OP_COMULTI
1715 || op == TOK_OP_COMOD
1716 || op == TOK_OP_COBSHL
1717 || op == TOK_OP_COBSHR
1718 || op == TOK_OP_COBAND
1719 || op == TOK_OP_COBXOR
1720 || op == TOK_OP_COBOR) {
1721 if (op == TOK_OP_CODIVIDE) op2 = TOK_OP_DIVIDE;
1722 else if (op == TOK_OP_COMULTI) op2 = TOK_OP_MULTI;
1723 else if (op == TOK_OP_COMOD) op2 = TOK_OP_MOD;
1724 else if (op == TOK_OP_COBSHL) op2 = TOK_OP_BSHL;
1725 else if (op == TOK_OP_COBSHR) op2 = TOK_OP_BSHR;
1726 else if (op == TOK_OP_COBAND) op2 = TOK_OP_BAND;
1727 else if (op == TOK_OP_COBXOR) op2 = TOK_OP_BXOR;
1728 else if (op == TOK_OP_COBOR) op2 = TOK_OP_BOR;
1729 needprep = 1;
1730 } else if (op == TOK_OP_COPLUS) {
1731 op2 = TOK_OP_PLUS;
1732 } else if (op == TOK_OP_COMINUS) {
1733 op2 = TOK_OP_MINUS;
1734 }
1735
1736
1737 if (eval) {
1738 if (lres->type->tbit != NULL) {
1739 /*
1740 * 07/20/08: Bitfield assignment! Load and decode target value
1741 * so that we can perform the requested operation and write it
1742 * back
1743 */
1744 load_and_decode_bitfield(&lres, il);
1745 (void) promote(&lres, &rres, op2, optok, il, eval);
1746 } else {
1747 (void) promote(&lres, &rres, op2, optok, il, eval);
1748 /* XXX .... as promote may move stuff :( */
1749 if (is_x87_trash(lres)) {
1750 /*
1751 * The target may not have been loaded yet
1752 */
1753 #if 0
1754 vreg_faultin_x87(NULL, NULL, lres, il, 0);
1755 vreg_map_preg(lres, &x86_fprs[1]);
1756 vreg_faultin_x87(NULL, NULL, rres, il, 0);
1757 #endif
1758 } else {
1759 vreg_faultin(NULL, NULL, lres, il, 0);
1760 vreg_faultin_protected(lres, NULL, NULL,
1761 rres, il, 0);
1762 }
1763 }
1764 if (needprep) {
1765 backend->icode_prepare_op(&lres, &rres, op2, il);
1766 }
1767 }
1768
1769 if (lres->type->code == TY_LDOUBLE && rres->type->code == TY_LDOUBLE) {
1770 emul_conv_ldouble_to_double(&temp_lres,
1771 &temp_rres, lres, rres, il, eval);
1772 } else {
1773 temp_rres = rres;
1774 temp_lres = lres;
1775 }
1776
1777 switch (op) {
1778 case TOK_OP_COPLUS:
1779 do_add_sub(&temp_lres, &temp_rres, TOK_OP_PLUS, optok, il, eval); /* XXX */
1780 break;
1781 case TOK_OP_COMINUS:
1782 do_add_sub(&temp_lres, &temp_rres, TOK_OP_MINUS, optok, il, eval); /* XXX */
1783 break;
1784 case TOK_OP_CODIVIDE:
1785 operator = &operators[LOOKUP_OP2(TOK_OP_DIVIDE)];
1786 do_mul(&temp_lres, temp_rres, operator, optok, il, eval);
1787 break;
1788 case TOK_OP_COMULTI:
1789 operator = &operators[LOOKUP_OP2(TOK_OP_MULTI)];
1790 do_mul(&temp_lres, temp_rres, operator, optok, il, eval);
1791 break;
1792 case TOK_OP_COMOD:
1793 operator = &operators[LOOKUP_OP2(TOK_OP_MOD)];
1794 do_mul(&temp_lres, temp_rres, operator, optok, il, eval);
1795 break;
1796 case TOK_OP_COBAND:
1797 operator = &operators[LOOKUP_OP2(TOK_OP_BAND)];
1798 do_bitwise(&temp_lres, temp_rres, operator, optok, il, eval);
1799 break;
1800 case TOK_OP_COBOR:
1801 operator = &operators[LOOKUP_OP2(TOK_OP_BOR)];
1802 do_bitwise(&lres, rres, operator, optok, il, eval);
1803 break;
1804 case TOK_OP_COBXOR:
1805 operator = &operators[LOOKUP_OP2(TOK_OP_BXOR)];
1806 do_bitwise(&temp_lres, temp_rres, operator, optok, il, eval);
1807 break;
1808 case TOK_OP_COBSHL:
1809 operator = &operators[LOOKUP_OP2(TOK_OP_BSHL)];
1810 do_bitwise(&temp_lres, temp_rres, operator, optok, il, eval);
1811 break;
1812 case TOK_OP_COBSHR:
1813 operator = &operators[LOOKUP_OP2(TOK_OP_BSHR)];
1814 do_bitwise(&temp_lres, temp_rres, operator, optok, il, eval);
1815 break;
1816 default:
1817 printf("%d is not compound assignment operator\n", op);
1818 abort();
1819 }
1820
1821 lres = temp_lres;
1822 rres = temp_rres;
1823
1824 /*
1825 * Given ``char *p;'', ``*p += 4'' has type ``char'', so
1826 * promotions to int need to be reverted here
1827 */
1828 if (eval) {
1829 if (desttype->tbit != NULL) {
1830 write_back_bitfield_by_assignment(destvr, lres, il);
1831 } else {
1832 lres = backend->
1833 icode_make_cast(lres, desttype, il);
1834 /*
1835 * With x87 the item may not be in a register even
1836 * after icode_make_cast
1837 */
1838 vreg_faultin_x87(NULL, NULL, lres, il, 0);
1839 destvr->pregs[0] = lres->pregs[0];
1840 if (lres->is_multi_reg_obj) {
1841 destvr->pregs[1] = lres->pregs[1];
1842 }
1843
1844 /*
1845 * 070802: This code only did:
1846 *
1847 * if (destvr->from_ptr) { faultin(destvr->from_ptr);
1848 *
1849 * That ignored parent structs which come from pointers.
1850 * Multi-register assignmenst were also ignored
1851 */
1852 reg_set_unallocatable(lres->pregs[0]);
1853 if (lres->is_multi_reg_obj) {
1854 reg_set_unallocatable(lres->pregs[1]);
1855 }
1856 vreg_faultin_ptr(destvr, il);
1857 reg_set_allocatable(lres->pregs[0]);
1858 if (lres->is_multi_reg_obj) {
1859 reg_set_allocatable(lres->pregs[1]);
1860 }
1861
1862 icode_make_store(NULL, lres, destvr, il);
1863 }
1864 } else {
1865 vreg_set_new_type(lres, desttype);
1866 }
1867
1868 return destvr;
1869 }
1870
1871 /*
1872 * XXX not ``eval-clean'' (sizeof)
1873 * XXX I think we need to be more careful not to trash the right/left operand..
1874 * Have to use reg_set_unallocatable() before faulting in the left operand
1875 * pointer if we are assigning through a pointer more carefully?!?!?
1876 */
1877
1878 static struct vreg *
do_assign(struct vreg * lres,struct vreg * rres,struct expr * ex,struct icode_list * ilp,int level,int purpose,int eval)1879 do_assign(
1880 struct vreg *lres,
1881 struct vreg *rres,
1882 struct expr *ex,
1883 struct icode_list *ilp,
1884 int level,
1885 int purpose,
1886 int eval) {
1887
1888 int is_struct = 0;
1889 int is_x87 = 0;
1890 struct icode_instr *ii = NULL;
1891 struct decl *d;
1892 struct vreg *vr2;
1893
1894 /* Need to do typechecking */
1895 if (ex->op == TOK_OP_ASSIGN
1896 && check_types_assign(ex->tok, lres->type, rres, 0, 0) != 0) {
1897 return NULL;
1898 }
1899
1900 if ((rres->type->code == TY_STRUCT
1901 || rres->type->code == TY_UNION)
1902 && rres->type->tlist == NULL) {
1903 is_struct = 1;
1904 if ((lres->type->code != TY_STRUCT
1905 && lres->type->code != TY_UNION)
1906 || lres->type->tlist != NULL) {
1907 errorfl(ex->tok,
1908 "Incompatible types in assignment");
1909 return NULL;
1910 }
1911 } else {
1912 if (eval) {
1913 if (is_x87_trash(rres)
1914 || is_x87_trash(lres)) {
1915 is_x87 = 1;
1916 } else {
1917 vreg_faultin(NULL, NULL, rres, ilp, 0);
1918 }
1919 }
1920 }
1921
1922 if (ex->op != TOK_OP_ASSIGN) {
1923 /* Compound assignment operator */
1924 if (eval && !is_x87) {
1925 vreg_faultin(NULL, NULL, rres, ilp, 0);
1926 vreg_faultin_protected(rres, NULL, NULL, lres, ilp, 0);
1927 }
1928 if (!eval) { /*rres->type->tbit != NULL || is_bitfield) {*/
1929 if (rres->type->tbit != NULL) {
1930 vreg_set_new_type(rres, cross_get_bitfield_promoted_type(
1931 rres->type));
1932 }
1933 }
1934 return do_comp_assign(lres, rres,
1935 ex->op, ex->tok, ilp, eval);
1936 } else {
1937 if (level == 1
1938 && (purpose == TOK_KEY_IF
1939 || purpose == TOK_KEY_DO
1940 || purpose == TOK_KEY_WHILE
1941 || purpose == TOK_KEY_SWITCH)) {
1942 warningfl(ex->tok,
1943 "`=' operator used at top-"
1944 "level in conditional "
1945 "expression - perhaps you "
1946 "meant `=='?");
1947 }
1948 }
1949
1950 if (!eval) {
1951 /* Not evaluated - just set new type */
1952 vreg_set_new_type(rres, lres->type);
1953 return rres;
1954 }
1955
1956 if (lres->parent) {
1957 vr2 = get_parent_struct(lres);
1958 } else {
1959 vr2 = NULL;
1960 }
1961
1962
1963 if (ex->left->data->var_lvalue != NULL) {
1964 /* Store to variable */
1965
1966 d = ex->left->data->var_lvalue;
1967 if (is_struct) {
1968 struct vreg *vr3;
1969
1970 if (rres->parent) {
1971 vr3 = get_parent_struct(rres);
1972 } else {
1973 vr3 = NULL;
1974 }
1975
1976 if (rres->from_ptr) {
1977 free_preg(rres->from_ptr->pregs[0],
1978 ilp, 0, 0);
1979 }
1980 if (vr3 && vr3->from_ptr) {
1981 free_preg(vr3->from_ptr->pregs[0],
1982 ilp, 0, 0);
1983 }
1984
1985 backend->invalidate_gprs(ilp, /*level==*/1, INV_FOR_FCALL);
1986
1987 if (rres->from_ptr) {
1988 #if 0
1989 rres->from_ptr->pregs[0]->used = 1;
1990 #endif
1991 vreg_map_preg(rres->from_ptr, rres->from_ptr->pregs[0]);
1992 }
1993 if (vr3 && vr3->from_ptr) {
1994 #if 0
1995 vr3->from_ptr->pregs[0]->used = 1;
1996 #endif
1997 vreg_map_preg(vr3->from_ptr, vr3->from_ptr->pregs[0]);
1998 }
1999
2000 if (lres->from_ptr) {
2001 vreg_faultin_protected(rres, NULL, NULL,
2002 lres->from_ptr, ilp, 0);
2003 } else if (vr2 && vr2->from_ptr) {
2004 vreg_faultin_protected(rres, NULL, NULL,
2005 vr2->from_ptr, ilp, 0);
2006 }
2007
2008 icode_make_copystruct(lres, rres, ilp);
2009 if (rres->from_ptr) {
2010 free_preg(rres->from_ptr->pregs[0],
2011 ilp, 0, 0);
2012 }
2013 if (vr3 && vr3->from_ptr) {
2014 free_preg(vr3->from_ptr->pregs[0],
2015 ilp, 0, 0);
2016 }
2017 if (lres->from_ptr) {
2018 free_preg(lres->from_ptr->pregs[0],
2019 ilp, 0, 0);
2020 }
2021 if (vr2 && vr2->from_ptr) {
2022 free_preg(vr2->from_ptr->pregs[0],
2023 ilp, 0, 0);
2024 }
2025 } else {
2026 struct reg *r = NULL;
2027
2028 rres = backend->
2029 icode_make_cast(rres, lres->type, ilp);
2030
2031
2032
2033 /*
2034 * When casting to x87 fp, the result is not register
2035 * resident anymore!
2036 */
2037 vreg_faultin_x87(NULL, NULL, rres, ilp, 0);
2038
2039 if (eval && lres->type->tbit == NULL) {
2040 /*
2041 * 070802: This was apparently done the wrong way
2042 * around; Possible left-handed pointers were loaded,
2043 * then the right side was cast to the left type,
2044 * then it was assigned.
2045 *
2046 * This caused an x86 sign-extension from int to
2047 * long long to trash eax (this must be done with eax
2048 * and edx), so the pointer was also trashed
2049 */
2050 vreg_set_unallocatable(rres);
2051 r = vreg_faultin_ptr(lres, ilp);
2052 vreg_set_allocatable(rres);
2053 }
2054
2055 if (!eval) {
2056 /* 09/09/08: Don't perform icode operations! */
2057 ;
2058 } else if (lres->type->tbit != NULL) {
2059 write_back_bitfield_by_assignment(lres, rres, ilp);
2060 /*
2061 * Reset ii pointer because it is appended below if
2062 * non-null
2063 */
2064 ii = NULL;
2065 } else if (lres->parent) {
2066 if (ex->op == TOK_OP_ASSIGN) {
2067 lres->pregs[0] = rres->pregs[0];
2068 if (rres->is_multi_reg_obj) {
2069 lres->pregs[1] = rres->pregs[1];
2070 }
2071 }
2072
2073 icode_make_store(NULL,
2074 rres, lres, ilp);
2075 ii = NULL;
2076 if (is_x87_trash(rres)) {
2077 lres->pregs[0] = NULL;
2078 }
2079 } else {
2080 struct vreg *vr;
2081
2082 #if 0
2083 /* CANOFWORMS :( */
2084 vr = n_xmemdup(d->vreg, sizeof *d->vreg);
2085 #endif
2086 vr = vreg_alloc(d, NULL, NULL, NULL);
2087 vreg_set_new_type(vr, d->dtype);
2088
2089 if (ex->op == TOK_OP_ASSIGN) {
2090 vr->pregs[0] =
2091 rres->pregs[0];
2092 if (rres->is_multi_reg_obj) {
2093 vr->is_multi_reg_obj = 2;
2094 vr->pregs[1] =
2095 rres->pregs[1];
2096 }
2097 } else {
2098 vr->pregs[0] =
2099 lres->pregs[0];
2100 if (lres->is_multi_reg_obj) {
2101 vr->is_multi_reg_obj = 2;
2102 vr->pregs[1] =
2103 lres->pregs[1];
2104 }
2105 }
2106 icode_make_store(NULL,
2107 rres, /*d->vreg*/vr, ilp);
2108 ii = NULL;
2109 if (is_x87_trash(rres)) {
2110 vr->pregs[0] = NULL;
2111 }
2112 }
2113 if (r != NULL) {
2114 free_preg(r, ilp, 1, 0);
2115 }
2116 }
2117 } else {
2118 struct reg *r = NULL;
2119
2120 if (!is_struct) {
2121 rres = backend->
2122 icode_make_cast(rres, lres->type, ilp);
2123 /*
2124 * When casting to x87 fp, the result is not register
2125 * resident anymore!
2126 */
2127 vreg_faultin_x87(NULL, NULL, rres, ilp, 0);
2128 }
2129
2130 if (lres->from_ptr || (vr2 && vr2->from_ptr)) {
2131 r = vreg_faultin_protected(rres,
2132 NULL, NULL,
2133 lres->from_ptr? lres->from_ptr:
2134 vr2->from_ptr,
2135 ilp, 0);
2136 }
2137
2138 if (is_struct) {
2139 /* XXXXXXXXXXXXXXXXXXXXXXXX need to faultin
2140 * rres if from ptr?!?! */
2141 struct reg *r2;
2142
2143 if (r) reg_set_unallocatable(r);
2144 r2 = vreg_faultin_ptr(rres, ilp);
2145 if (r) reg_set_allocatable(r);
2146
2147 backend->invalidate_except(ilp,
2148 /*level==1*/1, INV_FOR_FCALL, r, r2,
2149 (struct reg *)NULL);
2150 if (r != NULL) {
2151 free_preg(r, ilp, 0, 1);
2152 }
2153 if (r2 != NULL) {
2154 free_preg(r2, ilp, 0, 1);
2155 }
2156 icode_make_copystruct(lres, rres, ilp);
2157
2158 /*
2159 * 08/02/07: That invlidate_except() is only used
2160 * above because the registers shall not needlessly
2161 * be saved! However, the free_preg()s below were
2162 * missing the invalidate flag, which is still
2163 * necessary because copystruct may call memcpy()
2164 *
2165 * 08/03/07: Hmm ... without SAVING them too, some
2166 * pointers become unbacked. E.g. in
2167 *
2168 * foo->bar = foo->baz = foo->bam;
2169 *
2170 * ... where the members are all structures. Here
2171 * one of the frees below made a pointer invalid.
2172 * Presumably because the return value of the
2173 * assignment is the left vreg, not a new one!
2174 * XXX fix this!
2175 *
2176 * 08/03/07: Phew...Now the test structassbug.c
2177 * works. Saving pointers below is bogus because
2178 * they are already invalidated by the copystruct.
2179 * So now we have the FIRST CASE EVER where we can
2180 * save but not invalidate with free_preg()
2181 * legally... That is done above before the
2182 * copstruct. Maybe the invalidate_except() should
2183 * be changed instead. Not sure whether what we
2184 * have now is really correct
2185 */
2186 if (r != NULL) {
2187 free_preg(r, ilp, 1, 0);
2188 }
2189 if (r2 != NULL) {
2190 free_preg(r2, ilp, 1, 0);
2191 }
2192 } else {
2193 if (lres->type->tbit != NULL) {
2194 /*
2195 * 05/25/09: Unbelievable, this wasn't handling
2196 * bitfields!
2197 */
2198 struct reg *r2;
2199
2200 if (r) reg_set_unallocatable(r);
2201 r2 = vreg_faultin_ptr(rres, ilp);
2202 if (r) reg_set_allocatable(r);
2203
2204 write_back_bitfield_by_assignment(lres, rres, ilp);
2205 if (r != NULL) {
2206 free_preg(r, ilp, 1, 0);
2207 }
2208 if (r2 != NULL) {
2209 free_preg(r2, ilp, 1, 0);
2210 }
2211 ii = NULL;
2212 } else {
2213
2214 lres->pregs[0] = rres->pregs[0];
2215 lres->pregs[1] = rres->pregs[1];
2216 #if 0
2217 ii = icode_make_store_indir(rres, lres);
2218 #endif
2219 icode_make_store(curfunc, rres, lres, ilp);
2220 if (r != NULL) {
2221 free_preg(r, ilp, 0, 0);
2222 }
2223 if (is_x87_trash(rres)) {
2224 lres->pregs[0] = NULL;
2225 }
2226 backend->invalidate_except(ilp,
2227 /*level==1*/1, INV_FOR_FCALL, rres->pregs[0],
2228 (struct reg *)NULL);
2229 }
2230 }
2231 }
2232 if (ii) append_icode_list(ilp, ii);
2233
2234 if (lres->pregs[0] && lres->pregs[0] != rres->pregs[0]) {
2235 free_pregs_vreg(lres, ilp, 0, 0);
2236 }
2237
2238 return rres;
2239 }
2240
2241
2242 struct icode_instr *
compare_vreg_with_zero(struct vreg * vr,struct icode_list * ilp)2243 compare_vreg_with_zero(struct vreg *vr, struct icode_list *ilp) {
2244 struct token *ztok;
2245 struct vreg *zvr;
2246 struct icode_instr *ii;
2247
2248 if (IS_FLOATING(vr->type->code) && vr->type->tlist == NULL) {
2249 int is_x87 = 0;
2250 /*
2251 * On some or many or most architectures, we
2252 * cannot compare fp values with an immediate
2253 * zero. Therefore we explicitly construct an
2254 * fp zero token, load it into a register, and
2255 * compare with that
2256 */
2257 /* if (!backend->has_zero_cmp) { */
2258 ztok = fp_const_from_ascii("0.0", vr->type->code);
2259 zvr = vreg_alloc(NULL, ztok, NULL, NULL);
2260 vreg_faultin_x87(NULL, NULL, vr, ilp, 0);
2261
2262 /*
2263 * 07/08/03: Was missing a faultin for vr. This
2264 * broke when x87 support was rewritten I guess
2265 */
2266 reg_set_unallocatable(vr->pregs[0]);
2267 vreg_faultin_x87(NULL, NULL, zvr, ilp, 0);
2268 reg_set_unallocatable(vr->pregs[0]);
2269 if (is_x87_trash(vr)) {
2270 vreg_map_preg(vr, &x86_fprs[1]);
2271 is_x87 = 1;
2272 }
2273 ii = icode_make_cmp(vr, zvr);
2274 if (!is_x87) {
2275 free_pregs_vreg(zvr, ilp, 0, 0);
2276 }
2277 /* } */
2278 return ii;
2279 }
2280 return icode_make_cmp(vr, NULL);
2281 }
2282
2283 static struct icode_instr *
branch_if_zero(struct vreg * vr,int branch_type,struct icode_instr * label0,struct icode_list * ilp)2284 branch_if_zero(
2285 struct vreg *vr,
2286 int branch_type,
2287 struct icode_instr *label0,
2288 struct icode_list *ilp) {
2289
2290 struct icode_instr *ii;
2291 struct icode_instr *label;
2292 struct icode_instr *not_equal_zero;
2293
2294 if (vr->is_multi_reg_obj) {
2295 not_equal_zero = icode_make_label(NULL);
2296 } else {
2297 not_equal_zero = NULL;
2298 }
2299
2300 if (vr->is_multi_reg_obj && IS_FLOATING(vr->type->code)) {
2301 /* SPARC long double ?!?!??! */
2302 unimpl();
2303 }
2304
2305 ii = compare_vreg_with_zero(vr, ilp);
2306
2307 append_icode_list(ilp, ii);
2308 free_pregs_vreg(vr, ilp, 0, 0);
2309
2310 if (label0 != NULL) {
2311 label = label0;
2312 } else {
2313 label = icode_make_label(NULL);
2314 }
2315
2316 if (vr->is_multi_reg_obj) {
2317 ii = icode_make_branch(not_equal_zero,
2318 INSTR_BR_NEQUAL, vr);
2319 } else {
2320 ii = icode_make_branch(label, /*INSTR_BR_EQUAL*/branch_type, vr);
2321 }
2322 append_icode_list(ilp, ii);
2323
2324
2325 if (vr->is_multi_reg_obj) {
2326 ii = icode_make_cmp(vr, NULL);
2327 append_icode_list(ilp, ii);
2328 ii = icode_make_branch(label, /*INSTR_BR_EQUAL*/branch_type, vr);
2329 append_icode_list(ilp, ii);
2330 append_icode_list(ilp, not_equal_zero);
2331 }
2332 return label;
2333 }
2334
2335 /*
2336 * 04/12/08: XXXXXXXX We could use resval_not_used here to optimize the
2337 * result handling away if this is a top-level conditional operator
2338 */
2339 static struct vreg *
do_cond_op(struct expr * ex,struct type ** restype,struct vreg * lvalue,struct icode_list * ilp,int eval)2340 do_cond_op(struct expr *ex, struct type **restype,
2341 struct vreg *lvalue, /* for structs */
2342 struct icode_list *ilp, int eval) {
2343
2344 struct vreg *ret = NULL;
2345 struct vreg *lres;
2346 struct vreg *rres;
2347 struct reg *r = NULL;
2348 struct type *lt;
2349 struct type *rt;
2350 struct icode_instr *label = NULL;
2351 struct icode_instr *left_end_jump = NULL;
2352 struct icode_instr *end_label = NULL;
2353 struct icode_list *left_list = NULL;
2354 struct icode_list *right_list = NULL;
2355 int is_void;
2356 int is_struct = 0;
2357 int is_void_botched = 0;
2358
2359
2360 if (ex->right->op != TOK_OP_COND2) {
2361 errorfl(ex->right->tok,
2362 "Parse error - expected second part of conditional operator");
2363 return NULL;
2364 }
2365
2366 lres = expr_to_icode(ex->left, NULL, ilp, 0, 0, eval);
2367
2368 if (lres == NULL) {
2369 return NULL;
2370 }
2371
2372 if (!is_scalar_type(lres->type)) {
2373 errorfl(ex->left->tok,
2374 "First operand of conditional operator"
2375 " does not have scalar type");
2376 return NULL;
2377 }
2378
2379 if (eval) {
2380 left_list = alloc_icode_list();
2381 right_list = alloc_icode_list();
2382
2383 backend->invalidate_gprs(ilp, 1, 0);
2384 if (!is_x87_trash(lres)) {
2385 vreg_faultin(NULL, NULL, lres, ilp, 0);
2386 }
2387 label = branch_if_zero(lres, INSTR_BR_EQUAL, NULL, ilp);
2388 }
2389
2390 /*
2391 * Now comes the part for (cond) != 0 ...
2392 *
2393 * 08/18/07: As per GNU C, this part may be empty, in which case
2394 * it is replaced with the condition itself
2395 */
2396 if (ex->right->left != NULL) {
2397 lres = expr_to_icode(ex->right->left, NULL,
2398 left_list, 0, 0, eval);
2399 } else {
2400 ; /* lres already is value of condition */
2401 }
2402 if (lres == NULL) {
2403 return NULL;
2404 }
2405
2406 if (lres->type->code == TY_VOID
2407 && lres->type->tlist == NULL) {
2408 is_void = 1;
2409 } else {
2410 is_void = 0;
2411 if ((lres->type->code == TY_STRUCT
2412 || lres->type->code == TY_UNION)
2413 && lres->type->tlist == NULL) {
2414 is_struct = 1;
2415 if (eval) {
2416 if (lvalue != NULL) {
2417 /* Result is being assigned */
2418 icode_make_copystruct(lvalue, lres,
2419 left_list);
2420 } else {
2421 /*
2422 * Result is anonymous struct, e.g
2423 * in
2424 * (foo? bar: baz).xyz
2425 *
2426 * or
2427 *
2428 * func(foo? bar: baz)
2429 *
2430 * ... there is no lvalue to assign
2431 * to
2432 */
2433 ret = vreg_stack_alloc(lres->type, ilp, 1,
2434 NULL);
2435
2436 icode_make_copystruct(ret, lres,
2437 left_list);
2438 }
2439 }
2440 }
2441 }
2442
2443 if (!is_void && !is_struct) {
2444 pro_mote(&lres, left_list, eval);
2445 }
2446
2447 if (ret == NULL) {
2448 /* Not anonymous struct - vreg must still be allocated */
2449 ret = vreg_alloc(NULL, NULL, NULL, lres->type);
2450 }
2451
2452 if (eval) {
2453 end_label = icode_make_label(NULL);
2454 left_end_jump = icode_make_jump(end_label);
2455
2456 backend->invalidate_gprs(left_list, 1, 0); /* saves lres too */
2457 }
2458
2459 /* Now comes the part for (cond) == 0 ... */
2460 rres = expr_to_icode(ex->right->right, NULL,
2461 right_list, 0, 0, eval);
2462
2463 if (rres == NULL) {
2464 return NULL;
2465 }
2466
2467
2468 if (!is_void && !is_struct) {
2469 pro_mote(&rres, right_list, eval);
2470 } else if (is_struct) {
2471 if (eval) {
2472 if (lvalue != NULL) {
2473 /* Result is assigned */
2474 icode_make_copystruct(lvalue, rres, right_list);
2475 } else {
2476 /* Anonymous struct */
2477 icode_make_copystruct(ret, rres, right_list);
2478 }
2479 }
2480 }
2481
2482 /*
2483 * 07/10/09: We have to invalidate all registers used by the right
2484 * expression as well, because the left expression may be converted
2485 * (as per usual arithmetic conversion) in a convert_operands() call
2486 * below. That can mix up the two ``worlds'' of left and right side,
2487 * and may trash registers (i.e. we may end up freeing registers
2488 * that were only used in the right expression in the left icode
2489 * list)
2490 */
2491 if (eval) {
2492 backend->invalidate_gprs(right_list, 1, 0);
2493 }
2494
2495 /* Now it is FINALLY possible to determine the result type! */
2496 lt = lres->type;
2497 rt = rres->type;
2498 if (lt->tlist == NULL && rt->tlist == NULL) {
2499 if (lt->code == TY_STRUCT
2500 || lt->code == TY_UNION
2501 || rt->code == TY_STRUCT
2502 || rt->code == TY_UNION) {
2503 if (rt->code != lt->code
2504 || rt->tstruc != lt->tstruc) {
2505 errorfl(ex->tok,
2506 "Result of conditional operator has "
2507 "variable type");
2508 return NULL;
2509 }
2510 } else if (rt->code != lt->code) {
2511 if (rt->code == TY_VOID || lt->code == TY_VOID) {
2512 /*
2513 * 02/28/09: Allow void on one side because gcc does so too,
2514 * and some programs rely on it (PostgreSQL). gcc only warns
2515 * (but doesn't error) about it with -ansi
2516 */
2517 warningfl(ex->tok, "ISO C does not allow one "
2518 "conditional operator operand being "
2519 "of type `void' when the other one isn't");
2520 is_void = is_void_botched = 1;
2521 if (rt->code != TY_VOID) {
2522 rt = make_basic_type(TY_VOID);
2523 } else {
2524 lt = make_basic_type(TY_VOID);
2525 }
2526 } else if (!is_arithmetic_type(rt)
2527 || !is_arithmetic_type(lt)) {
2528 errorfl(ex->tok,
2529 "Result of conditional operator has "
2530 "variable type");
2531 return NULL;
2532 } else {
2533 if (convert_operands(&lres, &rres,
2534 left_list, right_list, TOK_OP_COND,
2535 ex->tok, eval) != 0) {
2536 return NULL;
2537 }
2538 }
2539 }
2540 } else {
2541 int bad = 0;
2542
2543 if (lt->tlist == NULL || rt->tlist == NULL) {
2544 /* Either one must be a null pointer constant */
2545 if (lt->tlist == NULL
2546 && !lres->is_nullptr_const) {
2547 bad = 1;
2548 } else if (rt->tlist == NULL
2549 && !rres->is_nullptr_const) {
2550 bad = 1;
2551 } else {
2552 if (lt->tlist == NULL) {
2553 if (eval) {
2554 lres = backend->
2555 icode_make_cast(lres,rt,
2556 left_list);
2557 } else {
2558 vreg_set_new_type(lres, rt);
2559 }
2560 lt = lres->type;
2561 } else {
2562 if (eval) {
2563 rres = backend->
2564 icode_make_cast(rres,lt,
2565 right_list);
2566 } else {
2567 vreg_set_new_type(rres, lt);
2568 }
2569 rt = rres->type;
2570 }
2571 }
2572 } else {
2573 if (rres->is_nullptr_const) {
2574 if (eval) {
2575 rres = backend->
2576 icode_make_cast(rres, lt,
2577 right_list);
2578 } else {
2579 vreg_set_new_type(rres, lt);
2580 }
2581 rt = rres->type;
2582 } else if (lres->is_nullptr_const) {
2583 if (eval) {
2584 lres = backend->
2585 icode_make_cast(lres, rt,
2586 left_list);
2587 } else {
2588 vreg_set_new_type(lres, rt);
2589 }
2590 lt = lres->type;
2591 } else {
2592 /* Both are pointers */
2593 if (compare_types(lres->type, rres->type,
2594 CMPTY_ALL|
2595 CMPTY_ARRAYPTR) != 0) {
2596 bad = 1;
2597 }
2598 }
2599 }
2600 if (bad) {
2601 errorfl(ex->tok,
2602 "Result of conditional operator has "
2603 "variable type");
2604 return NULL;
2605 }
2606 }
2607 *restype = lres->type;
2608
2609 if (!is_struct && !is_void) {
2610 ret->type = lres->type;
2611 ret->size = lres->size;
2612 ret->is_multi_reg_obj = lres->is_multi_reg_obj;
2613
2614 if (eval) {
2615 vreg_faultin_x87(NULL, NULL, lres, left_list, 0); /* !!! */
2616 r = lres->pregs[0];
2617 vreg_map_preg(ret, r);
2618 if (lres->is_multi_reg_obj) {
2619 struct reg *r2;
2620
2621 r2 = lres->pregs[1];
2622 vreg_map_preg2(ret, r2);
2623 }
2624 }
2625 } else {
2626 r = NULL;
2627 ret->pregs[0] = NULL;
2628 }
2629
2630 if (eval) {
2631 append_icode_list(left_list, left_end_jump); /* XXX */
2632 append_icode_list(left_list, label);
2633
2634 /*
2635 * As the type is known now, the code lists can be merged and
2636 * the unified ilp is used for finishing the processing
2637 */
2638 merge_icode_lists(left_list, right_list);
2639 merge_icode_lists(ilp, left_list);
2640 }
2641
2642 if (!is_void_botched) {
2643 if (compare_types(lres->type, rres->type, CMPTY_ALL|
2644 CMPTY_ARRAYPTR) != 0) {
2645 errorfl(ex->tok,
2646 "Result of conditional operator has variable type");
2647 return NULL;
2648 }
2649 }
2650 if (!is_void && !is_struct && eval) {
2651 if (!is_x87_trash(rres)) {
2652 vreg_faultin(NULL, NULL, rres, ilp, 0);
2653 if (rres->pregs[0] != ret->pregs[0]) {
2654 icode_make_copyreg(ret->pregs[0], rres->pregs[0],
2655 lres->type, lres->type, ilp);
2656 free_pregs_vreg(rres, ilp, 0, 0);
2657 }
2658 } else {
2659 vreg_faultin_x87(ret->pregs[0], NULL, rres, ilp, 0);
2660 }
2661 if (rres->is_multi_reg_obj) {
2662 if (rres->pregs[1] != ret->pregs[1]) {
2663 icode_make_copyreg(ret->pregs[1], rres->pregs[1],
2664 lres->type, lres->type, ilp);
2665 }
2666 }
2667 }
2668
2669 if (eval) {
2670 append_icode_list(ilp, end_label);
2671 backend->invalidate_except(ilp, 1,
2672 0, r, (struct reg *)NULL);
2673
2674 if (r != NULL) {
2675 vreg_map_preg(ret, r);
2676 }
2677 }
2678
2679 if (is_struct) {
2680 ret->struct_ret = 1;
2681 } else if (is_x87_trash(ret)) {
2682 /*
2683 * Don't keep stuff in x87 registers, ever!!!
2684 */
2685 free_preg(ret->pregs[0], ilp, 1, 1);
2686 }
2687 return ret;
2688 }
2689
2690
2691
2692 static int
2693 do_cond(
2694 struct expr *cond,
2695 struct icode_list *il,
2696 struct control *ctrl,
2697 struct vreg *have_cmp);
2698
2699 struct vreg *
expr_to_icode(struct expr * ex,struct vreg * lvalue,struct icode_list * ilp,int purpose,int resval_not_used,int eval)2700 expr_to_icode(
2701 struct expr *ex,
2702 struct vreg *lvalue,
2703 struct icode_list *ilp,
2704 int purpose,
2705 int resval_not_used,
2706 int eval) {
2707
2708 struct icode_instr *ii = NULL;
2709 struct icode_instr *label;
2710 struct icode_instr *label2;
2711 struct vreg *lres = NULL;
2712 struct vreg *rres = NULL;
2713 struct vreg *ret = NULL;
2714 struct type *restype = NULL;
2715 struct type *ltold;
2716 struct type *rtold;
2717 static int level;
2718 struct reg *r;
2719 int tmpop;
2720 struct vreg *temp_rres = NULL;
2721 struct vreg *temp_lres = NULL;
2722 int changed_vrs = 0;
2723
2724 if (level++ == 0 && eval) {
2725 /* Initialize allocator */
2726 /*
2727 * XXX July 2007: This SUCKS! I wasn't aware it's still
2728 * here, but it broke inline asm because registers were
2729 * not saved but just marked unused. Saving them also
2730 * caused problems with multi-gpr long longs converted
2731 * to floating point variables... Maybe we should keep
2732 * this for some time to debug ``register leak''
2733 * problems (i.e. assume something is wrong if the call
2734 * below ever does save anything to the stack), and
2735 * then get rid of it
2736 */
2737 #if FEAT_DEBUG_DUMP_BOGUS_STORES
2738 backend_warn_inv = 1;
2739 #endif
2740 backend->invalidate_gprs(ilp, /*0*/ 1, 0);
2741 #if FEAT_DEBUG_DUMP_BOGUS_STORES
2742 backend_warn_inv = 0;
2743 #endif
2744 }
2745
2746 if (ex->op != 0) {
2747 struct operator *operator;
2748
2749 operator = &operators[LOOKUP_OP2(ex->op)];
2750
2751 if (ex->op != TOK_OP_LAND
2752 && ex->op != TOK_OP_LOR
2753 && !IS_ASSIGN_OP(ex->op)
2754 && ex->op != TOK_OP_COND) {
2755
2756 /* 06/14/09: Don't pass through ``purpose'' */
2757 lres = expr_to_icode(ex->left, NULL, ilp,
2758 /*purpose*/0, 0, eval);
2759 #if 0
2760 hmm this is ABSOLUE nonsense!?!?
2761 if (is_x87_trash(lres)) {
2762 free_pregs_vreg(lres, ilp, 1, 1);
2763 }
2764 #endif
2765
2766 /* 06/14/09: Don't pass through ``purpose'' */
2767 rres = expr_to_icode(ex->right, NULL,
2768 ilp, /*purpose*/0, 0, eval);
2769 if (is_x87_trash(rres)) {
2770 free_pregs_vreg(rres, ilp, 1, 1);
2771 }
2772 if (lres == NULL || rres == NULL) {
2773 if (ilp) {
2774 /* XXX free */
2775 }
2776 --level;
2777 return NULL;
2778 }
2779
2780 /* A promotion may be in order */
2781 if (ex->op != TOK_OP_COMMA) {
2782 /*
2783 * Check whether we have a struct or union -
2784 * those cannot be promoted, or decay into
2785 * pointers
2786 */
2787 ltold = lres->type;
2788 rtold = rres->type;
2789
2790 if (((ltold->code == TY_STRUCT
2791 || ltold->code == TY_UNION)
2792 && ltold->tlist == NULL)
2793 ||
2794 ((rtold->code == TY_STRUCT
2795 || rtold->code == TY_UNION)
2796 && rtold->tlist == NULL)) {
2797 errorfl(ex->tok,
2798 "Operator `%s' does not work "
2799 "with union/struct types!",
2800 ex->tok->ascii);
2801 return NULL;
2802 }
2803
2804 if ((restype = promote(&lres, &rres,
2805 ex->op, ex->tok, ilp, eval)) == NULL) {
2806 --level;
2807 return NULL;
2808 }
2809
2810 debug_print_conv(ltold, rtold, ex->op,restype);
2811 } else {
2812 restype = rres->type;
2813 }
2814 }
2815
2816 switch (ex->op) {
2817 case TOK_OP_COMMA:
2818 /* Comma operator */
2819
2820 if (lres->pregs[0]) {
2821 free_pregs_vreg(lres, ilp, 0, 0);
2822 }
2823 ret = rres; /* XXX */
2824 break;
2825 case TOK_OP_ASSIGN:
2826 case TOK_OP_COPLUS:
2827 case TOK_OP_COMINUS:
2828 case TOK_OP_CODIVIDE:
2829 case TOK_OP_COMULTI:
2830 case TOK_OP_COMOD:
2831 case TOK_OP_COBAND:
2832 case TOK_OP_COBOR:
2833 case TOK_OP_COBXOR:
2834 case TOK_OP_COBSHL:
2835 case TOK_OP_COBSHR:
2836 /* Assignment & compound assignment operators */
2837
2838 if (ex->left->op != 0) {
2839 errorfl(ex->left->tok,
2840 "Bad lvalue in assignment");
2841 --level;
2842 return NULL;
2843 }
2844
2845 /*
2846 * 08/11/08: Pass intent to assign, so that bitfield
2847 * lvalues are not promoted
2848 */
2849 if ((lres = expr_to_icode(ex->left, NULL, ilp, TOK_OP_ASSIGN, 0, eval))
2850 == NULL) {
2851 --level;
2852 return NULL;
2853 }
2854 if (!ex->left->data->is_lvalue) {
2855 errorfl(ex->tok,
2856 "Left operand in assignment is not an lvalue");
2857 --level;
2858 return NULL;
2859 }
2860 lres = ex->left->data->res;
2861
2862 /* 06/14/09: Don't pass through ``purpose'' */
2863 rres = expr_to_icode(ex->right, lres, ilp,
2864 /*purpose*/0, 0, eval);
2865 if (rres == NULL) {
2866 --level;
2867 return NULL;
2868 }
2869
2870
2871 if (rres->struct_ret) {
2872 /*
2873 * Was returned by function call or
2874 * conditional operator - has already
2875 * been assigned
2876 */
2877 rres->struct_ret = 0;
2878
2879 /*
2880 * 08/02/07: This wronly returned rres
2881 * instead of lres as result!!! Thus when
2882 * the left side indirects through a pointer,
2883 * any vreg_faultins() working on the right
2884 * result will not load the pointer, and
2885 * a stale pointer may be used!!!
2886 *
2887 * foo = bar[0] = baz;
2888 *
2889 * ... if we wrongly do foo = baz, &bar[0]
2890 * may not be loaded correctly. Bombed in
2891 * GNU tar code
2892 */
2893 ret = lres;
2894 } else {
2895 int savedop = ex->op;
2896
2897 tmpop = ex->op;
2898
2899 if (!can_transform_to_bitwise(&lres, &rres,
2900 &tmpop, ilp)) {
2901 /* 07/03/08: Eval */
2902 if (rres->from_const && eval) {
2903 vreg_anonymify(&rres, NULL,
2904 NULL, ilp);
2905 }
2906 }
2907 ex->op = tmpop;
2908
2909 if ((ret = do_assign(lres, rres, ex, ilp,
2910 level, purpose, eval)) == NULL) {
2911 --level;
2912 return NULL;
2913 }
2914 ex->op = savedop;
2915 }
2916 restype = ret->type;
2917 break;
2918 case TOK_OP_LAND:
2919 case TOK_OP_LOR:
2920 /* Short circuit operators */
2921
2922 /*
2923 * foo && bar
2924 * generates instruction lists for foo and bar,
2925 * creates a label at the end of the bar list
2926 * and connects both lists through a conditional
2927 * jump to that label
2928 *
2929 * 06/14/09: Don't pass through the ``purpose''!
2930 * Otherwise e.g.
2931 *
2932 * (s->bitfield && s->bitfield2)
2933 *
2934 * breaks because with purpose beging TOK_PAREN_OPEN
2935 * we assume this is a parenthesized sub-expression
2936 * which cannot be decoded yet
2937 */
2938 lres = expr_to_icode(ex->left, NULL, ilp,
2939 /*purpose*/0, 0, eval);
2940 if (lres == NULL) {
2941 --level;
2942 return NULL;
2943 }
2944
2945 if (eval) {
2946 /*
2947 * 03/03/09: Invalidate items before executing
2948 * the second part! This is needed in cases like
2949 * this:
2950 *
2951 * - Assume an expression like
2952 * printf("%d\n", var && func());
2953 *
2954 * - This will first load the format string
2955 * - Then it will evaluate var
2956 * - Then it may or may not evaluate func()
2957 * since && short-circuits if the first
2958 * operand is 0
2959 *
2960 * What may happen, then, is that the second
2961 * part of the operator - the part that may or
2962 * may not be executed - causes a register
2963 * invalidation. This would then save the format
2964 * string which was loaded prior to evaluating
2965 * the && expression. That causes the format
2966 * string vreg to be associated with a stack
2967 * save location ___WHICH MAY NOT ACTUALLY HOLD
2968 * THE VALUE___ because the code that saves it
2969 * is conditional.
2970 *
2971 * SO we perform a general GPR invalidation
2972 * prior to doing the conditional jump to ensure
2973 * that external items (to this expression) can
2974 * only be associated with real save locations
2975 *
2976 * XXX Don't invalidate lres
2977 */
2978 backend->invalidate_gprs(ilp, 1, 0);
2979
2980 label2 = icode_make_label(NULL);
2981 if (!is_x87_trash(lres) && eval) {
2982 vreg_faultin(NULL, NULL, lres, ilp, 0);
2983 }
2984 if (ex->op == TOK_OP_LAND) {
2985 label = branch_if_zero(lres, INSTR_BR_EQUAL,
2986 NULL, ilp);
2987 } else {
2988 label = branch_if_zero(lres, INSTR_BR_NEQUAL,
2989 NULL, ilp);
2990 }
2991 free_pregs_vreg(lres, ilp, 0, 0);
2992 }
2993
2994 /*
2995 * 06/14/09: Don't pass through the ``purpose''!
2996 * Otherwise e.g.
2997 *
2998 * (s->bitfield && s->bitfield2)
2999 *
3000 * breaks because with purpose beging TOK_PAREN_OPEN
3001 * we assume this is a parenthesized sub-expression
3002 * which cannot be decoded yet
3003 */
3004 rres = expr_to_icode(ex->right, NULL, ilp,
3005 /*purpose*/ 0, 0, eval);
3006 if (rres == NULL) {
3007 --level;
3008 return NULL;
3009 }
3010
3011
3012 /* The result of these operators has type int */
3013 ret = vreg_alloc(NULL, NULL, NULL, NULL);
3014 ret->type = make_basic_type(TY_INT);
3015 ret->size = backend->get_sizeof_type(ret->type, NULL);
3016 if (eval) {
3017 r = ALLOC_GPR(curfunc, ret->size, ilp, NULL);
3018 vreg_map_preg(ret, r);
3019 reg_set_unallocatable(r);
3020
3021 if (!is_x87_trash(rres)) {
3022 vreg_faultin(NULL, NULL, rres, ilp, 0);
3023 }
3024
3025 reg_set_allocatable(r);
3026 if (ex->op == TOK_OP_LAND) {
3027 branch_if_zero(rres, INSTR_BR_EQUAL,
3028 label, ilp);
3029 ii = icode_make_setreg(r, 1);
3030 } else {
3031 branch_if_zero(rres, INSTR_BR_NEQUAL,
3032 label, ilp);
3033 ii = icode_make_setreg(r, 0);
3034 }
3035 append_icode_list(ilp, ii);
3036 ii = icode_make_jump(label2);
3037 append_icode_list(ilp, ii);
3038
3039 append_icode_list(ilp, label);
3040 if (ex->op == TOK_OP_LAND) {
3041 ii = icode_make_setreg(r, 0);
3042 } else {
3043 ii = icode_make_setreg(r, 1);
3044 }
3045 append_icode_list(ilp, ii);
3046 append_icode_list(ilp, label2);
3047 backend->invalidate_except(ilp, /*level == 1*/1, 0, r,
3048 (struct reg *)NULL);
3049 vreg_map_preg(ret, r);
3050 }
3051 break;
3052 case TOK_OP_MINUS:
3053 case TOK_OP_PLUS:
3054 case TOK_OP_MULTI:
3055 case TOK_OP_DIVIDE:
3056 case TOK_OP_MOD:
3057 case TOK_OP_BSHL:
3058 case TOK_OP_BSHR:
3059 case TOK_OP_BAND:
3060 case TOK_OP_BXOR:
3061 case TOK_OP_BOR:
3062 /* Arithmetic and bitwise operators */
3063
3064 tmpop = ex->op;
3065 /* WARNING: Order of faultins below matters */
3066
3067 /* 07/03/08: Eval */
3068 if (eval) {
3069 if (is_x87_trash(lres)) {
3070 ;
3071 } else if (can_transform_to_bitwise(&lres, &rres,
3072 &tmpop, ilp)) {
3073 /*
3074 * 04/13/08: Transform operations if possible.
3075 * Note that faultins are already handled by
3076 * the transformation function
3077 */
3078 operator = &operators[LOOKUP_OP2(tmpop)];
3079 backend->icode_prepare_op(&lres, &rres,
3080 tmpop, ilp);
3081 } else {
3082 if (rres->from_const) {
3083 vreg_anonymify(&rres, NULL,
3084 NULL, ilp);
3085 } else if (lres->from_const) {
3086 vreg_anonymify(&lres, NULL,
3087 NULL, ilp);
3088 }
3089
3090 vreg_faultin(NULL, NULL, lres, ilp, tmpop);
3091 vreg_faultin_protected(lres, NULL, NULL,
3092 rres, ilp, 0);
3093
3094 backend->icode_prepare_op(&lres, &rres,
3095 tmpop, ilp);
3096 }
3097 }
3098
3099 changed_vrs = emul_conv_ldouble_to_double(&temp_lres, &temp_rres,
3100 lres, rres, ilp, eval);
3101
3102 if (tmpop == TOK_OP_PLUS
3103 || tmpop == TOK_OP_MINUS) {
3104 do_add_sub(&temp_lres, &temp_rres, tmpop, ex->tok, ilp,
3105 eval);
3106 ii = NULL;
3107 } else if (tmpop == TOK_OP_MULTI
3108 || tmpop == TOK_OP_DIVIDE
3109 || tmpop == TOK_OP_MOD) {
3110 if (do_mul(&temp_lres, temp_rres, operator,
3111 ex->tok, ilp, eval)) {
3112 --level;
3113 return NULL;
3114 }
3115
3116 ii = NULL;
3117 } else if (tmpop == TOK_OP_BSHL
3118 || tmpop == TOK_OP_BSHR
3119 || tmpop == TOK_OP_BAND
3120 || tmpop == TOK_OP_BOR
3121 || tmpop == TOK_OP_BXOR) {
3122 if (do_bitwise(&temp_lres, temp_rres, operator, ex->tok,
3123 ilp, eval)) {
3124 --level;
3125 return NULL;
3126 }
3127 ii = NULL;
3128 }
3129
3130 /* 07/03/08: Eval */
3131 if (eval) {
3132 if (ii != NULL) {
3133 append_icode_list(ilp, ii);
3134 }
3135
3136 if (changed_vrs) {
3137 /*
3138 * 11/24/08: Convert long double value back
3139 * to original type (it was emulated using
3140 * double0
3141 */
3142 lres = backend->icode_make_cast(temp_lres,
3143 make_basic_type(TY_LDOUBLE),
3144 ilp);
3145 rres = temp_rres;
3146 } else {
3147 lres = temp_lres;
3148 rres = temp_rres;
3149 }
3150
3151 if (rres->pregs[0]) {
3152 /*
3153 * XXX free_pregs_vreg() causes unbacked
3154 * register problems with cpu_mips.c and
3155 * I do not understand why. The problem
3156 * is probably elsewhere
3157 */
3158 free_pregs_vreg(rres, ilp, 1, 0);
3159 }
3160 } else {
3161 lres = temp_lres;
3162 rres = temp_rres;
3163 }
3164
3165 ret = lres;
3166
3167 /* 07/03/08: Eval */
3168 if (eval) {
3169 if (is_x87_trash(ret)) {
3170 ;
3171 } else {
3172 vreg_faultin(NULL, NULL, ret, ilp, 0);
3173 vreg_map_preg(ret, ret->pregs[0]);
3174 if (lres->pregs[1]) {
3175 vreg_map_preg2(ret, ret->pregs[1]);
3176 }
3177 }
3178 }
3179 restype = ret->type;
3180 break;
3181 case TOK_OP_COND:
3182 /* Conditional operator */
3183 ret = do_cond_op(ex, &restype, lvalue, ilp, eval);
3184 if (ret == NULL) {
3185 --level;
3186 return NULL;
3187 }
3188 break;
3189 case TOK_OP_LEQU:
3190 case TOK_OP_LNEQU:
3191 case TOK_OP_GREAT:
3192 case TOK_OP_SMALL:
3193 case TOK_OP_GREATEQ:
3194 case TOK_OP_SMALLEQ:
3195 /* Equality and relational operators */
3196 r = NULL;
3197 ret = vreg_alloc(NULL,NULL,NULL,NULL);
3198 ret->type = make_basic_type(TY_INT);
3199 ret->size = backend->get_sizeof_type(ret->type, NULL);
3200
3201 /* 07/03/08: Eval */
3202 if (eval) {
3203 if (purpose != TOK_KEY_IF || level != 1) {
3204 /*
3205 * Need to allocate gpr so it isn't wiped out
3206 * by faultins below
3207 */
3208 r = ALLOC_GPR(curfunc, ret->size, ilp, NULL);
3209 reg_set_unallocatable(r);
3210 }
3211 if (is_x87_trash(lres)) {
3212 vreg_faultin_x87(NULL, NULL, lres, ilp, 0);
3213 vreg_map_preg(lres, &x86_fprs[1]);
3214 vreg_faultin_x87(NULL, NULL, rres, ilp, 0);
3215 } else {
3216 vreg_faultin(NULL, NULL, lres, ilp, 0);
3217 vreg_faultin_protected(lres, NULL, NULL, rres, ilp, 0);
3218 }
3219
3220
3221 /*
3222 * 12/29/08: On PPC, where long double is
3223 * emulated using double, we want to compare
3224 * as double - so convert both operands to
3225 * it
3226 *
3227 * 07/15/09: This applies to MIPS as well now
3228 */
3229 if (backend->emulate_long_double
3230 && lres->type->code == TY_LDOUBLE
3231 && lres->type->tlist == NULL) {
3232 lres = backend->icode_make_cast(lres,
3233 make_basic_type(TY_DOUBLE), ilp);
3234 rres = backend->icode_make_cast(rres,
3235 make_basic_type(TY_DOUBLE), ilp);
3236 }
3237 }
3238
3239
3240 if (r != NULL && eval) {
3241 reg_set_allocatable(r);
3242 }
3243
3244 /* 07/03/08: Eval */
3245 if (eval) {
3246 if (purpose == TOK_KEY_IF && level == 1) {
3247 /*
3248 * We want to generate the expected cmp + je
3249 * for ``if (stuff == stuff)'' so the caller
3250 * has to check for this logical operator and
3251 * generate the branch himself
3252 */
3253 ii = icode_make_cmp(lres, rres);
3254 append_icode_list(ilp, ii);
3255 free_pregs_vreg(rres, ilp, 0, 0);
3256 } else {
3257 static struct control dummy;
3258 /*
3259 * Generate kludgy
3260 * cmp dest, src
3261 * mov res, 0
3262 * jXX label
3263 * mov res, 1
3264 * label:
3265 * ... for expressions where the
3266 * result of the operator is used by
3267 * subsequently applied operators
3268 * XXX Again, conditional mov would be
3269 * much better ...
3270 */
3271 vreg_map_preg(ret, r);
3272
3273 label = icode_make_label(NULL);
3274
3275 /*
3276 * 06/29/08: This code path still had manual
3277 * multi-register handling, and wasn't corrected
3278 * during all of those long long bug fixes for
3279 * the other case. Therefore, we call do_cond()
3280 * here now as well
3281 */
3282 dummy.type = TOK_KEY_IF;
3283 dummy.endlabel = label;
3284
3285 ii = icode_make_setreg(r, 0);
3286 append_icode_list(ilp, ii);
3287 ii = icode_make_cmp(lres, rres);
3288 append_icode_list(ilp, ii);
3289 do_cond(ex, ilp, &dummy, ret);
3290 ii = icode_make_setreg(r, 1);
3291 append_icode_list(ilp, ii);
3292 append_icode_list(ilp, label);
3293
3294 free_pregs_vreg(lres, ilp, 0, 0);
3295 free_pregs_vreg(rres, ilp, 0, 0);
3296 }
3297 }
3298 restype = NULL;
3299 break;
3300 }
3301 } else if (ex->data != NULL) {
3302 int standalone_subexpr;
3303
3304 /*
3305 * 04/12/08: Tell s_expr_to_icode() whether this is a
3306 * ``standalone'' expression whose value is not used.
3307 * That allows us to optimize ``i--;'' to ``--i;''
3308 */
3309 if (resval_not_used && level == 1) {
3310 standalone_subexpr = 1;
3311 } else {
3312 standalone_subexpr = 0;
3313 }
3314
3315 ex->data->code = alloc_icode_list();
3316 ret = s_expr_to_icode(ex->data, lvalue,
3317 ex->data->code, standalone_subexpr, eval);
3318 if (eval) {
3319 if ((ilp->res = ret) == NULL) {
3320 --level;
3321 return NULL;
3322 }
3323 }
3324 if (eval) merge_icode_lists(ilp, ex->data->code);
3325 if (ret != NULL) {
3326 /*
3327 * 07/03/08: This restype assignment was missing, so
3328 * the array/function decay below never happened. But
3329 * now we sometimes have to do it even for something
3330 * that looks like a sub-expression
3331 */
3332 if (ex->data->flags & SEXPR_FROM_CONST_EXPR) {
3333 /* restype = ret->type;*/
3334 }
3335 }
3336 } else if (ex->stmt_as_expr != NULL) {
3337 /*
3338 * GNU statement-as-expression
3339 */
3340 struct icode_list *tmp;
3341
3342 ++doing_stmtexpr;
3343 if (!eval) {
3344 /*
3345 * 07/22/08: Implemented this case
3346 */
3347 struct statement *s;
3348 struct scope *sc;
3349
3350 /*
3351 * stmt_as_expr is a scope containg the compound
3352 * statement as body
3353 */
3354 s = ex->stmt_as_expr->code;
3355 sc = s->data;
3356
3357 /*
3358 * Iterate statement list
3359 *
3360 * XXX we should check the components for correctness
3361 * here!
3362 */
3363 for (s = sc->code; s->next != NULL; s = s->next) {
3364 ;
3365 }
3366 if (s->type == ST_CODE) {
3367 /* Expression statement - return type */
3368 return expr_to_icode(s->data, NULL, NULL, 0, 0, eval);
3369 } else {
3370 /*
3371 * The last part of this statement-as-expression
3372 * is something other than an expression, e.g. a
3373 * declaration
3374 */
3375 return NULL;
3376 }
3377 } else {
3378 /*
3379 * 07/25/09: We need to invalidate and save GPRs!
3380 * Otherwise things like
3381 *
3382 * ptr->m = ({ .... });
3383 *
3384 * ... may trash the loaded pointer value before
3385 * writing through it. It's not quite clear yet
3386 * why this happens, but may have something to
3387 * do with the statement getting compiled as
3388 * ``top level'' statement with no regard for
3389 * the surrounding expression
3390 */
3391 backend->invalidate_gprs(ilp, 1, 0); /* save */
3392 tmp = xlate_to_icode(ex->stmt_as_expr->code, 0);
3393 if (tmp == NULL) {
3394 --level;
3395 return NULL;
3396 }
3397 merge_icode_lists(ilp, tmp);
3398 ret = ilp->res;
3399 }
3400 if (ret == NULL) {
3401 /*
3402 * Last statement in compound statement wasn't an
3403 * expression; treat like void
3404 */
3405 ret = vreg_alloc(NULL, NULL, NULL, NULL);
3406 ret->type = make_basic_type(TY_VOID);
3407 }
3408 --doing_stmtexpr;
3409 } else {
3410 puts("BUG: Empty expression passed to expr_to_icode() :(");
3411 abort();
3412 }
3413 if (eval && ret) {
3414 ilp->res = ret;
3415 }
3416 if (--level == 0 && eval) {
3417 if (ilp->res->pregs[0] != NULL
3418 && ilp->res->pregs[0]->vreg == ilp->res) {
3419 vreg_map_preg(ilp->res, ilp->res->pregs[0]);
3420 }
3421 }
3422
3423 #if 0
3424 } else if (ex->data != NULL) {
3425 #endif
3426 if (restype != NULL) {
3427 size_t newsize;
3428
3429 /* XXX is check for only ``void'' sufficient!??! */
3430 if (restype->code != TY_VOID
3431 || restype->tlist != NULL) {
3432 if (IS_VLA(restype->flags) && is_immediate_vla_type(restype)) {
3433 newsize = 0;
3434 } else {
3435 newsize = backend->get_sizeof_type(restype, NULL);
3436 }
3437 } else {
3438 newsize = 0;
3439 }
3440
3441 ex->type = restype;
3442
3443 /*
3444 * 07/03/08: Do this for some sub-expressions as well
3445 */
3446 if (ilp && ilp->res) {
3447 if (newsize != 0
3448 && ((ilp->res->type->code != TY_STRUCT
3449 && ilp->res->type->code != TY_UNION)
3450 || ilp->res->type->tlist != NULL)) {
3451 if (is_x87_trash(ilp->res)) {
3452 #if 0
3453 ilp->res = vreg_disconnect(ilp->res);
3454 #endif
3455 ilp->res = x87_anonymify(ilp->res, ilp);
3456 } else {
3457 /*
3458 * 04/12/08: Only anonymify if we
3459 * don't have a constant
3460 */
3461 if (ilp->res->from_const == NULL
3462 || ilp->res->from_const->type
3463 == TOK_STRING_LITERAL
3464 || Oflag == -1) {
3465 vreg_anonymify(&ilp->res, NULL, NULL,
3466 ilp);
3467 }
3468 }
3469 }
3470 ilp->res->type = restype;
3471 ilp->res->size = newsize;
3472 } else if (eval) {
3473 puts("BUG!!!");
3474 printf("doing operator %d!\n", ex->op);
3475 abort();
3476 }
3477 }
3478
3479
3480 /*
3481 * 01/13/08: Perform array/function decay. Now we don't check ``eval''
3482 * anymore, because it is nonsense, since the decay also happens in
3483 * unevaluated expressions, such as
3484 *
3485 * char buf[128]; foo? buf: buf;
3486 *
3487 * purpose = TOK_PAREN_OPEN means this call is the top-level call for
3488 * a parenthesized sub-expressions, e.g. sizeof(buf)
3489 *
3490 * 07/03/08: Extended this for sub-expressions which came from
3491 * partially constant larger expressions, currently just to make
3492 *
3493 * sizeof (0? buf: buf) return sizeof(char[128]) and
3494 * sizeof (buf) return sizeof(char *)
3495 *
3496 * 05/13/09: We have to do this for the comma operator too!
3497 * XXX It's botched in various ways, ``ret = rres;'' seems very
3498 * nonsense (and gave us these array/function decay problems if
3499 * purpose = TOK_PAREN_OPEN... Not sure why ``(foo, bar)'' was
3500 * skipped with TOK_PAREN_OPEN
3501 *
3502 * 06/14/09: This wasn't decoding parenthesized bitfields, so
3503 * added check for tbit != NULL.
3504 *
3505 * printf("%d\n", (sp->bf && sp->bf2)); <--- failed
3506 */
3507 if ( (purpose != TOK_PAREN_OPEN
3508 /*|| ret->type->tbit != NULL*/)
3509 || ex->op == TOK_OP_COMMA
3510 || (ex->data != NULL && ex->data->flags & SEXPR_FROM_CONST_EXPR)) {
3511
3512 if (ret->type->tlist != NULL
3513 && (ret->type->tlist->type == TN_ARRAY_OF
3514 || ret->type->tlist->type == TN_FUNCTION
3515 || ret->type->tlist->type == TN_VARARRAY_OF)) {
3516 struct type *ty;
3517 int is_array;
3518
3519 if (ret->type->tlist->type == TN_ARRAY_OF
3520 || ret->type->tlist->type == TN_VARARRAY_OF) {
3521 is_array = 1;
3522 } else {
3523 is_array = 0;
3524 }
3525
3526 /*
3527 * Arrays and functions (01/13/08) decay into
3528 * pointers
3529 */
3530 if (eval) {
3531 /*
3532 * 01/13/08: XXX hmmm seems vreg_anonymify
3533 * also alters the type already?? This
3534 * sucks
3535 */
3536 vreg_anonymify(&ret, NULL, NULL, ilp);
3537 }
3538 ty = n_xmemdup(ret->type, sizeof *ret->type);
3539 copy_tlist(&ty->tlist, ret->type->tlist);
3540 if (is_array) {
3541 ty->tlist->type = TN_POINTER_TO;
3542 } else {
3543 static struct type dummy;
3544
3545 /* Append new pointer node */
3546 append_typelist(&dummy, TN_POINTER_TO, 0, NULL, NULL);
3547 dummy.tlist->next = ty->tlist;
3548 ty->tlist = dummy.tlist;
3549 }
3550
3551 ret->type = ty;
3552 ret->size = backend->get_sizeof_type(ret->type, NULL);
3553 if (eval) {
3554 ilp->res = ret;
3555 }
3556
3557 /*
3558 * 08/24/08: This was missing; Array decay does not
3559 * yield an lvalue, so without resetting the flag,
3560 * XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX It's not clear
3561 * whether altering the s_expr flag here could
3562 * yield problems! If the expression is evaluated
3563 * twice (for example once to try it as a constant
3564 * expression, and a second time if that failed),
3565 * then the second evaluation will start out with
3566 * a different is_lvalue, which may or may not be
3567 * a problem
3568 */
3569 if (ex->data != NULL && ex->data->is_lvalue) {
3570 ex->data->is_lvalue = 0;
3571 }
3572 } else if (ret->type->tbit != NULL) {
3573 /*
3574 * 08/11/08: Handle bitfield promotion
3575 * (only if it isn't the lvalue in an assignment)
3576 */
3577 if (purpose != TOK_OP_ASSIGN && !resval_not_used) {
3578 if (eval) {
3579 ret = promote_bitfield(ret, ilp);
3580 ret = backend->icode_make_cast(ret,
3581 cross_get_bitfield_promoted_type(ret->type), ilp);
3582 ilp->res = ret;
3583 } else {
3584 vreg_set_new_type(ret, cross_get_bitfield_promoted_type(ret->type));
3585 }
3586 }
3587 }
3588 }
3589
3590 return eval? ilp->res: ret;
3591 }
3592
3593 static int
do_cond(struct expr * cond,struct icode_list * il,struct control * ctrl,struct vreg * have_cmp)3594 do_cond(
3595 struct expr *cond,
3596 struct icode_list *il,
3597 struct control *ctrl,
3598 struct vreg *have_cmp) {
3599
3600 struct icode_instr *ii;
3601 struct icode_instr *dest_label;
3602 struct icode_instr *lastinstr;
3603 struct icode_instr *multi_reg_label = NULL;
3604 struct vreg *res;
3605 struct vreg *lower_vreg = NULL;
3606 int positive;
3607 int btype = 0;
3608 int first_btype = 0;
3609 int is_multi_reg_obj;
3610 int saved_btype;
3611 int have_multi_reg_cmp;
3612 int second_is_greater_than = 0;
3613
3614
3615 if (have_cmp != NULL) {
3616 res = have_cmp;
3617 } else {
3618 if ((res = expr_to_icode(cond, NULL, il, TOK_KEY_IF, 0, 1)) == NULL) {
3619 return -1;
3620 }
3621 }
3622 lastinstr = il->tail;
3623
3624
3625 if (lastinstr != NULL
3626 && lastinstr->type == INSTR_CMP
3627 && lastinstr->dest_vreg->is_multi_reg_obj) {
3628 have_multi_reg_cmp = 1;
3629 } else {
3630 have_multi_reg_cmp = 0;
3631 }
3632
3633 if (res->is_multi_reg_obj || have_multi_reg_cmp) {
3634 /*
3635 * This complicates things greatly. If we compare
3636 * the registers in the right order (XXX big vs
3637 * little endian), relational
3638 * operators+branches in a row still work as
3639 * expected. However, with equality and inequality,
3640 * the first comparison cannot branch to the
3641 * target already, because the second register may
3642 * still differ.
3643 *
3644 * Thus instead of
3645 * cmp foo, bar
3646 * je label
3647 * ... we do
3648 * cmp foo[0], bar[0]
3649 * jne end
3650 * cmp foo[1], bar[1]
3651 * je label
3652 * end:
3653 */
3654 is_multi_reg_obj = 1;
3655 } else {
3656 is_multi_reg_obj = 0;
3657 }
3658
3659 if (ctrl->type == TOK_KEY_IF
3660 || ctrl->type == TOK_KEY_WHILE
3661 || ctrl->type == TOK_KEY_FOR) {
3662 /*
3663 * for/while loop or if statement - branch
3664 * if condition negative
3665 */
3666 positive = 0;
3667 } else {
3668 /*
3669 * do-while loop - branch if condition
3670 * positive
3671 */
3672 positive = 1;
3673 }
3674
3675 /*
3676 * When dealing with relational operators that
3677 * occur at the top-level in an expression,
3678 * expr_to_icode() already does the branch-
3679 * determining cmp such that we can already
3680 * branch and get say for ``stuff == stuff'':
3681 * ``cmp stuff, stuff; jne end;'' rather than
3682 * computing 1 (true) or 0 (false) first and
3683 * comparing that against 0
3684 */
3685 if (cond->op == 0) {
3686 goto cmp_zero;
3687 } else if (0 /*doing_stmtexpr*/) {
3688 /*
3689 * expr_to_icode() uses a static variable ``level''
3690 * to record the recursive call depth. That doesn't
3691 * work with GNU C statement-as-expression, where in
3692 * ({ if (x == 0) ; })
3693 * the if controlling expression is parsed starting
3694 * with level = 1.
3695 * As a temporary workaround, we explicitly compare
3696 * with zero
3697 * *
3698 * 06/29/08: Removed this part again. I'm not quite sure
3699 * what this means, and whether it's still current with
3700 * the latest change. The latest change - i.e. to call
3701 * do_cond() for nested equality evaluation (creating
3702 * 1 or 0 instead of cmp + jump) made this bit wrong,
3703 * and hopefully solved the other issues which this
3704 * part was supposed to fix before that
3705 */
3706 goto cmp_zero;
3707 } else if (cond->op == TOK_OP_LEQU) {
3708 if (positive) btype = INSTR_BR_EQUAL;
3709 else btype = INSTR_BR_NEQUAL;
3710 } else if (cond->op == TOK_OP_LNEQU) {
3711 if (positive) btype = INSTR_BR_NEQUAL;
3712 else btype = INSTR_BR_EQUAL;
3713 } else if (cond->op == TOK_OP_GREAT) {
3714 if (positive) btype = INSTR_BR_GREATER;
3715 else btype = INSTR_BR_SMALLEREQ;
3716 } else if (cond->op == TOK_OP_SMALL) {
3717 if (positive) btype = INSTR_BR_SMALLER;
3718 else btype = INSTR_BR_GREATEREQ;
3719 } else if (cond->op == TOK_OP_GREATEQ) {
3720 if (positive) btype = INSTR_BR_GREATEREQ;
3721 else btype = INSTR_BR_SMALLER;
3722 } else if (cond->op == TOK_OP_SMALLEQ) {
3723 if (positive) btype = INSTR_BR_SMALLEREQ;
3724 else btype = INSTR_BR_GREATER;
3725 } else {
3726 cmp_zero:
3727 /*
3728 * Branch if condition false (0) and we're
3729 * doing an if. Otherwise branch if condition
3730 * true in case of do-while(to top of loop.)
3731 */
3732 #if 0
3733 vreg_faultin_protected(res, NULL, NULL, res, il, 0);
3734 #endif
3735 #if 0
3736 vreg_faultin(NULL, NULL, res, il, 0);
3737 ii = icode_make_cmp(res, NULL);
3738 lastinstr = ii;
3739 #endif
3740 vreg_faultin_x87(NULL, NULL, res, il, 0);
3741 lastinstr = ii = compare_vreg_with_zero(res, il);
3742
3743 append_icode_list(il, ii);
3744 if (positive) btype = INSTR_BR_NEQUAL;
3745 else btype = INSTR_BR_EQUAL;
3746 }
3747
3748 if (ctrl->type == TOK_KEY_DO) {
3749 dest_label = ctrl->startlabel;
3750 } else {
3751 dest_label = ctrl->endlabel;
3752 }
3753
3754
3755 first_btype = btype;
3756
3757 /*
3758 *
3759 * negative:
3760 * for equality
3761 *
3762 * if (foo == bar) {
3763 * body;
3764 * }
3765 *
3766 * becomes
3767 *
3768 * if (foo[0] != bar[0]) {
3769 * goto no;
3770 * }
3771 * if (foo[1] != bar[1]) {
3772 * goto no;
3773 * }
3774 * body
3775 * no:
3776 *
3777 *
3778 *
3779 * inequality:
3780 *
3781 * if (foo != bar) {
3782 * body;
3783 * }
3784 *
3785 * becomes
3786 *
3787 * if (foo[0] != bar[0]) {
3788 * goto yes;
3789 * }
3790 * if (foo[1] != bar[1]) {
3791 * goto no;
3792 * }
3793 * yes:
3794 * body;
3795 * no:
3796 */
3797 if (is_multi_reg_obj) {
3798 struct vreg *res_vr;
3799 int need_another_cmp = 0;
3800
3801 if (lastinstr) {
3802 res_vr = lastinstr->dest_vreg;
3803 } else {
3804 res_vr = res;
3805 }
3806 if (IS_LLONG(res_vr->type->code)) {
3807 /*
3808 * Comparisons of the lower word have to be done
3809 * unsigned! Otherwise stuff like
3810 * if (1LL > 0xffffffff) {
3811 * goes wrong
3812 */
3813
3814 #if 0
3815 lower_vreg = n_xmemdup(res_vr, sizeof *res_vr);
3816 #endif
3817 lower_vreg = copy_vreg(res_vr);
3818
3819 lower_vreg->type = n_xmemdup(res_vr->type,
3820 sizeof *res_vr->type);
3821 lower_vreg->type->sign = TOK_KEY_UNSIGNED;
3822 lower_vreg->type->code = TY_ULLONG;
3823 }
3824 if (btype == INSTR_BR_EQUAL
3825 || btype == INSTR_BR_NEQUAL) {
3826 /*
3827 * The first true comparison does not allow us to
3828 * jump to the target yet; Instead the first one
3829 * only determines whether the branch is already
3830 * known not to be taken (branch to label after
3831 * second cmp), or may be taken (fall through to
3832 * next cmp.) Thus the jump condition is reversed
3833 * if the ``positive'' flag is set, otherwise not.
3834 *
3835 * if (llong_value == 123ll) {
3836 * ...
3837 * }
3838 *
3839 * ... here ``positive'' is 0, meaning the branch
3840 * is NOT taken if the condition is true (the
3841 * flow of control ``falls through'' into the
3842 * statement body. Here both cmp+branch
3843 * instructions of a ``long long'' are allowed to
3844 * branch if they do not yield equality
3845 *
3846 * Yes, this is confusing.
3847 */
3848 if (positive) {
3849 if (btype == INSTR_BR_EQUAL) {
3850 /*btype = INSTR_BR_NEQUAL;*/
3851 btype = INSTR_BR_NEQUAL;
3852 multi_reg_label = icode_make_label(NULL);
3853 } else { /* is != */
3854 /*btype = INSTR_BR_EQUAL;*/
3855 }
3856 } else {
3857 /*
3858 * Branch if condition false. Note that the
3859 * branch type is already reversed, i.e. if
3860 * it's NEQUAL we really had a ``==''
3861 */
3862 if (btype == INSTR_BR_NEQUAL) {
3863 } else { /* equal, i.e. ``!='' */
3864 /*btype = INSTR_BR_NEQUAL;*/
3865 btype = INSTR_BR_NEQUAL;
3866 multi_reg_label =
3867 icode_make_label(NULL);
3868 }
3869 }
3870 } else if (btype == INSTR_BR_GREATEREQ
3871 || btype == INSTR_BR_SMALLEREQ) {
3872 /*
3873
3874 * If we're looking for a greater/smaller-or-equal
3875 * relation, the first comparison has to omit the
3876 * ``equal'' part, because otherwise we'd get false
3877 * hits. Consider comparing the values 123 and 456;
3878 * The higher word is 0 in both cases, so the equal
3879 * part would trigger
3880 */
3881 #if 0
3882 if (btype == INSTR_BR_GREATEREQ) {
3883 btype = INSTR_BR_GREATER;
3884 } else { /* smalleq */
3885 btype = INSTR_BR_SMALLER;
3886 }
3887 #endif
3888 multi_reg_label =
3889 icode_make_label(NULL);
3890
3891 /*
3892 * 07/20/08: This was missing! At least for < on signed
3893 * long long
3894 */
3895 if (IS_LLONG(res_vr->type->code) && res_vr->type->tlist == NULL
3896 && (cond->op == TOK_OP_GREAT
3897 || cond->op == TOK_OP_SMALL)) {
3898 need_another_cmp = 1;
3899 }
3900 } else if (btype == INSTR_BR_GREATER
3901 || btype == INSTR_BR_SMALLER) {
3902 multi_reg_label =
3903 icode_make_label(NULL);
3904 /*
3905 * This was MISSING;
3906 * 06/02/08: We have to generate for
3907 * greater or equal signed:
3908 *
3909 * jump below signed (for upper) <-- was missing!
3910 * jump greater signed (for upper)
3911 * jump below unsigned (for lower)
3912 *
3913 * and for smaller or equal signed:
3914 *
3915 * jump greater signed (for upper) <-- was missing!
3916 * jump below unsigned (for upper)
3917 * jump above unsigned (for lower)
3918 *
3919 * 06/29/08: This was wrongly only done for signed
3920 * long long, not unsigned!
3921 */
3922 if (IS_LLONG(res_vr->type->code) && res_vr->type->tlist == NULL
3923 && (cond->op == TOK_OP_GREATEQ
3924 || cond->op == TOK_OP_SMALLEQ)) {
3925 need_another_cmp = 1;
3926 }
3927 }
3928
3929 if (need_another_cmp) {
3930 /*ii = copy_icode_instr(lastinstr);*/
3931 if (lastinstr && lastinstr->type == INSTR_CMP) {
3932 ii = icode_make_branch(dest_label,
3933 btype, lastinstr->dest_vreg);
3934 } else {
3935 ii = icode_make_branch(dest_label,
3936 btype, res);
3937 }
3938
3939 if (cond->op == TOK_OP_SMALL && ii->type == INSTR_BR_GREATEREQ) {
3940 /*
3941 * 07/20/08: For <, this gives us greater-or-
3942 * equal for the first comparison. But we want
3943 * greater!
3944 */
3945 ii->type = INSTR_BR_GREATER;
3946 } else if (cond->op == TOK_OP_GREAT && ii->type == INSTR_BR_SMALLEREQ) {
3947 #if 0
3948 /*
3949 * 07/20/08: For >, this gives us less-or-
3950 * equal for the first comparison. But we want
3951 * less!
3952 */
3953 ii->type = INSTR_BR_GREATER;
3954 #endif
3955 ii->type = INSTR_BR_SMALLER;
3956 second_is_greater_than = 1;
3957 }
3958
3959 append_icode_list(il, ii);
3960
3961 /*
3962 * Now create another compare instruction for
3963 * the next branch
3964 */
3965 ii = copy_icode_instr(lastinstr);
3966 append_icode_list(il, ii);
3967
3968 /*
3969 * Make sure that both the original instruction
3970 * and the copied one refer to the UPPER dword
3971 * of the long long! That's necessary because we
3972 * want to jump-if-less, then jump-if-greater
3973 * both based on the same dword.
3974 *
3975 * We have to set a hint so that the backend
3976 * does not automatically take the next
3977 * compare to mean ``comapare second dword of
3978 * long long''
3979 */
3980 lastinstr->hints |=
3981 HINT_INSTR_NEXT_NOT_SECOND_LLONG_WORD;
3982 }
3983 }
3984
3985 saved_btype = btype;
3986
3987 if (is_multi_reg_obj) {
3988 if (btype == INSTR_BR_GREATER) {
3989 btype = INSTR_BR_SMALLER /*EQ*/;
3990 } else if (btype == INSTR_BR_SMALLER) {
3991 btype = INSTR_BR_GREATER /*EQ*/;
3992 } else if (btype == INSTR_BR_SMALLEREQ) {
3993 #if 0
3994 btype = INSTR_BR_GREATER;
3995 #endif
3996 btype = INSTR_BR_SMALLER;
3997 /*
3998 * 07/20/08: Setting the multi-reg label below seems
3999 * to be wrong. The reason why it was there in the
4000 * first place is not clear
4001 */
4002 /* multi_reg_label = NULL;*/
4003 } else if (btype == INSTR_BR_GREATEREQ) {
4004 btype = INSTR_BR_SMALLER;
4005 /* XXX hmm is this right? */
4006 #if 0
4007 btype = INSTR_BR_GREATER;
4008 multi_reg_label = NULL;
4009 #endif
4010
4011 }
4012 }
4013
4014 if (cond->op != 0) {
4015 /*
4016 * If the comaparison has already been made, it is an
4017 * error to pass the expression's result type to
4018 * icode_make_branch. For instance, when comparing two
4019 * unsigned integers, the result has type ``signed
4020 * int''. This botch, which I'm not sure works in all
4021 * cases, fixes it for now. In general it would be
4022 * better to always implicitly use the type of the
4023 * last comparison, as is already done on e.g. MIPS
4024 */
4025 if (lastinstr && lastinstr->type == INSTR_CMP) {
4026 ii = icode_make_branch(
4027 multi_reg_label? multi_reg_label: dest_label,
4028 btype, lastinstr->dest_vreg);
4029 } else {
4030 /*
4031 * 10/27/08: This used to just do the branch. This
4032 * is wrong - possibly for all cases - if there is
4033 * no directly preceding compare instruction.
4034 *
4035 * In that case we'll probably have something like
4036 *
4037 * if ( (x < y) && (y < z) )
4038 *
4039 * ... so the result vreg contains a value which has
4040 * not been compared with 0 yet, so that's what we
4041 * have to do here
4042 */
4043 lastinstr = compare_vreg_with_zero(res, il);
4044 append_icode_list(il, lastinstr);
4045 ii = icode_make_branch(
4046 multi_reg_label? multi_reg_label: dest_label,
4047 /*btype*/INSTR_BR_EQUAL, lastinstr->dest_vreg);
4048 #if 0
4049 ii = icode_make_branch(
4050 multi_reg_label? multi_reg_label: dest_label,
4051 btype, res);
4052 #endif
4053 }
4054 } else {
4055 ii = icode_make_branch(
4056 multi_reg_label? multi_reg_label: dest_label,
4057 btype, res);
4058 }
4059
4060 if (second_is_greater_than) {
4061 /*
4062 * 07/20/08: For multi-reg ``>''
4063 */
4064 ii->type = INSTR_BR_GREATER;
4065 }
4066
4067 btype = saved_btype;
4068 append_icode_list(il, ii);
4069
4070 btype = first_btype;
4071
4072 if (res->is_multi_reg_obj || have_multi_reg_cmp) {
4073 ii = copy_icode_instr(lastinstr);
4074
4075 /*
4076 * 01/29/08: This less significant word comparison
4077 * must be done unsigned. The hint is needed for
4078 * PPC32 where the distinction isn't done for the
4079 * branch but during comparison already. The vregs
4080 * are not unsigned
4081 * XXX Does this problem occur elsewhere? Is there
4082 * a better way to fix it? (Changing vreg types)
4083 */
4084 ii->hints |= HINT_INSTR_UNSIGNED;
4085
4086 append_icode_list(il, ii);
4087 if (multi_reg_label) {
4088 /* We have to reverse the test again */
4089 #if 0
4090 if (btype == INSTR_BR_EQUAL) {
4091 btype = INSTR_BR_NEQUAL;
4092 } else if (btype == INSTR_BR_NEQUAL) {
4093 btype = INSTR_BR_EQUAL;
4094 } else {
4095 unimpl();
4096 }
4097 #endif
4098 }
4099 ii = icode_make_branch(dest_label, btype,
4100 lower_vreg? lower_vreg: res);
4101 append_icode_list(il, ii);
4102
4103 /*
4104 * Now append label to which to jump if the first
4105 * comparison was false
4106 */
4107 if (multi_reg_label != NULL) {
4108 append_icode_list(il, multi_reg_label);
4109 }
4110 }
4111
4112 return 0;
4113 }
4114
4115 static void
do_body_labels(struct control * ctrl,struct icode_list * il)4116 do_body_labels(struct control *ctrl, struct icode_list *il) {
4117 if (ctrl->body_labels != NULL) {
4118 struct icode_instr *ii;
4119 for (ii = ctrl->body_labels->head; ii != NULL;) {
4120 struct icode_instr *botch = ii->next;
4121 ii->next = NULL;
4122 append_icode_list(il, ii);
4123 ii = botch;
4124 }
4125 }
4126 }
4127
4128
4129 /*
4130 * 07/20/09: New function to align a pointer to a multiple of N by adding
4131 * M if it is not aligned yet.
4132 */
4133 void
icode_align_ptr_up_to(struct vreg * ptr,int target_alignment,int addend,struct icode_list * il)4134 icode_align_ptr_up_to(struct vreg *ptr,
4135 int target_alignment,
4136 int addend,
4137 struct icode_list *il) {
4138
4139 struct vreg *andvr;
4140 struct vreg *addvr;
4141 struct vreg *tempptr;
4142 struct token *andtok;
4143 struct token *addtok;
4144 struct icode_instr *ii;
4145 struct icode_instr *label;
4146 struct reg *r;
4147
4148 vreg_faultin(NULL, NULL, ptr, il, 0);
4149
4150 /*
4151 * Reinterpret pointer as an unsigned integer so we can perform
4152 * arithmetic on it. We make a copy of the pointer value because
4153 * we will change it
4154 */
4155 r = ALLOC_GPR(curfunc, ptr->size, il, NULL);
4156 icode_make_copyreg(r, ptr->pregs[0], ptr->type, ptr->type, il);
4157
4158 tempptr = dup_vreg(ptr);
4159 vreg_set_new_type(tempptr, backend->get_size_t());
4160 vreg_map_preg(tempptr, r);
4161
4162
4163 /*
4164 * AND pointer with desired alignment - 1 (must be power of 2)
4165 */
4166 if (target_alignment & (target_alignment - 1)) {
4167 (void) fprintf(stderr, "BUG: icode_align_ptr_up_to() with "
4168 "alignment that is not a power of 2\n");
4169 abort();
4170 }
4171 --target_alignment;
4172
4173 andtok = const_from_value(&target_alignment, NULL);
4174 andvr = vreg_alloc(NULL, andtok, NULL, NULL);
4175 vreg_faultin_protected(tempptr, NULL, NULL, andvr, il, 0);
4176
4177 /* 07/26/12: Need to avoid type mismatch on AMD64 */
4178 andvr = backend->icode_make_cast(andvr, backend->get_size_t(), il);
4179
4180 backend->icode_prepare_op(&tempptr, &andvr, TOK_OP_BAND, il);
4181 ii = icode_make_and(tempptr, andvr);
4182 append_icode_list(il, ii);
4183
4184 /*
4185 * If 0 (alignment correct), skip pointer addition
4186 */
4187 ii = icode_make_cmp(tempptr, NULL);
4188 append_icode_list(il, ii);
4189 label = icode_make_label(NULL);
4190 ii = icode_make_branch(label, INSTR_BR_EQUAL, tempptr);
4191 append_icode_list(il, ii);
4192
4193 addtok = const_from_value(&addend, NULL);
4194 addvr = vreg_alloc(NULL, addtok, NULL, NULL);
4195 vreg_faultin(NULL, NULL, ptr, il, 0);
4196 vreg_faultin_protected(ptr, NULL, NULL, addvr, il, 0);
4197 ii = icode_make_add(ptr, addvr);
4198 append_icode_list(il, ii);
4199
4200 /*
4201 * Align pointer by adding specified value (we first restore
4202 * the pointer vreg by associating it with
4203 */
4204 append_icode_list(il, label);
4205 }
4206
4207
4208 void
4209 xlate_decl(struct decl *d, struct icode_list *il);
4210
4211 struct icode_list *
ctrl_to_icode(struct control * ctrl)4212 ctrl_to_icode(struct control *ctrl) {
4213 struct icode_list *il;
4214 struct icode_list *il2;
4215 struct icode_instr *ii;
4216 struct icode_instr *ii2;
4217 struct label *label;
4218
4219
4220 il = alloc_icode_list();
4221 if (ctrl->type == TOK_KEY_DO
4222 || ctrl->type == TOK_KEY_WHILE
4223 || ctrl->type == TOK_KEY_FOR) {
4224 backend->invalidate_gprs(il, 1, 0);
4225 }
4226
4227 if (ctrl->type == TOK_KEY_IF) {
4228 /*
4229 * Generate
4230 * cmp res, 0; je label;
4231 * ... where label is returned (but not inserted
4232 * into the icode list.)
4233 */
4234 if (do_cond(ctrl->cond, il, ctrl, NULL) != 0) {
4235 return NULL;
4236 }
4237 do_body_labels(ctrl, il);
4238
4239 il2 = xlate_to_icode(ctrl->stmt, 0);
4240 if (il2 != NULL) {
4241 merge_icode_lists(il, il2);
4242 #if ! USE_ZONE_ALLOCATOR
4243 free(il2);
4244 #endif
4245 }
4246
4247 if (ctrl->next != NULL) {
4248 /*
4249 * End of if branch - jump across else
4250 * branch, then append else body
4251 */
4252 ii2 = icode_make_jump(ctrl->next->endlabel);
4253 append_icode_list(il, ii2);
4254 append_icode_list(il, ctrl->endlabel);
4255
4256 do_body_labels(ctrl->next, il);
4257
4258 il2 = xlate_to_icode(ctrl->next->stmt, 0);
4259 if (il2 != NULL) {
4260 merge_icode_lists(il, il2);
4261 #if ! USE_ZONE_ALLOCATOR
4262 free(il2);
4263 #endif
4264 }
4265 /* elsepart: ... code ... end: */
4266 append_icode_list(il, ctrl->next->endlabel);
4267 } else {
4268 append_icode_list(il, ctrl->endlabel);
4269 }
4270 } else if (ctrl->type == TOK_KEY_WHILE) {
4271 append_icode_list(il, ctrl->startlabel);
4272 if (do_cond(ctrl->cond, il, ctrl, NULL) == -1) {
4273 return NULL;
4274 }
4275
4276 do_body_labels(ctrl, il);
4277
4278 il2 = xlate_to_icode(ctrl->stmt, 0);
4279 if (il2 != NULL) {
4280 merge_icode_lists(il, il2);
4281 #if ! USE_ZONE_ALLOCATOR
4282 free(il2);
4283 #endif
4284 }
4285 ii = icode_make_jump(ctrl->startlabel);
4286 append_icode_list(il, ii);
4287 append_icode_list(il, ctrl->endlabel);
4288 } else if (ctrl->type == TOK_KEY_DO) {
4289 /* do-while loop */
4290 append_icode_list(il, ctrl->startlabel);
4291 do_body_labels(ctrl, il);
4292 il2 = xlate_to_icode(ctrl->stmt, 0);
4293 if (il2 != NULL) {
4294 merge_icode_lists(il, il2);
4295 #if ! USE_ZONE_ALLOCATOR
4296 free(il2);
4297 #endif
4298 }
4299 append_icode_list(il, ctrl->do_cond);
4300 (void) do_cond(ctrl->cond, il, ctrl, NULL);
4301 append_icode_list(il, ctrl->endlabel);
4302 } else if (ctrl->type == TOK_KEY_FOR) {
4303 struct statement tmpst;
4304 tmpst.type = ST_CODE;
4305 tmpst.next = NULL;
4306
4307 if (ctrl->finit != NULL
4308 /* crude stuff to rule out empty ex - necessary? */
4309 && (ctrl->finit->op || ctrl->finit->data)) {
4310 tmpst.data = ctrl->finit;
4311 il2 = xlate_to_icode(&tmpst, 0);
4312 if (il2 != NULL) {
4313 merge_icode_lists(il, il2);
4314 #if ! USE_ZONE_ALLOCATOR
4315 free(il2);
4316 #endif
4317 }
4318 } else if (ctrl->dfinit != NULL) {
4319 int i;
4320
4321 for (i = 0; ctrl->dfinit[i] != NULL; ++i) {
4322 xlate_decl(ctrl->dfinit[i], il);
4323 }
4324 }
4325 append_icode_list(il, ctrl->startlabel);
4326
4327 if (ctrl->cond != NULL
4328 && (ctrl->cond->op || ctrl->cond->data)) {
4329 if (do_cond(ctrl->cond, il, ctrl, NULL) != 0) {
4330 return NULL;
4331 }
4332 }
4333
4334 do_body_labels(ctrl, il);
4335 il2 = xlate_to_icode(ctrl->stmt, 0);
4336 if (il2 != NULL) {
4337 merge_icode_lists(il, il2);
4338 #if ! USE_ZONE_ALLOCATOR
4339 free(il2);
4340 #endif
4341 }
4342
4343 if (ctrl->fcont != NULL
4344 /* crude stuff to rule out empty ex - necessary? */
4345 && (ctrl->fcont->op || ctrl->fcont->data)) {
4346 append_icode_list(il, ctrl->fcont_label);
4347 tmpst.data = ctrl->fcont;
4348 il2 = xlate_to_icode(&tmpst, 0);
4349 if (il2 != NULL) {
4350 merge_icode_lists(il, il2);
4351 #if ! USE_ZONE_ALLOCATOR
4352 free(il2);
4353 #endif
4354 }
4355 }
4356 ii = icode_make_jump(ctrl->startlabel);
4357 append_icode_list(il, ii);
4358 if (ctrl->endlabel != NULL) {
4359 append_icode_list(il, ctrl->endlabel);
4360 }
4361 } else if (ctrl->type == TOK_KEY_SWITCH) {
4362 struct token *cond;
4363 struct vreg *vr_cond;
4364 struct vreg *vr_case;
4365 struct label *default_case = NULL;
4366
4367 vr_cond = expr_to_icode(ctrl->cond, NULL, il, 0, 0, 1);
4368
4369 if (vr_cond == NULL) {
4370 return NULL;
4371 }
4372
4373 if (!is_integral_type(vr_cond->type)) {
4374 errorfl(ctrl->cond->tok,
4375 "Controlling switch expression doesn't have "
4376 "integral type");
4377 return NULL;
4378 }
4379
4380 do_body_labels(ctrl, il);
4381 for (label = ctrl->labels;
4382 label != NULL;
4383 label = label->next) {
4384 if (label->value == NULL) {
4385 if (label->is_switch_label) {
4386 default_case = label;
4387 }
4388 continue;
4389 } else if (!label->is_switch_label) {
4390 continue;
4391 }
4392
4393 /*
4394 * 08/22/07: This did usual arithmetic conversion
4395 * betwen condition and case, instead of converting
4396 * case to condition. Also, const_from_value was
4397 * called on the original case value, such that
4398 *
4399 * case ((char)1):
4400 *
4401 * would instruct the backend to load an ``immediate
4402 * char'', which if bogus. Now the condition is
4403 * instead promoted, and then the case is converted
4404 * to it.
4405 *
4406 * Another problem with that:
4407 *
4408 * switch (enum_type) {
4409 * case value:
4410 *
4411 * ... would convert value to an enum type, which is
4412 * also not handled by the backends. Thus the TY_INT
4413 * workaround below.
4414 */
4415
4416 (void) promote(&vr_cond, NULL, 0, NULL, il, 1);
4417 cross_do_conv(label->value->const_value,
4418 vr_cond->type->code, 1);
4419 label->value->const_value->type->code =
4420 vr_cond->type->code == TY_ENUM? TY_INT:
4421 vr_cond->type->code;
4422 cond = const_from_value(
4423 label->value->const_value->value,
4424 label->value->const_value->type);
4425 vr_case = vreg_alloc(NULL, cond, NULL, NULL);
4426 vreg_faultin_protected(vr_cond, NULL, NULL,
4427 vr_case, il, 0);
4428 vreg_faultin_protected(vr_case, NULL, NULL,
4429 vr_cond, il, 0);
4430 #if 0
4431 cond = const_from_value(
4432 label->value->const_value->value,
4433 label->value->const_value->type);
4434 vr_case = vreg_alloc(NULL, cond, NULL, NULL);
4435 vreg_faultin(NULL, NULL, vr_case, il, 0);
4436 vreg_faultin_protected(vr_case, NULL, NULL,
4437 vr_cond, il, 0);
4438 (void) promote(&vr_cond, &vr_case,
4439 TOK_OP_LEQU, NULL, il, 1);
4440
4441 /* XXX .... as promote may move stuff :( */
4442 vreg_faultin(NULL, NULL, vr_case, il, 0);
4443 vreg_faultin_protected(vr_case, NULL, NULL,
4444 vr_cond, il, 0);
4445 #endif
4446
4447 ii = icode_make_cmp(vr_cond, vr_case);
4448 append_icode_list(il, ii);
4449 free_pregs_vreg(vr_case, il, 0, 0);
4450 ii = icode_make_branch(label->instr, INSTR_BR_EQUAL,
4451 vr_cond);
4452 append_icode_list(il, ii);
4453 if (vr_cond->is_multi_reg_obj) {
4454 ii = icode_make_cmp(vr_cond, vr_case);
4455 append_icode_list(il, ii);
4456 ii = icode_make_branch(label->instr,
4457 INSTR_BR_EQUAL, vr_cond);
4458 append_icode_list(il, ii);
4459 }
4460 }
4461 free_pregs_vreg(vr_cond, il, 0, 0);
4462 if (default_case != NULL) {
4463 ii = icode_make_jump(default_case->instr);
4464 } else {
4465 ii = icode_make_jump(ctrl->endlabel);
4466 }
4467 append_icode_list(il, ii);
4468
4469 il2 = xlate_to_icode(ctrl->stmt, 0);
4470 if (il2 != NULL) {
4471 merge_icode_lists(il, il2);
4472 #if ! USE_ZONE_ALLOCATOR
4473 free(il2);
4474 #endif
4475 }
4476 append_icode_list(il, ctrl->endlabel);
4477 } else if (ctrl->type == TOK_KEY_CASE
4478 || ctrl->type == TOK_KEY_DEFAULT) {
4479 label = ctrl->stmt->data;
4480 append_icode_list(il, label->instr);
4481 } else if (ctrl->type == TOK_KEY_RETURN) {
4482 struct vreg *vr = NULL;
4483 struct type *ret_type = NULL;
4484 struct type_node *rettn;
4485
4486 if (ctrl->cond == NULL) {
4487 for (rettn = curfunc->proto->dtype->tlist;
4488 rettn != NULL;
4489 rettn = rettn->next) {
4490 if (rettn->type == TN_FUNCTION) {
4491 rettn = rettn->next;
4492 break;
4493 }
4494 }
4495
4496 if (curfunc->proto->dtype->code != TY_VOID
4497 || rettn != NULL) {
4498 warningfl(ctrl->tok,
4499 "Return statement without a value in function not returning `void'");
4500 }
4501 } else {
4502 if ((vr = expr_to_icode(ctrl->cond, NULL, il,
4503 TOK_KEY_RETURN, 0, 1)) == NULL) {
4504 return NULL;
4505 }
4506 /* XXX warn if types incompatible */
4507 }
4508
4509 if (vr != NULL) {
4510 #if 0
4511 ret_type = curfunc->proto->dtype;
4512 rettn = ret_type->tlist;
4513 ret_type->tlist = ret_type->tlist->next;
4514 #endif
4515 /* 06/17/08: Stop the tlist kludgery for return type */
4516 ret_type = curfunc->rettype;
4517 if (check_types_assign(ctrl->tok, ret_type, vr, 1, 0)
4518 != 0) {
4519 return NULL;
4520 }
4521 if ((ret_type->code != TY_STRUCT
4522 && ret_type->code != TY_UNION)
4523 || ret_type->tlist != NULL) {
4524 vr = backend->icode_make_cast(vr, ret_type, il);
4525 } else {
4526 if (vr->type->code != ret_type->code
4527 || vr->type->tlist != NULL
4528 || vr->type->tstruc
4529 != ret_type->tstruc) {
4530 errorfl(ctrl->tok,
4531 "Returned expression incompatible with function return type");
4532 return NULL;
4533 }
4534 }
4535 if (ret_type->code == TY_VOID
4536 && ret_type->tlist == NULL) {
4537 warningfl(ctrl->tok,
4538 "void expressions as argument to "
4539 "`return' are only allowed in GNU C "
4540 "and C++");
4541 }
4542 #if 0
4543 ret_type->tlist = rettn;
4544 #endif
4545 }
4546
4547 if (backend->icode_make_return(vr, il) != 0) {
4548 return NULL;
4549 }
4550 } else if (ctrl->type == TOK_KEY_BREAK) {
4551 ii = icode_make_jump(ctrl->endlabel);
4552 append_icode_list(il, ii);
4553 } else if (ctrl->type == TOK_KEY_CONTINUE) {
4554 ii = icode_make_jump(ctrl->startlabel);
4555 append_icode_list(il, ii);
4556 } else if (ctrl->type == TOK_KEY_GOTO) {
4557 struct label *l;
4558 struct token *dest = (struct token *)ctrl->stmt;
4559
4560 ii = NULL;
4561 if (dest != NULL) {
4562 /* goto label; */
4563 if ((l = lookup_label(curfunc, dest->data)) != NULL) {
4564 ii = icode_make_jump(l->instr);
4565 append_icode_list(il, ii);
4566 } else {
4567 errorfl(dest, "Undefined label `%s'", dest->ascii);
4568 }
4569 } else {
4570 struct vreg *vr;
4571
4572 /* goto *expr; */
4573 if ((vr = expr_to_icode(ctrl->cond, NULL, il,
4574 TOK_KEY_RETURN, 0, 1)) == NULL) {
4575 return NULL;
4576 }
4577
4578 /*
4579 * The type almost doesn't matter a bit! The only thing
4580 * to make sure is that the vreg comes from a pointer,
4581 * as in ``goto *foo;''
4582 */
4583 if (vr->type->tlist == NULL
4584 && (vr->type->code == TY_VOID
4585 || vr->type->code == TY_UNION
4586 || vr->type->code == TY_STRUCT)) {
4587 errorfl(ctrl->tok, "Invalid type for computed "
4588 "goto expression");
4589 return NULL;
4590 } else if ((signed)backend->get_sizeof_type(vr->type, NULL)
4591 != backend->get_ptr_size()) {
4592 warningfl(ctrl->tok, "Computed goto expression "
4593 "value does not have pointer size");
4594 }
4595 vreg_faultin(NULL, NULL, vr, il, 0);
4596 icode_make_comp_goto(vr->pregs[0], il);
4597 }
4598 } else {
4599 printf("UNKNOWN CONTROL STRUCTURE %d\n", ctrl->type);
4600 abort();
4601 }
4602 return il;
4603 }
4604
4605
4606 /*
4607 * XXX this should only penalize initializers which really are not
4608 * constant!!!!!!!!
4609 */
4610 static void
varinit_to_icode(struct decl * dest,struct vreg * destvr,struct initializer * init,unsigned long * offset,struct icode_list * il)4611 varinit_to_icode(struct decl *dest,
4612 struct vreg *destvr,
4613 struct initializer *init,
4614 unsigned long *offset,
4615 struct icode_list *il) {
4616
4617 unsigned long offset0 = 0;
4618
4619 if (offset == NULL) {
4620 offset = &offset0;
4621 }
4622
4623 for (; init != NULL; init = init->next) {
4624 size_t type_size;
4625 int remainder;
4626 int type_alignment;
4627
4628 switch (init->type) {
4629 case INIT_EXPR:
4630 case INIT_STRUCTEXPR:
4631 /* Nothing to do */
4632 break;
4633 case INIT_NESTED:
4634 varinit_to_icode(dest, destvr, init->data, offset, il);
4635 break;
4636 case INIT_NULL:
4637 if (init->varinit != NULL) {
4638 struct vreg *res;
4639 struct vreg *leftvr;
4640 struct vreg *tmpvr;
4641 struct token *tok;
4642 struct icode_instr *ii;
4643 struct vreg *indirvr;
4644 struct vreg *addrvr;
4645 int i;
4646 int is_struct = 0;
4647
4648
4649 res = expr_to_icode(init->varinit, NULL,
4650 il, 0, 0, 1);
4651 if (res == NULL) {
4652 break;
4653 }
4654
4655 if (!is_basic_agg_type(init->left_type)) {
4656 res = backend->icode_make_cast(res,
4657 init->left_type, il);
4658 } else {
4659 is_struct = 1;
4660 }
4661
4662 /*
4663 * 08/09/08: Handle bitfield range
4664 */
4665 if (init->left_type->tbit != NULL) {
4666 /* Mask source value to max range */
4667 mask_source_for_bitfield(
4668 init->left_type,
4669 res, il, 0);
4670 }
4671
4672 leftvr = vreg_alloc(NULL, NULL, NULL,
4673 init->left_type);
4674 {
4675 struct reg *r;
4676 /*ii =*/ r = icode_make_addrof(NULL, /*dest->vreg*/
4677 destvr,
4678 il);
4679 /*append_icode_list(il, ii);*/
4680
4681 addrvr = vreg_alloc(NULL, NULL, NULL,
4682 addrofify_type(leftvr->type));
4683 vreg_map_preg(addrvr, r /*ii->dat*/);
4684 }
4685
4686 /*
4687 * Now leftvr is the address of the left
4688 * hand struct or array. Now perform some
4689 * pointer arithmetic, then indirectly
4690 * assign the result. Yes this is kludged,
4691 * we need a more general way for offsets
4692 * in the long run
4693 */
4694 reg_set_unallocatable(addrvr->pregs[0]);
4695 if (init->left_type->tbit == NULL) {
4696 i = (int)*offset;
4697 } else {
4698 /*
4699 * Get offset from storage unit base
4700 * offset plus offset within it. This
4701 * is needed because the current offset
4702 * passed to this function already
4703 * includes a full bitfield initializer
4704 * (since we have to create one IN
4705 * ADDITION to this null initializer)
4706 */
4707 #if 0
4708 i = init->left_type->tbit->bitfield_storage_unit->
4709 offset + init->left_type->tbit->
4710 byte_offset;
4711 #endif
4712 int rel_off = init->left_type->tbit->absolute_byte_offset
4713 - init->left_type->tbit->bitfield_storage_unit->offset;
4714 i = *offset - backend->get_sizeof_type(
4715 init->left_type->tbit->bitfield_storage_unit->dtype, NULL)
4716 + rel_off;
4717 }
4718 tok = const_from_value(&i,
4719 make_basic_type(TY_INT));
4720 tmpvr = vreg_alloc(NULL, tok, NULL,
4721 make_basic_type(TY_INT));
4722 vreg_faultin(NULL, NULL, tmpvr, il, 0);
4723 ii = icode_make_add(addrvr, tmpvr);
4724 append_icode_list(il, ii);
4725 reg_set_allocatable(addrvr->pregs[0]);
4726
4727 indirvr = vreg_alloc(NULL, NULL,
4728 addrvr,
4729 init->left_type);
4730 reg_set_unallocatable(indirvr->from_ptr->pregs[0]);
4731
4732 if (is_struct) {
4733 /*
4734 * 04/03/08: Non-constant struct-by-
4735 * value initializer... was missing!
4736 */
4737 icode_make_copystruct(indirvr, res, il);
4738 } else {
4739 /*
4740 * 06/01/08: Use x87 faultin for
4741 * floating point. This is save because
4742 * the store will pop the reg
4743 */
4744 if (init->left_type->tbit != NULL) {
4745 /* Bitfield */
4746 write_back_bitfield_by_assignment(indirvr, res, il);
4747 } else {
4748 /* Not bitfield */
4749 vreg_faultin_x87(NULL, NULL, res, il, 0);
4750 reg_set_allocatable(indirvr->from_ptr->pregs[0]);
4751 indirvr->pregs[0] = res->pregs[0];
4752 icode_make_store(curfunc, res, indirvr, il);
4753 }
4754 }
4755 }
4756 break;
4757 default:
4758 unimpl();
4759 }
4760
4761 if (init->left_type != NULL) {
4762 if (init->type == INIT_NESTED) {
4763 /*
4764 * Don't add up sizes for nested initializers -
4765 * that has already been done
4766 */
4767 type_size = 0;
4768 } else if (init->type == INIT_NULL
4769 && init->left_type->tbit != NULL) {
4770 /*
4771 * 10/13/08: Don't add sizes for variable bitfield
4772 * initializers (struct foo { int x:8; } = { rand() })
4773 * either
4774 */
4775 type_size = 0;
4776 } else if (init->type == INIT_NULL) {
4777 /*
4778 * 02/16/10: We incorrectly used the struct
4779 * type size (in the else branch below) rather
4780 * than the explicitly set 0 data size field
4781 */
4782 type_size = *(size_t *)init->data;
4783 } else {
4784 type_size = backend->get_sizeof_type(init->left_type,
4785 NULL);
4786 }
4787 } else {
4788 assert(init->type == INIT_NULL);
4789 type_size = *(size_t *)init->data;
4790 }
4791
4792 *offset += type_size;
4793
4794 /*
4795 * Align for next initializer, unless it is a genuine null
4796 * initializer (as opposed to a placeholder for a variable
4797 * initializer), in which case alignment is already handled
4798 */
4799 if (init->next != NULL
4800 && (init->next->type != INIT_NULL
4801 || init->next->left_type != NULL)) {
4802 struct initializer *tmp = init->next;
4803
4804 /*
4805 * 10/13/08: Don't align for bitfield types
4806 */
4807 if (tmp->left_type->tbit == NULL) {
4808 type_alignment = backend->get_align_type(tmp->left_type);
4809 remainder = *offset % type_alignment;
4810 if (remainder) {
4811 *offset += type_alignment - remainder;
4812 }
4813 }
4814 }
4815 }
4816 }
4817
4818 /*
4819 * Generate initializations for automatic variables
4820 */
4821 void
init_to_icode(struct decl * d,struct icode_list * il)4822 init_to_icode(struct decl *d, struct icode_list *il) {
4823 struct initializer *init;
4824 struct vreg *decvr;
4825
4826 decvr = vreg_alloc(d, NULL, NULL, NULL);
4827 vreg_set_new_type(decvr, d->dtype);
4828
4829 if (is_basic_agg_type(d->dtype)
4830 && d->init->type != INIT_STRUCTEXPR) {
4831 /* Fill remaining elements/members with 0 */
4832 d->init_name = backend->make_init_name(d->init);
4833 d->init_name->dec = d;
4834 #if XLATE_IMMEDIATELY
4835 emit->struct_inits(d->init_name);
4836 #endif
4837 backend->invalidate_gprs(il, 1, INV_FOR_FCALL);
4838 icode_make_copyinit(d, il);
4839
4840 if (d->dtype->storage != TOK_KEY_STATIC
4841 && d->dtype->storage != TOK_KEY_EXTERN) {
4842 varinit_to_icode(d, decvr, d->init, NULL, il);
4843 #if 0
4844 varinit_to_icode(d, d->init, 0, il);
4845 #endif
4846 }
4847 } else {
4848 struct vreg *vr;
4849 struct vreg *left
4850 = vreg_alloc(NULL, NULL, NULL, d->dtype);
4851
4852 left->var_backed = d;
4853 init = d->init;
4854 if (init->next != NULL
4855 || (init->type != INIT_EXPR
4856 && init->type != INIT_STRUCTEXPR)) {
4857 puts("BAD INITIALIZER");
4858 abort();
4859 }
4860
4861 /*
4862 * 08/16/07: This didn't pass the variable to be
4863 * initialized (left) to expr_to_icode(). Thus
4864 * functions returning structures by values didn't
4865 * work as initializers
4866 */
4867 if ((vr = expr_to_icode(init->data, left,
4868 il, 0, 0, 1)) == NULL) {
4869 return;
4870 }
4871 if (check_types_assign(d->tok, d->dtype,
4872 vr, 1, 0) == -1) {
4873 return;
4874 }
4875
4876 if (init->type != INIT_STRUCTEXPR) {
4877 vr = backend->icode_make_cast(vr, d->dtype, il);
4878 vreg_faultin_x87(NULL, NULL, vr, il, 0);
4879 vreg_map_preg(/*d->vreg*/ decvr, vr->pregs[0]);
4880 if (vr->is_multi_reg_obj) {
4881 vreg_map_preg2(/*d->vreg*/ decvr, vr->pregs[1]);
4882 }
4883 icode_make_store(NULL,
4884 /*d->vreg, d->vreg*/ decvr, decvr, il);
4885 if (STUPID_X87(vr->pregs[0])) {
4886 #if 0
4887 backend->free_preg(vr->pregs[0], il);
4888 #endif
4889 vr->pregs[0]->vreg = NULL;
4890 vr->pregs[0] = NULL;
4891 }
4892 } else {
4893 /*
4894 * 08/16/07: This generated a bad struct copy if the
4895 * initializer was a function returning a structure
4896 * by value, in which case the callee does the copy,
4897 * not the caller
4898 */
4899 if (!vr->struct_ret) {
4900 icode_make_copystruct(left, vr, il);
4901 }
4902 }
4903 }
4904 }
4905
4906 /*
4907 * XXX Some constraints dealt with here are architecture-specific. That stuff
4908 * should be outsourced to the backend. o and i are missing :-(
4909 */
4910 static void
asm_to_icode(struct inline_asm_stmt * stmt,struct icode_list * il)4911 asm_to_icode(struct inline_asm_stmt *stmt, struct icode_list *il) {
4912 struct clobbered_reg *clob;
4913 struct inline_asm_io *io;
4914 char *p;
4915 struct reg *r;
4916 int i;
4917
4918 for (clob = stmt->clobbered; clob; clob = clob->next) {
4919 if (clob->reg == NULL) {
4920 /*
4921 * Reg = NULL means "memory" is clobbered -
4922 * no values should be cached anymore
4923 * XXX really need invalidate_fprs() :(
4924 */
4925 backend->invalidate_gprs(il, 1, 0);
4926 break;
4927 } if (clob->reg->type == REG_GPR) {
4928 free_preg(clob->reg, il, 1, 1);
4929 /*reg_set_unallocatable(clob->reg);*/
4930 clob->reg->used = 1;
4931 } else {
4932 puts("ERROR: Non-GPRS may not occur "
4933 "in the asm clobber list yet\n");
4934 unimpl();
4935 }
4936 }
4937
4938 for (io = stmt->output, i = 1; io != NULL; io = io->next, ++i) {
4939 io->vreg = expr_to_icode(io->expr, NULL, il, 0, 0, 1);
4940 io->outreg = NULL;
4941 if (io->vreg == NULL) {
4942 return;
4943 } else if (io->expr->op != 0
4944 || !io->expr->data->is_lvalue) {
4945 errorfl(io->expr->tok,
4946 "Output operand #%d isn't an lvalue", i);
4947 return;
4948 }
4949 for (p = io->constraints; *p != 0; ++p) {
4950 if (*p == '=') {
4951 ;
4952 } else if (*p == '+') {
4953 ;
4954 } else if (*p == '&') {
4955 ; /* is early clobber */
4956 } else if (strchr("rqQabcdSD", *p) != NULL) {
4957 /* XXX hmm Q is amd64?! */
4958 r = backend->asmvreg_to_reg(&io->vreg,
4959 *p, io, il, 0);
4960 if (r == NULL) {
4961 return;
4962 }
4963 io->outreg = r;
4964 }
4965 }
4966 }
4967
4968 /* XXX for some reason stuff below uses clobber registers :( */
4969 for (io = stmt->input; io != NULL; io = io->next) {
4970 io->vreg = expr_to_icode(io->expr, NULL, il, 0, 0, 1);
4971 if (io->vreg == NULL) {
4972 return;
4973 }
4974 }
4975
4976 for (io = stmt->input, i = 1; io != NULL; io = io->next, ++i) {
4977 for (p = io->constraints; *p != 0; ++p) {
4978 r = NULL;
4979 if (strchr("rqabcdSD", *p) != NULL) {
4980 r = backend->asmvreg_to_reg(&io->vreg, *p, io,
4981 il, 1);
4982 if (r == NULL) {
4983 return;
4984 }
4985 reg_set_unallocatable(r);
4986 } else if (*p == 'm') {
4987 /*
4988 * XXX faultin below assumes it can always get
4989 * a register
4990 */
4991 r = vreg_faultin_ptr(io->vreg, il);
4992 if (r != NULL) {
4993 /* Output is done through pointer */
4994 reg_set_unallocatable(r);
4995 }
4996 } else if (isdigit((unsigned char)*p)) {
4997 int num = *p - '0';
4998 struct inline_asm_io *tmp;
4999
5000 if (num >= stmt->n_outputs) {
5001 errorfl(io->expr->tok,
5002 "Output operand %d doesn't exist",
5003 num+1);
5004 return;
5005 }
5006 for (tmp = stmt->output; num > 0; --num) {
5007 tmp = tmp->next;
5008 }
5009
5010 if (tmp->outreg != NULL) {
5011 /* Must use same register */
5012 static char kludge[2];
5013
5014 kludge[0] = tmp->outreg->name[1];
5015 kludge[1] = 0;
5016 if (strcmp(tmp->outreg->name, "esi")
5017 == 0 ||
5018 strcmp(tmp->outreg->name,"edi")
5019 == 0) {
5020 kludge[0] = toupper(kludge[0]);
5021 }
5022 p = kludge; /* :-( */
5023 } else {
5024 /* XXX */
5025 p = tmp->constraints;
5026 }
5027 }
5028
5029 if (r != NULL){
5030 /* 03/25/08: Wow, this was missing */
5031 io->inreg = r;
5032 }
5033 }
5034 }
5035
5036 /*
5037 * At this point, input registers are setup and marked unallocatable,
5038 * so now the output regs can be assigned. If the first (write-)
5039 * access to an output register comes from a register, the
5040 * destination is that same register. That seems sort of bogus, but
5041 * then so does the idea of using "r" for output at all
5042 */
5043 for (io = stmt->output; io != NULL; io = io->next) {
5044 for (p = io->constraints; *p != 0; ++p) {
5045 if (*p == '=') {
5046 ;
5047 } else if (*p == '+') {
5048 /* Used for both input and output */
5049 if (io->outreg != NULL) {
5050 vreg_faultin(io->outreg, NULL,
5051 io->vreg, il, 0);
5052 }
5053 } else if (*p == 'm') {
5054 /*
5055 * XXX faultin below assumes it can always get
5056 * a register
5057 */
5058 r = vreg_faultin_ptr(io->vreg, il);
5059 if (r != NULL) {
5060 /* Output is done through pointer */
5061 reg_set_unallocatable(r);
5062 assert(io->outreg == NULL);
5063 io->outreg = r;
5064 }
5065 }
5066 }
5067 }
5068
5069 icode_make_asm(stmt, il);
5070
5071 /* Write back output registers (memory operands need no writeback) */
5072 for (io = stmt->output; io != NULL; io = io->next) {
5073 for (p = io->constraints; *p != 0; ++p) {
5074 if (*p == '=') {
5075 continue;
5076 } else if (strchr("qrabcdSD", *p) != NULL) {
5077 vreg_map_preg(io->vreg, io->outreg);
5078 icode_make_store(curfunc, /* XXX ?!*/
5079 io->vreg, io->vreg, il);
5080 free_pregs_vreg(io->vreg, il, 0, 0);
5081 }
5082 }
5083 }
5084
5085 for (io = stmt->input; io != NULL; io = io->next) {
5086 if (io->vreg->pregs[0] != NULL) {
5087 /*eg_set_allocatable(io->vreg->preg);*/
5088 free_pregs_vreg(io->vreg, il, 0, 0);
5089 }
5090 }
5091
5092 for (clob = stmt->clobbered; clob; clob = clob->next) {
5093 if (clob->reg && clob->reg->type == REG_GPR) {
5094 reg_set_allocatable(clob->reg);
5095 }
5096 }
5097 }
5098
5099 struct stack_block *
vla_decl_to_icode(struct type * ty,struct icode_list * il)5100 vla_decl_to_icode(struct type *ty, struct icode_list *il) {
5101 struct type_node *tn;
5102 int vlas_done = 0;
5103 int total_vla_dims = 0;
5104 struct stack_block *sb;
5105
5106 for (tn = ty->tlist; tn != NULL; tn = tn->next) {
5107 ++total_vla_dims;
5108 }
5109
5110 /*
5111 * Create block to store the VLA info in:
5112 *
5113 * struct vlainfo {
5114 * void *addr;
5115 * unsigned long total_size;
5116 * unsigned long var_dim_sizes[1];
5117 * };
5118 *
5119 * XXX for now we assume alignof(unsigned long)
5120 * = alignof(void *)
5121 */
5122 sb = make_stack_block(0,
5123 backend->get_ptr_size()
5124 + (1 + total_vla_dims) *
5125 backend->get_sizeof_type(
5126 make_basic_type(TY_ULONG), NULL));
5127
5128 if (curfunc->vla_head == NULL) {
5129 curfunc->vla_head = curfunc->vla_tail =
5130 sb;
5131 } else {
5132 curfunc->vla_tail->next = sb;
5133 curfunc->vla_tail = sb;
5134 }
5135
5136 ty->vla_addr = sb;
5137
5138 vlas_done = 0;
5139 for (tn = ty->tlist; tn != NULL; tn = tn->next) {
5140 struct vreg *vexpr;
5141
5142 if (tn->type != TN_VARARRAY_OF) {
5143 continue;
5144 }
5145 /*
5146 * Variable - execute variable
5147 * expression
5148 */
5149
5150 vexpr = expr_to_icode(
5151 #if REMOVE_ARRARG
5152 tn->variable_arrarg, NULL,
5153 #else
5154 tn->arrarg, NULL,
5155 #endif
5156 il, 0, 0, 1);
5157 if (vexpr == NULL) {
5158 return NULL;
5159 }
5160 vexpr = backend->icode_make_cast(
5161 vexpr, make_basic_type(TY_ULONG), il);
5162
5163 /*
5164 * Store dimension size in VLA block
5165 */
5166 vreg_faultin(NULL, NULL, vexpr, il, 0);
5167 icode_make_put_vla_size(vexpr->pregs[0], sb, vlas_done, il);
5168 tn->vla_block_no = vlas_done;
5169
5170 ++vlas_done;
5171 }
5172 return sb;
5173 }
5174
5175 static int
stmt_ends_with_ret(struct statement * stmt)5176 stmt_ends_with_ret(struct statement *stmt) {
5177 if (stmt->type == ST_CTRL
5178 && ((struct control *)stmt->data)->type == TOK_KEY_RETURN) {
5179 return 1;
5180 } else if (stmt->type == ST_COMP) {
5181 struct scope *s = stmt->data;
5182
5183 if (s->code_tail != NULL) {
5184 if (s->code_tail->type == ST_CTRL
5185 && ((struct control *)s->code_tail->data)->type
5186 == TOK_KEY_RETURN) {
5187 return 1;
5188 }
5189 }
5190 }
5191 return 0;
5192 }
5193
5194
5195 /*
5196 * 11/26/07: Moved out of analyze() into separate function, added missing
5197 * return checking
5198 */
5199 void
xlate_func_to_icode(struct function * func)5200 xlate_func_to_icode(struct function *func) {
5201 debug_print_function(func);
5202 curfunc = func;
5203 curscope = func->scope;
5204
5205
5206 func->icode = xlate_to_icode(func->scope->code, 1);
5207
5208 /*
5209 * 11/26/07: Check whether last statement is a return;
5210 * otherwise warn and generate a return. This used to be
5211 * done in the backend
5212 */
5213 if (func->scope->code_tail
5214 && func->scope->code_tail->type == ST_CTRL
5215 && ((struct control *)func->scope->code_tail->data)->type
5216 == TOK_KEY_RETURN) {
5217 ; /* OK there is a return */
5218 } else {
5219 /* No return */
5220 int warn = 1;
5221 int needret = 1;
5222
5223 /*
5224 * 04/09/08: This assumed that code_tail is non-NULL! That
5225 * usually works because we always store a declaration
5226 * statement for __func__. But that could change - don't
5227 * depend on it
5228 */
5229 if (!func_returns_void(func)
5230 && func->scope->code_tail != NULL) {
5231 struct control *ctrl = func->scope->code_tail->data;
5232
5233 if (ctrl->type == TOK_KEY_GOTO) {
5234 warn = 0; /* goto - don't warn */
5235 } else if (ctrl->type == TOK_KEY_IF) {
5236 if (ctrl->next != NULL) {
5237 /*
5238 * Looks like we have an
5239 * if (foo) { } else { }
5240 *
5241 * as last part. If both statement
5242 * bodies end with a return, this
5243 * part of the function is never
5244 * reached
5245 */
5246 if (stmt_ends_with_ret(ctrl->stmt)
5247 && stmt_ends_with_ret(
5248 ctrl->next->stmt)) {
5249 warn = 0;
5250 needret = 0;
5251 }
5252 }
5253 }
5254 if (warn) {
5255 #if 0
5256 /* This warning is too verbose for now since
5257 * the if-else check above does not cover
5258 * stuff like switch statements and exit()/
5259 * abort() calls
5260 */
5261 warningfl(func->proto->tok,
5262 "Falling off non-void function `%s' "
5263 "without a return",
5264 func->proto->dtype->name);
5265 #endif
5266 }
5267 }
5268 if (needret) {
5269 struct icode_instr *ii;
5270
5271 if (func->icode) {
5272 ii = icode_make_ret(NULL);
5273 append_icode_list(func->icode, ii);
5274 }
5275 }
5276 }
5277
5278 /*
5279 * 10/31/07: Added this to make sure that all registers are
5280 * completely thrown away when a function ends. Anything else
5281 * is just nonsense and causes subsequent function definitions
5282 * to perform unnecessary saves of these registers
5283 */
5284 backend->invalidate_gprs(NULL, 0, 0);
5285
5286 /*
5287 * 02/05/08: Added backend function to generate function outro
5288 * if necessary. This currently just marks PIC gprs (ebx on x86)
5289 * allocatable again. It may make sense to combien this with
5290 * emit_func_outro() somehow
5291 */
5292 if (backend->icode_complete_func != NULL) {
5293 backend->icode_complete_func(func, func->icode);
5294 }
5295 }
5296
5297 void
xlate_decl(struct decl * d,struct icode_list * il)5298 xlate_decl(struct decl *d, struct icode_list *il) {
5299 #if 0
5300 d->vreg = vreg_alloc(d, NULL, NULL, NULL);
5301 vreg_set_new_type(d->vreg, d->dtype);
5302 #endif
5303
5304 if (d->init != NULL
5305 && d->dtype->storage != TOK_KEY_STATIC
5306 && d->dtype->storage != TOK_KEY_EXTERN) {
5307 /*icode_make_dbginfo_line(st, il);*/
5308 init_to_icode(d, il);
5309 } else if (IS_VLA(d->dtype->flags)) {
5310 /*
5311 * This is a VLA declaration in some way.
5312 * It may e.g. be a one- or multi-
5313 * dimensional array, or a pointer to such
5314 * a thing
5315 */
5316 struct type_node *tn;
5317 struct stack_block *sb;
5318
5319 sb = vla_decl_to_icode(d->dtype, il);
5320
5321 if ( /*!err*/ sb != NULL) {
5322 for (tn = d->dtype->tlist;
5323 tn != NULL;
5324 tn = tn->next) {
5325 if (d->dtype->tlist->type
5326 == TN_POINTER_TO) {
5327 break;
5328 } else if (d->dtype->tlist->type
5329 == TN_VARARRAY_OF) {
5330 break;
5331 }
5332 }
5333
5334 if (tn->type == TN_VARARRAY_OF) {
5335 /*
5336 * This really is an array, not
5337 * just e.g. a pointer to one -
5338 * allocate storage!
5339 */
5340 struct vreg *size;
5341
5342 size = backend->get_sizeof_vla_type(d->dtype, il);
5343 icode_make_put_vla_whole_size(
5344 size->pregs[0],
5345 sb,
5346 il);
5347 icode_make_alloc_vla(sb, il);
5348 }
5349 }
5350 }
5351 }
5352
5353 struct icode_list *
xlate_to_icode(struct statement * st,int inv_gprs_first)5354 xlate_to_icode(struct statement *st, int inv_gprs_first) {
5355 struct icode_list *il;
5356 struct icode_list *il2;
5357
5358 il = alloc_icode_list();
5359 if (inv_gprs_first) {
5360 backend->invalidate_gprs(il, 0, 0);
5361 /*
5362 * XXXXXXXXXXXXXXXXXXXX 02/08/09: The PIC register was
5363 * initialized wrongly; Any static variable access which
5364 * did this set the ``initialized'' flag. This did not
5365 * take into account whether it was a conditional access,
5366 * e.g. expr? x: y or if (expr) x; else y;
5367 *
5368 * To ensure the PIC register is unambiguously loaded
5369 * correctly, we now do it at the beginning of every
5370 * function.
5371 *
5372 * XXXXXXXX: Obviously we shouldn't do it for functions
5373 * which do not access static variables. Proposed
5374 * solution: Where icode_initialize_pic() was called
5375 * (prepare_loadstore, etc), set a flag in the function
5376 * structure indicating that static variables were
5377 * accessed. Then, in the backend, perform initialization
5378 * if the flag is set
5379 */
5380 if (backend->need_pic_init && picflag) {
5381 backend->icode_initialize_pic(curfunc, il);
5382 curfunc->pic_initialized = 1;
5383 }
5384 }
5385
5386 for (; st != NULL; st = st->next) {
5387 if (st->type == ST_CTRL) {
5388 struct control *ctrl = st->data;
5389
5390 il2 = ctrl_to_icode(ctrl);
5391 if (il2 != NULL) {
5392 merge_icode_lists(il, il2);
5393 #if ! USE_ZONE_ALLOCATOR
5394 free(il2);
5395 #endif
5396 }
5397
5398 /*
5399 * Save flag added.
5400 * 11/26/07: Hmm, this yields nonsense saves for
5401 * ``return'' and possibly break/continue?!
5402 */
5403 if (ctrl->type == TOK_KEY_RETURN) {
5404 backend->invalidate_gprs(il, 0, 0);
5405 } else {
5406 backend->invalidate_gprs(il, 1, 0);
5407 }
5408 il->res = NULL;
5409 } else if (st->type == ST_LABEL) {
5410 struct label *l = st->data;
5411
5412 append_icode_list(il, l->instr);
5413
5414 /* Missed save flag */
5415 backend->invalidate_gprs(il, 1, 0);
5416 il->res = NULL;
5417 } else if (st->type == ST_CODE) {
5418 struct expr *ex = st->data;
5419 struct vreg *res;
5420
5421 if (ex->op == 0 && ex->data == NULL) {
5422 /* Empty expression */
5423 ;
5424 } else {
5425 icode_make_dbginfo_line(st, il);
5426 /*
5427 * 04/12/08: Top-level expression, so the result
5428 * is not used - set corresponding flag!
5429 */
5430 if ((res = expr_to_icode(ex, NULL, il, 0, 1, 1))
5431 == NULL) {
5432 /* XXX free stuff */
5433 return NULL;
5434 }
5435
5436 il->res = res;
5437 free_pregs_vreg(res, il, 0, 0);
5438 }
5439 } else if (st->type == ST_COMP || st->type == ST_EXPRSTMT) {
5440 struct scope *s = st->data;
5441
5442 curscope = s;
5443 il2 = xlate_to_icode(s->code, 0);
5444
5445 if (il2 != NULL) {
5446 merge_icode_lists(il, il2);
5447 #if ! USE_ZONE_ALLOCATOR
5448 free(il2);
5449 #endif
5450 }
5451 if (st->type != ST_EXPRSTMT) {
5452 il->res = NULL;
5453 } else {
5454 if (il->res && il->res->pregs[0]) {
5455 /*
5456 * There's a free_pregs_vreg()
5457 * in xlate_to_icode(). That's
5458 * bad because the value may
5459 * still be used. So we have to
5460 * ensure that the item remains
5461 * associated with a register
5462 * if it's a scalar type
5463 */
5464 if (is_arithmetic_type(il->res->type)
5465 || il->res->type->tlist) {
5466 if (il->res->pregs[0]->vreg == il->res) {
5467 vreg_map_preg(il->res, il->res->pregs[0]);
5468 if (il->res->is_multi_reg_obj) {
5469 vreg_map_preg(
5470 il->res, il->res->pregs[1]);
5471 }
5472 }
5473 }
5474 }
5475 }
5476 curscope = s->parent; /* restore scope */
5477 } else if (st->type == ST_DECL) {
5478 struct decl *d = st->data;
5479
5480 xlate_decl(d, il);
5481 if (!d->is_unrequested_decl) {
5482 /*
5483 * 08/09/08: In
5484 *
5485 * ({ expr; expr; int foo; })
5486 *
5487 * ... we correctly set the result to NULL.
5488 *
5489 * However in
5490 *
5491 * struct s foo();
5492 * ({ foo(); expr; })
5493 *
5494 * ... there will be a declaration appended
5495 * after expr. That declaration allocates the
5496 * anonymous struct to which the return value
5497 * of foo() is assigned. If we have such an
5498 * unrequested declaration, we do not want to
5499 * consider it a value, and keep the result
5500 * vreg of the last precedeing statement!
5501 * Hence the surrounding check above now
5502 */
5503 il->res = NULL;
5504 }
5505 } else if (st->type == ST_ASM) {
5506 asm_to_icode(st->data, il);
5507 il->res = NULL;
5508 } else {
5509 printf("UNKNOWN STATEMENT TYPE\n");
5510 printf("%d\n", st->type);
5511 abort();
5512 }
5513 }
5514
5515 return il;
5516 }
5517
5518