1 /* Process expressions for the GNU compiler for the Java(TM) language.
2 Copyright (C) 1996-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>.
19
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc. */
23
24 /* Hacked by Per Bothner <bothner@cygnus.com> February 1996. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "diagnostic-core.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
35 #include "java-tree.h"
36 #include "java-opcodes.h"
37 #include "jcf.h"
38 #include "parse.h"
39 #include "tree-iterator.h"
40
41 static void flush_quick_stack (void);
42 static void push_value (tree);
43 static tree pop_value (tree);
44 static void java_stack_swap (void);
45 static void java_stack_dup (int, int);
46 static void build_java_athrow (tree);
47 static void build_java_jsr (int, int);
48 static void build_java_ret (tree);
49 static void expand_java_multianewarray (tree, int);
50 static void expand_java_arraystore (tree);
51 static void expand_java_arrayload (tree);
52 static void expand_java_array_length (void);
53 static tree build_java_monitor (tree, tree);
54 static void expand_java_pushc (int, tree);
55 static void expand_java_return (tree);
56 static void expand_load_internal (int, tree, int);
57 static void expand_java_NEW (tree);
58 static void expand_java_INSTANCEOF (tree);
59 static void expand_java_CHECKCAST (tree);
60 static void expand_iinc (unsigned int, int, int);
61 static void expand_java_binop (tree, enum tree_code);
62 static void note_label (int, int);
63 static void expand_compare (enum tree_code, tree, tree, int);
64 static void expand_test (enum tree_code, tree, int);
65 static void expand_cond (enum tree_code, tree, int);
66 static void expand_java_goto (int);
67 static tree expand_java_switch (tree, int);
68 static void expand_java_add_case (tree, int, int);
69 static vec<tree, va_gc> *pop_arguments (tree);
70 static void expand_invoke (int, int, int);
71 static void expand_java_field_op (int, int, int);
72 static void java_push_constant_from_pool (struct JCF *, int);
73 static void java_stack_pop (int);
74 static tree build_java_throw_out_of_bounds_exception (tree);
75 static tree build_java_check_indexed_type (tree, tree);
76 static unsigned char peek_opcode_at_pc (struct JCF *, int, int);
77 static void promote_arguments (void);
78 static void cache_cpool_data_ref (void);
79
80 static GTY(()) tree operand_type[59];
81
82 static GTY(()) tree methods_ident;
83 static GTY(()) tree ncode_ident;
84 tree dtable_ident = NULL_TREE;
85
86 /* Set to nonzero value in order to emit class initialization code
87 before static field references. */
88 int always_initialize_class_p = 0;
89
90 /* We store the stack state in two places:
91 Within a basic block, we use the quick_stack, which is a vec of expression
92 nodes.
93 This is the top part of the stack; below that we use find_stack_slot.
94 At the end of a basic block, the quick_stack must be flushed
95 to the stack slot array (as handled by find_stack_slot).
96 Using quick_stack generates better code (especially when
97 compiled without optimization), because we do not have to
98 explicitly store and load trees to temporary variables.
99
100 If a variable is on the quick stack, it means the value of variable
101 when the quick stack was last flushed. Conceptually, flush_quick_stack
102 saves all the quick_stack elements in parallel. However, that is
103 complicated, so it actually saves them (i.e. copies each stack value
104 to is home virtual register) from low indexes. This allows a quick_stack
105 element at index i (counting from the bottom of stack the) to references
106 slot virtuals for register that are >= i, but not those that are deeper.
107 This convention makes most operations easier. For example iadd works
108 even when the stack contains (reg[0], reg[1]): It results in the
109 stack containing (reg[0]+reg[1]), which is OK. However, some stack
110 operations are more complicated. For example dup given a stack
111 containing (reg[0]) would yield (reg[0], reg[0]), which would violate
112 the convention, since stack value 1 would refer to a register with
113 lower index (reg[0]), which flush_quick_stack does not safely handle.
114 So dup cannot just add an extra element to the quick_stack, but iadd can.
115 */
116
117 static GTY(()) vec<tree, va_gc> *quick_stack;
118
119 /* The physical memory page size used in this computer. See
120 build_field_ref(). */
121 static GTY(()) tree page_size;
122
123 /* The stack pointer of the Java virtual machine.
124 This does include the size of the quick_stack. */
125
126 int stack_pointer;
127
128 const unsigned char *linenumber_table;
129 int linenumber_count;
130
131 /* Largest pc so far in this method that has been passed to lookup_label. */
132 int highest_label_pc_this_method = -1;
133
134 /* Base value for this method to add to pc to get generated label. */
135 int start_label_pc_this_method = 0;
136
137 void
init_expr_processing(void)138 init_expr_processing (void)
139 {
140 operand_type[21] = operand_type[54] = int_type_node;
141 operand_type[22] = operand_type[55] = long_type_node;
142 operand_type[23] = operand_type[56] = float_type_node;
143 operand_type[24] = operand_type[57] = double_type_node;
144 operand_type[25] = operand_type[58] = ptr_type_node;
145 }
146
147 tree
java_truthvalue_conversion(tree expr)148 java_truthvalue_conversion (tree expr)
149 {
150 /* It is simpler and generates better code to have only TRUTH_*_EXPR
151 or comparison expressions as truth values at this level.
152
153 This function should normally be identity for Java. */
154
155 switch (TREE_CODE (expr))
156 {
157 case EQ_EXPR: case NE_EXPR: case UNEQ_EXPR: case LTGT_EXPR:
158 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
159 case UNLE_EXPR: case UNGE_EXPR: case UNLT_EXPR: case UNGT_EXPR:
160 case ORDERED_EXPR: case UNORDERED_EXPR:
161 case TRUTH_ANDIF_EXPR:
162 case TRUTH_ORIF_EXPR:
163 case TRUTH_AND_EXPR:
164 case TRUTH_OR_EXPR:
165 case TRUTH_XOR_EXPR:
166 case TRUTH_NOT_EXPR:
167 case ERROR_MARK:
168 return expr;
169
170 case INTEGER_CST:
171 return integer_zerop (expr) ? boolean_false_node : boolean_true_node;
172
173 case REAL_CST:
174 return real_zerop (expr) ? boolean_false_node : boolean_true_node;
175
176 /* are these legal? XXX JH */
177 case NEGATE_EXPR:
178 case ABS_EXPR:
179 case FLOAT_EXPR:
180 /* These don't change whether an object is nonzero or zero. */
181 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
182
183 case COND_EXPR:
184 /* Distribute the conversion into the arms of a COND_EXPR. */
185 return fold_build3 (COND_EXPR, boolean_type_node, TREE_OPERAND (expr, 0),
186 java_truthvalue_conversion (TREE_OPERAND (expr, 1)),
187 java_truthvalue_conversion (TREE_OPERAND (expr, 2)));
188
189 case NOP_EXPR:
190 /* If this is widening the argument, we can ignore it. */
191 if (TYPE_PRECISION (TREE_TYPE (expr))
192 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
193 return java_truthvalue_conversion (TREE_OPERAND (expr, 0));
194 /* fall through to default */
195
196 default:
197 return fold_build2 (NE_EXPR, boolean_type_node,
198 expr, boolean_false_node);
199 }
200 }
201
202 /* Save any stack slots that happen to be in the quick_stack into their
203 home virtual register slots.
204
205 The copy order is from low stack index to high, to support the invariant
206 that the expression for a slot may contain decls for stack slots with
207 higher (or the same) index, but not lower. */
208
209 static void
flush_quick_stack(void)210 flush_quick_stack (void)
211 {
212 int stack_index = stack_pointer;
213 unsigned ix;
214 tree t;
215
216 /* Count the number of slots the quick stack is holding. */
217 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
218 stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
219
220 for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
221 {
222 tree decl, type = TREE_TYPE (t);
223
224 decl = find_stack_slot (stack_index, type);
225 if (decl != t)
226 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (t), decl, t));
227 stack_index += 1 + TYPE_IS_WIDE (type);
228 }
229
230 vec_safe_truncate (quick_stack, 0);
231 }
232
233 /* Push TYPE on the type stack.
234 Return true on success, 0 on overflow. */
235
236 int
push_type_0(tree type)237 push_type_0 (tree type)
238 {
239 int n_words;
240 type = promote_type (type);
241 n_words = 1 + TYPE_IS_WIDE (type);
242 if (stack_pointer + n_words > DECL_MAX_STACK (current_function_decl))
243 return 0;
244 /* Allocate decl for this variable now, so we get a temporary that
245 survives the whole method. */
246 find_stack_slot (stack_pointer, type);
247 stack_type_map[stack_pointer++] = type;
248 n_words--;
249 while (--n_words >= 0)
250 stack_type_map[stack_pointer++] = TYPE_SECOND;
251 return 1;
252 }
253
254 void
push_type(tree type)255 push_type (tree type)
256 {
257 int r = push_type_0 (type);
258 gcc_assert (r);
259 }
260
261 static void
push_value(tree value)262 push_value (tree value)
263 {
264 tree type = TREE_TYPE (value);
265 if (TYPE_PRECISION (type) < 32 && INTEGRAL_TYPE_P (type))
266 {
267 type = promote_type (type);
268 value = convert (type, value);
269 }
270 push_type (type);
271 vec_safe_push (quick_stack, value);
272
273 /* If the value has a side effect, then we need to evaluate it
274 whether or not the result is used. If the value ends up on the
275 quick stack and is then popped, this won't happen -- so we flush
276 the quick stack. It is safest to simply always flush, though,
277 since TREE_SIDE_EFFECTS doesn't capture COMPONENT_REF, and for
278 the latter we may need to strip conversions. */
279 flush_quick_stack ();
280 }
281
282 /* Pop a type from the type stack.
283 TYPE is the expected type. Return the actual type, which must be
284 convertible to TYPE.
285 On an error, *MESSAGEP is set to a freshly malloc'd error message. */
286
287 tree
pop_type_0(tree type,char ** messagep)288 pop_type_0 (tree type, char **messagep)
289 {
290 int n_words;
291 tree t;
292 *messagep = NULL;
293 if (TREE_CODE (type) == RECORD_TYPE)
294 type = promote_type (type);
295 n_words = 1 + TYPE_IS_WIDE (type);
296 if (stack_pointer < n_words)
297 {
298 *messagep = xstrdup ("stack underflow");
299 return type;
300 }
301 while (--n_words > 0)
302 {
303 if (stack_type_map[--stack_pointer] != void_type_node)
304 {
305 *messagep = xstrdup ("Invalid multi-word value on type stack");
306 return type;
307 }
308 }
309 t = stack_type_map[--stack_pointer];
310 if (type == NULL_TREE || t == type)
311 return t;
312 if (TREE_CODE (t) == TREE_LIST)
313 {
314 do
315 {
316 tree tt = TREE_PURPOSE (t);
317 if (! can_widen_reference_to (tt, type))
318 {
319 t = tt;
320 goto fail;
321 }
322 t = TREE_CHAIN (t);
323 }
324 while (t);
325 return t;
326 }
327 if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (t)
328 && TYPE_PRECISION (type) <= 32 && TYPE_PRECISION (t) <= 32)
329 return t;
330 if (TREE_CODE (type) == POINTER_TYPE && TREE_CODE (t) == POINTER_TYPE)
331 {
332 /* If the expected type we've been passed is object or ptr
333 (i.e. void*), the caller needs to know the real type. */
334 if (type == ptr_type_node || type == object_ptr_type_node)
335 return t;
336
337 /* Since the verifier has already run, we know that any
338 types we see will be compatible. In BC mode, this fact
339 may be checked at runtime, but if that is so then we can
340 assume its truth here as well. So, we always succeed
341 here, with the expected type. */
342 return type;
343 }
344
345 if (! flag_verify_invocations && flag_indirect_dispatch
346 && t == object_ptr_type_node)
347 {
348 if (type != ptr_type_node)
349 warning (0, "need to insert runtime check for %s",
350 xstrdup (lang_printable_name (type, 0)));
351 return type;
352 }
353
354 /* lang_printable_name uses a static buffer, so we must save the result
355 from calling it the first time. */
356 fail:
357 {
358 char *temp = xstrdup (lang_printable_name (type, 0));
359 /* If the stack contains a multi-word type, keep popping the stack until
360 the real type is found. */
361 while (t == void_type_node)
362 t = stack_type_map[--stack_pointer];
363 *messagep = concat ("expected type '", temp,
364 "' but stack contains '", lang_printable_name (t, 0),
365 "'", NULL);
366 free (temp);
367 }
368 return type;
369 }
370
371 /* Pop a type from the type stack.
372 TYPE is the expected type. Return the actual type, which must be
373 convertible to TYPE, otherwise call error. */
374
375 tree
pop_type(tree type)376 pop_type (tree type)
377 {
378 char *message = NULL;
379 type = pop_type_0 (type, &message);
380 if (message != NULL)
381 {
382 error ("%s", message);
383 free (message);
384 }
385 return type;
386 }
387
388
389 /* Return true if two type assertions are equal. */
390
391 bool
equal(type_assertion * k1,type_assertion * k2)392 type_assertion_hasher::equal (type_assertion *k1, type_assertion *k2)
393 {
394 return (k1->assertion_code == k2->assertion_code
395 && k1->op1 == k2->op1
396 && k1->op2 == k2->op2);
397 }
398
399 /* Hash a type assertion. */
400
401 hashval_t
hash(type_assertion * k_p)402 type_assertion_hasher::hash (type_assertion *k_p)
403 {
404 hashval_t hash = iterative_hash (&k_p->assertion_code, sizeof
405 k_p->assertion_code, 0);
406
407 switch (k_p->assertion_code)
408 {
409 case JV_ASSERT_TYPES_COMPATIBLE:
410 hash = iterative_hash (&TYPE_UID (k_p->op2), sizeof TYPE_UID (k_p->op2),
411 hash);
412 /* Fall through. */
413
414 case JV_ASSERT_IS_INSTANTIABLE:
415 hash = iterative_hash (&TYPE_UID (k_p->op1), sizeof TYPE_UID (k_p->op1),
416 hash);
417 /* Fall through. */
418
419 case JV_ASSERT_END_OF_TABLE:
420 break;
421
422 default:
423 gcc_unreachable ();
424 }
425
426 return hash;
427 }
428
429 /* Add an entry to the type assertion table for the given class.
430 KLASS is the class for which this assertion will be evaluated by the
431 runtime during loading/initialization.
432 ASSERTION_CODE is the 'opcode' or type of this assertion: see java-tree.h.
433 OP1 and OP2 are the operands. The tree type of these arguments may be
434 specific to each assertion_code. */
435
436 void
add_type_assertion(tree klass,int assertion_code,tree op1,tree op2)437 add_type_assertion (tree klass, int assertion_code, tree op1, tree op2)
438 {
439 hash_table<type_assertion_hasher> *assertions_htab;
440 type_assertion as;
441 type_assertion **as_pp;
442
443 assertions_htab = TYPE_ASSERTIONS (klass);
444 if (assertions_htab == NULL)
445 {
446 assertions_htab = hash_table<type_assertion_hasher>::create_ggc (7);
447 TYPE_ASSERTIONS (current_class) = assertions_htab;
448 }
449
450 as.assertion_code = assertion_code;
451 as.op1 = op1;
452 as.op2 = op2;
453
454 as_pp = assertions_htab->find_slot (&as, INSERT);
455
456 /* Don't add the same assertion twice. */
457 if (*as_pp)
458 return;
459
460 *as_pp = ggc_alloc<type_assertion> ();
461 **as_pp = as;
462 }
463
464
465 /* Return 1 if SOURCE_TYPE can be safely widened to TARGET_TYPE.
466 Handles array types and interfaces. */
467
468 int
can_widen_reference_to(tree source_type,tree target_type)469 can_widen_reference_to (tree source_type, tree target_type)
470 {
471 if (source_type == ptr_type_node || target_type == object_ptr_type_node)
472 return 1;
473
474 /* Get rid of pointers */
475 if (TREE_CODE (source_type) == POINTER_TYPE)
476 source_type = TREE_TYPE (source_type);
477 if (TREE_CODE (target_type) == POINTER_TYPE)
478 target_type = TREE_TYPE (target_type);
479
480 if (source_type == target_type)
481 return 1;
482
483 /* FIXME: This is very pessimistic, in that it checks everything,
484 even if we already know that the types are compatible. If we're
485 to support full Java class loader semantics, we need this.
486 However, we could do something more optimal. */
487 if (! flag_verify_invocations)
488 {
489 add_type_assertion (current_class, JV_ASSERT_TYPES_COMPATIBLE,
490 source_type, target_type);
491
492 if (!quiet_flag)
493 warning (0, "assert: %s is assign compatible with %s",
494 xstrdup (lang_printable_name (target_type, 0)),
495 xstrdup (lang_printable_name (source_type, 0)));
496 /* Punt everything to runtime. */
497 return 1;
498 }
499
500 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
501 {
502 return 1;
503 }
504 else
505 {
506 if (TYPE_ARRAY_P (source_type) || TYPE_ARRAY_P (target_type))
507 {
508 HOST_WIDE_INT source_length, target_length;
509 if (TYPE_ARRAY_P (source_type) != TYPE_ARRAY_P (target_type))
510 {
511 /* An array implements Cloneable and Serializable. */
512 tree name = DECL_NAME (TYPE_NAME (target_type));
513 return (name == java_lang_cloneable_identifier_node
514 || name == java_io_serializable_identifier_node);
515 }
516 target_length = java_array_type_length (target_type);
517 if (target_length >= 0)
518 {
519 source_length = java_array_type_length (source_type);
520 if (source_length != target_length)
521 return 0;
522 }
523 source_type = TYPE_ARRAY_ELEMENT (source_type);
524 target_type = TYPE_ARRAY_ELEMENT (target_type);
525 if (source_type == target_type)
526 return 1;
527 if (TREE_CODE (source_type) != POINTER_TYPE
528 || TREE_CODE (target_type) != POINTER_TYPE)
529 return 0;
530 return can_widen_reference_to (source_type, target_type);
531 }
532 else
533 {
534 int source_depth = class_depth (source_type);
535 int target_depth = class_depth (target_type);
536
537 if (TYPE_DUMMY (source_type) || TYPE_DUMMY (target_type))
538 {
539 if (! quiet_flag)
540 warning (0, "assert: %s is assign compatible with %s",
541 xstrdup (lang_printable_name (target_type, 0)),
542 xstrdup (lang_printable_name (source_type, 0)));
543 return 1;
544 }
545
546 /* class_depth can return a negative depth if an error occurred */
547 if (source_depth < 0 || target_depth < 0)
548 return 0;
549
550 if (CLASS_INTERFACE (TYPE_NAME (target_type)))
551 {
552 /* target_type is OK if source_type or source_type ancestors
553 implement target_type. We handle multiple sub-interfaces */
554 tree binfo, base_binfo;
555 int i;
556
557 for (binfo = TYPE_BINFO (source_type), i = 0;
558 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
559 if (can_widen_reference_to
560 (BINFO_TYPE (base_binfo), target_type))
561 return 1;
562
563 if (!i)
564 return 0;
565 }
566
567 for ( ; source_depth > target_depth; source_depth--)
568 {
569 source_type
570 = BINFO_TYPE (BINFO_BASE_BINFO (TYPE_BINFO (source_type), 0));
571 }
572 return source_type == target_type;
573 }
574 }
575 }
576
577 static tree
pop_value(tree type)578 pop_value (tree type)
579 {
580 type = pop_type (type);
581 if (vec_safe_length (quick_stack) != 0)
582 return quick_stack->pop ();
583 else
584 return find_stack_slot (stack_pointer, promote_type (type));
585 }
586
587
588 /* Pop and discard the top COUNT stack slots. */
589
590 static void
java_stack_pop(int count)591 java_stack_pop (int count)
592 {
593 while (count > 0)
594 {
595 tree type;
596
597 gcc_assert (stack_pointer != 0);
598
599 type = stack_type_map[stack_pointer - 1];
600 if (type == TYPE_SECOND)
601 {
602 count--;
603 gcc_assert (stack_pointer != 1 && count > 0);
604
605 type = stack_type_map[stack_pointer - 2];
606 }
607 pop_value (type);
608 count--;
609 }
610 }
611
612 /* Implement the 'swap' operator (to swap two top stack slots). */
613
614 static void
java_stack_swap(void)615 java_stack_swap (void)
616 {
617 tree type1, type2;
618 tree temp;
619 tree decl1, decl2;
620
621 if (stack_pointer < 2
622 || (type1 = stack_type_map[stack_pointer - 1]) == TYPE_SECOND
623 || (type2 = stack_type_map[stack_pointer - 2]) == TYPE_SECOND
624 || TYPE_IS_WIDE (type1) || TYPE_IS_WIDE (type2))
625 /* Bad stack swap. */
626 abort ();
627 /* Bad stack swap. */
628
629 flush_quick_stack ();
630 decl1 = find_stack_slot (stack_pointer - 1, type1);
631 decl2 = find_stack_slot (stack_pointer - 2, type2);
632 temp = build_decl (input_location, VAR_DECL, NULL_TREE, type1);
633 java_add_local_var (temp);
634 java_add_stmt (build2 (MODIFY_EXPR, type1, temp, decl1));
635 java_add_stmt (build2 (MODIFY_EXPR, type2,
636 find_stack_slot (stack_pointer - 1, type2),
637 decl2));
638 java_add_stmt (build2 (MODIFY_EXPR, type1,
639 find_stack_slot (stack_pointer - 2, type1),
640 temp));
641 stack_type_map[stack_pointer - 1] = type2;
642 stack_type_map[stack_pointer - 2] = type1;
643 }
644
645 static void
java_stack_dup(int size,int offset)646 java_stack_dup (int size, int offset)
647 {
648 int low_index = stack_pointer - size - offset;
649 int dst_index;
650 if (low_index < 0)
651 error ("stack underflow - dup* operation");
652
653 flush_quick_stack ();
654
655 stack_pointer += size;
656 dst_index = stack_pointer;
657
658 for (dst_index = stack_pointer; --dst_index >= low_index; )
659 {
660 tree type;
661 int src_index = dst_index - size;
662 if (src_index < low_index)
663 src_index = dst_index + size + offset;
664 type = stack_type_map [src_index];
665 if (type == TYPE_SECOND)
666 {
667 /* Dup operation splits 64-bit number. */
668 gcc_assert (src_index > low_index);
669
670 stack_type_map[dst_index] = type;
671 src_index--; dst_index--;
672 type = stack_type_map[src_index];
673 gcc_assert (TYPE_IS_WIDE (type));
674 }
675 else
676 gcc_assert (! TYPE_IS_WIDE (type));
677
678 if (src_index != dst_index)
679 {
680 tree src_decl = find_stack_slot (src_index, type);
681 tree dst_decl = find_stack_slot (dst_index, type);
682
683 java_add_stmt
684 (build2 (MODIFY_EXPR, TREE_TYPE (dst_decl), dst_decl, src_decl));
685 stack_type_map[dst_index] = type;
686 }
687 }
688 }
689
690 /* Calls _Jv_Throw or _Jv_Sjlj_Throw. Discard the contents of the
691 value stack. */
692
693 static void
build_java_athrow(tree node)694 build_java_athrow (tree node)
695 {
696 tree call;
697
698 call = build_call_nary (void_type_node,
699 build_address_of (throw_node),
700 1, node);
701 TREE_SIDE_EFFECTS (call) = 1;
702 java_add_stmt (call);
703 java_stack_pop (stack_pointer);
704 }
705
706 /* Implementation for jsr/ret */
707
708 static void
build_java_jsr(int target_pc,int return_pc)709 build_java_jsr (int target_pc, int return_pc)
710 {
711 tree where = lookup_label (target_pc);
712 tree ret = lookup_label (return_pc);
713 tree ret_label = fold_build1 (ADDR_EXPR, return_address_type_node, ret);
714 push_value (ret_label);
715 flush_quick_stack ();
716 java_add_stmt (build1 (GOTO_EXPR, void_type_node, where));
717
718 /* Do not need to emit the label here. We noted the existence of the
719 label as a jump target in note_instructions; we'll emit the label
720 for real at the beginning of the expand_byte_code loop. */
721 }
722
723 static void
build_java_ret(tree location)724 build_java_ret (tree location)
725 {
726 java_add_stmt (build1 (GOTO_EXPR, void_type_node, location));
727 }
728
729 /* Implementation of operations on array: new, load, store, length */
730
731 tree
decode_newarray_type(int atype)732 decode_newarray_type (int atype)
733 {
734 switch (atype)
735 {
736 case 4: return boolean_type_node;
737 case 5: return char_type_node;
738 case 6: return float_type_node;
739 case 7: return double_type_node;
740 case 8: return byte_type_node;
741 case 9: return short_type_node;
742 case 10: return int_type_node;
743 case 11: return long_type_node;
744 default: return NULL_TREE;
745 }
746 }
747
748 /* Map primitive type to the code used by OPCODE_newarray. */
749
750 int
encode_newarray_type(tree type)751 encode_newarray_type (tree type)
752 {
753 if (type == boolean_type_node)
754 return 4;
755 else if (type == char_type_node)
756 return 5;
757 else if (type == float_type_node)
758 return 6;
759 else if (type == double_type_node)
760 return 7;
761 else if (type == byte_type_node)
762 return 8;
763 else if (type == short_type_node)
764 return 9;
765 else if (type == int_type_node)
766 return 10;
767 else if (type == long_type_node)
768 return 11;
769 else
770 gcc_unreachable ();
771 }
772
773 /* Build a call to _Jv_ThrowBadArrayIndex(), the
774 ArrayIndexOfBoundsException exception handler. */
775
776 static tree
build_java_throw_out_of_bounds_exception(tree index)777 build_java_throw_out_of_bounds_exception (tree index)
778 {
779 tree node;
780
781 /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
782 has void return type. We cannot just set the type of the CALL_EXPR below
783 to int_type_node because we would lose it during gimplification. */
784 gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
785 node = build_call_nary (void_type_node,
786 build_address_of (soft_badarrayindex_node),
787 1, index);
788 TREE_SIDE_EFFECTS (node) = 1;
789
790 node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
791 TREE_SIDE_EFFECTS (node) = 1; /* Allows expansion within ANDIF */
792
793 return (node);
794 }
795
796 /* Return the length of an array. Doesn't perform any checking on the nature
797 or value of the array NODE. May be used to implement some bytecodes. */
798
799 tree
build_java_array_length_access(tree node)800 build_java_array_length_access (tree node)
801 {
802 tree type = TREE_TYPE (node);
803 tree array_type = TREE_TYPE (type);
804 HOST_WIDE_INT length;
805
806 if (!is_array_type_p (type))
807 {
808 /* With the new verifier, we will see an ordinary pointer type
809 here. In this case, we just use an arbitrary array type. */
810 array_type = build_java_array_type (object_ptr_type_node, -1);
811 type = promote_type (array_type);
812 }
813
814 length = java_array_type_length (type);
815 if (length >= 0)
816 return build_int_cst (NULL_TREE, length);
817
818 node = build3 (COMPONENT_REF, int_type_node,
819 build_java_indirect_ref (array_type, node,
820 flag_check_references),
821 lookup_field (&array_type, get_identifier ("length")),
822 NULL_TREE);
823 IS_ARRAY_LENGTH_ACCESS (node) = 1;
824 return node;
825 }
826
827 /* Optionally checks a reference against the NULL pointer. ARG1: the
828 expr, ARG2: we should check the reference. Don't generate extra
829 checks if we're not generating code. */
830
831 tree
java_check_reference(tree expr,int check)832 java_check_reference (tree expr, int check)
833 {
834 if (!flag_syntax_only && check)
835 {
836 expr = save_expr (expr);
837 expr = build3 (COND_EXPR, TREE_TYPE (expr),
838 build2 (EQ_EXPR, boolean_type_node,
839 expr, null_pointer_node),
840 build_call_nary (void_type_node,
841 build_address_of (soft_nullpointer_node),
842 0),
843 expr);
844 }
845
846 return expr;
847 }
848
849 /* Reference an object: just like an INDIRECT_REF, but with checking. */
850
851 tree
build_java_indirect_ref(tree type,tree expr,int check)852 build_java_indirect_ref (tree type, tree expr, int check)
853 {
854 tree t;
855 t = java_check_reference (expr, check);
856 t = convert (build_pointer_type (type), t);
857 return build1 (INDIRECT_REF, type, t);
858 }
859
860 /* Implement array indexing (either as l-value or r-value).
861 Returns a tree for ARRAY[INDEX], assume TYPE is the element type.
862 Optionally performs bounds checking and/or test to NULL.
863 At this point, ARRAY should have been verified as an array. */
864
865 tree
build_java_arrayaccess(tree array,tree type,tree index)866 build_java_arrayaccess (tree array, tree type, tree index)
867 {
868 tree node, throw_expr = NULL_TREE;
869 tree data_field;
870 tree ref;
871 tree array_type = TREE_TYPE (TREE_TYPE (array));
872 tree size_exp = fold_convert (sizetype, size_in_bytes (type));
873
874 if (!is_array_type_p (TREE_TYPE (array)))
875 {
876 /* With the new verifier, we will see an ordinary pointer type
877 here. In this case, we just use the correct array type. */
878 array_type = build_java_array_type (type, -1);
879 }
880
881 if (flag_bounds_check)
882 {
883 /* Generate:
884 * (unsigned jint) INDEX >= (unsigned jint) LEN
885 * && throw ArrayIndexOutOfBoundsException.
886 * Note this is equivalent to and more efficient than:
887 * INDEX < 0 || INDEX >= LEN && throw ... */
888 tree test;
889 tree len = convert (unsigned_int_type_node,
890 build_java_array_length_access (array));
891 test = fold_build2 (GE_EXPR, boolean_type_node,
892 convert (unsigned_int_type_node, index),
893 len);
894 if (! integer_zerop (test))
895 {
896 throw_expr
897 = build2 (TRUTH_ANDIF_EXPR, int_type_node, test,
898 build_java_throw_out_of_bounds_exception (index));
899 /* allows expansion within COMPOUND */
900 TREE_SIDE_EFFECTS( throw_expr ) = 1;
901 }
902 }
903
904 /* If checking bounds, wrap the index expr with a COMPOUND_EXPR in order
905 to have the bounds check evaluated first. */
906 if (throw_expr != NULL_TREE)
907 index = build2 (COMPOUND_EXPR, int_type_node, throw_expr, index);
908
909 data_field = lookup_field (&array_type, get_identifier ("data"));
910
911 ref = build3 (COMPONENT_REF, TREE_TYPE (data_field),
912 build_java_indirect_ref (array_type, array,
913 flag_check_references),
914 data_field, NULL_TREE);
915
916 /* Take the address of the data field and convert it to a pointer to
917 the element type. */
918 node = build1 (NOP_EXPR, build_pointer_type (type), build_address_of (ref));
919
920 /* Multiply the index by the size of an element to obtain a byte
921 offset. Convert the result to a pointer to the element type. */
922 index = build2 (MULT_EXPR, sizetype,
923 fold_convert (sizetype, index),
924 size_exp);
925
926 /* Sum the byte offset and the address of the data field. */
927 node = fold_build_pointer_plus (node, index);
928
929 /* Finally, return
930
931 *((&array->data) + index*size_exp)
932
933 */
934 return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (node)), node);
935 }
936
937 /* Generate code to throw an ArrayStoreException if OBJECT is not assignable
938 (at runtime) to an element of ARRAY. A NOP_EXPR is returned if it can
939 determine that no check is required. */
940
941 tree
build_java_arraystore_check(tree array,tree object)942 build_java_arraystore_check (tree array, tree object)
943 {
944 tree check, element_type, source;
945 tree array_type_p = TREE_TYPE (array);
946 tree object_type = TYPE_NAME (TREE_TYPE (TREE_TYPE (object)));
947
948 if (! flag_verify_invocations)
949 {
950 /* With the new verifier, we don't track precise types. FIXME:
951 performance regression here. */
952 element_type = TYPE_NAME (object_type_node);
953 }
954 else
955 {
956 gcc_assert (is_array_type_p (array_type_p));
957
958 /* Get the TYPE_DECL for ARRAY's element type. */
959 element_type
960 = TYPE_NAME (TREE_TYPE (TREE_TYPE (TREE_TYPE (array_type_p))));
961 }
962
963 gcc_assert (TREE_CODE (element_type) == TYPE_DECL
964 && TREE_CODE (object_type) == TYPE_DECL);
965
966 if (!flag_store_check)
967 return build1 (NOP_EXPR, array_type_p, array);
968
969 /* No check is needed if the element type is final. Also check that
970 element_type matches object_type, since in the bytecode
971 compilation case element_type may be the actual element type of
972 the array rather than its declared type. However, if we're doing
973 indirect dispatch, we can't do the `final' optimization. */
974 if (element_type == object_type
975 && ! flag_indirect_dispatch
976 && CLASS_FINAL (element_type))
977 return build1 (NOP_EXPR, array_type_p, array);
978
979 /* OBJECT might be wrapped by a SAVE_EXPR. */
980 if (TREE_CODE (object) == SAVE_EXPR)
981 source = TREE_OPERAND (object, 0);
982 else
983 source = object;
984
985 /* Avoid the check if OBJECT was just loaded from the same array. */
986 if (TREE_CODE (source) == ARRAY_REF)
987 {
988 tree target;
989 source = TREE_OPERAND (source, 0); /* COMPONENT_REF. */
990 source = TREE_OPERAND (source, 0); /* INDIRECT_REF. */
991 source = TREE_OPERAND (source, 0); /* Source array's DECL or SAVE_EXPR. */
992 if (TREE_CODE (source) == SAVE_EXPR)
993 source = TREE_OPERAND (source, 0);
994
995 target = array;
996 if (TREE_CODE (target) == SAVE_EXPR)
997 target = TREE_OPERAND (target, 0);
998
999 if (source == target)
1000 return build1 (NOP_EXPR, array_type_p, array);
1001 }
1002
1003 /* Build an invocation of _Jv_CheckArrayStore */
1004 check = build_call_nary (void_type_node,
1005 build_address_of (soft_checkarraystore_node),
1006 2, array, object);
1007 TREE_SIDE_EFFECTS (check) = 1;
1008
1009 return check;
1010 }
1011
1012 /* Makes sure that INDEXED_TYPE is appropriate. If not, make it from
1013 ARRAY_NODE. This function is used to retrieve something less vague than
1014 a pointer type when indexing the first dimension of something like [[<t>.
1015 May return a corrected type, if necessary, otherwise INDEXED_TYPE is
1016 return unchanged. */
1017
1018 static tree
build_java_check_indexed_type(tree array_node ATTRIBUTE_UNUSED,tree indexed_type)1019 build_java_check_indexed_type (tree array_node ATTRIBUTE_UNUSED,
1020 tree indexed_type)
1021 {
1022 /* We used to check to see if ARRAY_NODE really had array type.
1023 However, with the new verifier, this is not necessary, as we know
1024 that the object will be an array of the appropriate type. */
1025
1026 return indexed_type;
1027 }
1028
1029 /* newarray triggers a call to _Jv_NewPrimArray. This function should be
1030 called with an integer code (the type of array to create), and the length
1031 of the array to create. */
1032
1033 tree
build_newarray(int atype_value,tree length)1034 build_newarray (int atype_value, tree length)
1035 {
1036 tree type_arg;
1037
1038 tree prim_type = decode_newarray_type (atype_value);
1039 tree type
1040 = build_java_array_type (prim_type,
1041 tree_fits_shwi_p (length)
1042 ? tree_to_shwi (length) : -1);
1043
1044 /* Pass a reference to the primitive type class and save the runtime
1045 some work. */
1046 type_arg = build_class_ref (prim_type);
1047
1048 return build_call_nary (promote_type (type),
1049 build_address_of (soft_newarray_node),
1050 2, type_arg, length);
1051 }
1052
1053 /* Generates anewarray from a given CLASS_TYPE. Gets from the stack the size
1054 of the dimension. */
1055
1056 tree
build_anewarray(tree class_type,tree length)1057 build_anewarray (tree class_type, tree length)
1058 {
1059 tree type
1060 = build_java_array_type (class_type,
1061 tree_fits_shwi_p (length)
1062 ? tree_to_shwi (length) : -1);
1063
1064 return build_call_nary (promote_type (type),
1065 build_address_of (soft_anewarray_node),
1066 3,
1067 length,
1068 build_class_ref (class_type),
1069 null_pointer_node);
1070 }
1071
1072 /* Return a node the evaluates 'new TYPE[LENGTH]'. */
1073
1074 tree
build_new_array(tree type,tree length)1075 build_new_array (tree type, tree length)
1076 {
1077 if (JPRIMITIVE_TYPE_P (type))
1078 return build_newarray (encode_newarray_type (type), length);
1079 else
1080 return build_anewarray (TREE_TYPE (type), length);
1081 }
1082
1083 /* Generates a call to _Jv_NewMultiArray. multianewarray expects a
1084 class pointer, a number of dimensions and the matching number of
1085 dimensions. The argument list is NULL terminated. */
1086
1087 static void
expand_java_multianewarray(tree class_type,int ndim)1088 expand_java_multianewarray (tree class_type, int ndim)
1089 {
1090 int i;
1091 vec<tree, va_gc> *args = NULL;
1092
1093 vec_safe_grow (args, 3 + ndim);
1094
1095 (*args)[0] = build_class_ref (class_type);
1096 (*args)[1] = build_int_cst (NULL_TREE, ndim);
1097
1098 for(i = ndim - 1; i >= 0; i-- )
1099 (*args)[(unsigned)(2 + i)] = pop_value (int_type_node);
1100
1101 (*args)[2 + ndim] = null_pointer_node;
1102
1103 push_value (build_call_vec (promote_type (class_type),
1104 build_address_of (soft_multianewarray_node),
1105 args));
1106 }
1107
1108 /* ARRAY[INDEX] <- RHS. build_java_check_indexed_type makes sure that
1109 ARRAY is an array type. May expand some bound checking and NULL
1110 pointer checking. RHS_TYPE_NODE we are going to store. In the case
1111 of the CHAR/BYTE/BOOLEAN SHORT, the type popped of the stack is an
1112 INT. In those cases, we make the conversion.
1113
1114 if ARRAy is a reference type, the assignment is checked at run-time
1115 to make sure that the RHS can be assigned to the array element
1116 type. It is not necessary to generate this code if ARRAY is final. */
1117
1118 static void
expand_java_arraystore(tree rhs_type_node)1119 expand_java_arraystore (tree rhs_type_node)
1120 {
1121 tree rhs_node = pop_value ((INTEGRAL_TYPE_P (rhs_type_node)
1122 && TYPE_PRECISION (rhs_type_node) <= 32) ?
1123 int_type_node : rhs_type_node);
1124 tree index = pop_value (int_type_node);
1125 tree array_type, array, temp, access;
1126
1127 /* If we're processing an `aaload' we might as well just pick
1128 `Object'. */
1129 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1130 {
1131 array_type = build_java_array_type (object_ptr_type_node, -1);
1132 rhs_type_node = object_ptr_type_node;
1133 }
1134 else
1135 array_type = build_java_array_type (rhs_type_node, -1);
1136
1137 array = pop_value (array_type);
1138 array = build1 (NOP_EXPR, promote_type (array_type), array);
1139
1140 rhs_type_node = build_java_check_indexed_type (array, rhs_type_node);
1141
1142 flush_quick_stack ();
1143
1144 index = save_expr (index);
1145 array = save_expr (array);
1146
1147 /* We want to perform the bounds check (done by
1148 build_java_arrayaccess) before the type check (done by
1149 build_java_arraystore_check). So, we call build_java_arrayaccess
1150 -- which returns an ARRAY_REF lvalue -- and we then generate code
1151 to stash the address of that lvalue in a temp. Then we call
1152 build_java_arraystore_check, and finally we generate a
1153 MODIFY_EXPR to set the array element. */
1154
1155 access = build_java_arrayaccess (array, rhs_type_node, index);
1156 temp = build_decl (input_location, VAR_DECL, NULL_TREE,
1157 build_pointer_type (TREE_TYPE (access)));
1158 java_add_local_var (temp);
1159 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (temp),
1160 temp,
1161 build_fold_addr_expr (access)));
1162
1163 if (TREE_CODE (rhs_type_node) == POINTER_TYPE)
1164 {
1165 tree check = build_java_arraystore_check (array, rhs_node);
1166 java_add_stmt (check);
1167 }
1168
1169 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (access),
1170 build1 (INDIRECT_REF, TREE_TYPE (access), temp),
1171 rhs_node));
1172 }
1173
1174 /* Expand the evaluation of ARRAY[INDEX]. build_java_check_indexed_type makes
1175 sure that LHS is an array type. May expand some bound checking and NULL
1176 pointer checking.
1177 LHS_TYPE_NODE is the type of ARRAY[INDEX]. But in the case of CHAR/BYTE/
1178 BOOLEAN/SHORT, we push a promoted type back to the stack.
1179 */
1180
1181 static void
expand_java_arrayload(tree lhs_type_node)1182 expand_java_arrayload (tree lhs_type_node)
1183 {
1184 tree load_node;
1185 tree index_node = pop_value (int_type_node);
1186 tree array_type;
1187 tree array_node;
1188
1189 /* If we're processing an `aaload' we might as well just pick
1190 `Object'. */
1191 if (TREE_CODE (lhs_type_node) == POINTER_TYPE)
1192 {
1193 array_type = build_java_array_type (object_ptr_type_node, -1);
1194 lhs_type_node = object_ptr_type_node;
1195 }
1196 else
1197 array_type = build_java_array_type (lhs_type_node, -1);
1198 array_node = pop_value (array_type);
1199 array_node = build1 (NOP_EXPR, promote_type (array_type), array_node);
1200
1201 index_node = save_expr (index_node);
1202 array_node = save_expr (array_node);
1203
1204 lhs_type_node = build_java_check_indexed_type (array_node,
1205 lhs_type_node);
1206 load_node = build_java_arrayaccess (array_node,
1207 lhs_type_node,
1208 index_node);
1209 if (INTEGRAL_TYPE_P (lhs_type_node) && TYPE_PRECISION (lhs_type_node) <= 32)
1210 load_node = fold_build1 (NOP_EXPR, int_type_node, load_node);
1211 push_value (load_node);
1212 }
1213
1214 /* Expands .length. Makes sure that we deal with and array and may expand
1215 a NULL check on the array object. */
1216
1217 static void
expand_java_array_length(void)1218 expand_java_array_length (void)
1219 {
1220 tree array = pop_value (ptr_type_node);
1221 tree length = build_java_array_length_access (array);
1222
1223 push_value (length);
1224 }
1225
1226 /* Emit code for the call to _Jv_Monitor{Enter,Exit}. CALL can be
1227 either soft_monitorenter_node or soft_monitorexit_node. */
1228
1229 static tree
build_java_monitor(tree call,tree object)1230 build_java_monitor (tree call, tree object)
1231 {
1232 return build_call_nary (void_type_node,
1233 build_address_of (call),
1234 1, object);
1235 }
1236
1237 /* Emit code for one of the PUSHC instructions. */
1238
1239 static void
expand_java_pushc(int ival,tree type)1240 expand_java_pushc (int ival, tree type)
1241 {
1242 tree value;
1243 if (type == ptr_type_node && ival == 0)
1244 value = null_pointer_node;
1245 else if (type == int_type_node || type == long_type_node)
1246 value = build_int_cst (type, ival);
1247 else if (type == float_type_node || type == double_type_node)
1248 {
1249 REAL_VALUE_TYPE x;
1250 real_from_integer (&x, TYPE_MODE (type), ival, SIGNED);
1251 value = build_real (type, x);
1252 }
1253 else
1254 gcc_unreachable ();
1255
1256 push_value (value);
1257 }
1258
1259 static void
expand_java_return(tree type)1260 expand_java_return (tree type)
1261 {
1262 if (type == void_type_node)
1263 java_add_stmt (build1 (RETURN_EXPR, void_type_node, NULL));
1264 else
1265 {
1266 tree retval = pop_value (type);
1267 tree res = DECL_RESULT (current_function_decl);
1268 retval = build2 (MODIFY_EXPR, TREE_TYPE (res), res, retval);
1269
1270 /* Handle the situation where the native integer type is smaller
1271 than the JVM integer. It can happen for many cross compilers.
1272 The whole if expression just goes away if INT_TYPE_SIZE < 32
1273 is false. */
1274 if (INT_TYPE_SIZE < 32
1275 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (res)))
1276 < GET_MODE_SIZE (TYPE_MODE (type))))
1277 retval = build1(NOP_EXPR, TREE_TYPE(res), retval);
1278
1279 TREE_SIDE_EFFECTS (retval) = 1;
1280 java_add_stmt (build1 (RETURN_EXPR, void_type_node, retval));
1281 }
1282 }
1283
1284 static void
expand_load_internal(int index,tree type,int pc)1285 expand_load_internal (int index, tree type, int pc)
1286 {
1287 tree copy;
1288 tree var = find_local_variable (index, type, pc);
1289
1290 /* Now VAR is the VAR_DECL (or PARM_DECL) that we are going to push
1291 on the stack. If there is an assignment to this VAR_DECL between
1292 the stack push and the use, then the wrong code could be
1293 generated. To avoid this we create a new local and copy our
1294 value into it. Then we push this new local on the stack.
1295 Hopefully this all gets optimized out. */
1296 copy = build_decl (input_location, VAR_DECL, NULL_TREE, type);
1297 if ((INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1298 && TREE_TYPE (copy) != TREE_TYPE (var))
1299 var = convert (type, var);
1300 java_add_local_var (copy);
1301 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (var), copy, var));
1302
1303 push_value (copy);
1304 }
1305
1306 tree
build_address_of(tree value)1307 build_address_of (tree value)
1308 {
1309 return build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (value)), value);
1310 }
1311
1312 bool
class_has_finalize_method(tree type)1313 class_has_finalize_method (tree type)
1314 {
1315 tree super = CLASSTYPE_SUPER (type);
1316
1317 if (super == NULL_TREE)
1318 return false; /* Every class with a real finalizer inherits */
1319 /* from java.lang.Object. */
1320 else
1321 return HAS_FINALIZER_P (type) || class_has_finalize_method (super);
1322 }
1323
1324 tree
java_create_object(tree type)1325 java_create_object (tree type)
1326 {
1327 tree alloc_node = (class_has_finalize_method (type)
1328 ? alloc_object_node
1329 : alloc_no_finalizer_node);
1330
1331 return build_call_nary (promote_type (type),
1332 build_address_of (alloc_node),
1333 1, build_class_ref (type));
1334 }
1335
1336 static void
expand_java_NEW(tree type)1337 expand_java_NEW (tree type)
1338 {
1339 tree alloc_node;
1340
1341 alloc_node = (class_has_finalize_method (type) ? alloc_object_node
1342 : alloc_no_finalizer_node);
1343 if (! CLASS_LOADED_P (type))
1344 load_class (type, 1);
1345 safe_layout_class (type);
1346 push_value (build_call_nary (promote_type (type),
1347 build_address_of (alloc_node),
1348 1, build_class_ref (type)));
1349 }
1350
1351 /* This returns an expression which will extract the class of an
1352 object. */
1353
1354 tree
build_get_class(tree value)1355 build_get_class (tree value)
1356 {
1357 tree class_field = lookup_field (&dtable_type, get_identifier ("class"));
1358 tree vtable_field = lookup_field (&object_type_node,
1359 get_identifier ("vtable"));
1360 tree tmp = build3 (COMPONENT_REF, dtable_ptr_type,
1361 build_java_indirect_ref (object_type_node, value,
1362 flag_check_references),
1363 vtable_field, NULL_TREE);
1364 return build3 (COMPONENT_REF, class_ptr_type,
1365 build1 (INDIRECT_REF, dtable_type, tmp),
1366 class_field, NULL_TREE);
1367 }
1368
1369 /* This builds the tree representation of the `instanceof' operator.
1370 It tries various tricks to optimize this in cases where types are
1371 known. */
1372
1373 tree
build_instanceof(tree value,tree type)1374 build_instanceof (tree value, tree type)
1375 {
1376 tree expr;
1377 tree itype = TREE_TYPE (TREE_TYPE (soft_instanceof_node));
1378 tree valtype = TREE_TYPE (TREE_TYPE (value));
1379 tree valclass = TYPE_NAME (valtype);
1380 tree klass;
1381
1382 /* When compiling from bytecode, we need to ensure that TYPE has
1383 been loaded. */
1384 if (CLASS_P (type) && ! CLASS_LOADED_P (type))
1385 {
1386 load_class (type, 1);
1387 safe_layout_class (type);
1388 if (! TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) == ERROR_MARK)
1389 return error_mark_node;
1390 }
1391 klass = TYPE_NAME (type);
1392
1393 if (type == object_type_node || inherits_from_p (valtype, type))
1394 {
1395 /* Anything except `null' is an instance of Object. Likewise,
1396 if the object is known to be an instance of the class, then
1397 we only need to check for `null'. */
1398 expr = build2 (NE_EXPR, itype, value, null_pointer_node);
1399 }
1400 else if (flag_verify_invocations
1401 && ! TYPE_ARRAY_P (type)
1402 && ! TYPE_ARRAY_P (valtype)
1403 && DECL_P (klass) && DECL_P (valclass)
1404 && ! CLASS_INTERFACE (valclass)
1405 && ! CLASS_INTERFACE (klass)
1406 && ! inherits_from_p (type, valtype)
1407 && (CLASS_FINAL (klass)
1408 || ! inherits_from_p (valtype, type)))
1409 {
1410 /* The classes are from different branches of the derivation
1411 tree, so we immediately know the answer. */
1412 expr = boolean_false_node;
1413 }
1414 else if (DECL_P (klass) && CLASS_FINAL (klass))
1415 {
1416 tree save = save_expr (value);
1417 expr = build3 (COND_EXPR, itype,
1418 build2 (NE_EXPR, boolean_type_node,
1419 save, null_pointer_node),
1420 build2 (EQ_EXPR, itype,
1421 build_get_class (save),
1422 build_class_ref (type)),
1423 boolean_false_node);
1424 }
1425 else
1426 {
1427 expr = build_call_nary (itype,
1428 build_address_of (soft_instanceof_node),
1429 2, value, build_class_ref (type));
1430 }
1431 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (value);
1432 return expr;
1433 }
1434
1435 static void
expand_java_INSTANCEOF(tree type)1436 expand_java_INSTANCEOF (tree type)
1437 {
1438 tree value = pop_value (object_ptr_type_node);
1439 value = build_instanceof (value, type);
1440 push_value (value);
1441 }
1442
1443 static void
expand_java_CHECKCAST(tree type)1444 expand_java_CHECKCAST (tree type)
1445 {
1446 tree value = pop_value (ptr_type_node);
1447 value = build_call_nary (promote_type (type),
1448 build_address_of (soft_checkcast_node),
1449 2, build_class_ref (type), value);
1450 push_value (value);
1451 }
1452
1453 static void
expand_iinc(unsigned int local_var_index,int ival,int pc)1454 expand_iinc (unsigned int local_var_index, int ival, int pc)
1455 {
1456 tree local_var, res;
1457 tree constant_value;
1458
1459 flush_quick_stack ();
1460 local_var = find_local_variable (local_var_index, int_type_node, pc);
1461 constant_value = build_int_cst (NULL_TREE, ival);
1462 res = fold_build2 (PLUS_EXPR, int_type_node, local_var, constant_value);
1463 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (local_var), local_var, res));
1464 }
1465
1466
1467 tree
build_java_soft_divmod(enum tree_code op,tree type,tree op1,tree op2)1468 build_java_soft_divmod (enum tree_code op, tree type, tree op1, tree op2)
1469 {
1470 tree call = NULL;
1471 tree arg1 = convert (type, op1);
1472 tree arg2 = convert (type, op2);
1473
1474 if (type == int_type_node)
1475 {
1476 switch (op)
1477 {
1478 case TRUNC_DIV_EXPR:
1479 call = soft_idiv_node;
1480 break;
1481 case TRUNC_MOD_EXPR:
1482 call = soft_irem_node;
1483 break;
1484 default:
1485 break;
1486 }
1487 }
1488 else if (type == long_type_node)
1489 {
1490 switch (op)
1491 {
1492 case TRUNC_DIV_EXPR:
1493 call = soft_ldiv_node;
1494 break;
1495 case TRUNC_MOD_EXPR:
1496 call = soft_lrem_node;
1497 break;
1498 default:
1499 break;
1500 }
1501 }
1502
1503 gcc_assert (call);
1504 call = build_call_nary (type, build_address_of (call), 2, arg1, arg2);
1505 return call;
1506 }
1507
1508 tree
build_java_binop(enum tree_code op,tree type,tree arg1,tree arg2)1509 build_java_binop (enum tree_code op, tree type, tree arg1, tree arg2)
1510 {
1511 tree mask;
1512 switch (op)
1513 {
1514 case URSHIFT_EXPR:
1515 {
1516 tree u_type = unsigned_type_for (type);
1517 arg1 = convert (u_type, arg1);
1518 arg1 = build_java_binop (RSHIFT_EXPR, u_type, arg1, arg2);
1519 return convert (type, arg1);
1520 }
1521 case LSHIFT_EXPR:
1522 case RSHIFT_EXPR:
1523 mask = build_int_cst (int_type_node,
1524 TYPE_PRECISION (TREE_TYPE (arg1)) - 1);
1525 arg2 = fold_build2 (BIT_AND_EXPR, int_type_node, arg2, mask);
1526 break;
1527
1528 case COMPARE_L_EXPR: /* arg1 > arg2 ? 1 : arg1 == arg2 ? 0 : -1 */
1529 case COMPARE_G_EXPR: /* arg1 < arg2 ? -1 : arg1 == arg2 ? 0 : 1 */
1530 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1531 {
1532 tree ifexp1 = fold_build2 (op == COMPARE_L_EXPR ? GT_EXPR : LT_EXPR,
1533 boolean_type_node, arg1, arg2);
1534 tree ifexp2 = fold_build2 (EQ_EXPR, boolean_type_node, arg1, arg2);
1535 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1536 ifexp2, integer_zero_node,
1537 op == COMPARE_L_EXPR
1538 ? integer_minus_one_node
1539 : integer_one_node);
1540 return fold_build3 (COND_EXPR, int_type_node, ifexp1,
1541 op == COMPARE_L_EXPR ? integer_one_node
1542 : integer_minus_one_node,
1543 second_compare);
1544 }
1545 case COMPARE_EXPR:
1546 arg1 = save_expr (arg1); arg2 = save_expr (arg2);
1547 {
1548 tree ifexp1 = fold_build2 (LT_EXPR, boolean_type_node, arg1, arg2);
1549 tree ifexp2 = fold_build2 (GT_EXPR, boolean_type_node, arg1, arg2);
1550 tree second_compare = fold_build3 (COND_EXPR, int_type_node,
1551 ifexp2, integer_one_node,
1552 integer_zero_node);
1553 return fold_build3 (COND_EXPR, int_type_node,
1554 ifexp1, integer_minus_one_node, second_compare);
1555 }
1556 case TRUNC_DIV_EXPR:
1557 case TRUNC_MOD_EXPR:
1558 if (TREE_CODE (type) == REAL_TYPE
1559 && op == TRUNC_MOD_EXPR)
1560 {
1561 tree call;
1562 if (type != double_type_node)
1563 {
1564 arg1 = convert (double_type_node, arg1);
1565 arg2 = convert (double_type_node, arg2);
1566 }
1567 call = build_call_nary (double_type_node,
1568 build_address_of (soft_fmod_node),
1569 2, arg1, arg2);
1570 if (type != double_type_node)
1571 call = convert (type, call);
1572 return call;
1573 }
1574
1575 if (TREE_CODE (type) == INTEGER_TYPE
1576 && flag_use_divide_subroutine
1577 && ! flag_syntax_only)
1578 return build_java_soft_divmod (op, type, arg1, arg2);
1579
1580 break;
1581 default: ;
1582 }
1583 return fold_build2 (op, type, arg1, arg2);
1584 }
1585
1586 static void
expand_java_binop(tree type,enum tree_code op)1587 expand_java_binop (tree type, enum tree_code op)
1588 {
1589 tree larg, rarg;
1590 tree ltype = type;
1591 tree rtype = type;
1592 switch (op)
1593 {
1594 case LSHIFT_EXPR:
1595 case RSHIFT_EXPR:
1596 case URSHIFT_EXPR:
1597 rtype = int_type_node;
1598 rarg = pop_value (rtype);
1599 break;
1600 default:
1601 rarg = pop_value (rtype);
1602 }
1603 larg = pop_value (ltype);
1604 push_value (build_java_binop (op, type, larg, rarg));
1605 }
1606
1607 /* Lookup the field named NAME in *TYPEP or its super classes.
1608 If not found, return NULL_TREE.
1609 (If the *TYPEP is not found, or if the field reference is
1610 ambiguous, return error_mark_node.)
1611 If found, return the FIELD_DECL, and set *TYPEP to the
1612 class containing the field. */
1613
1614 tree
lookup_field(tree * typep,tree name)1615 lookup_field (tree *typep, tree name)
1616 {
1617 if (CLASS_P (*typep) && !CLASS_LOADED_P (*typep))
1618 {
1619 load_class (*typep, 1);
1620 safe_layout_class (*typep);
1621 if (!TYPE_SIZE (*typep) || TREE_CODE (TYPE_SIZE (*typep)) == ERROR_MARK)
1622 return error_mark_node;
1623 }
1624 do
1625 {
1626 tree field, binfo, base_binfo;
1627 tree save_field;
1628 int i;
1629
1630 for (field = TYPE_FIELDS (*typep); field; field = DECL_CHAIN (field))
1631 if (DECL_NAME (field) == name)
1632 return field;
1633
1634 /* Process implemented interfaces. */
1635 save_field = NULL_TREE;
1636 for (binfo = TYPE_BINFO (*typep), i = 0;
1637 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1638 {
1639 tree t = BINFO_TYPE (base_binfo);
1640 if ((field = lookup_field (&t, name)))
1641 {
1642 if (save_field == field)
1643 continue;
1644 if (save_field == NULL_TREE)
1645 save_field = field;
1646 else
1647 {
1648 tree i1 = DECL_CONTEXT (save_field);
1649 tree i2 = DECL_CONTEXT (field);
1650 error ("reference %qs is ambiguous: appears in interface %qs and interface %qs",
1651 IDENTIFIER_POINTER (name),
1652 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i1))),
1653 IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (i2))));
1654 return error_mark_node;
1655 }
1656 }
1657 }
1658
1659 if (save_field != NULL_TREE)
1660 return save_field;
1661
1662 *typep = CLASSTYPE_SUPER (*typep);
1663 } while (*typep);
1664 return NULL_TREE;
1665 }
1666
1667 /* Look up the field named NAME in object SELF_VALUE,
1668 which has class SELF_CLASS (a non-handle RECORD_TYPE).
1669 SELF_VALUE is NULL_TREE if looking for a static field. */
1670
1671 tree
build_field_ref(tree self_value,tree self_class,tree name)1672 build_field_ref (tree self_value, tree self_class, tree name)
1673 {
1674 tree base_class = self_class;
1675 tree field_decl = lookup_field (&base_class, name);
1676 if (field_decl == NULL_TREE)
1677 {
1678 error ("field %qs not found", IDENTIFIER_POINTER (name));
1679 return error_mark_node;
1680 }
1681 if (self_value == NULL_TREE)
1682 {
1683 return build_static_field_ref (field_decl);
1684 }
1685 else
1686 {
1687 tree base_type = promote_type (base_class);
1688
1689 /* CHECK is true if self_value is not the this pointer. */
1690 int check = (! (DECL_P (self_value)
1691 && DECL_NAME (self_value) == this_identifier_node));
1692
1693 /* Determine whether a field offset from NULL will lie within
1694 Page 0: this is necessary on those GNU/Linux/BSD systems that
1695 trap SEGV to generate NullPointerExceptions.
1696
1697 We assume that Page 0 will be mapped with NOPERM, and that
1698 memory may be allocated from any other page, so only field
1699 offsets < pagesize are guaranteed to trap. We also assume
1700 the smallest page size we'll encounter is 4k bytes. */
1701 if (! flag_syntax_only && check && ! flag_check_references
1702 && ! flag_indirect_dispatch)
1703 {
1704 tree field_offset = byte_position (field_decl);
1705 if (! page_size)
1706 page_size = size_int (4096);
1707 check = !tree_int_cst_lt (field_offset, page_size);
1708 }
1709
1710 if (base_type != TREE_TYPE (self_value))
1711 self_value = fold_build1 (NOP_EXPR, base_type, self_value);
1712 if (! flag_syntax_only && flag_indirect_dispatch)
1713 {
1714 tree otable_index
1715 = build_int_cst (NULL_TREE, get_symbol_table_index
1716 (field_decl, NULL_TREE,
1717 &TYPE_OTABLE_METHODS (output_class)));
1718 tree field_offset
1719 = build4 (ARRAY_REF, integer_type_node,
1720 TYPE_OTABLE_DECL (output_class), otable_index,
1721 NULL_TREE, NULL_TREE);
1722 tree address;
1723
1724 if (DECL_CONTEXT (field_decl) != output_class)
1725 field_offset
1726 = build3 (COND_EXPR, TREE_TYPE (field_offset),
1727 build2 (EQ_EXPR, boolean_type_node,
1728 field_offset, integer_zero_node),
1729 build_call_nary (void_type_node,
1730 build_address_of (soft_nosuchfield_node),
1731 1, otable_index),
1732 field_offset);
1733
1734 self_value = java_check_reference (self_value, check);
1735 address = fold_build_pointer_plus (self_value, field_offset);
1736 address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
1737 address);
1738 return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
1739 }
1740
1741 self_value = build_java_indirect_ref (TREE_TYPE (TREE_TYPE (self_value)),
1742 self_value, check);
1743 return fold_build3 (COMPONENT_REF, TREE_TYPE (field_decl),
1744 self_value, field_decl, NULL_TREE);
1745 }
1746 }
1747
1748 tree
lookup_label(int pc)1749 lookup_label (int pc)
1750 {
1751 tree name;
1752 char buf[32];
1753 if (pc > highest_label_pc_this_method)
1754 highest_label_pc_this_method = pc;
1755 targetm.asm_out.generate_internal_label (buf, "LJpc=",
1756 start_label_pc_this_method + pc);
1757 name = get_identifier (buf);
1758 if (IDENTIFIER_LOCAL_VALUE (name))
1759 return IDENTIFIER_LOCAL_VALUE (name);
1760 else
1761 {
1762 /* The type of the address of a label is return_address_type_node. */
1763 tree decl = create_label_decl (name);
1764 return pushdecl (decl);
1765 }
1766 }
1767
1768 /* Generate a unique name for the purpose of loops and switches
1769 labels, and try-catch-finally blocks label or temporary variables. */
1770
1771 tree
generate_name(void)1772 generate_name (void)
1773 {
1774 static int l_number = 0;
1775 char buff [32];
1776 targetm.asm_out.generate_internal_label (buff, "LJv", l_number);
1777 l_number++;
1778 return get_identifier (buff);
1779 }
1780
1781 tree
create_label_decl(tree name)1782 create_label_decl (tree name)
1783 {
1784 tree decl;
1785 decl = build_decl (input_location, LABEL_DECL, name,
1786 TREE_TYPE (return_address_type_node));
1787 DECL_CONTEXT (decl) = current_function_decl;
1788 DECL_IGNORED_P (decl) = 1;
1789 return decl;
1790 }
1791
1792 /* This maps a bytecode offset (PC) to various flags. */
1793 char *instruction_bits;
1794
1795 /* This is a vector of type states for the current method. It is
1796 indexed by PC. Each element is a tree vector holding the type
1797 state at that PC. We only note type states at basic block
1798 boundaries. */
1799 vec<tree, va_gc> *type_states;
1800
1801 static void
note_label(int current_pc ATTRIBUTE_UNUSED,int target_pc)1802 note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
1803 {
1804 lookup_label (target_pc);
1805 instruction_bits [target_pc] |= BCODE_JUMP_TARGET;
1806 }
1807
1808 /* Emit code to jump to TARGET_PC if VALUE1 CONDITION VALUE2,
1809 where CONDITION is one of one the compare operators. */
1810
1811 static void
expand_compare(enum tree_code condition,tree value1,tree value2,int target_pc)1812 expand_compare (enum tree_code condition, tree value1, tree value2,
1813 int target_pc)
1814 {
1815 tree target = lookup_label (target_pc);
1816 tree cond = fold_build2 (condition, boolean_type_node, value1, value2);
1817 java_add_stmt
1818 (build3 (COND_EXPR, void_type_node, java_truthvalue_conversion (cond),
1819 build1 (GOTO_EXPR, void_type_node, target),
1820 build_java_empty_stmt ()));
1821 }
1822
1823 /* Emit code for a TEST-type opcode. */
1824
1825 static void
expand_test(enum tree_code condition,tree type,int target_pc)1826 expand_test (enum tree_code condition, tree type, int target_pc)
1827 {
1828 tree value1, value2;
1829 flush_quick_stack ();
1830 value1 = pop_value (type);
1831 value2 = (type == ptr_type_node) ? null_pointer_node : integer_zero_node;
1832 expand_compare (condition, value1, value2, target_pc);
1833 }
1834
1835 /* Emit code for a COND-type opcode. */
1836
1837 static void
expand_cond(enum tree_code condition,tree type,int target_pc)1838 expand_cond (enum tree_code condition, tree type, int target_pc)
1839 {
1840 tree value1, value2;
1841 flush_quick_stack ();
1842 /* note: pop values in opposite order */
1843 value2 = pop_value (type);
1844 value1 = pop_value (type);
1845 /* Maybe should check value1 and value2 for type compatibility ??? */
1846 expand_compare (condition, value1, value2, target_pc);
1847 }
1848
1849 static void
expand_java_goto(int target_pc)1850 expand_java_goto (int target_pc)
1851 {
1852 tree target_label = lookup_label (target_pc);
1853 flush_quick_stack ();
1854 java_add_stmt (build1 (GOTO_EXPR, void_type_node, target_label));
1855 }
1856
1857 static tree
expand_java_switch(tree selector,int default_pc)1858 expand_java_switch (tree selector, int default_pc)
1859 {
1860 tree switch_expr, x;
1861
1862 flush_quick_stack ();
1863 switch_expr = build3 (SWITCH_EXPR, TREE_TYPE (selector), selector,
1864 NULL_TREE, NULL_TREE);
1865 java_add_stmt (switch_expr);
1866
1867 x = build_case_label (NULL_TREE, NULL_TREE,
1868 create_artificial_label (input_location));
1869 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1870
1871 x = build1 (GOTO_EXPR, void_type_node, lookup_label (default_pc));
1872 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1873
1874 return switch_expr;
1875 }
1876
1877 static void
expand_java_add_case(tree switch_expr,int match,int target_pc)1878 expand_java_add_case (tree switch_expr, int match, int target_pc)
1879 {
1880 tree value, x;
1881
1882 value = build_int_cst (TREE_TYPE (switch_expr), match);
1883
1884 x = build_case_label (value, NULL_TREE,
1885 create_artificial_label (input_location));
1886 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1887
1888 x = build1 (GOTO_EXPR, void_type_node, lookup_label (target_pc));
1889 append_to_statement_list (x, &SWITCH_BODY (switch_expr));
1890 }
1891
1892 static vec<tree, va_gc> *
pop_arguments(tree method_type)1893 pop_arguments (tree method_type)
1894 {
1895 function_args_iterator fnai;
1896 tree type;
1897 vec<tree, va_gc> *args = NULL;
1898 int arity;
1899
1900 FOREACH_FUNCTION_ARGS (method_type, type, fnai)
1901 {
1902 /* XXX: leaky abstraction. */
1903 if (type == void_type_node)
1904 break;
1905
1906 vec_safe_push (args, type);
1907 }
1908
1909 arity = vec_safe_length (args);
1910
1911 while (arity--)
1912 {
1913 tree arg = pop_value ((*args)[arity]);
1914
1915 /* We simply cast each argument to its proper type. This is
1916 needed since we lose type information coming out of the
1917 verifier. We also have to do this when we pop an integer
1918 type that must be promoted for the function call. */
1919 if (TREE_CODE (type) == POINTER_TYPE)
1920 arg = build1 (NOP_EXPR, type, arg);
1921 else if (targetm.calls.promote_prototypes (type)
1922 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node)
1923 && INTEGRAL_TYPE_P (type))
1924 arg = convert (integer_type_node, arg);
1925
1926 (*args)[arity] = arg;
1927 }
1928
1929 return args;
1930 }
1931
1932 /* Attach to PTR (a block) the declaration found in ENTRY. */
1933
1934 int
attach_init_test_initialization_flags(treetreehash_entry ** slot,tree block)1935 attach_init_test_initialization_flags (treetreehash_entry **slot, tree block)
1936 {
1937 treetreehash_entry *ite = *slot;
1938
1939 if (block != error_mark_node)
1940 {
1941 if (TREE_CODE (block) == BIND_EXPR)
1942 {
1943 tree body = BIND_EXPR_BODY (block);
1944 DECL_CHAIN (ite->value) = BIND_EXPR_VARS (block);
1945 BIND_EXPR_VARS (block) = ite->value;
1946 body = build2 (COMPOUND_EXPR, void_type_node,
1947 build1 (DECL_EXPR, void_type_node, ite->value), body);
1948 BIND_EXPR_BODY (block) = body;
1949 }
1950 else
1951 {
1952 tree body = BLOCK_SUBBLOCKS (block);
1953 TREE_CHAIN (ite->value) = BLOCK_EXPR_DECLS (block);
1954 BLOCK_EXPR_DECLS (block) = ite->value;
1955 body = build2 (COMPOUND_EXPR, void_type_node,
1956 build1 (DECL_EXPR, void_type_node, ite->value), body);
1957 BLOCK_SUBBLOCKS (block) = body;
1958 }
1959
1960 }
1961 return true;
1962 }
1963
1964 /* Build an expression to initialize the class CLAS.
1965 if EXPR is non-NULL, returns an expression to first call the initializer
1966 (if it is needed) and then calls EXPR. */
1967
1968 tree
build_class_init(tree clas,tree expr)1969 build_class_init (tree clas, tree expr)
1970 {
1971 tree init;
1972
1973 /* An optimization: if CLAS is a superclass of the class we're
1974 compiling, we don't need to initialize it. However, if CLAS is
1975 an interface, it won't necessarily be initialized, even if we
1976 implement it. */
1977 if ((! CLASS_INTERFACE (TYPE_NAME (clas))
1978 && inherits_from_p (current_class, clas))
1979 || current_class == clas)
1980 return expr;
1981
1982 if (always_initialize_class_p)
1983 {
1984 init = build_call_nary (void_type_node,
1985 build_address_of (soft_initclass_node),
1986 1, build_class_ref (clas));
1987 TREE_SIDE_EFFECTS (init) = 1;
1988 }
1989 else
1990 {
1991 tree *init_test_decl;
1992 tree decl;
1993 init_test_decl = java_treetreehash_new
1994 (DECL_FUNCTION_INIT_TEST_TABLE (current_function_decl), clas);
1995
1996 if (*init_test_decl == NULL)
1997 {
1998 /* Build a declaration and mark it as a flag used to track
1999 static class initializations. */
2000 decl = build_decl (input_location, VAR_DECL, NULL_TREE,
2001 boolean_type_node);
2002 MAYBE_CREATE_VAR_LANG_DECL_SPECIFIC (decl);
2003 DECL_CONTEXT (decl) = current_function_decl;
2004 DECL_INITIAL (decl) = boolean_false_node;
2005 /* Don't emit any symbolic debugging info for this decl. */
2006 DECL_IGNORED_P (decl) = 1;
2007 *init_test_decl = decl;
2008 }
2009
2010 init = build_call_nary (void_type_node,
2011 build_address_of (soft_initclass_node),
2012 1, build_class_ref (clas));
2013 TREE_SIDE_EFFECTS (init) = 1;
2014 init = build3 (COND_EXPR, void_type_node,
2015 build2 (EQ_EXPR, boolean_type_node,
2016 *init_test_decl, boolean_false_node),
2017 init, integer_zero_node);
2018 TREE_SIDE_EFFECTS (init) = 1;
2019 init = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init,
2020 build2 (MODIFY_EXPR, boolean_type_node,
2021 *init_test_decl, boolean_true_node));
2022 TREE_SIDE_EFFECTS (init) = 1;
2023 }
2024
2025 if (expr != NULL_TREE)
2026 {
2027 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), init, expr);
2028 TREE_SIDE_EFFECTS (expr) = 1;
2029 return expr;
2030 }
2031 return init;
2032 }
2033
2034
2035
2036 /* Rewrite expensive calls that require stack unwinding at runtime to
2037 cheaper alternatives. The logic here performs these
2038 transformations:
2039
2040 java.lang.Class.forName("foo") -> java.lang.Class.forName("foo", class$)
2041 java.lang.Class.getClassLoader() -> java.lang.Class.getClassLoader(class$)
2042
2043 */
2044
2045 typedef struct
2046 {
2047 const char *classname;
2048 const char *method;
2049 const char *signature;
2050 const char *new_classname;
2051 const char *new_signature;
2052 int flags;
2053 void (*rewrite_arglist) (vec<tree, va_gc> **);
2054 } rewrite_rule;
2055
2056 /* Add __builtin_return_address(0) to the end of an arglist. */
2057
2058
2059 static void
rewrite_arglist_getcaller(vec<tree,va_gc> ** arglist)2060 rewrite_arglist_getcaller (vec<tree, va_gc> **arglist)
2061 {
2062 tree retaddr
2063 = build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS),
2064 1, integer_zero_node);
2065
2066 DECL_UNINLINABLE (current_function_decl) = 1;
2067
2068 vec_safe_push (*arglist, retaddr);
2069 }
2070
2071 /* Add this.class to the end of an arglist. */
2072
2073 static void
rewrite_arglist_getclass(vec<tree,va_gc> ** arglist)2074 rewrite_arglist_getclass (vec<tree, va_gc> **arglist)
2075 {
2076 vec_safe_push (*arglist, build_class_ref (output_class));
2077 }
2078
2079 static rewrite_rule rules[] =
2080 {{"java.lang.Class", "getClassLoader", "()Ljava/lang/ClassLoader;",
2081 "java.lang.Class", "(Ljava/lang/Class;)Ljava/lang/ClassLoader;",
2082 ACC_FINAL|ACC_PRIVATE, rewrite_arglist_getclass},
2083
2084 {"java.lang.Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;",
2085 "java.lang.Class", "(Ljava/lang/String;Ljava/lang/Class;)Ljava/lang/Class;",
2086 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getclass},
2087
2088 {"gnu.classpath.VMStackWalker", "getCallingClass", "()Ljava/lang/Class;",
2089 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/Class;",
2090 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2091
2092 {"gnu.classpath.VMStackWalker", "getCallingClassLoader",
2093 "()Ljava/lang/ClassLoader;",
2094 "gnu.classpath.VMStackWalker", "(Lgnu/gcj/RawData;)Ljava/lang/ClassLoader;",
2095 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, rewrite_arglist_getcaller},
2096
2097 {"gnu.java.lang.VMCPStringBuilder", "toString", "([CII)Ljava/lang/String;",
2098 "java.lang.String", "([CII)Ljava/lang/String;",
2099 ACC_FINAL|ACC_PRIVATE|ACC_STATIC, NULL},
2100
2101 {NULL, NULL, NULL, NULL, NULL, 0, NULL}};
2102
2103 /* True if this method is special, i.e. it's a private method that
2104 should be exported from a DSO. */
2105
2106 bool
special_method_p(tree candidate_method)2107 special_method_p (tree candidate_method)
2108 {
2109 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (candidate_method)));
2110 tree method = DECL_NAME (candidate_method);
2111 rewrite_rule *p;
2112
2113 for (p = rules; p->classname; p++)
2114 {
2115 if (get_identifier (p->classname) == context
2116 && get_identifier (p->method) == method)
2117 return true;
2118 }
2119 return false;
2120 }
2121
2122 /* Scan the rules list for replacements for *METHOD_P and replace the
2123 args accordingly. If the rewrite results in an access to a private
2124 method, update SPECIAL.*/
2125
2126 void
maybe_rewrite_invocation(tree * method_p,vec<tree,va_gc> ** arg_list_p,tree * method_signature_p,tree * special)2127 maybe_rewrite_invocation (tree *method_p, vec<tree, va_gc> **arg_list_p,
2128 tree *method_signature_p, tree *special)
2129 {
2130 tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
2131 rewrite_rule *p;
2132 *special = NULL_TREE;
2133
2134 for (p = rules; p->classname; p++)
2135 {
2136 if (get_identifier (p->classname) == context)
2137 {
2138 tree method = DECL_NAME (*method_p);
2139 if (get_identifier (p->method) == method
2140 && get_identifier (p->signature) == *method_signature_p)
2141 {
2142 tree maybe_method;
2143 tree destination_class
2144 = lookup_class (get_identifier (p->new_classname));
2145 gcc_assert (destination_class);
2146 maybe_method
2147 = lookup_java_method (destination_class,
2148 method,
2149 get_identifier (p->new_signature));
2150 if (! maybe_method && ! flag_verify_invocations)
2151 {
2152 maybe_method
2153 = add_method (destination_class, p->flags,
2154 method, get_identifier (p->new_signature));
2155 DECL_EXTERNAL (maybe_method) = 1;
2156 }
2157 *method_p = maybe_method;
2158 gcc_assert (*method_p);
2159 if (p->rewrite_arglist)
2160 p->rewrite_arglist (arg_list_p);
2161 *method_signature_p = get_identifier (p->new_signature);
2162 *special = integer_one_node;
2163
2164 break;
2165 }
2166 }
2167 }
2168 }
2169
2170
2171
2172 tree
build_known_method_ref(tree method,tree method_type ATTRIBUTE_UNUSED,tree self_type,tree method_signature ATTRIBUTE_UNUSED,vec<tree,va_gc> * arg_list ATTRIBUTE_UNUSED,tree special)2173 build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
2174 tree self_type, tree method_signature ATTRIBUTE_UNUSED,
2175 vec<tree, va_gc> *arg_list ATTRIBUTE_UNUSED, tree special)
2176 {
2177 tree func;
2178 if (is_compiled_class (self_type))
2179 {
2180 /* With indirect dispatch we have to use indirect calls for all
2181 publicly visible methods or gcc will use PLT indirections
2182 to reach them. We also have to use indirect dispatch for all
2183 external methods. */
2184 if (! flag_indirect_dispatch
2185 || (! DECL_EXTERNAL (method) && ! TREE_PUBLIC (method)))
2186 {
2187 func = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (method)),
2188 method);
2189 }
2190 else
2191 {
2192 tree table_index
2193 = build_int_cst (NULL_TREE,
2194 (get_symbol_table_index
2195 (method, special,
2196 &TYPE_ATABLE_METHODS (output_class))));
2197 func
2198 = build4 (ARRAY_REF,
2199 TREE_TYPE (TREE_TYPE (TYPE_ATABLE_DECL (output_class))),
2200 TYPE_ATABLE_DECL (output_class), table_index,
2201 NULL_TREE, NULL_TREE);
2202 }
2203 func = convert (method_ptr_type_node, func);
2204 }
2205 else
2206 {
2207 /* We don't know whether the method has been (statically) compiled.
2208 Compile this code to get a reference to the method's code:
2209
2210 SELF_TYPE->methods[METHOD_INDEX].ncode
2211
2212 */
2213
2214 int method_index = 0;
2215 tree meth, ref;
2216
2217 /* The method might actually be declared in some superclass, so
2218 we have to use its class context, not the caller's notion of
2219 where the method is. */
2220 self_type = DECL_CONTEXT (method);
2221 ref = build_class_ref (self_type);
2222 ref = build1 (INDIRECT_REF, class_type_node, ref);
2223 if (ncode_ident == NULL_TREE)
2224 ncode_ident = get_identifier ("ncode");
2225 if (methods_ident == NULL_TREE)
2226 methods_ident = get_identifier ("methods");
2227 ref = build3 (COMPONENT_REF, method_ptr_type_node, ref,
2228 lookup_field (&class_type_node, methods_ident),
2229 NULL_TREE);
2230 for (meth = TYPE_METHODS (self_type);
2231 ; meth = DECL_CHAIN (meth))
2232 {
2233 if (method == meth)
2234 break;
2235 if (meth == NULL_TREE)
2236 fatal_error (input_location, "method '%s' not found in class",
2237 IDENTIFIER_POINTER (DECL_NAME (method)));
2238 method_index++;
2239 }
2240 method_index *= int_size_in_bytes (method_type_node);
2241 ref = fold_build_pointer_plus_hwi (ref, method_index);
2242 ref = build1 (INDIRECT_REF, method_type_node, ref);
2243 func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
2244 ref, lookup_field (&method_type_node, ncode_ident),
2245 NULL_TREE);
2246 }
2247 return func;
2248 }
2249
2250 tree
invoke_build_dtable(int is_invoke_interface,vec<tree,va_gc> * arg_list)2251 invoke_build_dtable (int is_invoke_interface, vec<tree, va_gc> *arg_list)
2252 {
2253 tree dtable, objectref;
2254 tree saved = save_expr ((*arg_list)[0]);
2255
2256 (*arg_list)[0] = saved;
2257
2258 /* If we're dealing with interfaces and if the objectref
2259 argument is an array then get the dispatch table of the class
2260 Object rather than the one from the objectref. */
2261 objectref = (is_invoke_interface
2262 && is_array_type_p (TREE_TYPE (saved))
2263 ? build_class_ref (object_type_node) : saved);
2264
2265 if (dtable_ident == NULL_TREE)
2266 dtable_ident = get_identifier ("vtable");
2267 dtable = build_java_indirect_ref (object_type_node, objectref,
2268 flag_check_references);
2269 dtable = build3 (COMPONENT_REF, dtable_ptr_type, dtable,
2270 lookup_field (&object_type_node, dtable_ident), NULL_TREE);
2271
2272 return dtable;
2273 }
2274
2275 /* Determine the index in SYMBOL_TABLE for a reference to the decl
2276 T. If this decl has not been seen before, it will be added to the
2277 [oa]table_methods. If it has, the existing table slot will be
2278 reused. */
2279
2280 int
get_symbol_table_index(tree t,tree special,vec<method_entry,va_gc> ** symbol_table)2281 get_symbol_table_index (tree t, tree special,
2282 vec<method_entry, va_gc> **symbol_table)
2283 {
2284 method_entry *e;
2285 unsigned i;
2286 method_entry elem = {t, special};
2287
2288 FOR_EACH_VEC_SAFE_ELT (*symbol_table, i, e)
2289 if (t == e->method && special == e->special)
2290 goto done;
2291
2292 vec_safe_push (*symbol_table, elem);
2293
2294 done:
2295 return i + 1;
2296 }
2297
2298 tree
build_invokevirtual(tree dtable,tree method,tree special)2299 build_invokevirtual (tree dtable, tree method, tree special)
2300 {
2301 tree func;
2302 tree nativecode_ptr_ptr_type_node
2303 = build_pointer_type (nativecode_ptr_type_node);
2304 tree method_index;
2305 tree otable_index;
2306
2307 if (flag_indirect_dispatch)
2308 {
2309 gcc_assert (! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))));
2310
2311 otable_index
2312 = build_int_cst (NULL_TREE, get_symbol_table_index
2313 (method, special,
2314 &TYPE_OTABLE_METHODS (output_class)));
2315 method_index = build4 (ARRAY_REF, integer_type_node,
2316 TYPE_OTABLE_DECL (output_class),
2317 otable_index, NULL_TREE, NULL_TREE);
2318 }
2319 else
2320 {
2321 /* We fetch the DECL_VINDEX field directly here, rather than
2322 using get_method_index(). DECL_VINDEX is the true offset
2323 from the vtable base to a method, regrdless of any extra
2324 words inserted at the start of the vtable. */
2325 method_index = DECL_VINDEX (method);
2326 method_index = size_binop (MULT_EXPR, method_index,
2327 TYPE_SIZE_UNIT (nativecode_ptr_ptr_type_node));
2328 if (TARGET_VTABLE_USES_DESCRIPTORS)
2329 method_index = size_binop (MULT_EXPR, method_index,
2330 size_int (TARGET_VTABLE_USES_DESCRIPTORS));
2331 }
2332
2333 func = fold_build_pointer_plus (dtable, method_index);
2334
2335 if (TARGET_VTABLE_USES_DESCRIPTORS)
2336 func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
2337 else
2338 {
2339 func = fold_convert (nativecode_ptr_ptr_type_node, func);
2340 func = build1 (INDIRECT_REF, nativecode_ptr_type_node, func);
2341 }
2342
2343 return func;
2344 }
2345
2346 static GTY(()) tree class_ident;
2347 tree
build_invokeinterface(tree dtable,tree method)2348 build_invokeinterface (tree dtable, tree method)
2349 {
2350 tree interface;
2351 tree idx;
2352
2353 /* We expand invokeinterface here. */
2354
2355 if (class_ident == NULL_TREE)
2356 class_ident = get_identifier ("class");
2357
2358 dtable = build_java_indirect_ref (dtable_type, dtable,
2359 flag_check_references);
2360 dtable = build3 (COMPONENT_REF, class_ptr_type, dtable,
2361 lookup_field (&dtable_type, class_ident), NULL_TREE);
2362
2363 interface = DECL_CONTEXT (method);
2364 gcc_assert (CLASS_INTERFACE (TYPE_NAME (interface)));
2365 layout_class_methods (interface);
2366
2367 if (flag_indirect_dispatch)
2368 {
2369 int itable_index
2370 = 2 * (get_symbol_table_index
2371 (method, NULL_TREE, &TYPE_ITABLE_METHODS (output_class)));
2372 interface
2373 = build4 (ARRAY_REF,
2374 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2375 TYPE_ITABLE_DECL (output_class),
2376 build_int_cst (NULL_TREE, itable_index-1),
2377 NULL_TREE, NULL_TREE);
2378 idx
2379 = build4 (ARRAY_REF,
2380 TREE_TYPE (TREE_TYPE (TYPE_ITABLE_DECL (output_class))),
2381 TYPE_ITABLE_DECL (output_class),
2382 build_int_cst (NULL_TREE, itable_index),
2383 NULL_TREE, NULL_TREE);
2384 interface = convert (class_ptr_type, interface);
2385 idx = convert (integer_type_node, idx);
2386 }
2387 else
2388 {
2389 idx = build_int_cst (NULL_TREE,
2390 get_interface_method_index (method, interface));
2391 interface = build_class_ref (interface);
2392 }
2393
2394 return build_call_nary (ptr_type_node,
2395 build_address_of (soft_lookupinterfacemethod_node),
2396 3, dtable, interface, idx);
2397 }
2398
2399 /* Expand one of the invoke_* opcodes.
2400 OPCODE is the specific opcode.
2401 METHOD_REF_INDEX is an index into the constant pool.
2402 NARGS is the number of arguments, or -1 if not specified. */
2403
2404 static void
expand_invoke(int opcode,int method_ref_index,int nargs ATTRIBUTE_UNUSED)2405 expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
2406 {
2407 tree method_signature
2408 = COMPONENT_REF_SIGNATURE(¤t_jcf->cpool, method_ref_index);
2409 tree method_name = COMPONENT_REF_NAME (¤t_jcf->cpool,
2410 method_ref_index);
2411 tree self_type
2412 = get_class_constant (current_jcf,
2413 COMPONENT_REF_CLASS_INDEX(¤t_jcf->cpool,
2414 method_ref_index));
2415 const char *const self_name
2416 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2417 tree call, func, method, method_type;
2418 vec<tree, va_gc> *arg_list;
2419 tree check = NULL_TREE;
2420
2421 tree special = NULL_TREE;
2422
2423 if (! CLASS_LOADED_P (self_type))
2424 {
2425 load_class (self_type, 1);
2426 safe_layout_class (self_type);
2427 if (TREE_CODE (TYPE_SIZE (self_type)) == ERROR_MARK)
2428 fatal_error (input_location, "failed to find class '%s'", self_name);
2429 }
2430 layout_class_methods (self_type);
2431
2432 if (ID_INIT_P (method_name))
2433 method = lookup_java_constructor (self_type, method_signature);
2434 else
2435 method = lookup_java_method (self_type, method_name, method_signature);
2436
2437 /* We've found a method in a class other than the one in which it
2438 was wanted. This can happen if, for instance, we're trying to
2439 compile invokespecial super.equals().
2440 FIXME: This is a kludge. Rather than nullifying the result, we
2441 should change lookup_java_method() so that it doesn't search the
2442 superclass chain when we're BC-compiling. */
2443 if (! flag_verify_invocations
2444 && method
2445 && ! TYPE_ARRAY_P (self_type)
2446 && self_type != DECL_CONTEXT (method))
2447 method = NULL_TREE;
2448
2449 /* We've found a method in an interface, but this isn't an interface
2450 call. */
2451 if (opcode != OPCODE_invokeinterface
2452 && method
2453 && (CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method)))))
2454 method = NULL_TREE;
2455
2456 /* We've found a non-interface method but we are making an
2457 interface call. This can happen if the interface overrides a
2458 method in Object. */
2459 if (! flag_verify_invocations
2460 && opcode == OPCODE_invokeinterface
2461 && method
2462 && ! CLASS_INTERFACE (TYPE_NAME (DECL_CONTEXT (method))))
2463 method = NULL_TREE;
2464
2465 if (method == NULL_TREE)
2466 {
2467 if (flag_verify_invocations || ! flag_indirect_dispatch)
2468 {
2469 error ("class '%s' has no method named '%s' matching signature '%s'",
2470 self_name,
2471 IDENTIFIER_POINTER (method_name),
2472 IDENTIFIER_POINTER (method_signature));
2473 }
2474 else
2475 {
2476 int flags = ACC_PUBLIC;
2477 if (opcode == OPCODE_invokestatic)
2478 flags |= ACC_STATIC;
2479 if (opcode == OPCODE_invokeinterface)
2480 {
2481 flags |= ACC_INTERFACE | ACC_ABSTRACT;
2482 CLASS_INTERFACE (TYPE_NAME (self_type)) = 1;
2483 }
2484 method = add_method (self_type, flags, method_name,
2485 method_signature);
2486 DECL_ARTIFICIAL (method) = 1;
2487 METHOD_DUMMY (method) = 1;
2488 layout_class_method (self_type, NULL,
2489 method, NULL);
2490 }
2491 }
2492
2493 /* Invoke static can't invoke static/abstract method */
2494 if (method != NULL_TREE)
2495 {
2496 if (opcode == OPCODE_invokestatic)
2497 {
2498 if (!METHOD_STATIC (method))
2499 {
2500 error ("invokestatic on non static method");
2501 method = NULL_TREE;
2502 }
2503 else if (METHOD_ABSTRACT (method))
2504 {
2505 error ("invokestatic on abstract method");
2506 method = NULL_TREE;
2507 }
2508 }
2509 else
2510 {
2511 if (METHOD_STATIC (method))
2512 {
2513 error ("invoke[non-static] on static method");
2514 method = NULL_TREE;
2515 }
2516 }
2517 }
2518
2519 if (method == NULL_TREE)
2520 {
2521 /* If we got here, we emitted an error message above. So we
2522 just pop the arguments, push a properly-typed zero, and
2523 continue. */
2524 method_type = get_type_from_signature (method_signature);
2525 pop_arguments (method_type);
2526 if (opcode != OPCODE_invokestatic)
2527 pop_type (self_type);
2528 method_type = promote_type (TREE_TYPE (method_type));
2529 push_value (convert (method_type, integer_zero_node));
2530 return;
2531 }
2532
2533 arg_list = pop_arguments (TREE_TYPE (method));
2534 flush_quick_stack ();
2535
2536 maybe_rewrite_invocation (&method, &arg_list, &method_signature,
2537 &special);
2538 method_type = TREE_TYPE (method);
2539
2540 func = NULL_TREE;
2541 if (opcode == OPCODE_invokestatic)
2542 func = build_known_method_ref (method, method_type, self_type,
2543 method_signature, arg_list, special);
2544 else if (opcode == OPCODE_invokespecial
2545 || (opcode == OPCODE_invokevirtual
2546 && (METHOD_PRIVATE (method)
2547 || METHOD_FINAL (method)
2548 || CLASS_FINAL (TYPE_NAME (self_type)))))
2549 {
2550 /* If the object for the method call is null, we throw an
2551 exception. We don't do this if the object is the current
2552 method's `this'. In other cases we just rely on an
2553 optimization pass to eliminate redundant checks. FIXME:
2554 Unfortunately there doesn't seem to be a way to determine
2555 what the current method is right now.
2556 We do omit the check if we're calling <init>. */
2557 /* We use a SAVE_EXPR here to make sure we only evaluate
2558 the new `self' expression once. */
2559 tree save_arg = save_expr ((*arg_list)[0]);
2560 (*arg_list)[0] = save_arg;
2561 check = java_check_reference (save_arg, ! DECL_INIT_P (method));
2562 func = build_known_method_ref (method, method_type, self_type,
2563 method_signature, arg_list, special);
2564 }
2565 else
2566 {
2567 tree dtable = invoke_build_dtable (opcode == OPCODE_invokeinterface,
2568 arg_list);
2569 if (opcode == OPCODE_invokevirtual)
2570 func = build_invokevirtual (dtable, method, special);
2571 else
2572 func = build_invokeinterface (dtable, method);
2573 }
2574
2575 if (TREE_CODE (func) == ADDR_EXPR)
2576 TREE_TYPE (func) = build_pointer_type (method_type);
2577 else
2578 func = build1 (NOP_EXPR, build_pointer_type (method_type), func);
2579
2580 call = build_call_vec (TREE_TYPE (method_type), func, arg_list);
2581 TREE_SIDE_EFFECTS (call) = 1;
2582 call = check_for_builtin (method, call);
2583
2584 if (check != NULL_TREE)
2585 {
2586 call = build2 (COMPOUND_EXPR, TREE_TYPE (call), check, call);
2587 TREE_SIDE_EFFECTS (call) = 1;
2588 }
2589
2590 if (TREE_CODE (TREE_TYPE (method_type)) == VOID_TYPE)
2591 java_add_stmt (call);
2592 else
2593 {
2594 push_value (call);
2595 flush_quick_stack ();
2596 }
2597 }
2598
2599 /* Create a stub which will be put into the vtable but which will call
2600 a JNI function. */
2601
2602 tree
build_jni_stub(tree method)2603 build_jni_stub (tree method)
2604 {
2605 tree jnifunc, call, body, method_sig, arg_types;
2606 tree jniarg0, jniarg1, jniarg2, jniarg3;
2607 tree jni_func_type, tem;
2608 tree env_var, res_var = NULL_TREE, block;
2609 tree method_args;
2610 tree meth_var;
2611 tree bind;
2612 vec<tree, va_gc> *args = NULL;
2613 int args_size = 0;
2614
2615 tree klass = DECL_CONTEXT (method);
2616 klass = build_class_ref (klass);
2617
2618 gcc_assert (METHOD_NATIVE (method) && flag_jni);
2619
2620 DECL_ARTIFICIAL (method) = 1;
2621 DECL_EXTERNAL (method) = 0;
2622
2623 env_var = build_decl (input_location,
2624 VAR_DECL, get_identifier ("env"), ptr_type_node);
2625 DECL_CONTEXT (env_var) = method;
2626
2627 if (TREE_TYPE (TREE_TYPE (method)) != void_type_node)
2628 {
2629 res_var = build_decl (input_location, VAR_DECL, get_identifier ("res"),
2630 TREE_TYPE (TREE_TYPE (method)));
2631 DECL_CONTEXT (res_var) = method;
2632 DECL_CHAIN (env_var) = res_var;
2633 }
2634
2635 method_args = DECL_ARGUMENTS (method);
2636 block = build_block (env_var, NULL_TREE, method_args, NULL_TREE);
2637 TREE_SIDE_EFFECTS (block) = 1;
2638
2639 /* Compute the local `env' by calling _Jv_GetJNIEnvNewFrame. */
2640 body = build2 (MODIFY_EXPR, ptr_type_node, env_var,
2641 build_call_nary (ptr_type_node,
2642 build_address_of (soft_getjnienvnewframe_node),
2643 1, klass));
2644
2645 /* The JNIEnv structure is the first argument to the JNI function. */
2646 args_size += int_size_in_bytes (TREE_TYPE (env_var));
2647 vec_safe_push (args, env_var);
2648
2649 /* For a static method the second argument is the class. For a
2650 non-static method the second argument is `this'; that is already
2651 available in the argument list. */
2652 if (METHOD_STATIC (method))
2653 {
2654 args_size += int_size_in_bytes (TREE_TYPE (klass));
2655 vec_safe_push (args, klass);
2656 }
2657
2658 /* All the arguments to this method become arguments to the
2659 underlying JNI function. If we had to wrap object arguments in a
2660 special way, we would do that here. */
2661 for (tem = method_args; tem != NULL_TREE; tem = DECL_CHAIN (tem))
2662 {
2663 int arg_bits = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)));
2664 #ifdef PARM_BOUNDARY
2665 arg_bits = (((arg_bits + PARM_BOUNDARY - 1) / PARM_BOUNDARY)
2666 * PARM_BOUNDARY);
2667 #endif
2668 args_size += (arg_bits / BITS_PER_UNIT);
2669
2670 vec_safe_push (args, tem);
2671 }
2672 arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
2673
2674 /* Argument types for static methods and the JNIEnv structure.
2675 FIXME: Write and use build_function_type_vec to avoid this. */
2676 if (METHOD_STATIC (method))
2677 arg_types = tree_cons (NULL_TREE, object_ptr_type_node, arg_types);
2678 arg_types = tree_cons (NULL_TREE, ptr_type_node, arg_types);
2679
2680 /* We call _Jv_LookupJNIMethod to find the actual underlying
2681 function pointer. _Jv_LookupJNIMethod will throw the appropriate
2682 exception if this function is not found at runtime. */
2683 method_sig = build_java_signature (TREE_TYPE (method));
2684 jniarg0 = klass;
2685 jniarg1 = build_utf8_ref (DECL_NAME (method));
2686 jniarg2 = build_utf8_ref (unmangle_classname
2687 (IDENTIFIER_POINTER (method_sig),
2688 IDENTIFIER_LENGTH (method_sig)));
2689 jniarg3 = build_int_cst (NULL_TREE, args_size);
2690
2691 tem = build_function_type (TREE_TYPE (TREE_TYPE (method)), arg_types);
2692
2693 #ifdef MODIFY_JNI_METHOD_CALL
2694 tem = MODIFY_JNI_METHOD_CALL (tem);
2695 #endif
2696
2697 jni_func_type = build_pointer_type (tem);
2698
2699 /* Use the actual function type, rather than a generic pointer type,
2700 such that this decl keeps the actual pointer type from being
2701 garbage-collected. If it is, we end up using canonical types
2702 with different uids for equivalent function types, and this in
2703 turn causes utf8 identifiers and output order to vary. */
2704 meth_var = build_decl (input_location,
2705 VAR_DECL, get_identifier ("meth"), jni_func_type);
2706 TREE_STATIC (meth_var) = 1;
2707 TREE_PUBLIC (meth_var) = 0;
2708 DECL_EXTERNAL (meth_var) = 0;
2709 DECL_CONTEXT (meth_var) = method;
2710 DECL_ARTIFICIAL (meth_var) = 1;
2711 DECL_INITIAL (meth_var) = null_pointer_node;
2712 TREE_USED (meth_var) = 1;
2713 chainon (env_var, meth_var);
2714 build_result_decl (method);
2715
2716 jnifunc = build3 (COND_EXPR, jni_func_type,
2717 build2 (NE_EXPR, boolean_type_node,
2718 meth_var, build_int_cst (TREE_TYPE (meth_var), 0)),
2719 meth_var,
2720 build2 (MODIFY_EXPR, jni_func_type, meth_var,
2721 build1
2722 (NOP_EXPR, jni_func_type,
2723 build_call_nary (ptr_type_node,
2724 build_address_of
2725 (soft_lookupjnimethod_node),
2726 4,
2727 jniarg0, jniarg1,
2728 jniarg2, jniarg3))));
2729
2730 /* Now we make the actual JNI call via the resulting function
2731 pointer. */
2732 call = build_call_vec (TREE_TYPE (TREE_TYPE (method)), jnifunc, args);
2733
2734 /* If the JNI call returned a result, capture it here. If we had to
2735 unwrap JNI object results, we would do that here. */
2736 if (res_var != NULL_TREE)
2737 {
2738 /* If the call returns an object, it may return a JNI weak
2739 reference, in which case we must unwrap it. */
2740 if (! JPRIMITIVE_TYPE_P (TREE_TYPE (TREE_TYPE (method))))
2741 call = build_call_nary (TREE_TYPE (TREE_TYPE (method)),
2742 build_address_of (soft_unwrapjni_node),
2743 1, call);
2744 call = build2 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (method)),
2745 res_var, call);
2746 }
2747
2748 TREE_SIDE_EFFECTS (call) = 1;
2749
2750 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2751 TREE_SIDE_EFFECTS (body) = 1;
2752
2753 /* Now free the environment we allocated. */
2754 call = build_call_nary (ptr_type_node,
2755 build_address_of (soft_jnipopsystemframe_node),
2756 1, env_var);
2757 TREE_SIDE_EFFECTS (call) = 1;
2758 body = build2 (COMPOUND_EXPR, void_type_node, body, call);
2759 TREE_SIDE_EFFECTS (body) = 1;
2760
2761 /* Finally, do the return. */
2762 if (res_var != NULL_TREE)
2763 {
2764 tree drt;
2765 gcc_assert (DECL_RESULT (method));
2766 /* Make sure we copy the result variable to the actual
2767 result. We use the type of the DECL_RESULT because it
2768 might be different from the return type of the function:
2769 it might be promoted. */
2770 drt = TREE_TYPE (DECL_RESULT (method));
2771 if (drt != TREE_TYPE (res_var))
2772 res_var = build1 (CONVERT_EXPR, drt, res_var);
2773 res_var = build2 (MODIFY_EXPR, drt, DECL_RESULT (method), res_var);
2774 TREE_SIDE_EFFECTS (res_var) = 1;
2775 }
2776
2777 body = build2 (COMPOUND_EXPR, void_type_node, body,
2778 build1 (RETURN_EXPR, void_type_node, res_var));
2779 TREE_SIDE_EFFECTS (body) = 1;
2780
2781 /* Prepend class initialization for static methods reachable from
2782 other classes. */
2783 if (METHOD_STATIC (method)
2784 && (! METHOD_PRIVATE (method)
2785 || INNER_CLASS_P (DECL_CONTEXT (method))))
2786 {
2787 tree init = build_call_expr (soft_initclass_node, 1,
2788 klass);
2789 body = build2 (COMPOUND_EXPR, void_type_node, init, body);
2790 TREE_SIDE_EFFECTS (body) = 1;
2791 }
2792
2793 bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
2794 body, block);
2795 return bind;
2796 }
2797
2798
2799 /* Given lvalue EXP, return a volatile expression that references the
2800 same object. */
2801
2802 tree
java_modify_addr_for_volatile(tree exp)2803 java_modify_addr_for_volatile (tree exp)
2804 {
2805 tree exp_type = TREE_TYPE (exp);
2806 tree v_type
2807 = build_qualified_type (exp_type,
2808 TYPE_QUALS (exp_type) | TYPE_QUAL_VOLATILE);
2809 tree addr = build_fold_addr_expr (exp);
2810 v_type = build_pointer_type (v_type);
2811 addr = fold_convert (v_type, addr);
2812 exp = build_fold_indirect_ref (addr);
2813 return exp;
2814 }
2815
2816
2817 /* Expand an operation to extract from or store into a field.
2818 IS_STATIC is 1 iff the field is static.
2819 IS_PUTTING is 1 for putting into a field; 0 for getting from the field.
2820 FIELD_REF_INDEX is an index into the constant pool. */
2821
2822 static void
expand_java_field_op(int is_static,int is_putting,int field_ref_index)2823 expand_java_field_op (int is_static, int is_putting, int field_ref_index)
2824 {
2825 tree self_type
2826 = get_class_constant (current_jcf,
2827 COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool,
2828 field_ref_index));
2829 const char *self_name
2830 = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
2831 tree field_name = COMPONENT_REF_NAME (¤t_jcf->cpool, field_ref_index);
2832 tree field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool,
2833 field_ref_index);
2834 tree field_type = get_type_from_signature (field_signature);
2835 tree new_value = is_putting ? pop_value (field_type) : NULL_TREE;
2836 tree field_ref;
2837 int is_error = 0;
2838 tree original_self_type = self_type;
2839 tree field_decl;
2840 tree modify_expr;
2841
2842 if (! CLASS_LOADED_P (self_type))
2843 load_class (self_type, 1);
2844 field_decl = lookup_field (&self_type, field_name);
2845 if (field_decl == error_mark_node)
2846 {
2847 is_error = 1;
2848 }
2849 else if (field_decl == NULL_TREE)
2850 {
2851 if (! flag_verify_invocations)
2852 {
2853 int flags = ACC_PUBLIC;
2854 if (is_static)
2855 flags |= ACC_STATIC;
2856 self_type = original_self_type;
2857 field_decl = add_field (original_self_type, field_name,
2858 field_type, flags);
2859 DECL_ARTIFICIAL (field_decl) = 1;
2860 DECL_IGNORED_P (field_decl) = 1;
2861 #if 0
2862 /* FIXME: We should be pessimistic about volatility. We
2863 don't know one way or another, but this is safe.
2864 However, doing this has bad effects on code quality. We
2865 need to look at better ways to do this. */
2866 TREE_THIS_VOLATILE (field_decl) = 1;
2867 #endif
2868 }
2869 else
2870 {
2871 error ("missing field '%s' in '%s'",
2872 IDENTIFIER_POINTER (field_name), self_name);
2873 is_error = 1;
2874 }
2875 }
2876 else if (build_java_signature (TREE_TYPE (field_decl)) != field_signature)
2877 {
2878 error ("mismatching signature for field '%s' in '%s'",
2879 IDENTIFIER_POINTER (field_name), self_name);
2880 is_error = 1;
2881 }
2882 field_ref = is_static ? NULL_TREE : pop_value (self_type);
2883 if (is_error)
2884 {
2885 if (! is_putting)
2886 push_value (convert (field_type, integer_zero_node));
2887 flush_quick_stack ();
2888 return;
2889 }
2890
2891 field_ref = build_field_ref (field_ref, self_type, field_name);
2892 if (is_static
2893 && ! flag_indirect_dispatch)
2894 {
2895 tree context = DECL_CONTEXT (field_ref);
2896 if (context != self_type && CLASS_INTERFACE (TYPE_NAME (context)))
2897 field_ref = build_class_init (context, field_ref);
2898 else
2899 field_ref = build_class_init (self_type, field_ref);
2900 }
2901 if (is_putting)
2902 {
2903 flush_quick_stack ();
2904 if (FIELD_FINAL (field_decl))
2905 {
2906 if (DECL_CONTEXT (field_decl) != current_class)
2907 error ("assignment to final field %q+D not in field%'s class",
2908 field_decl);
2909 /* We used to check for assignments to final fields not
2910 occurring in the class initializer or in a constructor
2911 here. However, this constraint doesn't seem to be
2912 enforced by the JVM. */
2913 }
2914
2915 if (TREE_THIS_VOLATILE (field_decl))
2916 field_ref = java_modify_addr_for_volatile (field_ref);
2917
2918 modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (field_ref),
2919 field_ref, new_value);
2920
2921 if (TREE_THIS_VOLATILE (field_decl))
2922 {
2923 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2924 java_add_stmt (build_call_expr (sync, 0));
2925 }
2926
2927 java_add_stmt (modify_expr);
2928 }
2929 else
2930 {
2931 tree temp = build_decl (input_location,
2932 VAR_DECL, NULL_TREE, TREE_TYPE (field_ref));
2933 java_add_local_var (temp);
2934
2935 if (TREE_THIS_VOLATILE (field_decl))
2936 field_ref = java_modify_addr_for_volatile (field_ref);
2937
2938 modify_expr
2939 = build2 (MODIFY_EXPR, TREE_TYPE (field_ref), temp, field_ref);
2940 java_add_stmt (modify_expr);
2941
2942 if (TREE_THIS_VOLATILE (field_decl))
2943 {
2944 tree sync = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
2945 java_add_stmt (build_call_expr (sync, 0));
2946 }
2947
2948 push_value (temp);
2949 }
2950 TREE_THIS_VOLATILE (field_ref) = TREE_THIS_VOLATILE (field_decl);
2951 }
2952
2953 static void
load_type_state(int pc)2954 load_type_state (int pc)
2955 {
2956 int i;
2957 tree vec = (*type_states)[pc];
2958 int cur_length = TREE_VEC_LENGTH (vec);
2959 stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
2960 for (i = 0; i < cur_length; i++)
2961 type_map [i] = TREE_VEC_ELT (vec, i);
2962 }
2963
2964 /* Go over METHOD's bytecode and note instruction starts in
2965 instruction_bits[]. */
2966
2967 void
note_instructions(JCF * jcf,tree method)2968 note_instructions (JCF *jcf, tree method)
2969 {
2970 int PC;
2971 unsigned char* byte_ops;
2972 long length = DECL_CODE_LENGTH (method);
2973
2974 int saw_index;
2975 jint INT_temp;
2976
2977 #undef RET /* Defined by config/i386/i386.h */
2978 #undef PTR
2979 #define BCODE byte_ops
2980 #define BYTE_type_node byte_type_node
2981 #define SHORT_type_node short_type_node
2982 #define INT_type_node int_type_node
2983 #define LONG_type_node long_type_node
2984 #define CHAR_type_node char_type_node
2985 #define PTR_type_node ptr_type_node
2986 #define FLOAT_type_node float_type_node
2987 #define DOUBLE_type_node double_type_node
2988 #define VOID_type_node void_type_node
2989 #define CONST_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2990 #define CONST_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2991 #define VAR_INDEX_1 (saw_index = 1, IMMEDIATE_u1)
2992 #define VAR_INDEX_2 (saw_index = 1, IMMEDIATE_u2)
2993
2994 #define CHECK_PC_IN_RANGE(PC) ((void)1) /* Already handled by verifier. */
2995
2996 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
2997 byte_ops = jcf->read_ptr;
2998 instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
2999 memset (instruction_bits, 0, length + 1);
3000 vec_alloc (type_states, length + 1);
3001 type_states->quick_grow_cleared (length + 1);
3002
3003 /* This pass figures out which PC can be the targets of jumps. */
3004 for (PC = 0; PC < length;)
3005 {
3006 int oldpc = PC; /* PC at instruction start. */
3007 instruction_bits [PC] |= BCODE_INSTRUCTION_START;
3008 switch (byte_ops[PC++])
3009 {
3010 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3011 case OPCODE: \
3012 PRE_##OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3013 break;
3014
3015 #define NOTE_LABEL(PC) note_label(oldpc, PC)
3016
3017 #define PRE_PUSHC(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3018 #define PRE_LOAD(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3019 #define PRE_STORE(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE);
3020 #define PRE_STACK(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3021 #define PRE_UNOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3022 #define PRE_BINOP(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3023 #define PRE_CONVERT(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3024 #define PRE_CONVERT2(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3025
3026 #define PRE_SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3027 PRE_SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3028 #define PRE_SPECIAL_IINC(OPERAND_TYPE) \
3029 ((void) IMMEDIATE_u1, (void) IMMEDIATE_s1)
3030 #define PRE_SPECIAL_ENTER(IGNORE) /* nothing */
3031 #define PRE_SPECIAL_EXIT(IGNORE) /* nothing */
3032 #define PRE_SPECIAL_THROW(IGNORE) /* nothing */
3033 #define PRE_SPECIAL_BREAK(IGNORE) /* nothing */
3034
3035 /* two forms of wide instructions */
3036 #define PRE_SPECIAL_WIDE(IGNORE) \
3037 { \
3038 int modified_opcode = IMMEDIATE_u1; \
3039 if (modified_opcode == OPCODE_iinc) \
3040 { \
3041 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3042 (void) IMMEDIATE_s2; /* constbyte1 and constbyte2 */ \
3043 } \
3044 else \
3045 { \
3046 (void) IMMEDIATE_u2; /* indexbyte1 and indexbyte2 */ \
3047 } \
3048 }
3049
3050 #define PRE_IMPL(IGNORE1, IGNORE2) /* nothing */
3051
3052 #define PRE_MONITOR(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3053
3054 #define PRE_RETURN(OPERAND_TYPE, OPERAND_VALUE) /* nothing */
3055 #define PRE_ARRAY(OPERAND_TYPE, SUBOP) \
3056 PRE_ARRAY_##SUBOP(OPERAND_TYPE)
3057 #define PRE_ARRAY_LOAD(TYPE) /* nothing */
3058 #define PRE_ARRAY_STORE(TYPE) /* nothing */
3059 #define PRE_ARRAY_LENGTH(TYPE) /* nothing */
3060 #define PRE_ARRAY_NEW(TYPE) PRE_ARRAY_NEW_##TYPE
3061 #define PRE_ARRAY_NEW_NUM ((void) IMMEDIATE_u1)
3062 #define PRE_ARRAY_NEW_PTR ((void) IMMEDIATE_u2)
3063 #define PRE_ARRAY_NEW_MULTI ((void) IMMEDIATE_u2, (void) IMMEDIATE_u1)
3064
3065 #define PRE_TEST(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3066 #define PRE_COND(OPERAND_TYPE, OPERAND_VALUE) NOTE_LABEL (oldpc+IMMEDIATE_s2)
3067 #define PRE_BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3068 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3069 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3070 #define PRE_JSR(OPERAND_TYPE, OPERAND_VALUE) \
3071 saw_index = 0; INT_temp = (OPERAND_VALUE); \
3072 NOTE_LABEL (PC); \
3073 if (!saw_index) NOTE_LABEL(oldpc + INT_temp);
3074
3075 #define PRE_RET(OPERAND_TYPE, OPERAND_VALUE) (void)(OPERAND_VALUE)
3076
3077 #define PRE_SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3078 PC = (PC + 3) / 4 * 4; PRE_##TABLE_OR_LOOKUP##_SWITCH
3079
3080 #define PRE_LOOKUP_SWITCH \
3081 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3082 NOTE_LABEL (default_offset+oldpc); \
3083 if (npairs >= 0) \
3084 while (--npairs >= 0) { \
3085 jint match ATTRIBUTE_UNUSED = IMMEDIATE_s4; \
3086 jint offset = IMMEDIATE_s4; \
3087 NOTE_LABEL (offset+oldpc); } \
3088 }
3089
3090 #define PRE_TABLE_SWITCH \
3091 { jint default_offset = IMMEDIATE_s4; \
3092 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3093 NOTE_LABEL (default_offset+oldpc); \
3094 if (low <= high) \
3095 while (low++ <= high) { \
3096 jint offset = IMMEDIATE_s4; \
3097 NOTE_LABEL (offset+oldpc); } \
3098 }
3099
3100 #define PRE_FIELD(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3101 #define PRE_OBJECT(MAYBE_STATIC, PUT_OR_GET) (void)(IMMEDIATE_u2);
3102 #define PRE_INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3103 (void)(IMMEDIATE_u2); \
3104 PC += 2 * IS_INTERFACE /* for invokeinterface */;
3105
3106 #include "javaop.def"
3107 #undef JAVAOP
3108 }
3109 } /* for */
3110 }
3111
3112 void
expand_byte_code(JCF * jcf,tree method)3113 expand_byte_code (JCF *jcf, tree method)
3114 {
3115 int PC;
3116 int i;
3117 const unsigned char *linenumber_pointer;
3118 int dead_code_index = -1;
3119 unsigned char* byte_ops;
3120 long length = DECL_CODE_LENGTH (method);
3121 location_t max_location = input_location;
3122
3123 stack_pointer = 0;
3124 JCF_SEEK (jcf, DECL_CODE_OFFSET (method));
3125 byte_ops = jcf->read_ptr;
3126
3127 /* We make an initial pass of the line number table, to note
3128 which instructions have associated line number entries. */
3129 linenumber_pointer = linenumber_table;
3130 for (i = 0; i < linenumber_count; i++)
3131 {
3132 int pc = GET_u2 (linenumber_pointer);
3133 linenumber_pointer += 4;
3134 if (pc >= length)
3135 warning (0, "invalid PC in line number table");
3136 else
3137 {
3138 if ((instruction_bits[pc] & BCODE_HAS_LINENUMBER) != 0)
3139 instruction_bits[pc] |= BCODE_HAS_MULTI_LINENUMBERS;
3140 instruction_bits[pc] |= BCODE_HAS_LINENUMBER;
3141 }
3142 }
3143
3144 if (! verify_jvm_instructions_new (jcf, byte_ops, length))
3145 return;
3146
3147 promote_arguments ();
3148 cache_this_class_ref (method);
3149 cache_cpool_data_ref ();
3150
3151 /* Translate bytecodes. */
3152 linenumber_pointer = linenumber_table;
3153 for (PC = 0; PC < length;)
3154 {
3155 if ((instruction_bits [PC] & BCODE_TARGET) != 0 || PC == 0)
3156 {
3157 tree label = lookup_label (PC);
3158 flush_quick_stack ();
3159 if ((instruction_bits [PC] & BCODE_TARGET) != 0)
3160 java_add_stmt (build1 (LABEL_EXPR, void_type_node, label));
3161 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3162 load_type_state (PC);
3163 }
3164
3165 if (! (instruction_bits [PC] & BCODE_VERIFIED))
3166 {
3167 if (dead_code_index == -1)
3168 {
3169 /* This is the start of a region of unreachable bytecodes.
3170 They still need to be processed in order for EH ranges
3171 to get handled correctly. However, we can simply
3172 replace these bytecodes with nops. */
3173 dead_code_index = PC;
3174 }
3175
3176 /* Turn this bytecode into a nop. */
3177 byte_ops[PC] = 0x0;
3178 }
3179 else
3180 {
3181 if (dead_code_index != -1)
3182 {
3183 /* We've just reached the end of a region of dead code. */
3184 if (extra_warnings)
3185 warning (0, "unreachable bytecode from %d to before %d",
3186 dead_code_index, PC);
3187 dead_code_index = -1;
3188 }
3189 }
3190
3191 /* Handle possible line number entry for this PC.
3192
3193 This code handles out-of-order and multiple linenumbers per PC,
3194 but is optimized for the case of line numbers increasing
3195 monotonically with PC. */
3196 if ((instruction_bits[PC] & BCODE_HAS_LINENUMBER) != 0)
3197 {
3198 if ((instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS) != 0
3199 || GET_u2 (linenumber_pointer) != PC)
3200 linenumber_pointer = linenumber_table;
3201 while (linenumber_pointer < linenumber_table + linenumber_count * 4)
3202 {
3203 int pc = GET_u2 (linenumber_pointer);
3204 linenumber_pointer += 4;
3205 if (pc == PC)
3206 {
3207 int line = GET_u2 (linenumber_pointer - 2);
3208 input_location = linemap_line_start (line_table, line, 1);
3209 if (input_location > max_location)
3210 max_location = input_location;
3211 if (!(instruction_bits[PC] & BCODE_HAS_MULTI_LINENUMBERS))
3212 break;
3213 }
3214 }
3215 }
3216 maybe_pushlevels (PC);
3217 PC = process_jvm_instruction (PC, byte_ops, length);
3218 maybe_poplevels (PC);
3219 } /* for */
3220
3221 uncache_this_class_ref (method);
3222
3223 if (dead_code_index != -1)
3224 {
3225 /* We've just reached the end of a region of dead code. */
3226 if (extra_warnings)
3227 warning (0, "unreachable bytecode from %d to the end of the method",
3228 dead_code_index);
3229 }
3230
3231 DECL_FUNCTION_LAST_LINE (method) = max_location;
3232 }
3233
3234 static void
java_push_constant_from_pool(JCF * jcf,int index)3235 java_push_constant_from_pool (JCF *jcf, int index)
3236 {
3237 tree c;
3238 if (JPOOL_TAG (jcf, index) == CONSTANT_String)
3239 {
3240 tree name;
3241 name = get_name_constant (jcf, JPOOL_USHORT1 (jcf, index));
3242 index = alloc_name_constant (CONSTANT_String, name);
3243 c = build_ref_from_constant_pool (index);
3244 c = convert (promote_type (string_type_node), c);
3245 }
3246 else if (JPOOL_TAG (jcf, index) == CONSTANT_Class
3247 || JPOOL_TAG (jcf, index) == CONSTANT_ResolvedClass)
3248 {
3249 tree record = get_class_constant (jcf, index);
3250 c = build_class_ref (record);
3251 }
3252 else
3253 c = get_constant (jcf, index);
3254 push_value (c);
3255 }
3256
3257 int
process_jvm_instruction(int PC,const unsigned char * byte_ops,long length ATTRIBUTE_UNUSED)3258 process_jvm_instruction (int PC, const unsigned char* byte_ops,
3259 long length ATTRIBUTE_UNUSED)
3260 {
3261 const char *opname; /* Temporary ??? */
3262 int oldpc = PC; /* PC at instruction start. */
3263
3264 /* If the instruction is at the beginning of an exception handler,
3265 replace the top of the stack with the thrown object reference. */
3266 if (instruction_bits [PC] & BCODE_EXCEPTION_TARGET)
3267 {
3268 /* Note that the verifier will not emit a type map at all for
3269 dead exception handlers. In this case we just ignore the
3270 situation. */
3271 if ((instruction_bits[PC] & BCODE_VERIFIED) != 0)
3272 {
3273 tree type = pop_type (promote_type (throwable_type_node));
3274 push_value (build_exception_object_ref (type));
3275 }
3276 }
3277
3278 switch (byte_ops[PC++])
3279 {
3280 #define JAVAOP(OPNAME, OPCODE, OPKIND, OPERAND_TYPE, OPERAND_VALUE) \
3281 case OPCODE: \
3282 opname = #OPNAME; \
3283 OPKIND(OPERAND_TYPE, OPERAND_VALUE); \
3284 break;
3285
3286 #define RET(OPERAND_TYPE, OPERAND_VALUE) \
3287 { \
3288 int saw_index = 0; \
3289 int index = OPERAND_VALUE; \
3290 (void) saw_index; /* Avoid set but not used warning. */ \
3291 build_java_ret \
3292 (find_local_variable (index, return_address_type_node, oldpc)); \
3293 }
3294
3295 #define JSR(OPERAND_TYPE, OPERAND_VALUE) \
3296 { \
3297 /* OPERAND_VALUE may have side-effects on PC */ \
3298 int opvalue = OPERAND_VALUE; \
3299 build_java_jsr (oldpc + opvalue, PC); \
3300 }
3301
3302 /* Push a constant onto the stack. */
3303 #define PUSHC(OPERAND_TYPE, OPERAND_VALUE) \
3304 { int saw_index = 0; int ival = (OPERAND_VALUE); \
3305 if (saw_index) java_push_constant_from_pool (current_jcf, ival); \
3306 else expand_java_pushc (ival, OPERAND_TYPE##_type_node); }
3307
3308 /* internal macro added for use by the WIDE case */
3309 #define LOAD_INTERNAL(OPTYPE, OPVALUE) \
3310 expand_load_internal (OPVALUE, type_map[OPVALUE], oldpc);
3311
3312 /* Push local variable onto the opcode stack. */
3313 #define LOAD(OPERAND_TYPE, OPERAND_VALUE) \
3314 { \
3315 /* have to do this since OPERAND_VALUE may have side-effects */ \
3316 int opvalue = OPERAND_VALUE; \
3317 LOAD_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3318 }
3319
3320 #define RETURN(OPERAND_TYPE, OPERAND_VALUE) \
3321 expand_java_return (OPERAND_TYPE##_type_node)
3322
3323 #define REM_EXPR TRUNC_MOD_EXPR
3324 #define BINOP(OPERAND_TYPE, OPERAND_VALUE) \
3325 expand_java_binop (OPERAND_TYPE##_type_node, OPERAND_VALUE##_EXPR)
3326
3327 #define FIELD(IS_STATIC, IS_PUT) \
3328 expand_java_field_op (IS_STATIC, IS_PUT, IMMEDIATE_u2)
3329
3330 #define TEST(OPERAND_TYPE, CONDITION) \
3331 expand_test (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3332
3333 #define COND(OPERAND_TYPE, CONDITION) \
3334 expand_cond (CONDITION##_EXPR, OPERAND_TYPE##_type_node, oldpc+IMMEDIATE_s2)
3335
3336 #define BRANCH(OPERAND_TYPE, OPERAND_VALUE) \
3337 BRANCH_##OPERAND_TYPE (OPERAND_VALUE)
3338
3339 #define BRANCH_GOTO(OPERAND_VALUE) \
3340 expand_java_goto (oldpc + OPERAND_VALUE)
3341
3342 #define BRANCH_CALL(OPERAND_VALUE) \
3343 expand_java_call (oldpc + OPERAND_VALUE, oldpc)
3344
3345 #if 0
3346 #define BRANCH_RETURN(OPERAND_VALUE) \
3347 { \
3348 tree type = OPERAND_TYPE##_type_node; \
3349 tree value = find_local_variable (OPERAND_VALUE, type, oldpc); \
3350 expand_java_ret (value); \
3351 }
3352 #endif
3353
3354 #define NOT_IMPL(OPERAND_TYPE, OPERAND_VALUE) \
3355 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3356 fprintf (stderr, "(not implemented)\n")
3357 #define NOT_IMPL1(OPERAND_VALUE) \
3358 fprintf (stderr, "%3d: %s ", oldpc, opname); \
3359 fprintf (stderr, "(not implemented)\n")
3360
3361 #define BRANCH_RETURN(OPERAND_VALUE) NOT_IMPL1(OPERAND_VALUE)
3362
3363 #define STACK(SUBOP, COUNT) STACK_##SUBOP (COUNT)
3364
3365 #define STACK_POP(COUNT) java_stack_pop (COUNT)
3366
3367 #define STACK_SWAP(COUNT) java_stack_swap()
3368
3369 #define STACK_DUP(COUNT) java_stack_dup (COUNT, 0)
3370 #define STACK_DUPx1(COUNT) java_stack_dup (COUNT, 1)
3371 #define STACK_DUPx2(COUNT) java_stack_dup (COUNT, 2)
3372
3373 #define SWITCH(OPERAND_TYPE, TABLE_OR_LOOKUP) \
3374 PC = (PC + 3) / 4 * 4; TABLE_OR_LOOKUP##_SWITCH
3375
3376 #define LOOKUP_SWITCH \
3377 { jint default_offset = IMMEDIATE_s4; jint npairs = IMMEDIATE_s4; \
3378 tree selector = pop_value (INT_type_node); \
3379 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3380 while (--npairs >= 0) \
3381 { \
3382 jint match = IMMEDIATE_s4; jint offset = IMMEDIATE_s4; \
3383 expand_java_add_case (switch_expr, match, oldpc + offset); \
3384 } \
3385 }
3386
3387 #define TABLE_SWITCH \
3388 { jint default_offset = IMMEDIATE_s4; \
3389 jint low = IMMEDIATE_s4; jint high = IMMEDIATE_s4; \
3390 tree selector = pop_value (INT_type_node); \
3391 tree switch_expr = expand_java_switch (selector, oldpc + default_offset); \
3392 for (; low <= high; low++) \
3393 { \
3394 jint offset = IMMEDIATE_s4; \
3395 expand_java_add_case (switch_expr, low, oldpc + offset); \
3396 } \
3397 }
3398
3399 #define INVOKE(MAYBE_STATIC, IS_INTERFACE) \
3400 { int opcode = byte_ops[PC-1]; \
3401 int method_ref_index = IMMEDIATE_u2; \
3402 int nargs; \
3403 if (IS_INTERFACE) { nargs = IMMEDIATE_u1; (void) IMMEDIATE_u1; } \
3404 else nargs = -1; \
3405 expand_invoke (opcode, method_ref_index, nargs); \
3406 }
3407
3408 /* Handle new, checkcast, instanceof */
3409 #define OBJECT(TYPE, OP) \
3410 expand_java_##OP (get_class_constant (current_jcf, IMMEDIATE_u2))
3411
3412 #define ARRAY(OPERAND_TYPE, SUBOP) ARRAY_##SUBOP(OPERAND_TYPE)
3413
3414 #define ARRAY_LOAD(OPERAND_TYPE) \
3415 { \
3416 expand_java_arrayload( OPERAND_TYPE##_type_node ); \
3417 }
3418
3419 #define ARRAY_STORE(OPERAND_TYPE) \
3420 { \
3421 expand_java_arraystore( OPERAND_TYPE##_type_node ); \
3422 }
3423
3424 #define ARRAY_LENGTH(OPERAND_TYPE) expand_java_array_length();
3425 #define ARRAY_NEW(OPERAND_TYPE) ARRAY_NEW_##OPERAND_TYPE()
3426 #define ARRAY_NEW_PTR() \
3427 push_value (build_anewarray (get_class_constant (current_jcf, \
3428 IMMEDIATE_u2), \
3429 pop_value (int_type_node)));
3430 #define ARRAY_NEW_NUM() \
3431 { \
3432 int atype = IMMEDIATE_u1; \
3433 push_value (build_newarray (atype, pop_value (int_type_node)));\
3434 }
3435 #define ARRAY_NEW_MULTI() \
3436 { \
3437 tree klass = get_class_constant (current_jcf, IMMEDIATE_u2 ); \
3438 int ndims = IMMEDIATE_u1; \
3439 expand_java_multianewarray( klass, ndims ); \
3440 }
3441
3442 #define UNOP(OPERAND_TYPE, OPERAND_VALUE) \
3443 push_value (fold_build1 (NEGATE_EXPR, OPERAND_TYPE##_type_node, \
3444 pop_value (OPERAND_TYPE##_type_node)));
3445
3446 #define CONVERT2(FROM_TYPE, TO_TYPE) \
3447 { \
3448 push_value (build1 (NOP_EXPR, int_type_node, \
3449 (convert (TO_TYPE##_type_node, \
3450 pop_value (FROM_TYPE##_type_node))))); \
3451 }
3452
3453 #define CONVERT(FROM_TYPE, TO_TYPE) \
3454 { \
3455 push_value (convert (TO_TYPE##_type_node, \
3456 pop_value (FROM_TYPE##_type_node))); \
3457 }
3458
3459 /* internal macro added for use by the WIDE case
3460 Added TREE_TYPE (decl) assignment, apbianco */
3461 #define STORE_INTERNAL(OPTYPE, OPVALUE) \
3462 { \
3463 tree decl, value; \
3464 int index = OPVALUE; \
3465 tree type = OPTYPE; \
3466 value = pop_value (type); \
3467 type = TREE_TYPE (value); \
3468 decl = find_local_variable (index, type, oldpc); \
3469 set_local_type (index, type); \
3470 java_add_stmt (build2 (MODIFY_EXPR, type, decl, value)); \
3471 }
3472
3473 #define STORE(OPERAND_TYPE, OPERAND_VALUE) \
3474 { \
3475 /* have to do this since OPERAND_VALUE may have side-effects */ \
3476 int opvalue = OPERAND_VALUE; \
3477 STORE_INTERNAL(OPERAND_TYPE##_type_node, opvalue); \
3478 }
3479
3480 #define SPECIAL(OPERAND_TYPE, INSTRUCTION) \
3481 SPECIAL_##INSTRUCTION(OPERAND_TYPE)
3482
3483 #define SPECIAL_ENTER(IGNORED) MONITOR_OPERATION (soft_monitorenter_node)
3484 #define SPECIAL_EXIT(IGNORED) MONITOR_OPERATION (soft_monitorexit_node)
3485
3486 #define MONITOR_OPERATION(call) \
3487 { \
3488 tree o = pop_value (ptr_type_node); \
3489 tree c; \
3490 flush_quick_stack (); \
3491 c = build_java_monitor (call, o); \
3492 TREE_SIDE_EFFECTS (c) = 1; \
3493 java_add_stmt (c); \
3494 }
3495
3496 #define SPECIAL_IINC(IGNORED) \
3497 { \
3498 unsigned int local_var_index = IMMEDIATE_u1; \
3499 int ival = IMMEDIATE_s1; \
3500 expand_iinc(local_var_index, ival, oldpc); \
3501 }
3502
3503 #define SPECIAL_WIDE(IGNORED) \
3504 { \
3505 int modified_opcode = IMMEDIATE_u1; \
3506 unsigned int local_var_index = IMMEDIATE_u2; \
3507 switch (modified_opcode) \
3508 { \
3509 case OPCODE_iinc: \
3510 { \
3511 int ival = IMMEDIATE_s2; \
3512 expand_iinc (local_var_index, ival, oldpc); \
3513 break; \
3514 } \
3515 case OPCODE_iload: \
3516 case OPCODE_lload: \
3517 case OPCODE_fload: \
3518 case OPCODE_dload: \
3519 case OPCODE_aload: \
3520 { \
3521 /* duplicate code from LOAD macro */ \
3522 LOAD_INTERNAL(operand_type[modified_opcode], local_var_index); \
3523 break; \
3524 } \
3525 case OPCODE_istore: \
3526 case OPCODE_lstore: \
3527 case OPCODE_fstore: \
3528 case OPCODE_dstore: \
3529 case OPCODE_astore: \
3530 { \
3531 STORE_INTERNAL(operand_type[modified_opcode], local_var_index); \
3532 break; \
3533 } \
3534 default: \
3535 error ("unrecognized wide sub-instruction"); \
3536 } \
3537 }
3538
3539 #define SPECIAL_THROW(IGNORED) \
3540 build_java_athrow (pop_value (throwable_type_node))
3541
3542 #define SPECIAL_BREAK NOT_IMPL1
3543 #define IMPL NOT_IMPL
3544
3545 #include "javaop.def"
3546 #undef JAVAOP
3547 default:
3548 fprintf (stderr, "%3d: unknown(%3d)\n", oldpc, byte_ops[PC]);
3549 }
3550 return PC;
3551 }
3552
3553 /* Return the opcode at PC in the code section pointed to by
3554 CODE_OFFSET. */
3555
3556 static unsigned char
peek_opcode_at_pc(JCF * jcf,int code_offset,int pc)3557 peek_opcode_at_pc (JCF *jcf, int code_offset, int pc)
3558 {
3559 unsigned char opcode;
3560 long absolute_offset = (long)JCF_TELL (jcf);
3561
3562 JCF_SEEK (jcf, code_offset);
3563 opcode = jcf->read_ptr [pc];
3564 JCF_SEEK (jcf, absolute_offset);
3565 return opcode;
3566 }
3567
3568 /* Some bytecode compilers are emitting accurate LocalVariableTable
3569 attributes. Here's an example:
3570
3571 PC <t>store_<n>
3572 PC+1 ...
3573
3574 Attribute "LocalVariableTable"
3575 slot #<n>: ... (PC: PC+1 length: L)
3576
3577 This is accurate because the local in slot <n> really exists after
3578 the opcode at PC is executed, hence from PC+1 to PC+1+L.
3579
3580 This procedure recognizes this situation and extends the live range
3581 of the local in SLOT to START_PC-1 or START_PC-2 (depending on the
3582 length of the store instruction.)
3583
3584 This function is used by `give_name_to_locals' so that a local's
3585 DECL features a DECL_LOCAL_START_PC such that the first related
3586 store operation will use DECL as a destination, not an unrelated
3587 temporary created for the occasion.
3588
3589 This function uses a global (instruction_bits) `note_instructions' should
3590 have allocated and filled properly. */
3591
3592 int
maybe_adjust_start_pc(struct JCF * jcf,int code_offset,int start_pc,int slot)3593 maybe_adjust_start_pc (struct JCF *jcf, int code_offset,
3594 int start_pc, int slot)
3595 {
3596 int first, index, opcode;
3597 int pc, insn_pc;
3598 int wide_found = 0;
3599
3600 if (!start_pc)
3601 return start_pc;
3602
3603 first = index = -1;
3604
3605 /* Find last previous instruction and remember it */
3606 for (pc = start_pc-1; pc; pc--)
3607 if (instruction_bits [pc] & BCODE_INSTRUCTION_START)
3608 break;
3609 insn_pc = pc;
3610
3611 /* Retrieve the instruction, handle `wide'. */
3612 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3613 if (opcode == OPCODE_wide)
3614 {
3615 wide_found = 1;
3616 opcode = (int) peek_opcode_at_pc (jcf, code_offset, pc++);
3617 }
3618
3619 switch (opcode)
3620 {
3621 case OPCODE_astore_0:
3622 case OPCODE_astore_1:
3623 case OPCODE_astore_2:
3624 case OPCODE_astore_3:
3625 first = OPCODE_astore_0;
3626 break;
3627
3628 case OPCODE_istore_0:
3629 case OPCODE_istore_1:
3630 case OPCODE_istore_2:
3631 case OPCODE_istore_3:
3632 first = OPCODE_istore_0;
3633 break;
3634
3635 case OPCODE_lstore_0:
3636 case OPCODE_lstore_1:
3637 case OPCODE_lstore_2:
3638 case OPCODE_lstore_3:
3639 first = OPCODE_lstore_0;
3640 break;
3641
3642 case OPCODE_fstore_0:
3643 case OPCODE_fstore_1:
3644 case OPCODE_fstore_2:
3645 case OPCODE_fstore_3:
3646 first = OPCODE_fstore_0;
3647 break;
3648
3649 case OPCODE_dstore_0:
3650 case OPCODE_dstore_1:
3651 case OPCODE_dstore_2:
3652 case OPCODE_dstore_3:
3653 first = OPCODE_dstore_0;
3654 break;
3655
3656 case OPCODE_astore:
3657 case OPCODE_istore:
3658 case OPCODE_lstore:
3659 case OPCODE_fstore:
3660 case OPCODE_dstore:
3661 index = peek_opcode_at_pc (jcf, code_offset, pc);
3662 if (wide_found)
3663 {
3664 int other = peek_opcode_at_pc (jcf, code_offset, ++pc);
3665 index = (other << 8) + index;
3666 }
3667 break;
3668 }
3669
3670 /* Now we decide: first >0 means we have a <t>store_<n>, index >0
3671 means we have a <t>store. */
3672 if ((first > 0 && opcode - first == slot) || (index > 0 && index == slot))
3673 start_pc = insn_pc;
3674
3675 return start_pc;
3676 }
3677
3678 /* Build a node to represent empty statements and blocks. */
3679
3680 tree
build_java_empty_stmt(void)3681 build_java_empty_stmt (void)
3682 {
3683 tree t = build_empty_stmt (input_location);
3684 return t;
3685 }
3686
3687 /* Promote all args of integral type before generating any code. */
3688
3689 static void
promote_arguments(void)3690 promote_arguments (void)
3691 {
3692 int i;
3693 tree arg;
3694 for (arg = DECL_ARGUMENTS (current_function_decl), i = 0;
3695 arg != NULL_TREE; arg = DECL_CHAIN (arg), i++)
3696 {
3697 tree arg_type = TREE_TYPE (arg);
3698 if (INTEGRAL_TYPE_P (arg_type)
3699 && TYPE_PRECISION (arg_type) < 32)
3700 {
3701 tree copy = find_local_variable (i, integer_type_node, -1);
3702 java_add_stmt (build2 (MODIFY_EXPR, integer_type_node,
3703 copy,
3704 fold_convert (integer_type_node, arg)));
3705 }
3706 if (TYPE_IS_WIDE (arg_type))
3707 i++;
3708 }
3709 }
3710
3711 /* Create a local variable that points to the constant pool. */
3712
3713 static void
cache_cpool_data_ref(void)3714 cache_cpool_data_ref (void)
3715 {
3716 if (optimize)
3717 {
3718 tree cpool;
3719 tree d = build_constant_data_ref (flag_indirect_classes);
3720 tree cpool_ptr = build_decl (input_location, VAR_DECL, NULL_TREE,
3721 build_pointer_type (TREE_TYPE (d)));
3722 java_add_local_var (cpool_ptr);
3723 TREE_CONSTANT (cpool_ptr) = 1;
3724
3725 java_add_stmt (build2 (MODIFY_EXPR, TREE_TYPE (cpool_ptr),
3726 cpool_ptr, build_address_of (d)));
3727 cpool = build1 (INDIRECT_REF, TREE_TYPE (d), cpool_ptr);
3728 TREE_THIS_NOTRAP (cpool) = 1;
3729 TYPE_CPOOL_DATA_REF (output_class) = cpool;
3730 }
3731 }
3732
3733 #include "gt-java-expr.h"
3734