xref: /openbsd/gnu/usr.bin/gcc/gcc/java/jcf-write.c (revision c87b03e5)
1 /* Write out a Java(TM) class file.
2    Copyright (C) 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
3 
4 This file is part of GNU CC.
5 
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10 
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 You should have received a copy of the GNU General Public License
16 along with GNU CC; see the file COPYING.  If not, write to
17 the Free Software Foundation, 59 Temple Place - Suite 330,
18 Boston, MA 02111-1307, USA.
19 
20 Java and all Java-based marks are trademarks or registered trademarks
21 of Sun Microsystems, Inc. in the United States and other countries.
22 The Free Software Foundation is independent of Sun Microsystems, Inc.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "jcf.h"
27 #include "tree.h"
28 #include "real.h"
29 #include "java-tree.h"
30 #include "obstack.h"
31 #undef AND
32 #include "rtl.h"
33 #include "flags.h"
34 #include "java-opcodes.h"
35 #include "parse.h" /* for BLOCK_EXPR_BODY */
36 #include "buffer.h"
37 #include "toplev.h"
38 #include "ggc.h"
39 
40 #ifndef DIR_SEPARATOR
41 #define DIR_SEPARATOR '/'
42 #endif
43 
44 extern struct obstack temporary_obstack;
45 
46 /* Base directory in which `.class' files should be written.
47    NULL means to put the file into the same directory as the
48    corresponding .java file.  */
49 char *jcf_write_base_directory = NULL;
50 
51 /* Make sure bytecode.data is big enough for at least N more bytes. */
52 
53 #define RESERVE(N) \
54   do { CHECK_OP(state); \
55     if (state->bytecode.ptr + (N) > state->bytecode.limit) \
56     buffer_grow (&state->bytecode, N); } while (0)
57 
58 /* Add a 1-byte instruction/operand I to bytecode.data,
59    assuming space has already been RESERVE'd. */
60 
61 #define OP1(I) (*state->bytecode.ptr++ = (I), CHECK_OP(state))
62 
63 /* Like OP1, but I is a 2-byte big endian integer. */
64 
65 #define OP2(I) \
66   do { int _i = (I); OP1 (_i >> 8);  OP1 (_i); CHECK_OP(state); } while (0)
67 
68 /* Like OP1, but I is a 4-byte big endian integer. */
69 
70 #define OP4(I) \
71   do { int _i = (I);  OP1 (_i >> 24);  OP1 (_i >> 16); \
72        OP1 (_i >> 8); OP1 (_i); CHECK_OP(state); } while (0)
73 
74 /* Macro to call each time we push I words on the JVM stack. */
75 
76 #define NOTE_PUSH(I) \
77   do { state->code_SP += (I); \
78     if (state->code_SP > state->code_SP_max) \
79       state->code_SP_max = state->code_SP; } while (0)
80 
81 /* Macro to call each time we pop I words from the JVM stack. */
82 
83 #define NOTE_POP(I) \
84   do { state->code_SP -= (I); if (state->code_SP < 0) abort(); } while (0)
85 
86 /* A chunk or segment of a .class file. */
87 
88 struct chunk
89 {
90   /* The next segment of this .class file. */
91   struct chunk *next;
92 
93   /* The actual data in this segment to be written to the .class file. */
94   unsigned char *data;
95 
96   /* The size of the segment to be written to the .class file. */
97   int size;
98 };
99 
100 #define PENDING_CLEANUP_PC (-3)
101 #define PENDING_EXIT_PC (-2)
102 #define UNDEFINED_PC (-1)
103 
104 /* Each "block" represents a label plus the bytecode instructions following.
105    There may be branches out of the block, but no incoming jumps, except
106    to the beginning of the block.
107 
108    If (pc < 0), the jcf_block is not an actual block (i.e. it has no
109    associated code yet), but it is an undefined label.
110 */
111 
112 struct jcf_block
113 {
114   /* For blocks that that are defined, the next block (in pc order).
115      For blocks that are not-yet-defined the end label of a LABELED_BLOCK_EXPR
116      or a cleanup expression (from a TRY_FINALLY_EXPR),
117      this is the next (outer) such end label, in a stack headed by
118      labeled_blocks in jcf_partial. */
119   struct jcf_block *next;
120 
121   /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
122      pc is PENDING_EXIT_PC.
123      In the not-yet-defined end label for pending cleanup subroutine,
124      pc is PENDING_CLEANUP_PC.
125      For other not-yet-defined labels, pc is UNDEFINED_PC.
126 
127      If the label has been defined:
128      Until perform_relocations is finished, this is the maximum possible
129      value of the bytecode offset at the begnning of this block.
130      After perform_relocations, it is the actual offset (pc). */
131   int pc;
132 
133   int linenumber;
134 
135   /* After finish_jcf_block is called, the actual instructions
136      contained in this block.  Before that NULL, and the instructions
137      are in state->bytecode. */
138   union {
139     struct chunk *chunk;
140 
141     /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
142        covered by the cleanup. */
143     struct jcf_block *start_label;
144   } v;
145 
146   union {
147     /* Set of relocations (in reverse offset order) for this block. */
148     struct jcf_relocation *relocations;
149 
150     /* If this block is that of the not-yet-defined end label of
151        a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
152        If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
153     tree labeled_block;
154   } u;
155 };
156 
157 /* A "relocation" type for the 0-3 bytes of padding at the start
158    of a tableswitch or a lookupswitch. */
159 #define SWITCH_ALIGN_RELOC 4
160 
161 /* A relocation type for the labels in a tableswitch or a lookupswitch;
162    these are relative to the start of the instruction, but (due to
163    th 0-3 bytes of padding), we don't know the offset before relocation. */
164 #define BLOCK_START_RELOC 1
165 
166 struct jcf_relocation
167 {
168   /* Next relocation for the current jcf_block. */
169   struct jcf_relocation *next;
170 
171   /* The (byte) offset within the current block that needs to be relocated. */
172   HOST_WIDE_INT offset;
173 
174   /* 0 if offset is a 4-byte relative offset.
175      4 (SWITCH_ALIGN_RELOC) if offset points to 0-3 padding bytes inserted
176      for proper alignment in tableswitch/lookupswitch instructions.
177      1 (BLOCK_START_RELOC) if offset points to a 4-byte offset relative
178      to the start of the containing block.
179      -1 if offset is a 2-byte relative offset.
180      < -1 if offset is the address of an instruction with a 2-byte offset
181      that does not have a corresponding 4-byte offset version, in which
182      case the absolute value of kind is the inverted opcode.
183      > 4 if offset is the address of an instruction (such as jsr) with a
184      2-byte offset that does have a corresponding 4-byte offset version,
185      in which case kind is the opcode of the 4-byte version (such as jsr_w). */
186   int kind;
187 
188   /* The label the relocation wants to actually transfer to. */
189   struct jcf_block *label;
190 };
191 
192 #define RELOCATION_VALUE_0 ((HOST_WIDE_INT)0)
193 #define RELOCATION_VALUE_1 ((HOST_WIDE_INT)1)
194 
195 /* State for single catch clause. */
196 
197 struct jcf_handler
198 {
199   struct jcf_handler *next;
200 
201   struct jcf_block *start_label;
202   struct jcf_block *end_label;
203   struct jcf_block *handler_label;
204 
205   /* The sub-class of Throwable handled, or NULL_TREE (for finally). */
206   tree type;
207 };
208 
209 /* State for the current switch statement. */
210 
211 struct jcf_switch_state
212 {
213   struct jcf_switch_state *prev;
214   struct jcf_block *default_label;
215 
216   struct jcf_relocation *cases;
217   int num_cases;
218   HOST_WIDE_INT min_case, max_case;
219 };
220 
221 /* This structure is used to contain the various pieces that will
222    become a .class file. */
223 
224 struct jcf_partial
225 {
226   struct chunk *first;
227   struct chunk *chunk;
228   struct obstack *chunk_obstack;
229   tree current_method;
230 
231   /* List of basic blocks for the current method. */
232   struct jcf_block *blocks;
233   struct jcf_block *last_block;
234 
235   struct localvar_info *first_lvar;
236   struct localvar_info *last_lvar;
237   int lvar_count;
238 
239   CPool cpool;
240 
241   int linenumber_count;
242 
243   /* Until perform_relocations, this is a upper bound on the number
244      of bytes (so far) in the instructions for the current method. */
245   int code_length;
246 
247   /* Stack of undefined ending labels for LABELED_BLOCK_EXPR. */
248   struct jcf_block *labeled_blocks;
249 
250   /* The current stack size (stack pointer) in the current method. */
251   int code_SP;
252 
253   /* The largest extent of stack size (stack pointer) in the current method. */
254   int code_SP_max;
255 
256   /* Contains a mapping from local var slot number to localvar_info. */
257   struct buffer localvars;
258 
259   /* The buffer allocated for bytecode for the current jcf_block. */
260   struct buffer bytecode;
261 
262   /* Chain of exception handlers for the current method. */
263   struct jcf_handler *handlers;
264 
265   /* Last element in handlers chain. */
266   struct jcf_handler *last_handler;
267 
268   /* Number of exception handlers for the current method. */
269   int num_handlers;
270 
271   /* Number of finalizers we are currently nested within. */
272   int num_finalizers;
273 
274   /* If non-NULL, use this for the return value. */
275   tree return_value_decl;
276 
277   /* Information about the current switch statement. */
278   struct jcf_switch_state *sw_state;
279 
280   /* The count of jsr instructions that have been emmitted.  */
281   long num_jsrs;
282 };
283 
284 static void generate_bytecode_insns PARAMS ((tree, int, struct jcf_partial *));
285 static struct chunk * alloc_chunk PARAMS ((struct chunk *, unsigned char *,
286 					  int, struct obstack *));
287 static unsigned char * append_chunk PARAMS ((unsigned char *, int,
288 					    struct jcf_partial *));
289 static void append_chunk_copy PARAMS ((unsigned char *, int,
290 				      struct jcf_partial *));
291 static struct jcf_block * gen_jcf_label PARAMS ((struct jcf_partial *));
292 static void finish_jcf_block PARAMS ((struct jcf_partial *));
293 static void define_jcf_label PARAMS ((struct jcf_block *,
294 				     struct jcf_partial *));
295 static struct jcf_block * get_jcf_label_here PARAMS ((struct jcf_partial *));
296 static void put_linenumber PARAMS ((int, struct jcf_partial *));
297 static void localvar_alloc PARAMS ((tree, struct jcf_partial *));
298 static void maybe_free_localvar PARAMS ((tree, struct jcf_partial *, int));
299 static int get_access_flags PARAMS ((tree));
300 static void write_chunks PARAMS ((FILE *, struct chunk *));
301 static int adjust_typed_op PARAMS ((tree, int));
302 static void generate_bytecode_conditional PARAMS ((tree, struct jcf_block *,
303 						  struct jcf_block *, int,
304 						  struct jcf_partial *));
305 static void generate_bytecode_return PARAMS ((tree, struct jcf_partial *));
306 static void perform_relocations PARAMS ((struct jcf_partial *));
307 static void init_jcf_state PARAMS ((struct jcf_partial *, struct obstack *));
308 static void init_jcf_method PARAMS ((struct jcf_partial *, tree));
309 static void release_jcf_state PARAMS ((struct jcf_partial *));
310 static struct chunk * generate_classfile PARAMS ((tree, struct jcf_partial *));
311 static struct jcf_handler *alloc_handler PARAMS ((struct jcf_block *,
312 						 struct jcf_block *,
313 						 struct jcf_partial *));
314 static void emit_iinc PARAMS ((tree, HOST_WIDE_INT, struct jcf_partial *));
315 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
316 			       struct jcf_partial *));
317 static void push_constant1 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
318 static void push_constant2 PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
319 static void push_int_const PARAMS ((HOST_WIDE_INT, struct jcf_partial *));
320 static int find_constant_wide PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
321 				      struct jcf_partial *));
322 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
323 				    struct jcf_partial *));
324 static int find_constant_index PARAMS ((tree, struct jcf_partial *));
325 static void push_long_const PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
326 				    struct jcf_partial *));
327 static void field_op PARAMS ((tree, int, struct jcf_partial *));
328 static void maybe_wide PARAMS ((int, int, struct jcf_partial *));
329 static void emit_dup PARAMS ((int, int, struct jcf_partial *));
330 static void emit_pop PARAMS ((int, struct jcf_partial *));
331 static void emit_load_or_store PARAMS ((tree, int, struct jcf_partial *));
332 static void emit_load PARAMS ((tree, struct jcf_partial *));
333 static void emit_store PARAMS ((tree, struct jcf_partial *));
334 static void emit_unop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
335 static void emit_binop PARAMS ((enum java_opcode, tree, struct jcf_partial *));
336 static void emit_reloc PARAMS ((HOST_WIDE_INT, int, struct jcf_block *,
337 			       struct jcf_partial *));
338 static void emit_switch_reloc PARAMS ((struct jcf_block *,
339 				      struct jcf_partial *));
340 static void emit_case_reloc PARAMS ((struct jcf_relocation *,
341 				    struct jcf_partial *));
342 static void emit_if PARAMS ((struct jcf_block *, int, int,
343 			    struct jcf_partial *));
344 static void emit_goto PARAMS ((struct jcf_block *, struct jcf_partial *));
345 static void emit_jsr PARAMS ((struct jcf_block *, struct jcf_partial *));
346 static void call_cleanups PARAMS ((struct jcf_block *, struct jcf_partial *));
347 static char *make_class_file_name PARAMS ((tree));
348 static unsigned char *append_synthetic_attribute PARAMS ((struct jcf_partial *));
349 static void append_innerclasses_attribute PARAMS ((struct jcf_partial *, tree));
350 static void append_innerclasses_attribute_entry PARAMS ((struct jcf_partial *, tree, tree));
351 static void append_gcj_attribute PARAMS ((struct jcf_partial *, tree));
352 
353 /* Utility macros for appending (big-endian) data to a buffer.
354    We assume a local variable 'ptr' points into where we want to
355    write next, and we assume enough space has been allocated. */
356 
357 #ifdef ENABLE_JC1_CHECKING
358 static int CHECK_PUT PARAMS ((void *, struct jcf_partial *, int));
359 
360 static int
CHECK_PUT(ptr,state,i)361 CHECK_PUT (ptr, state, i)
362      void *ptr;
363      struct jcf_partial *state;
364      int i;
365 {
366   if ((unsigned char *) ptr < state->chunk->data
367       || (unsigned char *) ptr + i > state->chunk->data + state->chunk->size)
368     abort ();
369 
370   return 0;
371 }
372 #else
373 #define CHECK_PUT(PTR, STATE, I) ((void)0)
374 #endif
375 
376 #define PUT1(X)  (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
377 #define PUT2(X)  (PUT1((X) >> 8), PUT1((X) & 0xFF))
378 #define PUT4(X)  (PUT2((X) >> 16), PUT2((X) & 0xFFFF))
379 #define PUTN(P, N)  (CHECK_PUT(ptr, state, N), memcpy(ptr, P, N), ptr += (N))
380 
381 /* There are some cases below where CHECK_PUT is guaranteed to fail.
382    Use the following macros in those specific cases.  */
383 #define UNSAFE_PUT1(X)  (*ptr++ = (X))
384 #define UNSAFE_PUT2(X)  (UNSAFE_PUT1((X) >> 8), UNSAFE_PUT1((X) & 0xFF))
385 #define UNSAFE_PUT4(X)  (UNSAFE_PUT2((X) >> 16), UNSAFE_PUT2((X) & 0xFFFF))
386 #define UNSAFE_PUTN(P, N)  (memcpy(ptr, P, N), ptr += (N))
387 
388 
389 /* Allocate a new chunk on obstack WORK, and link it in after LAST.
390    Set the data and size fields to DATA and SIZE, respectively.
391    However, if DATA is NULL and SIZE>0, allocate a buffer as well. */
392 
393 static struct chunk *
alloc_chunk(last,data,size,work)394 alloc_chunk (last, data, size, work)
395      struct chunk *last;
396      unsigned char *data;
397      int size;
398      struct obstack *work;
399 {
400   struct chunk *chunk = (struct chunk *)
401     obstack_alloc (work, sizeof(struct chunk));
402 
403   if (data == NULL && size > 0)
404     data = obstack_alloc (work, size);
405 
406   chunk->next = NULL;
407   chunk->data = data;
408   chunk->size = size;
409   if (last != NULL)
410     last->next = chunk;
411   return chunk;
412 }
413 
414 #ifdef ENABLE_JC1_CHECKING
415 static int CHECK_OP PARAMS ((struct jcf_partial *));
416 
417 static int
CHECK_OP(state)418 CHECK_OP (state)
419      struct jcf_partial *state;
420 {
421   if (state->bytecode.ptr > state->bytecode.limit)
422     abort ();
423 
424   return 0;
425 }
426 #else
427 #define CHECK_OP(STATE) ((void) 0)
428 #endif
429 
430 static unsigned char *
append_chunk(data,size,state)431 append_chunk (data, size, state)
432      unsigned char *data;
433      int size;
434      struct jcf_partial *state;
435 {
436   state->chunk = alloc_chunk (state->chunk, data, size, state->chunk_obstack);
437   if (state->first == NULL)
438     state->first = state->chunk;
439   return state->chunk->data;
440 }
441 
442 static void
append_chunk_copy(data,size,state)443 append_chunk_copy (data, size, state)
444      unsigned char *data;
445      int size;
446      struct jcf_partial *state;
447 {
448   unsigned char *ptr = append_chunk (NULL, size, state);
449   memcpy (ptr, data, size);
450 }
451 
452 static struct jcf_block *
gen_jcf_label(state)453 gen_jcf_label (state)
454      struct jcf_partial *state;
455 {
456   struct jcf_block *block = (struct jcf_block *)
457     obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
458   block->next =	NULL;
459   block->linenumber = -1;
460   block->pc = UNDEFINED_PC;
461   return block;
462 }
463 
464 static void
finish_jcf_block(state)465 finish_jcf_block (state)
466      struct jcf_partial *state;
467 {
468   struct jcf_block *block = state->last_block;
469   struct jcf_relocation *reloc;
470   int code_length = BUFFER_LENGTH (&state->bytecode);
471   int pc = state->code_length;
472   append_chunk_copy (state->bytecode.data, code_length, state);
473   BUFFER_RESET (&state->bytecode);
474   block->v.chunk = state->chunk;
475 
476   /* Calculate code_length to the maximum value it can have. */
477   pc += block->v.chunk->size;
478   for (reloc = block->u.relocations;  reloc != NULL;  reloc = reloc->next)
479     {
480       int kind = reloc->kind;
481       if (kind == SWITCH_ALIGN_RELOC)
482 	pc += 3;
483       else if (kind > BLOCK_START_RELOC)
484 	pc += 2; /* 2-byte offset may grow to 4-byte offset */
485       else if (kind < -1)
486 	pc += 5; /* May need to add a goto_w. */
487     }
488   state->code_length = pc;
489 }
490 
491 static void
define_jcf_label(label,state)492 define_jcf_label (label, state)
493      struct jcf_block *label;
494      struct jcf_partial *state;
495 {
496   if (state->last_block != NULL)
497     finish_jcf_block (state);
498   label->pc = state->code_length;
499   if (state->blocks == NULL)
500     state->blocks = label;
501   else
502     state->last_block->next = label;
503   state->last_block = label;
504   label->next = NULL;
505   label->u.relocations = NULL;
506 }
507 
508 static struct jcf_block *
get_jcf_label_here(state)509 get_jcf_label_here (state)
510      struct jcf_partial *state;
511 {
512   if (state->last_block != NULL && BUFFER_LENGTH (&state->bytecode) == 0)
513     return state->last_block;
514   else
515     {
516       struct jcf_block *label = gen_jcf_label (state);
517       define_jcf_label (label, state);
518       return label;
519     }
520 }
521 
522 /* Note a line number entry for the current PC and given LINE. */
523 
524 static void
put_linenumber(line,state)525 put_linenumber (line, state)
526      int line;
527      struct jcf_partial *state;
528 {
529   struct jcf_block *label = get_jcf_label_here (state);
530   if (label->linenumber > 0)
531     {
532       label = gen_jcf_label (state);
533       define_jcf_label (label, state);
534     }
535   label->linenumber = line;
536   state->linenumber_count++;
537 }
538 
539 /* Allocate a new jcf_handler, for a catch clause that catches exceptions
540    in the range (START_LABEL, END_LABEL). */
541 
542 static struct jcf_handler *
alloc_handler(start_label,end_label,state)543 alloc_handler (start_label, end_label, state)
544      struct jcf_block *start_label;
545      struct jcf_block *end_label;
546      struct jcf_partial *state;
547 {
548   struct jcf_handler *handler = (struct jcf_handler *)
549     obstack_alloc (state->chunk_obstack, sizeof (struct jcf_handler));
550   handler->start_label = start_label;
551   handler->end_label = end_label;
552   handler->handler_label = get_jcf_label_here (state);
553   if (state->handlers == NULL)
554     state->handlers = handler;
555   else
556     state->last_handler->next = handler;
557   state->last_handler = handler;
558   handler->next = NULL;
559   state->num_handlers++;
560   return handler;
561 }
562 
563 
564 /* The index of jvm local variable allocated for this DECL.
565    This is assigned when generating .class files;
566    contrast DECL_LOCAL_SLOT_NUMBER which is set when *reading* a .class file.
567    (We don't allocate DECL_LANG_SPECIFIC for locals from Java sourc code.) */
568 
569 #define DECL_LOCAL_INDEX(DECL) DECL_ALIGN(DECL)
570 
571 struct localvar_info
572 {
573   struct localvar_info *next;
574 
575   tree decl;
576   struct jcf_block *start_label;
577   struct jcf_block *end_label;
578 };
579 
580 #define localvar_buffer ((struct localvar_info**) state->localvars.data)
581 #define localvar_max \
582   ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
583 
584 static void
localvar_alloc(decl,state)585 localvar_alloc (decl, state)
586      tree decl;
587      struct jcf_partial *state;
588 {
589   struct jcf_block *start_label = get_jcf_label_here (state);
590   int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
591   int index;
592   register struct localvar_info *info;
593   register struct localvar_info **ptr = localvar_buffer;
594   register struct localvar_info **limit
595     = (struct localvar_info**) state->localvars.ptr;
596   for (index = 0;  ptr < limit;  index++, ptr++)
597     {
598       if (ptr[0] == NULL
599 	  && (! wide || ((ptr+1) < limit && ptr[1] == NULL)))
600 	break;
601     }
602   if (ptr == limit)
603     {
604       buffer_grow (&state->localvars, 2 * sizeof (struct localvar_info*));
605       ptr = (struct localvar_info**) state->localvars.data + index;
606       state->localvars.ptr = (unsigned char *) (ptr + 1 + wide);
607     }
608   info = (struct localvar_info *)
609     obstack_alloc (state->chunk_obstack, sizeof (struct localvar_info));
610   ptr[0] = info;
611   if (wide)
612     ptr[1] = (struct localvar_info *)(~0);
613   DECL_LOCAL_INDEX (decl) = index;
614   info->decl = decl;
615   info->start_label = start_label;
616 
617   if (debug_info_level > DINFO_LEVEL_TERSE
618       && DECL_NAME (decl) != NULL_TREE)
619     {
620       /* Generate debugging info. */
621       info->next = NULL;
622       if (state->last_lvar != NULL)
623 	state->last_lvar->next = info;
624       else
625 	state->first_lvar = info;
626       state->last_lvar = info;
627       state->lvar_count++;
628     }
629 }
630 
631 static void
maybe_free_localvar(decl,state,really)632 maybe_free_localvar (decl, state, really)
633      tree decl;
634      struct jcf_partial *state;
635      int really;
636 {
637   struct jcf_block *end_label = get_jcf_label_here (state);
638   int index = DECL_LOCAL_INDEX (decl);
639   register struct localvar_info **ptr = &localvar_buffer [index];
640   register struct localvar_info *info = *ptr;
641   int wide = TYPE_IS_WIDE (TREE_TYPE (decl));
642 
643   info->end_label = end_label;
644 
645   if (info->decl != decl)
646     abort ();
647   if (! really)
648     return;
649   ptr[0] = NULL;
650   if (wide)
651     {
652       if (ptr[1] !=  (struct localvar_info *)(~0))
653 	abort ();
654       ptr[1] = NULL;
655     }
656 }
657 
658 
659 #define STACK_TARGET 1
660 #define IGNORE_TARGET 2
661 
662 /* Get the access flags of a class (TYPE_DECL), a method (FUNCTION_DECL), or
663    a field (FIELD_DECL or VAR_DECL, if static), as encoded in a .class file. */
664 
665 static int
get_access_flags(decl)666 get_access_flags (decl)
667     tree decl;
668 {
669   int flags = 0;
670   int isfield = TREE_CODE (decl) == FIELD_DECL || TREE_CODE (decl) == VAR_DECL;
671   if (CLASS_PUBLIC (decl))  /* same as FIELD_PUBLIC and METHOD_PUBLIC */
672     flags |= ACC_PUBLIC;
673   if (CLASS_FINAL (decl))  /* same as FIELD_FINAL and METHOD_FINAL */
674     flags |= ACC_FINAL;
675   if (isfield || TREE_CODE (decl) == FUNCTION_DECL)
676     {
677       if (TREE_PROTECTED (decl))
678 	flags |= ACC_PROTECTED;
679       if (TREE_PRIVATE (decl))
680 	flags |= ACC_PRIVATE;
681     }
682   else if (TREE_CODE (decl) == TYPE_DECL)
683     {
684       if (CLASS_SUPER (decl))
685 	flags |= ACC_SUPER;
686       if (CLASS_ABSTRACT (decl))
687 	flags |= ACC_ABSTRACT;
688       if (CLASS_INTERFACE (decl))
689 	flags |= ACC_INTERFACE;
690       if (CLASS_STATIC (decl))
691 	flags |= ACC_STATIC;
692       if (CLASS_PRIVATE (decl))
693 	flags |= ACC_PRIVATE;
694       if (CLASS_PROTECTED (decl))
695 	flags |= ACC_PROTECTED;
696       if (ANONYMOUS_CLASS_P (TREE_TYPE (decl))
697 	  || LOCAL_CLASS_P (TREE_TYPE (decl)))
698 	flags |= ACC_PRIVATE;
699       if (CLASS_STRICTFP (decl))
700 	flags |= ACC_STRICT;
701     }
702   else
703     abort ();
704 
705   if (TREE_CODE (decl) == FUNCTION_DECL)
706     {
707       if (METHOD_NATIVE (decl))
708 	flags |= ACC_NATIVE;
709       if (METHOD_STATIC (decl))
710 	flags |= ACC_STATIC;
711       if (METHOD_SYNCHRONIZED (decl))
712 	flags |= ACC_SYNCHRONIZED;
713       if (METHOD_ABSTRACT (decl))
714 	flags |= ACC_ABSTRACT;
715       if (METHOD_STRICTFP (decl))
716 	flags |= ACC_STRICT;
717     }
718   if (isfield)
719     {
720       if (FIELD_STATIC (decl))
721 	flags |= ACC_STATIC;
722       if (FIELD_VOLATILE (decl))
723 	flags |= ACC_VOLATILE;
724       if (FIELD_TRANSIENT (decl))
725 	flags |= ACC_TRANSIENT;
726     }
727   return flags;
728 }
729 
730 /* Write the list of segments starting at CHUNKS to STREAM. */
731 
732 static void
write_chunks(stream,chunks)733 write_chunks (stream, chunks)
734      FILE* stream;
735      struct chunk *chunks;
736 {
737   for (;  chunks != NULL;  chunks = chunks->next)
738     fwrite (chunks->data, chunks->size, 1, stream);
739 }
740 
741 /* Push a 1-word constant in the constant pool at the given INDEX.
742    (Caller is responsible for doing NOTE_PUSH.) */
743 
744 static void
push_constant1(index,state)745 push_constant1 (index, state)
746      HOST_WIDE_INT index;
747      struct jcf_partial *state;
748 {
749   RESERVE (3);
750   if (index < 256)
751     {
752       OP1 (OPCODE_ldc);
753       OP1 (index);
754     }
755   else
756     {
757       OP1 (OPCODE_ldc_w);
758       OP2 (index);
759     }
760 }
761 
762 /* Push a 2-word constant in the constant pool at the given INDEX.
763    (Caller is responsible for doing NOTE_PUSH.) */
764 
765 static void
push_constant2(index,state)766 push_constant2 (index, state)
767      HOST_WIDE_INT index;
768      struct jcf_partial *state;
769 {
770   RESERVE (3);
771   OP1 (OPCODE_ldc2_w);
772   OP2 (index);
773 }
774 
775 /* Push 32-bit integer constant on VM stack.
776    Caller is responsible for doing NOTE_PUSH. */
777 
778 static void
push_int_const(i,state)779 push_int_const (i, state)
780      HOST_WIDE_INT i;
781      struct jcf_partial *state;
782 {
783   RESERVE(3);
784   if (i >= -1 && i <= 5)
785     OP1(OPCODE_iconst_0 + i);
786   else if (i >= -128 && i < 128)
787     {
788       OP1(OPCODE_bipush);
789       OP1(i);
790     }
791   else if (i >= -32768 && i < 32768)
792     {
793       OP1(OPCODE_sipush);
794       OP2(i);
795     }
796   else
797     {
798       i = find_constant1 (&state->cpool, CONSTANT_Integer,
799 			  (jword)(i & 0xFFFFFFFF));
800       push_constant1 (i, state);
801     }
802 }
803 
804 static int
find_constant_wide(lo,hi,state)805 find_constant_wide (lo, hi, state)
806      HOST_WIDE_INT lo, hi;
807      struct jcf_partial *state;
808 {
809   HOST_WIDE_INT w1, w2;
810   lshift_double (lo, hi, -32, 64, &w1, &w2, 1);
811   return find_constant2 (&state->cpool, CONSTANT_Long,
812 			 (jword)(w1 & 0xFFFFFFFF), (jword)(lo & 0xFFFFFFFF));
813 }
814 
815 /* Find or allocate a constant pool entry for the given VALUE.
816    Return the index in the constant pool. */
817 
818 static int
find_constant_index(value,state)819 find_constant_index (value, state)
820      tree value;
821      struct jcf_partial *state;
822 {
823   if (TREE_CODE (value) == INTEGER_CST)
824     {
825       if (TYPE_PRECISION (TREE_TYPE (value)) <= 32)
826 	return find_constant1 (&state->cpool, CONSTANT_Integer,
827 			       (jword)(TREE_INT_CST_LOW (value) & 0xFFFFFFFF));
828       else
829 	return find_constant_wide (TREE_INT_CST_LOW (value),
830 				   TREE_INT_CST_HIGH (value), state);
831     }
832   else if (TREE_CODE (value) == REAL_CST)
833     {
834       long words[2];
835 
836       real_to_target (words, &TREE_REAL_CST (value),
837 		      TYPE_MODE (TREE_TYPE (value)));
838       words[0] &= 0xffffffff;
839       words[1] &= 0xffffffff;
840 
841       if (TYPE_PRECISION (TREE_TYPE (value)) == 32)
842 	return find_constant1 (&state->cpool, CONSTANT_Float, (jword)words[0]);
843       else
844 	return find_constant2 (&state->cpool, CONSTANT_Double,
845 			       (jword)words[1-FLOAT_WORDS_BIG_ENDIAN],
846 			       (jword)words[FLOAT_WORDS_BIG_ENDIAN]);
847     }
848   else if (TREE_CODE (value) == STRING_CST)
849     return find_string_constant (&state->cpool, value);
850 
851   else
852     abort ();
853 }
854 
855 /* Push 64-bit long constant on VM stack.
856    Caller is responsible for doing NOTE_PUSH. */
857 
858 static void
push_long_const(lo,hi,state)859 push_long_const (lo, hi, state)
860      HOST_WIDE_INT lo, hi;
861      struct jcf_partial *state;
862 {
863   HOST_WIDE_INT highpart, dummy;
864   jint lowpart = WORD_TO_INT (lo);
865 
866   rshift_double (lo, hi, 32, 64, &highpart, &dummy, 1);
867 
868   if (highpart == 0 && (lowpart == 0 || lowpart == 1))
869     {
870       RESERVE(1);
871       OP1(OPCODE_lconst_0 + lowpart);
872     }
873   else if ((highpart == 0 && lowpart > 0 && lowpart < 32768)
874 	   || (highpart == -1 && lowpart < 0 && lowpart >= -32768))
875       {
876         push_int_const (lowpart, state);
877         RESERVE (1);
878         OP1 (OPCODE_i2l);
879       }
880   else
881     push_constant2 (find_constant_wide (lo, hi, state), state);
882 }
883 
884 static void
field_op(field,opcode,state)885 field_op (field, opcode, state)
886      tree field;
887      int opcode;
888      struct jcf_partial *state;
889 {
890   int index = find_fieldref_index (&state->cpool, field);
891   RESERVE (3);
892   OP1 (opcode);
893   OP2 (index);
894 }
895 
896 /* Returns an integer in the range 0 (for 'int') through 4 (for object
897    reference) to 7 (for 'short') which matches the pattern of how JVM
898    opcodes typically depend on the operand type. */
899 
900 static int
adjust_typed_op(type,max)901 adjust_typed_op (type, max)
902      tree type;
903      int max;
904 {
905   switch (TREE_CODE (type))
906     {
907     case POINTER_TYPE:
908     case RECORD_TYPE:   return 4;
909     case BOOLEAN_TYPE:
910       return TYPE_PRECISION (type) == 32 || max < 5 ? 0 : 5;
911     case CHAR_TYPE:
912       return TYPE_PRECISION (type) == 32 || max < 6 ? 0 : 6;
913     case INTEGER_TYPE:
914       switch (TYPE_PRECISION (type))
915 	{
916 	case  8:       return max < 5 ? 0 : 5;
917 	case 16:       return max < 7 ? 0 : 7;
918 	case 32:       return 0;
919 	case 64:       return 1;
920 	}
921       break;
922     case REAL_TYPE:
923       switch (TYPE_PRECISION (type))
924 	{
925 	case 32:       return 2;
926 	case 64:       return 3;
927 	}
928       break;
929     default:
930       break;
931     }
932   abort ();
933 }
934 
935 static void
maybe_wide(opcode,index,state)936 maybe_wide (opcode, index, state)
937      int opcode, index;
938      struct jcf_partial *state;
939 {
940   if (index >= 256)
941     {
942       RESERVE (4);
943       OP1 (OPCODE_wide);
944       OP1 (opcode);
945       OP2 (index);
946     }
947   else
948     {
949       RESERVE (2);
950       OP1 (opcode);
951       OP1 (index);
952     }
953 }
954 
955 /* Compile code to duplicate with offset, where
956    SIZE is the size of the stack item to duplicate (1 or 2), abd
957    OFFSET is where to insert the result (must be 0, 1, or 2).
958    (The new words get inserted at stack[SP-size-offset].) */
959 
960 static void
emit_dup(size,offset,state)961 emit_dup (size, offset, state)
962      int size, offset;
963      struct jcf_partial *state;
964 {
965   int kind;
966   if (size == 0)
967     return;
968   RESERVE(1);
969   if (offset == 0)
970     kind = size == 1 ? OPCODE_dup : OPCODE_dup2;
971   else if (offset == 1)
972     kind = size == 1 ? OPCODE_dup_x1 : OPCODE_dup2_x1;
973   else if (offset == 2)
974     kind = size == 1 ? OPCODE_dup_x2 : OPCODE_dup2_x2;
975   else
976     abort();
977   OP1 (kind);
978   NOTE_PUSH (size);
979 }
980 
981 static void
emit_pop(size,state)982 emit_pop (size, state)
983      int size;
984      struct jcf_partial *state;
985 {
986   RESERVE (1);
987   OP1 (OPCODE_pop - 1 + size);
988 }
989 
990 static void
emit_iinc(var,value,state)991 emit_iinc (var, value, state)
992      tree var;
993      HOST_WIDE_INT value;
994      struct jcf_partial *state;
995 {
996   int slot = DECL_LOCAL_INDEX (var);
997 
998   if (value < -128 || value > 127 || slot >= 256)
999     {
1000       RESERVE (6);
1001       OP1 (OPCODE_wide);
1002       OP1 (OPCODE_iinc);
1003       OP2 (slot);
1004       OP2 (value);
1005     }
1006   else
1007     {
1008       RESERVE (3);
1009       OP1 (OPCODE_iinc);
1010       OP1 (slot);
1011       OP1 (value);
1012     }
1013 }
1014 
1015 static void
emit_load_or_store(var,opcode,state)1016 emit_load_or_store (var, opcode, state)
1017      tree var;    /* Variable to load from or store into. */
1018      int opcode;  /* Either OPCODE_iload or OPCODE_istore. */
1019      struct jcf_partial *state;
1020 {
1021   tree type = TREE_TYPE (var);
1022   int kind = adjust_typed_op (type, 4);
1023   int index = DECL_LOCAL_INDEX (var);
1024   if (index <= 3)
1025     {
1026       RESERVE (1);
1027       OP1 (opcode + 5 + 4 * kind + index);    /* [ilfda]{load,store}_[0123] */
1028     }
1029   else
1030     maybe_wide (opcode + kind, index, state);  /* [ilfda]{load,store} */
1031 }
1032 
1033 static void
emit_load(var,state)1034 emit_load (var, state)
1035      tree var;
1036      struct jcf_partial *state;
1037 {
1038   emit_load_or_store (var, OPCODE_iload, state);
1039   NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1040 }
1041 
1042 static void
emit_store(var,state)1043 emit_store (var, state)
1044      tree var;
1045      struct jcf_partial *state;
1046 {
1047   emit_load_or_store (var, OPCODE_istore, state);
1048   NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (var)) ? 2 : 1);
1049 }
1050 
1051 static void
emit_unop(opcode,type,state)1052 emit_unop (opcode, type, state)
1053      enum java_opcode opcode;
1054      tree type ATTRIBUTE_UNUSED;
1055      struct jcf_partial *state;
1056 {
1057   RESERVE(1);
1058   OP1 (opcode);
1059 }
1060 
1061 static void
emit_binop(opcode,type,state)1062 emit_binop (opcode, type, state)
1063      enum java_opcode opcode;
1064      tree type;
1065      struct jcf_partial *state;
1066 {
1067   int size = TYPE_IS_WIDE (type) ? 2 : 1;
1068   RESERVE(1);
1069   OP1 (opcode);
1070   NOTE_POP (size);
1071 }
1072 
1073 static void
emit_reloc(value,kind,target,state)1074 emit_reloc (value, kind, target, state)
1075      HOST_WIDE_INT value;
1076      int kind;
1077      struct jcf_block *target;
1078      struct jcf_partial *state;
1079 {
1080   struct jcf_relocation *reloc = (struct jcf_relocation *)
1081     obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1082   struct jcf_block *block = state->last_block;
1083   reloc->next = block->u.relocations;
1084   block->u.relocations = reloc;
1085   reloc->offset = BUFFER_LENGTH (&state->bytecode);
1086   reloc->label = target;
1087   reloc->kind = kind;
1088   if (kind == 0 || kind == BLOCK_START_RELOC)
1089     OP4 (value);
1090   else if (kind != SWITCH_ALIGN_RELOC)
1091     OP2 (value);
1092 }
1093 
1094 static void
emit_switch_reloc(label,state)1095 emit_switch_reloc (label, state)
1096      struct jcf_block *label;
1097      struct jcf_partial *state;
1098 {
1099   emit_reloc (RELOCATION_VALUE_0, BLOCK_START_RELOC, label, state);
1100 }
1101 
1102 /* Similar to emit_switch_reloc,
1103    but re-uses an existing case reloc. */
1104 
1105 static void
emit_case_reloc(reloc,state)1106 emit_case_reloc (reloc, state)
1107      struct jcf_relocation *reloc;
1108      struct jcf_partial *state;
1109 {
1110   struct jcf_block *block = state->last_block;
1111   reloc->next = block->u.relocations;
1112   block->u.relocations = reloc;
1113   reloc->offset = BUFFER_LENGTH (&state->bytecode);
1114   reloc->kind = BLOCK_START_RELOC;
1115   OP4 (0);
1116 }
1117 
1118 /* Emit a conditional jump to TARGET with a 2-byte relative jump offset
1119    The opcode is OPCODE, the inverted opcode is INV_OPCODE. */
1120 
1121 static void
emit_if(target,opcode,inv_opcode,state)1122 emit_if (target, opcode, inv_opcode, state)
1123      struct jcf_block *target;
1124      int opcode, inv_opcode;
1125      struct jcf_partial *state;
1126 {
1127   RESERVE(3);
1128   OP1 (opcode);
1129   /* value is 1 byte from reloc back to start of instruction.  */
1130   emit_reloc (RELOCATION_VALUE_1, - inv_opcode, target, state);
1131 }
1132 
1133 static void
emit_goto(target,state)1134 emit_goto (target, state)
1135      struct jcf_block *target;
1136      struct jcf_partial *state;
1137 {
1138   RESERVE(3);
1139   OP1 (OPCODE_goto);
1140   /* Value is 1 byte from reloc back to start of instruction.  */
1141   emit_reloc (RELOCATION_VALUE_1, OPCODE_goto_w, target, state);
1142 }
1143 
1144 static void
emit_jsr(target,state)1145 emit_jsr (target, state)
1146      struct jcf_block *target;
1147      struct jcf_partial *state;
1148 {
1149   RESERVE(3);
1150   OP1 (OPCODE_jsr);
1151   /* Value is 1 byte from reloc back to start of instruction.  */
1152   emit_reloc (RELOCATION_VALUE_1, OPCODE_jsr_w, target, state);
1153   state->num_jsrs++;
1154 }
1155 
1156 /* Generate code to evaluate EXP.  If the result is true,
1157    branch to TRUE_LABEL; otherwise, branch to FALSE_LABEL.
1158    TRUE_BRANCH_FIRST is a code geneation hint that the
1159    TRUE_LABEL may follow right after this. (The idea is that we
1160    may be able to optimize away GOTO TRUE_LABEL; TRUE_LABEL:) */
1161 
1162 static void
generate_bytecode_conditional(exp,true_label,false_label,true_branch_first,state)1163 generate_bytecode_conditional (exp, true_label, false_label,
1164 			       true_branch_first, state)
1165      tree exp;
1166      struct jcf_block *true_label;
1167      struct jcf_block *false_label;
1168      int true_branch_first;
1169      struct jcf_partial *state;
1170 {
1171   tree exp0, exp1, type;
1172   int save_SP = state->code_SP;
1173   enum java_opcode op, negop;
1174   switch (TREE_CODE (exp))
1175     {
1176     case INTEGER_CST:
1177       emit_goto (integer_zerop (exp) ? false_label : true_label, state);
1178       break;
1179     case COND_EXPR:
1180       {
1181 	struct jcf_block *then_label = gen_jcf_label (state);
1182 	struct jcf_block *else_label = gen_jcf_label (state);
1183 	int save_SP_before, save_SP_after;
1184 	generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1185 				       then_label, else_label, 1, state);
1186 	define_jcf_label (then_label, state);
1187 	save_SP_before = state->code_SP;
1188 	generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1189 				       true_label, false_label, 1, state);
1190 	save_SP_after = state->code_SP;
1191 	state->code_SP = save_SP_before;
1192 	define_jcf_label (else_label, state);
1193 	generate_bytecode_conditional (TREE_OPERAND (exp, 2),
1194 				       true_label, false_label,
1195 				       true_branch_first, state);
1196 	if (state->code_SP != save_SP_after)
1197 	  abort ();
1198       }
1199       break;
1200     case TRUTH_NOT_EXPR:
1201       generate_bytecode_conditional (TREE_OPERAND (exp, 0), false_label,
1202 				     true_label, ! true_branch_first, state);
1203       break;
1204     case TRUTH_ANDIF_EXPR:
1205       {
1206 	struct jcf_block *next_label = gen_jcf_label (state);
1207 	generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1208 				       next_label, false_label, 1, state);
1209 	define_jcf_label (next_label, state);
1210 	generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1211 				       true_label, false_label, 1, state);
1212       }
1213       break;
1214     case TRUTH_ORIF_EXPR:
1215       {
1216 	struct jcf_block *next_label = gen_jcf_label (state);
1217 	generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1218 				       true_label, next_label, 1, state);
1219 	define_jcf_label (next_label, state);
1220 	generate_bytecode_conditional (TREE_OPERAND (exp, 1),
1221 				       true_label, false_label, 1, state);
1222       }
1223       break;
1224     compare_1:
1225       /* Assuming op is one of the 2-operand if_icmp<COND> instructions,
1226 	 set it to the corresponding 1-operand if<COND> instructions. */
1227       op = op - 6;
1228       /* FALLTHROUGH */
1229     compare_2:
1230       /* The opcodes with their inverses are allocated in pairs.
1231 	 E.g.  The inverse of if_icmplt (161) is if_icmpge (162). */
1232       negop = (op & 1) ? op + 1 : op - 1;
1233     compare_2_ptr:
1234       if (true_branch_first)
1235 	{
1236 	  emit_if (false_label, negop, op, state);
1237 	  emit_goto (true_label, state);
1238 	}
1239       else
1240 	{
1241 	  emit_if (true_label, op, negop, state);
1242 	  emit_goto (false_label, state);
1243 	}
1244       break;
1245     case EQ_EXPR:
1246       op = OPCODE_if_icmpeq;
1247       goto compare;
1248     case NE_EXPR:
1249       op = OPCODE_if_icmpne;
1250       goto compare;
1251     case GT_EXPR:
1252       op = OPCODE_if_icmpgt;
1253       goto compare;
1254     case LT_EXPR:
1255       op = OPCODE_if_icmplt;
1256       goto compare;
1257     case GE_EXPR:
1258       op = OPCODE_if_icmpge;
1259       goto compare;
1260     case LE_EXPR:
1261       op = OPCODE_if_icmple;
1262       goto compare;
1263     compare:
1264       exp0 = TREE_OPERAND (exp, 0);
1265       exp1 = TREE_OPERAND (exp, 1);
1266       type = TREE_TYPE (exp0);
1267       switch (TREE_CODE (type))
1268 	{
1269 	  int opf;
1270 	case POINTER_TYPE:  case RECORD_TYPE:
1271 	  switch (TREE_CODE (exp))
1272 	    {
1273 	    case EQ_EXPR:  op = OPCODE_if_acmpeq;  break;
1274 	    case NE_EXPR:  op = OPCODE_if_acmpne;  break;
1275 	    default:  abort();
1276 	    }
1277 	  if (integer_zerop (exp1) || integer_zerop (exp0))
1278 	    {
1279 	      generate_bytecode_insns (integer_zerop (exp0) ? exp1 : exp0,
1280 				       STACK_TARGET, state);
1281 	      op = op + (OPCODE_ifnull - OPCODE_if_acmpeq);
1282 	      negop = (op & 1) ? op - 1 : op + 1;
1283 	      NOTE_POP (1);
1284 	      goto compare_2_ptr;
1285 	    }
1286 	  generate_bytecode_insns (exp0, STACK_TARGET, state);
1287 	  generate_bytecode_insns (exp1, STACK_TARGET, state);
1288 	  NOTE_POP (2);
1289 	  goto compare_2;
1290 	case REAL_TYPE:
1291 	  generate_bytecode_insns (exp0, STACK_TARGET, state);
1292 	  generate_bytecode_insns (exp1, STACK_TARGET, state);
1293 	  if (op == OPCODE_if_icmplt || op == OPCODE_if_icmple)
1294 	    opf = OPCODE_fcmpg;
1295 	  else
1296 	    opf = OPCODE_fcmpl;
1297 	  if (TYPE_PRECISION (type) > 32)
1298 	    {
1299 	      opf += 2;
1300 	      NOTE_POP (4);
1301 	    }
1302 	  else
1303 	    NOTE_POP (2);
1304 	  RESERVE (1);
1305 	  OP1 (opf);
1306 	  goto compare_1;
1307 	case INTEGER_TYPE:
1308 	  if (TYPE_PRECISION (type) > 32)
1309 	    {
1310 	      generate_bytecode_insns (exp0, STACK_TARGET, state);
1311 	      generate_bytecode_insns (exp1, STACK_TARGET, state);
1312 	      NOTE_POP (4);
1313 	      RESERVE (1);
1314 	      OP1 (OPCODE_lcmp);
1315 	      goto compare_1;
1316 	    }
1317 	  /* FALLTHOUGH */
1318 	default:
1319 	  if (integer_zerop (exp1))
1320 	    {
1321 	      generate_bytecode_insns (exp0, STACK_TARGET, state);
1322 	      NOTE_POP (1);
1323 	      goto compare_1;
1324 	    }
1325 	  if (integer_zerop (exp0))
1326 	    {
1327 	      switch (op)
1328 		{
1329 		case OPCODE_if_icmplt:
1330 		case OPCODE_if_icmpge:
1331 		  op += 2;
1332 		  break;
1333 		case OPCODE_if_icmpgt:
1334 		case OPCODE_if_icmple:
1335 		  op -= 2;
1336 		  break;
1337 		default:
1338 		  break;
1339 		}
1340 	      generate_bytecode_insns (exp1, STACK_TARGET, state);
1341 	      NOTE_POP (1);
1342 	      goto compare_1;
1343 	    }
1344 	  generate_bytecode_insns (exp0, STACK_TARGET, state);
1345 	  generate_bytecode_insns (exp1, STACK_TARGET, state);
1346 	  NOTE_POP (2);
1347 	  goto compare_2;
1348 	}
1349 
1350     default:
1351       generate_bytecode_insns (exp, STACK_TARGET, state);
1352       NOTE_POP (1);
1353       if (true_branch_first)
1354 	{
1355 	  emit_if (false_label, OPCODE_ifeq, OPCODE_ifne, state);
1356 	  emit_goto (true_label, state);
1357 	}
1358       else
1359 	{
1360 	  emit_if (true_label, OPCODE_ifne, OPCODE_ifeq, state);
1361 	  emit_goto (false_label, state);
1362 	}
1363       break;
1364     }
1365   if (save_SP != state->code_SP)
1366     abort ();
1367 }
1368 
1369 /* Call pending cleanups i.e. those for surrounding TRY_FINALLY_EXPRs.
1370    but only as far out as LIMIT (since we are about to jump to the
1371    emit label that is LIMIT). */
1372 
1373 static void
call_cleanups(limit,state)1374 call_cleanups (limit, state)
1375      struct jcf_block *limit;
1376      struct jcf_partial *state;
1377 {
1378   struct jcf_block *block = state->labeled_blocks;
1379   for (;  block != limit;  block = block->next)
1380     {
1381       if (block->pc == PENDING_CLEANUP_PC)
1382 	emit_jsr (block, state);
1383     }
1384 }
1385 
1386 static void
generate_bytecode_return(exp,state)1387 generate_bytecode_return (exp, state)
1388      tree exp;
1389      struct jcf_partial *state;
1390 {
1391   tree return_type = TREE_TYPE (TREE_TYPE (state->current_method));
1392   int returns_void = TREE_CODE (return_type) == VOID_TYPE;
1393   int op;
1394  again:
1395   if (exp != NULL)
1396     {
1397       switch (TREE_CODE (exp))
1398 	{
1399  	case COMPOUND_EXPR:
1400 	  generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET,
1401 				   state);
1402 	  exp = TREE_OPERAND (exp, 1);
1403 	  goto again;
1404 	case COND_EXPR:
1405 	  {
1406 	    struct jcf_block *then_label = gen_jcf_label (state);
1407 	    struct jcf_block *else_label = gen_jcf_label (state);
1408 	    generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1409 					   then_label, else_label, 1, state);
1410 	    define_jcf_label (then_label, state);
1411 	    generate_bytecode_return (TREE_OPERAND (exp, 1), state);
1412 	    define_jcf_label (else_label, state);
1413 	    generate_bytecode_return (TREE_OPERAND (exp, 2), state);
1414 	  }
1415 	  return;
1416 	default:
1417 	  generate_bytecode_insns (exp,
1418 				   returns_void ? IGNORE_TARGET
1419 				   : STACK_TARGET, state);
1420 	}
1421     }
1422   if (returns_void)
1423     {
1424       op = OPCODE_return;
1425       call_cleanups (NULL, state);
1426     }
1427   else
1428     {
1429       op = OPCODE_ireturn + adjust_typed_op (return_type, 4);
1430       if (state->num_finalizers > 0)
1431 	{
1432 	  if (state->return_value_decl == NULL_TREE)
1433 	    {
1434 	      state->return_value_decl
1435 		= build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1436 	      localvar_alloc (state->return_value_decl, state);
1437 	    }
1438 	  emit_store (state->return_value_decl, state);
1439 	  call_cleanups (NULL, state);
1440 	  emit_load (state->return_value_decl, state);
1441 	  /* If we call maybe_free_localvar (state->return_value_decl, state, 1),
1442 	     then we risk the save decl erroneously re-used in the
1443 	     finalizer.  Instead, we keep the state->return_value_decl
1444 	     allocated through the rest of the method.  This is not
1445 	     the greatest solution, but it is at least simple and safe. */
1446 	}
1447     }
1448   RESERVE (1);
1449   OP1 (op);
1450 }
1451 
1452 /* Generate bytecode for sub-expression EXP of METHOD.
1453    TARGET is one of STACK_TARGET or IGNORE_TARGET. */
1454 
1455 static void
generate_bytecode_insns(exp,target,state)1456 generate_bytecode_insns (exp, target, state)
1457      tree exp;
1458      int target;
1459      struct jcf_partial *state;
1460 {
1461   tree type, arg;
1462   enum java_opcode jopcode;
1463   int op;
1464   HOST_WIDE_INT value;
1465   int post_op;
1466   int size;
1467   int offset;
1468 
1469   if (exp == NULL && target == IGNORE_TARGET)
1470     return;
1471 
1472   type = TREE_TYPE (exp);
1473 
1474   switch (TREE_CODE (exp))
1475     {
1476     case BLOCK:
1477       if (BLOCK_EXPR_BODY (exp))
1478 	{
1479 	  tree local;
1480 	  tree body = BLOCK_EXPR_BODY (exp);
1481 	  long jsrs = state->num_jsrs;
1482 	  for (local = BLOCK_EXPR_DECLS (exp); local; )
1483 	    {
1484 	      tree next = TREE_CHAIN (local);
1485 	      localvar_alloc (local, state);
1486 	      local = next;
1487 	    }
1488 	  /* Avoid deep recursion for long blocks. */
1489 	  while (TREE_CODE (body) == COMPOUND_EXPR)
1490 	    {
1491 	      generate_bytecode_insns (TREE_OPERAND (body, 0), target, state);
1492 	      body = TREE_OPERAND (body, 1);
1493 	    }
1494 	  generate_bytecode_insns (body, target, state);
1495 
1496 	  for (local = BLOCK_EXPR_DECLS (exp); local; )
1497 	    {
1498 	      tree next = TREE_CHAIN (local);
1499 	      maybe_free_localvar (local, state, state->num_jsrs <= jsrs);
1500 	      local = next;
1501 	    }
1502 	}
1503       break;
1504     case COMPOUND_EXPR:
1505       generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
1506       /* Normally the first operand to a COMPOUND_EXPR must complete
1507 	 normally.  However, in the special case of a do-while
1508 	 statement this is not necessarily the case.  */
1509       if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0)))
1510 	generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1511       break;
1512     case EXPR_WITH_FILE_LOCATION:
1513       {
1514 	const char *saved_input_filename = input_filename;
1515 	tree body = EXPR_WFL_NODE (exp);
1516 	int saved_lineno = lineno;
1517 	if (body == empty_stmt_node)
1518 	  break;
1519 	input_filename = EXPR_WFL_FILENAME (exp);
1520 	lineno = EXPR_WFL_LINENO (exp);
1521 	if (EXPR_WFL_EMIT_LINE_NOTE (exp) && lineno > 0
1522 	    && debug_info_level > DINFO_LEVEL_NONE)
1523 	  put_linenumber (lineno, state);
1524 	generate_bytecode_insns (body, target, state);
1525 	input_filename = saved_input_filename;
1526 	lineno = saved_lineno;
1527       }
1528       break;
1529     case INTEGER_CST:
1530       if (target == IGNORE_TARGET) ; /* do nothing */
1531       else if (TREE_CODE (type) == POINTER_TYPE)
1532 	{
1533 	  if (! integer_zerop (exp))
1534 	    abort();
1535 	  RESERVE(1);
1536 	  OP1 (OPCODE_aconst_null);
1537 	  NOTE_PUSH (1);
1538 	}
1539       else if (TYPE_PRECISION (type) <= 32)
1540 	{
1541 	  push_int_const (TREE_INT_CST_LOW (exp), state);
1542 	  NOTE_PUSH (1);
1543 	}
1544       else
1545 	{
1546 	  push_long_const (TREE_INT_CST_LOW (exp), TREE_INT_CST_HIGH (exp),
1547 			   state);
1548 	  NOTE_PUSH (2);
1549 	}
1550       break;
1551     case REAL_CST:
1552       {
1553 	int prec = TYPE_PRECISION (type) >> 5;
1554 	RESERVE(1);
1555 	if (real_zerop (exp) && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (exp)))
1556 	  OP1 (prec == 1 ? OPCODE_fconst_0 : OPCODE_dconst_0);
1557 	else if (real_onep (exp))
1558 	  OP1 (prec == 1 ? OPCODE_fconst_1 : OPCODE_dconst_1);
1559 	/* FIXME Should also use fconst_2 for 2.0f.
1560 	   Also, should use iconst_2/ldc followed by i2f/i2d
1561 	   for other float/double when the value is a small integer. */
1562 	else
1563 	  {
1564 	    offset = find_constant_index (exp, state);
1565 	    if (prec == 1)
1566 	      push_constant1 (offset, state);
1567 	    else
1568 	      push_constant2 (offset, state);
1569 	  }
1570 	NOTE_PUSH (prec);
1571       }
1572       break;
1573     case STRING_CST:
1574       push_constant1 (find_string_constant (&state->cpool, exp), state);
1575       NOTE_PUSH (1);
1576       break;
1577     case VAR_DECL:
1578       if (TREE_STATIC (exp))
1579 	{
1580 	  field_op (exp, OPCODE_getstatic, state);
1581 	  NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1582 	  break;
1583 	}
1584       /* ... fall through ... */
1585     case PARM_DECL:
1586       emit_load (exp, state);
1587       break;
1588     case NON_LVALUE_EXPR:
1589     case INDIRECT_REF:
1590       generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1591       break;
1592     case ARRAY_REF:
1593       generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
1594       generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1595       if (target != IGNORE_TARGET)
1596 	{
1597 	  jopcode = OPCODE_iaload + adjust_typed_op (type, 7);
1598 	  RESERVE(1);
1599 	  OP1 (jopcode);
1600 	  if (! TYPE_IS_WIDE (type))
1601 	    NOTE_POP (1);
1602 	}
1603       break;
1604     case COMPONENT_REF:
1605       {
1606 	tree obj = TREE_OPERAND (exp, 0);
1607 	tree field = TREE_OPERAND (exp, 1);
1608 	int is_static = FIELD_STATIC (field);
1609 	generate_bytecode_insns (obj,
1610 				 is_static ? IGNORE_TARGET : target, state);
1611 	if (target != IGNORE_TARGET)
1612 	  {
1613 	    if (DECL_NAME (field) == length_identifier_node && !is_static
1614 		&& TYPE_ARRAY_P (TREE_TYPE (obj)))
1615 	      {
1616 		RESERVE (1);
1617 		OP1 (OPCODE_arraylength);
1618 	      }
1619 	    else
1620 	      {
1621 		field_op (field, is_static ? OPCODE_getstatic : OPCODE_getfield,
1622 			  state);
1623 		if (! is_static)
1624 		  NOTE_POP (1);
1625 		NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
1626 	      }
1627 	  }
1628       }
1629       break;
1630     case TRUTH_ANDIF_EXPR:
1631     case TRUTH_ORIF_EXPR:
1632     case EQ_EXPR:
1633     case NE_EXPR:
1634     case GT_EXPR:
1635     case LT_EXPR:
1636     case GE_EXPR:
1637     case LE_EXPR:
1638       {
1639 	struct jcf_block *then_label = gen_jcf_label (state);
1640 	struct jcf_block *else_label = gen_jcf_label (state);
1641 	struct jcf_block *end_label = gen_jcf_label (state);
1642 	generate_bytecode_conditional (exp,
1643 				       then_label, else_label, 1, state);
1644 	define_jcf_label (then_label, state);
1645 	push_int_const (1, state);
1646 	emit_goto (end_label, state);
1647 	define_jcf_label (else_label, state);
1648 	push_int_const (0, state);
1649 	define_jcf_label (end_label, state);
1650 	NOTE_PUSH (1);
1651       }
1652       break;
1653     case COND_EXPR:
1654       {
1655 	struct jcf_block *then_label = gen_jcf_label (state);
1656 	struct jcf_block *else_label = gen_jcf_label (state);
1657 	struct jcf_block *end_label = gen_jcf_label (state);
1658 	generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1659 				       then_label, else_label, 1, state);
1660 	define_jcf_label (then_label, state);
1661 	generate_bytecode_insns (TREE_OPERAND (exp, 1), target, state);
1662 	if (CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 1))
1663 	    /* Not all expressions have CAN_COMPLETE_NORMALLY set properly. */
1664 	    || TREE_CODE (TREE_TYPE (exp)) != VOID_TYPE)
1665 	  emit_goto (end_label, state);
1666 	define_jcf_label (else_label, state);
1667 	generate_bytecode_insns (TREE_OPERAND (exp, 2), target, state);
1668 	define_jcf_label (end_label, state);
1669 	/* COND_EXPR can be used in a binop. The stack must be adjusted. */
1670 	if (TREE_TYPE (exp) != void_type_node)
1671 	  NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
1672       }
1673       break;
1674     case CASE_EXPR:
1675       {
1676 	struct jcf_switch_state *sw_state = state->sw_state;
1677 	struct jcf_relocation *reloc = (struct jcf_relocation *)
1678 	  obstack_alloc (state->chunk_obstack, sizeof (struct jcf_relocation));
1679 	HOST_WIDE_INT case_value = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
1680 	reloc->kind = 0;
1681 	reloc->label = get_jcf_label_here (state);
1682 	reloc->offset = case_value;
1683 	reloc->next = sw_state->cases;
1684 	sw_state->cases = reloc;
1685 	if (sw_state->num_cases == 0)
1686 	  {
1687 	    sw_state->min_case = case_value;
1688 	    sw_state->max_case = case_value;
1689 	  }
1690 	else
1691 	  {
1692 	    if (case_value < sw_state->min_case)
1693 	      sw_state->min_case = case_value;
1694 	    if (case_value > sw_state->max_case)
1695 	      sw_state->max_case = case_value;
1696 	  }
1697 	sw_state->num_cases++;
1698       }
1699       break;
1700     case DEFAULT_EXPR:
1701       state->sw_state->default_label = get_jcf_label_here (state);
1702       break;
1703 
1704     case SWITCH_EXPR:
1705       {
1706 	/* The SWITCH_EXPR has three parts, generated in the following order:
1707 	   1.  the switch_expression (the value used to select the correct case);
1708 	   2.  the switch_body;
1709 	   3.  the switch_instruction (the tableswitch/loopupswitch instruction.).
1710 	   After code generation, we will re-order them in the order 1, 3, 2.
1711 	   This is to avoid any extra GOTOs. */
1712 	struct jcf_switch_state sw_state;
1713 	struct jcf_block *expression_last; /* Last block of the switch_expression. */
1714 	struct jcf_block *body_last; /* Last block of the switch_body. */
1715 	struct jcf_block *switch_instruction;  /* First block of switch_instruction. */
1716 	struct jcf_block *instruction_last; /* Last block of the switch_instruction. */
1717 	struct jcf_block *body_block;
1718 	int switch_length;
1719 	sw_state.prev = state->sw_state;
1720 	state->sw_state = &sw_state;
1721 	sw_state.cases = NULL;
1722 	sw_state.num_cases = 0;
1723 	sw_state.default_label = NULL;
1724 	generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1725 	expression_last = state->last_block;
1726 	/* Force a new block here.  */
1727 	body_block = gen_jcf_label (state);
1728 	define_jcf_label (body_block, state);
1729 	generate_bytecode_insns (TREE_OPERAND (exp, 1), IGNORE_TARGET, state);
1730 	body_last = state->last_block;
1731 
1732 	switch_instruction = gen_jcf_label (state);
1733 	define_jcf_label (switch_instruction, state);
1734 	if (sw_state.default_label == NULL)
1735 	  sw_state.default_label = gen_jcf_label (state);
1736 
1737 	if (sw_state.num_cases <= 1)
1738 	  {
1739 	    if (sw_state.num_cases == 0)
1740 	      {
1741 		emit_pop (1, state);
1742 		NOTE_POP (1);
1743 	      }
1744 	    else
1745 	      {
1746 		push_int_const (sw_state.cases->offset, state);
1747 		NOTE_PUSH (1);
1748 		emit_if (sw_state.cases->label,
1749 			 OPCODE_if_icmpeq, OPCODE_if_icmpne, state);
1750 	      }
1751 	    emit_goto (sw_state.default_label, state);
1752 	  }
1753 	else
1754 	  {
1755 	    HOST_WIDE_INT i;
1756 	    unsigned HOST_WIDE_INT delta;
1757 	    /* Copy the chain of relocs into a sorted array. */
1758 	    struct jcf_relocation **relocs = (struct jcf_relocation **)
1759 	      xmalloc (sw_state.num_cases * sizeof (struct jcf_relocation *));
1760 	    /* The relocs arrays is a buffer with a gap.
1761 	       The assumption is that cases will normally come in "runs". */
1762 	    int gap_start = 0;
1763 	    int gap_end = sw_state.num_cases;
1764 	    struct jcf_relocation *reloc;
1765 	    for (reloc = sw_state.cases;  reloc != NULL;  reloc = reloc->next)
1766 	      {
1767 		HOST_WIDE_INT case_value = reloc->offset;
1768 		while (gap_end < sw_state.num_cases)
1769 		  {
1770 		    struct jcf_relocation *end = relocs[gap_end];
1771 		    if (case_value <= end->offset)
1772 		      break;
1773 		    relocs[gap_start++] = end;
1774 		    gap_end++;
1775 		  }
1776 		while (gap_start > 0)
1777 		  {
1778 		    struct jcf_relocation *before = relocs[gap_start-1];
1779 		    if (case_value >= before->offset)
1780 		      break;
1781 		    relocs[--gap_end] = before;
1782 		    gap_start--;
1783 		  }
1784 		relocs[gap_start++] = reloc;
1785 		/* Note we don't check for duplicates.  This is
1786 		   handled by the parser.  */
1787 	      }
1788 
1789 	    /* We could have DELTA < 0 if sw_state.min_case is
1790 	       something like Integer.MIN_VALUE.  That is why delta is
1791 	       unsigned.  */
1792 	    delta = sw_state.max_case - sw_state.min_case;
1793 	    if (2 * (unsigned) sw_state.num_cases >= delta)
1794 	      { /* Use tableswitch. */
1795 		int index = 0;
1796 		RESERVE (13 + 4 * (sw_state.max_case - sw_state.min_case + 1));
1797 		OP1 (OPCODE_tableswitch);
1798 		emit_reloc (RELOCATION_VALUE_0,
1799 			    SWITCH_ALIGN_RELOC, NULL, state);
1800 		emit_switch_reloc (sw_state.default_label, state);
1801 		OP4 (sw_state.min_case);
1802 		OP4 (sw_state.max_case);
1803 		for (i = sw_state.min_case; ; )
1804 		  {
1805 		    reloc = relocs[index];
1806 		    if (i == reloc->offset)
1807 		      {
1808 			emit_case_reloc (reloc, state);
1809 			if (i == sw_state.max_case)
1810 			  break;
1811 			index++;
1812 		      }
1813 		    else
1814 		      emit_switch_reloc (sw_state.default_label, state);
1815 		    i++;
1816 		  }
1817 	      }
1818 	    else
1819 	      { /* Use lookupswitch. */
1820 		RESERVE(9 + 8 * sw_state.num_cases);
1821 		OP1 (OPCODE_lookupswitch);
1822 		emit_reloc (RELOCATION_VALUE_0,
1823 			    SWITCH_ALIGN_RELOC, NULL, state);
1824 		emit_switch_reloc (sw_state.default_label, state);
1825 		OP4 (sw_state.num_cases);
1826 		for (i = 0;  i < sw_state.num_cases;  i++)
1827 		  {
1828 		    struct jcf_relocation *reloc = relocs[i];
1829 		    OP4 (reloc->offset);
1830 		    emit_case_reloc (reloc, state);
1831 		  }
1832 	      }
1833 	    free (relocs);
1834 	  }
1835 
1836 	instruction_last = state->last_block;
1837 	if (sw_state.default_label->pc < 0)
1838 	  define_jcf_label (sw_state.default_label, state);
1839 	else /* Force a new block. */
1840 	  sw_state.default_label = get_jcf_label_here (state);
1841 	/* Now re-arrange the blocks so the switch_instruction
1842 	   comes before the switch_body. */
1843 	switch_length = state->code_length - switch_instruction->pc;
1844 	switch_instruction->pc = body_block->pc;
1845 	instruction_last->next = body_block;
1846 	instruction_last->v.chunk->next = body_block->v.chunk;
1847 	expression_last->next = switch_instruction;
1848 	expression_last->v.chunk->next = switch_instruction->v.chunk;
1849 	body_last->next = sw_state.default_label;
1850 	body_last->v.chunk->next = NULL;
1851 	state->chunk = body_last->v.chunk;
1852 	for (;  body_block != sw_state.default_label;  body_block = body_block->next)
1853 	  body_block->pc += switch_length;
1854 
1855 	state->sw_state = sw_state.prev;
1856 	break;
1857       }
1858 
1859     case RETURN_EXPR:
1860       exp = TREE_OPERAND (exp, 0);
1861       if (exp == NULL_TREE)
1862 	exp = empty_stmt_node;
1863       else if (TREE_CODE (exp) != MODIFY_EXPR)
1864 	abort ();
1865       else
1866 	exp = TREE_OPERAND (exp, 1);
1867       generate_bytecode_return (exp, state);
1868       break;
1869     case LABELED_BLOCK_EXPR:
1870       {
1871 	struct jcf_block *end_label = gen_jcf_label (state);
1872 	end_label->next = state->labeled_blocks;
1873 	state->labeled_blocks = end_label;
1874 	end_label->pc = PENDING_EXIT_PC;
1875 	end_label->u.labeled_block = exp;
1876 	if (LABELED_BLOCK_BODY (exp))
1877 	  generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
1878 	if (state->labeled_blocks != end_label)
1879 	  abort();
1880 	state->labeled_blocks = end_label->next;
1881 	define_jcf_label (end_label, state);
1882       }
1883       break;
1884     case LOOP_EXPR:
1885       {
1886 	tree body = TREE_OPERAND (exp, 0);
1887 #if 0
1888 	if (TREE_CODE (body) == COMPOUND_EXPR
1889 	    && TREE_CODE (TREE_OPERAND (body, 0)) == EXIT_EXPR)
1890 	  {
1891 	    /* Optimize:  H: if (TEST) GOTO L; BODY; GOTO H; L:
1892 	       to:  GOTO L;  BODY;  L:  if (!TEST) GOTO L; */
1893 	    struct jcf_block *head_label;
1894 	    struct jcf_block *body_label;
1895 	    struct jcf_block *end_label = gen_jcf_label (state);
1896 	    struct jcf_block *exit_label = state->labeled_blocks;
1897 	    head_label = gen_jcf_label (state);
1898 	    emit_goto (head_label, state);
1899 	    body_label = get_jcf_label_here (state);
1900 	    generate_bytecode_insns (TREE_OPERAND (body, 1), target, state);
1901 	    define_jcf_label (head_label, state);
1902 	    generate_bytecode_conditional (TREE_OPERAND (body, 0),
1903 					   end_label, body_label, 1, state);
1904 	    define_jcf_label (end_label, state);
1905 	  }
1906 	else
1907 #endif
1908 	  {
1909 	    struct jcf_block *head_label = get_jcf_label_here (state);
1910 	    generate_bytecode_insns (body, IGNORE_TARGET, state);
1911 	    if (CAN_COMPLETE_NORMALLY (body))
1912 	      emit_goto (head_label, state);
1913 	  }
1914       }
1915       break;
1916     case EXIT_EXPR:
1917       {
1918 	struct jcf_block *label = state->labeled_blocks;
1919 	struct jcf_block *end_label = gen_jcf_label (state);
1920 	generate_bytecode_conditional (TREE_OPERAND (exp, 0),
1921 				       label, end_label, 0, state);
1922 	define_jcf_label (end_label, state);
1923       }
1924       break;
1925     case EXIT_BLOCK_EXPR:
1926       {
1927 	struct jcf_block *label = state->labeled_blocks;
1928 	if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
1929 	while (label->u.labeled_block != TREE_OPERAND (exp, 0))
1930 	  label = label->next;
1931 	call_cleanups (label, state);
1932 	emit_goto (label, state);
1933       }
1934       break;
1935 
1936     case PREDECREMENT_EXPR:  value = -1; post_op = 0;  goto increment;
1937     case PREINCREMENT_EXPR:  value =  1; post_op = 0;  goto increment;
1938     case POSTDECREMENT_EXPR: value = -1; post_op = 1;  goto increment;
1939     case POSTINCREMENT_EXPR: value =  1; post_op = 1;  goto increment;
1940     increment:
1941 
1942       arg = TREE_OPERAND (exp, 1);
1943       exp = TREE_OPERAND (exp, 0);
1944       type = TREE_TYPE (exp);
1945       size = TYPE_IS_WIDE (type) ? 2 : 1;
1946       if ((TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1947 	  && ! TREE_STATIC (exp)
1948 	  && TREE_CODE (type) == INTEGER_TYPE
1949 	  && TYPE_PRECISION (type) == 32)
1950 	{
1951 	  if (target != IGNORE_TARGET && post_op)
1952 	    emit_load (exp, state);
1953 	  emit_iinc (exp, value, state);
1954 	  if (target != IGNORE_TARGET && ! post_op)
1955 	    emit_load (exp, state);
1956 	  break;
1957 	}
1958       if (TREE_CODE (exp) == COMPONENT_REF)
1959 	{
1960 	  generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1961 	  emit_dup (1, 0, state);
1962 	  /* Stack:  ..., objectref, objectref. */
1963 	  field_op (TREE_OPERAND (exp, 1), OPCODE_getfield, state);
1964 	  NOTE_PUSH (size-1);
1965 	  /* Stack:  ..., objectref, oldvalue. */
1966 	  offset = 1;
1967 	}
1968       else if (TREE_CODE (exp) == ARRAY_REF)
1969 	{
1970 	  generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
1971 	  generate_bytecode_insns (TREE_OPERAND (exp, 1), STACK_TARGET, state);
1972 	  emit_dup (2, 0, state);
1973 	  /* Stack:  ..., array, index, array, index. */
1974 	  jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (exp), 7);
1975 	  RESERVE(1);
1976 	  OP1 (jopcode);
1977 	  NOTE_POP (2-size);
1978 	  /* Stack:  ..., array, index, oldvalue. */
1979 	  offset = 2;
1980 	}
1981       else if (TREE_CODE (exp) == VAR_DECL || TREE_CODE (exp) == PARM_DECL)
1982 	{
1983 	  generate_bytecode_insns (exp, STACK_TARGET, state);
1984 	  /* Stack:  ..., oldvalue. */
1985 	  offset = 0;
1986 	}
1987       else
1988 	abort ();
1989 
1990       if (target != IGNORE_TARGET && post_op)
1991 	emit_dup (size, offset, state);
1992       /* Stack, if ARRAY_REF:  ..., [result, ] array, index, oldvalue. */
1993       /* Stack, if COMPONENT_REF:  ..., [result, ] objectref, oldvalue. */
1994       /* Stack, otherwise:  ..., [result, ] oldvalue. */
1995       generate_bytecode_insns (arg, STACK_TARGET, state);
1996       emit_binop ((value >= 0 ? OPCODE_iadd : OPCODE_isub)
1997 		  + adjust_typed_op (type, 3),
1998 		  type, state);
1999       if (target != IGNORE_TARGET && ! post_op)
2000 	emit_dup (size, offset, state);
2001       /* Stack, if ARRAY_REF:  ..., [result, ] array, index, newvalue. */
2002       /* Stack, if COMPONENT_REF:  ..., [result, ] objectref, newvalue. */
2003       /* Stack, otherwise:  ..., [result, ] newvalue. */
2004       goto finish_assignment;
2005 
2006     case MODIFY_EXPR:
2007       {
2008 	tree lhs = TREE_OPERAND (exp, 0);
2009 	tree rhs = TREE_OPERAND (exp, 1);
2010 	int offset = 0;
2011 
2012 	/* See if we can use the iinc instruction. */
2013 	if ((TREE_CODE (lhs) == VAR_DECL || TREE_CODE (lhs) == PARM_DECL)
2014 	    && ! TREE_STATIC (lhs)
2015 	    && TREE_CODE (TREE_TYPE (lhs)) == INTEGER_TYPE
2016 	    && TYPE_PRECISION (TREE_TYPE (lhs)) == 32
2017 	    && (TREE_CODE (rhs) == PLUS_EXPR || TREE_CODE (rhs) == MINUS_EXPR))
2018 	  {
2019 	    tree arg0 = TREE_OPERAND (rhs, 0);
2020 	    tree arg1 = TREE_OPERAND (rhs, 1);
2021 	    HOST_WIDE_INT min_value = -32768;
2022 	    HOST_WIDE_INT max_value = 32767;
2023 	    if (TREE_CODE (rhs) == MINUS_EXPR)
2024 	      {
2025 		min_value++;
2026 		max_value++;
2027 	      }
2028 	    else if (arg1 == lhs)
2029 	      {
2030 		arg0 = arg1;
2031 		arg1 = TREE_OPERAND (rhs, 0);
2032 	      }
2033 	    if (lhs == arg0 && TREE_CODE (arg1) == INTEGER_CST)
2034 	      {
2035 		HOST_WIDE_INT hi_value = TREE_INT_CST_HIGH (arg1);
2036 		value = TREE_INT_CST_LOW (arg1);
2037 		if ((hi_value == 0 && value <= max_value)
2038 		    || (hi_value == -1 && value >= min_value))
2039 		  {
2040 		    if (TREE_CODE (rhs) == MINUS_EXPR)
2041 		      value = -value;
2042 		    emit_iinc (lhs, value, state);
2043 		    if (target != IGNORE_TARGET)
2044 		      emit_load (lhs, state);
2045 		    break;
2046 		  }
2047 	      }
2048 	  }
2049 
2050 	if (TREE_CODE (lhs) == COMPONENT_REF)
2051 	  {
2052 	    generate_bytecode_insns (TREE_OPERAND (lhs, 0),
2053 				     STACK_TARGET, state);
2054 	    offset = 1;
2055 	  }
2056 	else if (TREE_CODE (lhs) == ARRAY_REF)
2057 	  {
2058 	    generate_bytecode_insns (TREE_OPERAND(lhs, 0),
2059 				     STACK_TARGET, state);
2060 	    generate_bytecode_insns (TREE_OPERAND(lhs, 1),
2061 				     STACK_TARGET, state);
2062 	    offset = 2;
2063 	  }
2064 	else
2065 	  offset = 0;
2066 
2067 	/* If the rhs is a binary expression and the left operand is
2068 	   `==' to the lhs then we have an OP= expression.  In this
2069 	   case we must do some special processing.  */
2070 	if (TREE_CODE_CLASS (TREE_CODE (rhs)) == '2'
2071 	    && lhs == TREE_OPERAND (rhs, 0))
2072 	  {
2073 	    if (TREE_CODE (lhs) == COMPONENT_REF)
2074 	      {
2075 		tree field = TREE_OPERAND (lhs, 1);
2076 		if (! FIELD_STATIC (field))
2077 		  {
2078 		    /* Duplicate the object reference so we can get
2079 		       the field.  */
2080 		    emit_dup (TYPE_IS_WIDE (field) ? 2 : 1, 0, state);
2081 		    NOTE_POP (1);
2082 		  }
2083 		field_op (field, (FIELD_STATIC (field)
2084 				  ? OPCODE_getstatic
2085 				  : OPCODE_getfield),
2086 			  state);
2087 
2088 		NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2089 	      }
2090 	    else if (TREE_CODE (lhs) == VAR_DECL
2091 		     || TREE_CODE (lhs) == PARM_DECL)
2092 	      {
2093 		if (FIELD_STATIC (lhs))
2094 		  {
2095 		    field_op (lhs, OPCODE_getstatic, state);
2096 		    NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2097 		  }
2098 		else
2099 		  emit_load (lhs, state);
2100 	      }
2101 	    else if (TREE_CODE (lhs) == ARRAY_REF)
2102 	      {
2103 		/* Duplicate the array and index, which are on the
2104 		   stack, so that we can load the old value.  */
2105 		emit_dup (2, 0, state);
2106 		NOTE_POP (2);
2107 		jopcode = OPCODE_iaload + adjust_typed_op (TREE_TYPE (lhs), 7);
2108 		RESERVE (1);
2109 		OP1 (jopcode);
2110 		NOTE_PUSH (TYPE_IS_WIDE (TREE_TYPE (lhs)) ? 2 : 1);
2111 	      }
2112 	    else
2113 	      abort ();
2114 
2115 	    /* This function correctly handles the case where the LHS
2116 	       of a binary expression is NULL_TREE.  */
2117 	    rhs = build (TREE_CODE (rhs), TREE_TYPE (rhs),
2118 			 NULL_TREE, TREE_OPERAND (rhs, 1));
2119 	  }
2120 
2121 	generate_bytecode_insns (rhs, STACK_TARGET, state);
2122 	if (target != IGNORE_TARGET)
2123 	  emit_dup (TYPE_IS_WIDE (type) ? 2 : 1 , offset, state);
2124 	exp = lhs;
2125       }
2126       /* FALLTHOUGH */
2127 
2128     finish_assignment:
2129       if (TREE_CODE (exp) == COMPONENT_REF)
2130 	{
2131 	  tree field = TREE_OPERAND (exp, 1);
2132 	  if (! FIELD_STATIC (field))
2133 	    NOTE_POP (1);
2134 	  field_op (field,
2135 		    FIELD_STATIC (field) ? OPCODE_putstatic : OPCODE_putfield,
2136 		    state);
2137 
2138 	  NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (field)) ? 2 : 1);
2139 	}
2140       else if (TREE_CODE (exp) == VAR_DECL
2141 	       || TREE_CODE (exp) == PARM_DECL)
2142 	{
2143 	  if (FIELD_STATIC (exp))
2144 	    {
2145 	      field_op (exp, OPCODE_putstatic, state);
2146 	      NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 2 : 1);
2147 	    }
2148 	  else
2149 	    emit_store (exp, state);
2150 	}
2151       else if (TREE_CODE (exp) == ARRAY_REF)
2152 	{
2153 	  jopcode = OPCODE_iastore + adjust_typed_op (TREE_TYPE (exp), 7);
2154 	  RESERVE (1);
2155 	  OP1 (jopcode);
2156 	  NOTE_POP (TYPE_IS_WIDE (TREE_TYPE (exp)) ? 4 : 3);
2157 	}
2158       else
2159 	abort ();
2160       break;
2161     case PLUS_EXPR:
2162       jopcode = OPCODE_iadd;
2163       goto binop;
2164     case MINUS_EXPR:
2165       jopcode = OPCODE_isub;
2166       goto binop;
2167     case MULT_EXPR:
2168       jopcode = OPCODE_imul;
2169       goto binop;
2170     case TRUNC_DIV_EXPR:
2171     case RDIV_EXPR:
2172       jopcode = OPCODE_idiv;
2173       goto binop;
2174     case TRUNC_MOD_EXPR:
2175       jopcode = OPCODE_irem;
2176       goto binop;
2177     case LSHIFT_EXPR:   jopcode = OPCODE_ishl;   goto binop;
2178     case RSHIFT_EXPR:   jopcode = OPCODE_ishr;   goto binop;
2179     case URSHIFT_EXPR:  jopcode = OPCODE_iushr;  goto binop;
2180     case TRUTH_AND_EXPR:
2181     case BIT_AND_EXPR:  jopcode = OPCODE_iand;   goto binop;
2182     case TRUTH_OR_EXPR:
2183     case BIT_IOR_EXPR:  jopcode = OPCODE_ior;    goto binop;
2184     case TRUTH_XOR_EXPR:
2185     case BIT_XOR_EXPR:  jopcode = OPCODE_ixor;   goto binop;
2186     binop:
2187     {
2188       tree arg0 = TREE_OPERAND (exp, 0);
2189       tree arg1 = TREE_OPERAND (exp, 1);
2190       jopcode += adjust_typed_op (type, 3);
2191       if (arg0 == arg1 && TREE_CODE (arg0) == SAVE_EXPR)
2192 	{
2193 	  /* fold may (e.g) convert 2*x to x+x. */
2194 	  generate_bytecode_insns (TREE_OPERAND (arg0, 0), target, state);
2195 	  emit_dup (TYPE_PRECISION (TREE_TYPE (arg0)) > 32 ? 2 : 1, 0, state);
2196 	}
2197       else
2198 	{
2199 	  /* ARG0 will be NULL_TREE if we're handling an `OP='
2200 	     expression.  In this case the stack already holds the
2201 	     LHS.  See the MODIFY_EXPR case.  */
2202 	  if (arg0 != NULL_TREE)
2203 	    generate_bytecode_insns (arg0, target, state);
2204 	  if (jopcode >= OPCODE_lshl && jopcode <= OPCODE_lushr)
2205 	    arg1 = convert (int_type_node, arg1);
2206 	  generate_bytecode_insns (arg1, target, state);
2207 	}
2208       /* For most binary operations, both operands and the result have the
2209 	 same type.  Shift operations are different.  Using arg1's type
2210 	 gets us the correct SP adjustment in all cases. */
2211       if (target == STACK_TARGET)
2212 	emit_binop (jopcode, TREE_TYPE (arg1), state);
2213       break;
2214     }
2215     case TRUTH_NOT_EXPR:
2216     case BIT_NOT_EXPR:
2217       generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2218       if (target == STACK_TARGET)
2219 	{
2220 	  int is_long = TYPE_PRECISION (TREE_TYPE (exp)) > 32;
2221 	  push_int_const (TREE_CODE (exp) == BIT_NOT_EXPR ? -1 : 1, state);
2222 	  RESERVE (2);
2223 	  if (is_long)
2224 	    OP1 (OPCODE_i2l);
2225 	  NOTE_PUSH (1 + is_long);
2226 	  OP1 (OPCODE_ixor + is_long);
2227 	  NOTE_POP (1 + is_long);
2228 	}
2229       break;
2230     case NEGATE_EXPR:
2231       jopcode = OPCODE_ineg;
2232       jopcode += adjust_typed_op (type, 3);
2233       generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2234       if (target == STACK_TARGET)
2235 	emit_unop (jopcode, type, state);
2236       break;
2237     case INSTANCEOF_EXPR:
2238       {
2239 	int index = find_class_constant (&state->cpool, TREE_OPERAND (exp, 1));
2240 	generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2241 	RESERVE (3);
2242 	OP1 (OPCODE_instanceof);
2243 	OP2 (index);
2244       }
2245       break;
2246     case SAVE_EXPR:
2247       generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2248       break;
2249     case CONVERT_EXPR:
2250     case NOP_EXPR:
2251     case FLOAT_EXPR:
2252     case FIX_TRUNC_EXPR:
2253       {
2254 	tree src = TREE_OPERAND (exp, 0);
2255 	tree src_type = TREE_TYPE (src);
2256 	tree dst_type = TREE_TYPE (exp);
2257 	generate_bytecode_insns (TREE_OPERAND (exp, 0), target, state);
2258 	if (target == IGNORE_TARGET || src_type == dst_type)
2259 	  break;
2260 	if (TREE_CODE (dst_type) == POINTER_TYPE)
2261 	  {
2262 	    if (TREE_CODE (exp) == CONVERT_EXPR)
2263 	      {
2264 		int index = find_class_constant (&state->cpool,
2265 						 TREE_TYPE (dst_type));
2266 		RESERVE (3);
2267 		OP1 (OPCODE_checkcast);
2268 		OP2 (index);
2269 	      }
2270 	  }
2271 	else /* Convert numeric types. */
2272 	  {
2273 	    int wide_src = TYPE_PRECISION (src_type) > 32;
2274 	    int wide_dst = TYPE_PRECISION (dst_type) > 32;
2275 	    NOTE_POP (1 + wide_src);
2276 	    RESERVE (1);
2277 	    if (TREE_CODE (dst_type) == REAL_TYPE)
2278 	      {
2279 		if (TREE_CODE (src_type) == REAL_TYPE)
2280 		  OP1 (wide_dst ? OPCODE_f2d : OPCODE_d2f);
2281 		else if (TYPE_PRECISION (src_type) == 64)
2282 		  OP1 (OPCODE_l2f + wide_dst);
2283 		else
2284 		  OP1 (OPCODE_i2f + wide_dst);
2285 	      }
2286 	    else /* Convert to integral type. */
2287 	      {
2288 		if (TREE_CODE (src_type) == REAL_TYPE)
2289 		  OP1 (OPCODE_f2i + wide_dst + 3 * wide_src);
2290 		else if (wide_dst)
2291 		  OP1 (OPCODE_i2l);
2292 		else if (wide_src)
2293 		  OP1 (OPCODE_l2i);
2294 		if (TYPE_PRECISION (dst_type) < 32)
2295 		  {
2296 		    RESERVE (1);
2297 		    /* Already converted to int, if needed. */
2298 		    if (TYPE_PRECISION (dst_type) <= 8)
2299 		      OP1 (OPCODE_i2b);
2300 		    else if (TREE_UNSIGNED (dst_type))
2301 		      OP1 (OPCODE_i2c);
2302 		    else
2303 		      OP1 (OPCODE_i2s);
2304 		  }
2305 	      }
2306 	    NOTE_PUSH (1 + wide_dst);
2307 	  }
2308       }
2309       break;
2310 
2311     case TRY_EXPR:
2312       {
2313 	tree try_clause = TREE_OPERAND (exp, 0);
2314 	struct jcf_block *start_label = get_jcf_label_here (state);
2315 	struct jcf_block *end_label;  /* End of try clause. */
2316 	struct jcf_block *finished_label = gen_jcf_label (state);
2317 	tree clause = TREE_OPERAND (exp, 1);
2318 	if (target != IGNORE_TARGET)
2319 	  abort ();
2320 	generate_bytecode_insns (try_clause, IGNORE_TARGET, state);
2321 	end_label = get_jcf_label_here (state);
2322 	if (end_label == start_label)
2323 	  break;
2324 	if (CAN_COMPLETE_NORMALLY (try_clause))
2325 	  emit_goto (finished_label, state);
2326 	while (clause != NULL_TREE)
2327 	  {
2328 	    tree catch_clause = TREE_OPERAND (clause, 0);
2329 	    tree exception_decl = BLOCK_EXPR_DECLS (catch_clause);
2330 	    struct jcf_handler *handler = alloc_handler (start_label,
2331 							 end_label, state);
2332 	    if (exception_decl == NULL_TREE)
2333 	      handler->type = NULL_TREE;
2334 	    else
2335 	      handler->type = TREE_TYPE (TREE_TYPE (exception_decl));
2336 	    generate_bytecode_insns (catch_clause, IGNORE_TARGET, state);
2337 	    clause = TREE_CHAIN (clause);
2338 	    if (CAN_COMPLETE_NORMALLY (catch_clause) && clause != NULL_TREE)
2339 	      emit_goto (finished_label, state);
2340 	  }
2341 	define_jcf_label (finished_label, state);
2342       }
2343       break;
2344 
2345     case TRY_FINALLY_EXPR:
2346       {
2347 	struct jcf_block *finished_label = NULL;
2348 	struct jcf_block *finally_label, *start_label, *end_label;
2349 	struct jcf_handler *handler;
2350 	tree try_block = TREE_OPERAND (exp, 0);
2351 	tree finally = TREE_OPERAND (exp, 1);
2352 	tree return_link = NULL_TREE, exception_decl = NULL_TREE;
2353 
2354 	tree exception_type;
2355 
2356 	finally_label = gen_jcf_label (state);
2357 	start_label = get_jcf_label_here (state);
2358 	/* If the `finally' clause can complete normally, we emit it
2359 	   as a subroutine and let the other clauses call it via
2360 	   `jsr'.  If it can't complete normally, then we simply emit
2361 	   `goto's directly to it.  */
2362 	if (CAN_COMPLETE_NORMALLY (finally))
2363 	  {
2364 	    finally_label->pc = PENDING_CLEANUP_PC;
2365 	    finally_label->next = state->labeled_blocks;
2366 	    state->labeled_blocks = finally_label;
2367 	    state->num_finalizers++;
2368 	  }
2369 
2370 	generate_bytecode_insns (try_block, target, state);
2371 
2372 	if (CAN_COMPLETE_NORMALLY (finally))
2373 	  {
2374 	    if (state->labeled_blocks != finally_label)
2375 	      abort();
2376 	    state->labeled_blocks = finally_label->next;
2377 	  }
2378 	end_label = get_jcf_label_here (state);
2379 
2380 	if (end_label == start_label)
2381 	  {
2382 	    state->num_finalizers--;
2383 	    define_jcf_label (finally_label, state);
2384 	    generate_bytecode_insns (finally, IGNORE_TARGET, state);
2385 	    break;
2386 	  }
2387 
2388 	if (CAN_COMPLETE_NORMALLY (finally))
2389 	  {
2390 	    return_link = build_decl (VAR_DECL, NULL_TREE,
2391 				      return_address_type_node);
2392 	    finished_label = gen_jcf_label (state);
2393 	  }
2394 
2395 	if (CAN_COMPLETE_NORMALLY (try_block))
2396 	  {
2397 	    if (CAN_COMPLETE_NORMALLY (finally))
2398 	      {
2399 		emit_jsr (finally_label, state);
2400 		emit_goto (finished_label, state);
2401 	      }
2402 	    else
2403 	      emit_goto (finally_label, state);
2404 	  }
2405 
2406 	/* Handle exceptions.  */
2407 
2408 	exception_type = build_pointer_type (throwable_type_node);
2409 	if (CAN_COMPLETE_NORMALLY (finally))
2410 	  {
2411 	    /* We're going to generate a subroutine, so we'll need to
2412 	       save and restore the exception around the `jsr'.  */
2413 	    exception_decl = build_decl (VAR_DECL, NULL_TREE, exception_type);
2414 	    localvar_alloc (return_link, state);
2415 	  }
2416 	handler = alloc_handler (start_label, end_label, state);
2417 	handler->type = NULL_TREE;
2418 	if (CAN_COMPLETE_NORMALLY (finally))
2419 	  {
2420 	    localvar_alloc (exception_decl, state);
2421 	    NOTE_PUSH (1);
2422 	    emit_store (exception_decl, state);
2423 	    emit_jsr (finally_label, state);
2424 	    emit_load (exception_decl, state);
2425 	    RESERVE (1);
2426 	    OP1 (OPCODE_athrow);
2427 	    NOTE_POP (1);
2428 	  }
2429 	else
2430 	  {
2431 	    /* We're not generating a subroutine.  In this case we can
2432 	       simply have the exception handler pop the exception and
2433 	       then fall through to the `finally' block.  */
2434 	    NOTE_PUSH (1);
2435 	    emit_pop (1, state);
2436 	    NOTE_POP (1);
2437 	  }
2438 
2439 	/* The finally block.  If we're generating a subroutine, first
2440 	   save return PC into return_link.  Otherwise, just generate
2441 	   the code for the `finally' block.  */
2442 	define_jcf_label (finally_label, state);
2443 	if (CAN_COMPLETE_NORMALLY (finally))
2444 	  {
2445 	    NOTE_PUSH (1);
2446 	    emit_store (return_link, state);
2447 	  }
2448 
2449 	generate_bytecode_insns (finally, IGNORE_TARGET, state);
2450 	if (CAN_COMPLETE_NORMALLY (finally))
2451 	  {
2452 	    maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
2453 	    maybe_free_localvar (exception_decl, state, 1);
2454 	    maybe_free_localvar (return_link, state, 1);
2455 	    define_jcf_label (finished_label, state);
2456 	  }
2457       }
2458       break;
2459     case THROW_EXPR:
2460       generate_bytecode_insns (TREE_OPERAND (exp, 0), STACK_TARGET, state);
2461       RESERVE (1);
2462       OP1 (OPCODE_athrow);
2463       break;
2464     case NEW_ARRAY_INIT:
2465       {
2466 	tree values = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
2467 	tree array_type = TREE_TYPE (TREE_TYPE (exp));
2468 	tree element_type = TYPE_ARRAY_ELEMENT (array_type);
2469 	HOST_WIDE_INT length = java_array_type_length (array_type);
2470 	if (target == IGNORE_TARGET)
2471 	  {
2472 	    for ( ;  values != NULL_TREE;  values = TREE_CHAIN (values))
2473 	      generate_bytecode_insns (TREE_VALUE (values), target, state);
2474 	    break;
2475 	  }
2476 	push_int_const (length, state);
2477 	NOTE_PUSH (1);
2478 	RESERVE (3);
2479 	if (JPRIMITIVE_TYPE_P (element_type))
2480 	  {
2481 	    int atype = encode_newarray_type (element_type);
2482 	    OP1 (OPCODE_newarray);
2483 	    OP1 (atype);
2484 	  }
2485 	else
2486 	  {
2487 	    int index = find_class_constant (&state->cpool,
2488 					     TREE_TYPE (element_type));
2489 	    OP1 (OPCODE_anewarray);
2490 	    OP2 (index);
2491 	  }
2492 	offset = 0;
2493 	jopcode = OPCODE_iastore + adjust_typed_op (element_type, 7);
2494 	for ( ;  values != NULL_TREE;  values = TREE_CHAIN (values), offset++)
2495 	  {
2496 	    int save_SP = state->code_SP;
2497 	    emit_dup (1, 0, state);
2498 	    push_int_const (offset, state);
2499 	    NOTE_PUSH (1);
2500 	    generate_bytecode_insns (TREE_VALUE (values), STACK_TARGET, state);
2501 	    RESERVE (1);
2502 	    OP1 (jopcode);
2503 	    state->code_SP = save_SP;
2504 	  }
2505       }
2506       break;
2507     case JAVA_EXC_OBJ_EXPR:
2508       NOTE_PUSH (1);  /* Pushed by exception system. */
2509       break;
2510     case NEW_CLASS_EXPR:
2511       {
2512 	tree class = TREE_TYPE (TREE_TYPE (exp));
2513 	int need_result = target != IGNORE_TARGET;
2514 	int index = find_class_constant (&state->cpool, class);
2515 	RESERVE (4);
2516 	OP1 (OPCODE_new);
2517 	OP2 (index);
2518 	if (need_result)
2519 	  OP1 (OPCODE_dup);
2520 	NOTE_PUSH (1 + need_result);
2521       }
2522       /* ... fall though ... */
2523     case CALL_EXPR:
2524       {
2525 	tree f = TREE_OPERAND (exp, 0);
2526 	tree x = TREE_OPERAND (exp, 1);
2527 	int save_SP = state->code_SP;
2528 	int nargs;
2529 	if (TREE_CODE (f) == ADDR_EXPR)
2530 	  f = TREE_OPERAND (f, 0);
2531 	if (f == soft_newarray_node)
2532 	  {
2533 	    int type_code = TREE_INT_CST_LOW (TREE_VALUE (x));
2534 	    generate_bytecode_insns (TREE_VALUE (TREE_CHAIN (x)),
2535 				     STACK_TARGET, state);
2536 	    RESERVE (2);
2537 	    OP1 (OPCODE_newarray);
2538 	    OP1 (type_code);
2539 	    break;
2540 	  }
2541 	else if (f == soft_multianewarray_node)
2542 	  {
2543 	    int ndims;
2544 	    int idim;
2545 	    int index = find_class_constant (&state->cpool,
2546 					     TREE_TYPE (TREE_TYPE (exp)));
2547 	    x = TREE_CHAIN (x);  /* Skip class argument. */
2548 	    ndims = TREE_INT_CST_LOW (TREE_VALUE (x));
2549 	    for (idim = ndims;  --idim >= 0; )
2550 	      {
2551 		x = TREE_CHAIN (x);
2552 		generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2553 	      }
2554 	    RESERVE (4);
2555 	    OP1 (OPCODE_multianewarray);
2556 	    OP2 (index);
2557 	    OP1 (ndims);
2558 	    break;
2559 	  }
2560 	else if (f == soft_anewarray_node)
2561 	  {
2562 	    tree cl = TYPE_ARRAY_ELEMENT (TREE_TYPE (TREE_TYPE (exp)));
2563 	    int index = find_class_constant (&state->cpool, TREE_TYPE (cl));
2564 	    generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2565 	    RESERVE (3);
2566 	    OP1 (OPCODE_anewarray);
2567 	    OP2 (index);
2568 	    break;
2569 	  }
2570 	else if (f == soft_monitorenter_node
2571 		 || f == soft_monitorexit_node
2572 		 || f == throw_node)
2573 	  {
2574 	    if (f == soft_monitorenter_node)
2575 	      op = OPCODE_monitorenter;
2576 	    else if (f == soft_monitorexit_node)
2577 	      op = OPCODE_monitorexit;
2578 	    else
2579 	      op = OPCODE_athrow;
2580 	    generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2581 	    RESERVE (1);
2582 	    OP1 (op);
2583 	    NOTE_POP (1);
2584 	    break;
2585 	  }
2586 	for ( ;  x != NULL_TREE;  x = TREE_CHAIN (x))
2587 	  {
2588 	    generate_bytecode_insns (TREE_VALUE (x), STACK_TARGET, state);
2589 	  }
2590 	nargs = state->code_SP - save_SP;
2591 	state->code_SP = save_SP;
2592 	if (f == soft_fmod_node)
2593 	  {
2594 	    RESERVE (1);
2595 	    OP1 (OPCODE_drem);
2596 	    NOTE_PUSH (2);
2597 	    break;
2598 	  }
2599 	if (TREE_CODE (exp) == NEW_CLASS_EXPR)
2600 	  NOTE_POP (1);  /* Pop implicit this. */
2601 	if (TREE_CODE (f) == FUNCTION_DECL && DECL_CONTEXT (f) != NULL_TREE)
2602 	  {
2603 	    tree context = DECL_CONTEXT (f);
2604 	    int index, interface = 0;
2605 	    RESERVE (5);
2606 	    if (METHOD_STATIC (f))
2607 	      OP1 (OPCODE_invokestatic);
2608 	    else if (DECL_CONSTRUCTOR_P (f) || CALL_USING_SUPER (exp)
2609 		|| METHOD_PRIVATE (f))
2610 	      OP1 (OPCODE_invokespecial);
2611 	    else
2612 	      {
2613 		if (CLASS_INTERFACE (TYPE_NAME (context)))
2614 		  {
2615 		    tree arg1 = TREE_VALUE (TREE_OPERAND (exp, 1));
2616 		    context = TREE_TYPE (TREE_TYPE (arg1));
2617 		    if (CLASS_INTERFACE (TYPE_NAME (context)))
2618 		      interface = 1;
2619 		  }
2620 		if (interface)
2621 		  OP1 (OPCODE_invokeinterface);
2622 		else
2623 		  OP1 (OPCODE_invokevirtual);
2624 	      }
2625 	    index = find_methodref_with_class_index (&state->cpool, f, context);
2626 	    OP2 (index);
2627 	    if (interface)
2628 	      {
2629 		if (nargs <= 0)
2630 		  abort ();
2631 
2632 		OP1 (nargs);
2633 		OP1 (0);
2634 	      }
2635 	    f = TREE_TYPE (TREE_TYPE (f));
2636 	    if (TREE_CODE (f) != VOID_TYPE)
2637 	      {
2638 		int size = TYPE_IS_WIDE (f) ? 2 : 1;
2639 		if (target == IGNORE_TARGET)
2640 		  emit_pop (size, state);
2641 		else
2642 		  NOTE_PUSH (size);
2643 	      }
2644 	    break;
2645 	  }
2646       }
2647       /* fall through */
2648     notimpl:
2649     default:
2650       error("internal error in generate_bytecode_insn - tree code not implemented: %s",
2651 	    tree_code_name [(int) TREE_CODE (exp)]);
2652     }
2653 }
2654 
2655 static void
perform_relocations(state)2656 perform_relocations (state)
2657      struct jcf_partial *state;
2658 {
2659   struct jcf_block *block;
2660   struct jcf_relocation *reloc;
2661   int pc;
2662   int shrink;
2663 
2664   /* Before we start, the pc field of each block is an upper bound on
2665      the block's start pc (it may be less, if previous blocks need less
2666      than their maximum).
2667 
2668      The minimum size of each block is in the block's chunk->size. */
2669 
2670   /* First, figure out the actual locations of each block. */
2671   pc = 0;
2672   shrink = 0;
2673   for (block = state->blocks;  block != NULL;  block = block->next)
2674     {
2675       int block_size = block->v.chunk->size;
2676 
2677       block->pc = pc;
2678 
2679       /* Optimize GOTO L; L: by getting rid of the redundant goto.
2680 	 Assumes relocations are in reverse order. */
2681       reloc = block->u.relocations;
2682       while (reloc != NULL
2683 	     && reloc->kind == OPCODE_goto_w
2684 	     && reloc->label->pc == block->next->pc
2685 	     && reloc->offset + 2 == block_size)
2686 	{
2687 	  reloc = reloc->next;
2688 	  block->u.relocations = reloc;
2689 	  block->v.chunk->size -= 3;
2690 	  block_size -= 3;
2691 	  shrink += 3;
2692 	}
2693 
2694       /* Optimize GOTO L; ... L: GOTO X by changing the first goto to
2695 	 jump directly to X.  We're careful here to avoid an infinite
2696 	 loop if the `goto's themselves form one.  We do this
2697 	 optimization because we can generate a goto-to-goto for some
2698 	 try/finally blocks.  */
2699       while (reloc != NULL
2700 	     && reloc->kind == OPCODE_goto_w
2701 	     && reloc->label != block
2702 	     && reloc->label->v.chunk->data != NULL
2703 	     && reloc->label->v.chunk->data[0] == OPCODE_goto)
2704 	{
2705 	  /* Find the reloc for the first instruction of the
2706 	     destination block.  */
2707 	  struct jcf_relocation *first_reloc;
2708 	  for (first_reloc = reloc->label->u.relocations;
2709 	       first_reloc;
2710 	       first_reloc = first_reloc->next)
2711 	    {
2712 	      if (first_reloc->offset == 1
2713 		  && first_reloc->kind == OPCODE_goto_w)
2714 		{
2715 		  reloc->label = first_reloc->label;
2716 		  break;
2717 		}
2718 	    }
2719 
2720 	  /* If we didn't do anything, exit the loop.  */
2721 	  if (first_reloc == NULL)
2722 	    break;
2723 	}
2724 
2725       for (reloc = block->u.relocations;  reloc != NULL;  reloc = reloc->next)
2726 	{
2727 	  if (reloc->kind == SWITCH_ALIGN_RELOC)
2728 	    {
2729 	      /* We assume this is the first relocation in this block,
2730 		 so we know its final pc. */
2731 	      int where = pc + reloc->offset;
2732 	      int pad = ((where + 3) & ~3) - where;
2733 	      block_size += pad;
2734 	    }
2735 	  else if (reloc->kind < -1 || reloc->kind > BLOCK_START_RELOC)
2736 	    {
2737 	      int delta = reloc->label->pc - (pc + reloc->offset - 1);
2738 	      int expand = reloc->kind > 0 ? 2 : 5;
2739 
2740 	      if (delta > 0)
2741 		delta -= shrink;
2742 	      if (delta >= -32768 && delta <= 32767)
2743 		{
2744 		  shrink += expand;
2745 		  reloc->kind = -1;
2746 		}
2747 	      else
2748 		block_size += expand;
2749 	    }
2750 	}
2751       pc += block_size;
2752     }
2753 
2754   for (block = state->blocks;  block != NULL;  block = block->next)
2755     {
2756       struct chunk *chunk = block->v.chunk;
2757       int old_size = chunk->size;
2758       int next_pc = block->next == NULL ? pc : block->next->pc;
2759       int new_size = next_pc - block->pc;
2760       unsigned char *new_ptr;
2761       unsigned char *old_buffer = chunk->data;
2762       unsigned char *old_ptr = old_buffer + old_size;
2763       if (new_size != old_size)
2764 	{
2765 	  chunk->data = (unsigned char *)
2766 	    obstack_alloc (state->chunk_obstack, new_size);
2767 	  chunk->size = new_size;
2768 	}
2769       new_ptr = chunk->data + new_size;
2770 
2771       /* We do the relocations from back to front, because
2772 	 the relocations are in reverse order. */
2773       for (reloc = block->u.relocations; ; reloc = reloc->next)
2774 	{
2775 	  /* new_ptr and old_ptr point into the old and new buffers,
2776 	     respectively.  (If no relocations cause the buffer to
2777 	     grow, the buffer will be the same buffer, and new_ptr==old_ptr.)
2778 	     The bytes at higher address have been copied and relocations
2779 	     handled; those at lower addresses remain to process. */
2780 
2781 	  /* Lower old index of piece to be copied with no relocation.
2782 	     I.e. high index of the first piece that does need relocation. */
2783 	  int start = reloc == NULL ? 0
2784 	    : reloc->kind == SWITCH_ALIGN_RELOC ? reloc->offset
2785 	    : (reloc->kind == 0 || reloc->kind == BLOCK_START_RELOC)
2786 	    ? reloc->offset + 4
2787 	    : reloc->offset + 2;
2788 	  int32 value;
2789 	  int new_offset;
2790 	  int n = (old_ptr - old_buffer) - start;
2791 	  new_ptr -= n;
2792 	  old_ptr -= n;
2793 	  if (n > 0)
2794 	    memcpy (new_ptr, old_ptr, n);
2795 	  if (old_ptr == old_buffer)
2796 	    break;
2797 
2798 	  new_offset = new_ptr - chunk->data;
2799 	  new_offset -= (reloc->kind == -1 ? 2 : 4);
2800 	  if (reloc->kind == 0)
2801 	    {
2802 	      old_ptr -= 4;
2803 	      value = GET_u4 (old_ptr);
2804 	    }
2805 	  else if (reloc->kind == BLOCK_START_RELOC)
2806 	    {
2807 	      old_ptr -= 4;
2808 	      value = 0;
2809 	      new_offset = 0;
2810 	    }
2811 	  else if (reloc->kind == SWITCH_ALIGN_RELOC)
2812 	    {
2813 	      int where = block->pc + reloc->offset;
2814 	      int pad = ((where + 3) & ~3) - where;
2815 	      while (--pad >= 0)
2816 		*--new_ptr = 0;
2817 	      continue;
2818 	    }
2819 	  else
2820 	    {
2821 	      old_ptr -= 2;
2822 	      value = GET_u2 (old_ptr);
2823 	    }
2824 	  value += reloc->label->pc - (block->pc + new_offset);
2825 	  *--new_ptr = (unsigned char) value;  value >>= 8;
2826 	  *--new_ptr = (unsigned char) value;  value >>= 8;
2827 	  if (reloc->kind != -1)
2828 	    {
2829 	      *--new_ptr = (unsigned char) value;  value >>= 8;
2830 	      *--new_ptr = (unsigned char) value;
2831 	    }
2832 	  if (reloc->kind > BLOCK_START_RELOC)
2833 	    {
2834 	      /* Convert: OP TARGET to: OP_w TARGET;  (OP is goto or jsr). */
2835 	      --old_ptr;
2836 	      *--new_ptr = reloc->kind;
2837 	    }
2838 	  else if (reloc->kind < -1)
2839 	    {
2840 	      /* Convert: ifCOND TARGET to: ifNCOND T; goto_w TARGET; T: */
2841 	      --old_ptr;
2842 	      *--new_ptr = OPCODE_goto_w;
2843 	      *--new_ptr = 3;
2844 	      *--new_ptr = 0;
2845 	      *--new_ptr = - reloc->kind;
2846 	    }
2847 	}
2848       if (new_ptr != chunk->data)
2849 	abort ();
2850     }
2851   state->code_length = pc;
2852 }
2853 
2854 static void
init_jcf_state(state,work)2855 init_jcf_state (state, work)
2856      struct jcf_partial *state;
2857      struct obstack *work;
2858 {
2859   state->chunk_obstack = work;
2860   state->first = state->chunk = NULL;
2861   CPOOL_INIT (&state->cpool);
2862   BUFFER_INIT (&state->localvars);
2863   BUFFER_INIT (&state->bytecode);
2864 }
2865 
2866 static void
init_jcf_method(state,method)2867 init_jcf_method (state, method)
2868      struct jcf_partial *state;
2869      tree method;
2870 {
2871   state->current_method = method;
2872   state->blocks = state->last_block = NULL;
2873   state->linenumber_count = 0;
2874   state->first_lvar = state->last_lvar = NULL;
2875   state->lvar_count = 0;
2876   state->labeled_blocks = NULL;
2877   state->code_length = 0;
2878   BUFFER_RESET (&state->bytecode);
2879   BUFFER_RESET (&state->localvars);
2880   state->code_SP = 0;
2881   state->code_SP_max = 0;
2882   state->handlers = NULL;
2883   state->last_handler = NULL;
2884   state->num_handlers = 0;
2885   state->num_finalizers = 0;
2886   state->return_value_decl = NULL_TREE;
2887 }
2888 
2889 static void
release_jcf_state(state)2890 release_jcf_state (state)
2891      struct jcf_partial *state;
2892 {
2893   CPOOL_FINISH (&state->cpool);
2894   obstack_free (state->chunk_obstack, state->first);
2895 }
2896 
2897 /* Generate and return a list of chunks containing the class CLAS
2898    in the .class file representation.  The list can be written to a
2899    .class file using write_chunks.  Allocate chunks from obstack WORK. */
2900 
2901 static GTY(()) tree SourceFile_node;
2902 static struct chunk *
generate_classfile(clas,state)2903 generate_classfile (clas, state)
2904      tree clas;
2905      struct jcf_partial *state;
2906 {
2907   struct chunk *cpool_chunk;
2908   const char *source_file, *s;
2909   char *ptr;
2910   int i;
2911   char *fields_count_ptr;
2912   int fields_count = 0;
2913   char *methods_count_ptr;
2914   int methods_count = 0;
2915   tree part;
2916   int total_supers
2917     = clas == object_type_node ? 0
2918     : TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (clas));
2919 
2920   ptr = append_chunk (NULL, 8, state);
2921   PUT4 (0xCafeBabe);  /* Magic number */
2922   PUT2 (3);  /* Minor version */
2923   PUT2 (45);  /* Major version */
2924 
2925   append_chunk (NULL, 0, state);
2926   cpool_chunk = state->chunk;
2927 
2928   /* Next allocate the chunk containing acces_flags through fields_counr. */
2929   if (clas == object_type_node)
2930     i = 10;
2931   else
2932     i = 8 + 2 * total_supers;
2933   ptr = append_chunk (NULL, i, state);
2934   i = get_access_flags (TYPE_NAME (clas));
2935   if (! (i & ACC_INTERFACE))
2936     i |= ACC_SUPER;
2937   PUT2 (i); /* acces_flags */
2938   i = find_class_constant (&state->cpool, clas);  PUT2 (i);  /* this_class */
2939   if (clas == object_type_node)
2940     {
2941       PUT2(0);  /* super_class */
2942       PUT2(0);  /* interfaces_count */
2943     }
2944   else
2945     {
2946       tree basetypes = TYPE_BINFO_BASETYPES (clas);
2947       tree base = BINFO_TYPE (TREE_VEC_ELT (basetypes, 0));
2948       int j = find_class_constant (&state->cpool, base);
2949       PUT2 (j);  /* super_class */
2950       PUT2 (total_supers - 1);  /* interfaces_count */
2951       for (i = 1;  i < total_supers;  i++)
2952 	{
2953 	  base = BINFO_TYPE (TREE_VEC_ELT (basetypes, i));
2954 	  j = find_class_constant (&state->cpool, base);
2955 	  PUT2 (j);
2956 	}
2957     }
2958   fields_count_ptr = ptr;
2959 
2960   for (part = TYPE_FIELDS (clas);  part;  part = TREE_CHAIN (part))
2961     {
2962       int have_value, attr_count = 0;
2963       if (DECL_NAME (part) == NULL_TREE || DECL_ARTIFICIAL (part))
2964 	continue;
2965       ptr = append_chunk (NULL, 8, state);
2966       i = get_access_flags (part);  PUT2 (i);
2967       i = find_utf8_constant (&state->cpool, DECL_NAME (part));  PUT2 (i);
2968       i = find_utf8_constant (&state->cpool,
2969 			      build_java_signature (TREE_TYPE (part)));
2970       PUT2(i);
2971       have_value = DECL_INITIAL (part) != NULL_TREE
2972 	&& FIELD_STATIC (part) && CONSTANT_VALUE_P (DECL_INITIAL (part))
2973 	&& FIELD_FINAL (part)
2974 	&& (JPRIMITIVE_TYPE_P (TREE_TYPE (part))
2975 	    || TREE_TYPE (part) == string_ptr_type_node);
2976       if (have_value)
2977 	attr_count++;
2978 
2979       if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part) || FIELD_SYNTHETIC (part))
2980 	attr_count++;
2981 
2982       PUT2 (attr_count);  /* attributes_count */
2983       if (have_value)
2984 	{
2985 	  tree init = DECL_INITIAL (part);
2986 	  static tree ConstantValue_node = NULL_TREE;
2987 	  if (TREE_TYPE (part) != TREE_TYPE (init))
2988 	    fatal_error ("field initializer type mismatch");
2989 	  ptr = append_chunk (NULL, 8, state);
2990 	  if (ConstantValue_node == NULL_TREE)
2991 	    ConstantValue_node = get_identifier ("ConstantValue");
2992 	  i = find_utf8_constant (&state->cpool, ConstantValue_node);
2993 	  PUT2 (i);  /* attribute_name_index */
2994 	  PUT4 (2); /* attribute_length */
2995 	  i = find_constant_index (init, state);  PUT2 (i);
2996 	}
2997       /* Emit the "Synthetic" attribute for val$<x> and this$<n>
2998 	 fields and other fields which need it.  */
2999       if (FIELD_THISN (part) || FIELD_LOCAL_ALIAS (part)
3000 	  || FIELD_SYNTHETIC (part))
3001 	ptr = append_synthetic_attribute (state);
3002       fields_count++;
3003     }
3004   ptr = fields_count_ptr;  UNSAFE_PUT2 (fields_count);
3005 
3006   ptr = methods_count_ptr = append_chunk (NULL, 2, state);
3007   PUT2 (0);
3008 
3009   for (part = TYPE_METHODS (clas);  part;  part = TREE_CHAIN (part))
3010     {
3011       struct jcf_block *block;
3012       tree function_body = DECL_FUNCTION_BODY (part);
3013       tree body = function_body == NULL_TREE ? NULL_TREE
3014 	: BLOCK_EXPR_BODY (function_body);
3015       tree name = DECL_CONSTRUCTOR_P (part) ? init_identifier_node
3016 	: DECL_NAME (part);
3017       tree type = TREE_TYPE (part);
3018       tree save_function = current_function_decl;
3019       int synthetic_p = 0;
3020       current_function_decl = part;
3021       ptr = append_chunk (NULL, 8, state);
3022       i = get_access_flags (part);  PUT2 (i);
3023       i = find_utf8_constant (&state->cpool, name);  PUT2 (i);
3024       i = find_utf8_constant (&state->cpool, build_java_signature (type));
3025       PUT2 (i);
3026       i = (body != NULL_TREE) + (DECL_FUNCTION_THROWS (part) != NULL_TREE);
3027 
3028       /* Make room for the Synthetic attribute (of zero length.)  */
3029       if (DECL_FINIT_P (part)
3030 	  || DECL_INSTINIT_P (part)
3031 	  || OUTER_FIELD_ACCESS_IDENTIFIER_P (DECL_NAME (part))
3032 	  || TYPE_DOT_CLASS (clas) == part)
3033 	{
3034 	  i++;
3035 	  synthetic_p = 1;
3036 	}
3037 
3038       PUT2 (i);   /* attributes_count */
3039 
3040       if (synthetic_p)
3041 	ptr = append_synthetic_attribute (state);
3042 
3043       if (body != NULL_TREE)
3044 	{
3045 	  int code_attributes_count = 0;
3046 	  static tree Code_node = NULL_TREE;
3047 	  tree t;
3048 	  char *attr_len_ptr;
3049 	  struct jcf_handler *handler;
3050 	  if (Code_node == NULL_TREE)
3051 	    Code_node = get_identifier ("Code");
3052 	  ptr = append_chunk (NULL, 14, state);
3053 	  i = find_utf8_constant (&state->cpool, Code_node);  PUT2 (i);
3054 	  attr_len_ptr = ptr;
3055 	  init_jcf_method (state, part);
3056 	  get_jcf_label_here (state);  /* Force a first block. */
3057 	  for (t = DECL_ARGUMENTS (part);  t != NULL_TREE;  t = TREE_CHAIN (t))
3058 	    localvar_alloc (t, state);
3059 	  state->num_jsrs = 0;
3060 	  generate_bytecode_insns (body, IGNORE_TARGET, state);
3061 	  if (CAN_COMPLETE_NORMALLY (body))
3062 	    {
3063 	      if (TREE_CODE (TREE_TYPE (type)) != VOID_TYPE)
3064 		abort();
3065 	      RESERVE (1);
3066 	      OP1 (OPCODE_return);
3067 	    }
3068 	  for (t = DECL_ARGUMENTS (part);  t != NULL_TREE;  t = TREE_CHAIN (t))
3069 	    maybe_free_localvar (t, state, 1);
3070 	  if (state->return_value_decl != NULL_TREE)
3071 	    maybe_free_localvar (state->return_value_decl, state, 1);
3072 	  finish_jcf_block (state);
3073 	  perform_relocations (state);
3074 
3075 	  ptr = attr_len_ptr;
3076 	  i = 8 + state->code_length + 4 + 8 * state->num_handlers;
3077 	  if (state->linenumber_count > 0)
3078 	    {
3079 	      code_attributes_count++;
3080 	      i += 8 + 4 * state->linenumber_count;
3081 	    }
3082 	  if (state->lvar_count > 0)
3083 	    {
3084 	      code_attributes_count++;
3085 	      i += 8 + 10 * state->lvar_count;
3086 	    }
3087 	  UNSAFE_PUT4 (i); /* attribute_length */
3088 	  UNSAFE_PUT2 (state->code_SP_max);  /* max_stack */
3089 	  UNSAFE_PUT2 (localvar_max);  /* max_locals */
3090 	  UNSAFE_PUT4 (state->code_length);
3091 
3092 	  /* Emit the exception table. */
3093 	  ptr = append_chunk (NULL, 2 + 8 * state->num_handlers, state);
3094 	  PUT2 (state->num_handlers);  /* exception_table_length */
3095 	  handler = state->handlers;
3096 	  for (; handler != NULL;  handler = handler->next)
3097 	    {
3098 	      int type_index;
3099 	      PUT2 (handler->start_label->pc);
3100 	      PUT2 (handler->end_label->pc);
3101 	      PUT2 (handler->handler_label->pc);
3102 	      if (handler->type == NULL_TREE)
3103 		type_index = 0;
3104 	      else
3105 		type_index = find_class_constant (&state->cpool,
3106 						  handler->type);
3107 	      PUT2 (type_index);
3108 	    }
3109 
3110 	  ptr = append_chunk (NULL, 2, state);
3111 	  PUT2 (code_attributes_count);
3112 
3113 	  /* Write the LineNumberTable attribute. */
3114 	  if (state->linenumber_count > 0)
3115 	    {
3116 	      static tree LineNumberTable_node = NULL_TREE;
3117 	      ptr = append_chunk (NULL,
3118 				  8 + 4 * state->linenumber_count, state);
3119 	      if (LineNumberTable_node == NULL_TREE)
3120 		LineNumberTable_node = get_identifier ("LineNumberTable");
3121 	      i = find_utf8_constant (&state->cpool, LineNumberTable_node);
3122 	      PUT2 (i);  /* attribute_name_index */
3123 	      i = 2+4*state->linenumber_count;  PUT4(i); /* attribute_length */
3124 	      i = state->linenumber_count;  PUT2 (i);
3125 	      for (block = state->blocks;  block != NULL;  block = block->next)
3126 		{
3127 		  int line = block->linenumber;
3128 		  if (line > 0)
3129 		    {
3130 		      PUT2 (block->pc);
3131 		      PUT2 (line);
3132 		    }
3133 		}
3134 	    }
3135 
3136 	  /* Write the LocalVariableTable attribute. */
3137 	  if (state->lvar_count > 0)
3138 	    {
3139 	      static tree LocalVariableTable_node = NULL_TREE;
3140 	      struct localvar_info *lvar = state->first_lvar;
3141 	      ptr = append_chunk (NULL, 8 + 10 * state->lvar_count, state);
3142 	      if (LocalVariableTable_node == NULL_TREE)
3143 		LocalVariableTable_node = get_identifier("LocalVariableTable");
3144 	      i = find_utf8_constant (&state->cpool, LocalVariableTable_node);
3145 	      PUT2 (i);  /* attribute_name_index */
3146 	      i = 2 + 10 * state->lvar_count;  PUT4 (i); /* attribute_length */
3147 	      i = state->lvar_count;  PUT2 (i);
3148 	      for ( ; lvar != NULL;  lvar = lvar->next)
3149 		{
3150 		  tree name = DECL_NAME (lvar->decl);
3151 		  tree sig = build_java_signature (TREE_TYPE (lvar->decl));
3152 		  i = lvar->start_label->pc;  PUT2 (i);
3153 		  i = lvar->end_label->pc - i;  PUT2 (i);
3154 		  i = find_utf8_constant (&state->cpool, name);  PUT2 (i);
3155 		  i = find_utf8_constant (&state->cpool, sig);  PUT2 (i);
3156 		  i = DECL_LOCAL_INDEX (lvar->decl);  PUT2 (i);
3157 		}
3158 	    }
3159 	}
3160       if (DECL_FUNCTION_THROWS (part) != NULL_TREE)
3161 	{
3162 	  tree t = DECL_FUNCTION_THROWS (part);
3163 	  int throws_count = list_length (t);
3164 	  static tree Exceptions_node = NULL_TREE;
3165 	  if (Exceptions_node == NULL_TREE)
3166 	    Exceptions_node = get_identifier ("Exceptions");
3167 	  ptr = append_chunk (NULL, 8 + 2 * throws_count, state);
3168 	  i = find_utf8_constant (&state->cpool, Exceptions_node);
3169 	  PUT2 (i);  /* attribute_name_index */
3170 	  i = 2 + 2 * throws_count;  PUT4(i); /* attribute_length */
3171 	  i = throws_count;  PUT2 (i);
3172 	  for (;  t != NULL_TREE;  t = TREE_CHAIN (t))
3173 	    {
3174 	      i = find_class_constant (&state->cpool, TREE_VALUE (t));
3175 	      PUT2 (i);
3176 	    }
3177 	}
3178       methods_count++;
3179       current_function_decl = save_function;
3180     }
3181   ptr = methods_count_ptr;  UNSAFE_PUT2 (methods_count);
3182 
3183   source_file = DECL_SOURCE_FILE (TYPE_NAME (clas));
3184   for (s = source_file; ; s++)
3185     {
3186       char ch = *s;
3187       if (ch == '\0')
3188 	break;
3189       if (ch == '/' || ch == '\\')
3190 	source_file = s+1;
3191     }
3192   ptr = append_chunk (NULL, 10, state);
3193 
3194   i = 1;		/* Source file always exists as an attribute */
3195   if (INNER_CLASS_TYPE_P (clas) || DECL_INNER_CLASS_LIST (TYPE_NAME (clas)))
3196     i++;
3197   if (clas == object_type_node)
3198     i++;
3199   PUT2 (i);			/* attributes_count */
3200 
3201   /* generate the SourceFile attribute. */
3202   if (SourceFile_node == NULL_TREE)
3203     {
3204       SourceFile_node = get_identifier ("SourceFile");
3205     }
3206 
3207   i = find_utf8_constant (&state->cpool, SourceFile_node);
3208   PUT2 (i);  /* attribute_name_index */
3209   PUT4 (2);
3210   i = find_utf8_constant (&state->cpool, get_identifier (source_file));
3211   PUT2 (i);
3212   append_gcj_attribute (state, clas);
3213   append_innerclasses_attribute (state, clas);
3214 
3215   /* New finally generate the contents of the constant pool chunk. */
3216   i = count_constant_pool_bytes (&state->cpool);
3217   ptr = obstack_alloc (state->chunk_obstack, i);
3218   cpool_chunk->data = ptr;
3219   cpool_chunk->size = i;
3220   write_constant_pool (&state->cpool, ptr, i);
3221   return state->first;
3222 }
3223 
3224 static GTY(()) tree Synthetic_node;
3225 static unsigned char *
append_synthetic_attribute(state)3226 append_synthetic_attribute (state)
3227      struct jcf_partial *state;
3228 {
3229   unsigned char *ptr = append_chunk (NULL, 6, state);
3230   int i;
3231 
3232   if (Synthetic_node == NULL_TREE)
3233     {
3234       Synthetic_node = get_identifier ("Synthetic");
3235     }
3236   i = find_utf8_constant (&state->cpool, Synthetic_node);
3237   PUT2 (i);		/* Attribute string index */
3238   PUT4 (0);		/* Attribute length */
3239 
3240   return ptr;
3241 }
3242 
3243 static void
append_gcj_attribute(state,class)3244 append_gcj_attribute (state, class)
3245      struct jcf_partial *state;
3246      tree class;
3247 {
3248   unsigned char *ptr;
3249   int i;
3250 
3251   if (class != object_type_node)
3252     return;
3253 
3254   ptr = append_chunk (NULL, 6, state); /* 2+4 */
3255   i = find_utf8_constant (&state->cpool,
3256 			  get_identifier ("gnu.gcj.gcj-compiled"));
3257   PUT2 (i);			/* Attribute string index */
3258   PUT4 (0);			/* Attribute length */
3259 }
3260 
3261 static tree InnerClasses_node;
3262 static void
append_innerclasses_attribute(state,class)3263 append_innerclasses_attribute (state, class)
3264      struct jcf_partial *state;
3265      tree class;
3266 {
3267   tree orig_decl = TYPE_NAME (class);
3268   tree current, decl;
3269   int length = 0, i;
3270   unsigned char *ptr, *length_marker, *number_marker;
3271 
3272   if (!INNER_CLASS_TYPE_P (class) && !DECL_INNER_CLASS_LIST (orig_decl))
3273     return;
3274 
3275   ptr = append_chunk (NULL, 8, state); /* 2+4+2 */
3276 
3277   if (InnerClasses_node == NULL_TREE)
3278     {
3279       InnerClasses_node = get_identifier ("InnerClasses");
3280     }
3281   i = find_utf8_constant (&state->cpool, InnerClasses_node);
3282   PUT2 (i);
3283   length_marker = ptr; PUT4 (0); /* length, to be later patched */
3284   number_marker = ptr; PUT2 (0); /* number of classes, tblp */
3285 
3286   /* Generate the entries: all inner classes visible from the one we
3287      process: itself, up and down. */
3288   while (class && INNER_CLASS_TYPE_P (class))
3289     {
3290       const char *n;
3291 
3292       decl = TYPE_NAME (class);
3293       n = IDENTIFIER_POINTER (DECL_NAME (decl)) +
3294 	IDENTIFIER_LENGTH (DECL_NAME (decl));
3295 
3296       while (n[-1] != '$')
3297 	n--;
3298       append_innerclasses_attribute_entry (state, decl, get_identifier (n));
3299       length++;
3300 
3301       class = TREE_TYPE (DECL_CONTEXT (TYPE_NAME (class)));
3302     }
3303 
3304   decl = orig_decl;
3305   for (current = DECL_INNER_CLASS_LIST (decl);
3306        current; current = TREE_CHAIN (current))
3307     {
3308       append_innerclasses_attribute_entry (state, TREE_PURPOSE (current),
3309 					   TREE_VALUE (current));
3310       length++;
3311     }
3312 
3313   ptr = length_marker; PUT4 (8*length+2);
3314   ptr = number_marker; PUT2 (length);
3315 }
3316 
3317 static void
append_innerclasses_attribute_entry(state,decl,name)3318 append_innerclasses_attribute_entry (state, decl, name)
3319      struct jcf_partial *state;
3320      tree decl, name;
3321 {
3322   int icii, icaf;
3323   int ocii = 0, ini = 0;
3324   unsigned char *ptr = append_chunk (NULL, 8, state);
3325 
3326   icii = find_class_constant (&state->cpool, TREE_TYPE (decl));
3327 
3328   /* Sun's implementation seems to generate ocii to 0 for inner
3329      classes (which aren't considered members of the class they're
3330      in.) The specs are saying that if the class is anonymous,
3331      inner_name_index must be zero. */
3332   if (!ANONYMOUS_CLASS_P (TREE_TYPE (decl)))
3333     {
3334       ocii = find_class_constant (&state->cpool,
3335 				  TREE_TYPE (DECL_CONTEXT (decl)));
3336       ini = find_utf8_constant (&state->cpool, name);
3337     }
3338   icaf = get_access_flags (decl);
3339 
3340   PUT2 (icii); PUT2 (ocii); PUT2 (ini);  PUT2 (icaf);
3341 }
3342 
3343 static char *
make_class_file_name(clas)3344 make_class_file_name (clas)
3345      tree clas;
3346 {
3347   const char *dname, *cname, *slash;
3348   char *r;
3349   struct stat sb;
3350   char sep;
3351 
3352   cname = IDENTIFIER_POINTER (identifier_subst (DECL_NAME (TYPE_NAME (clas)),
3353 						"", '.', DIR_SEPARATOR,
3354 						".class"));
3355   if (jcf_write_base_directory == NULL)
3356     {
3357       /* Make sure we put the class file into the .java file's
3358 	 directory, and not into some subdirectory thereof.  */
3359       char *t;
3360       dname = DECL_SOURCE_FILE (TYPE_NAME (clas));
3361       slash = strrchr (dname, DIR_SEPARATOR);
3362 #ifdef DIR_SEPARATOR_2
3363       if (! slash)
3364         slash = strrchr (dname, DIR_SEPARATOR_2);
3365 #endif
3366       if (! slash)
3367         {
3368           dname = ".";
3369           slash = dname + 1;
3370           sep = DIR_SEPARATOR;
3371         }
3372       else
3373         sep = *slash;
3374 
3375       t = strrchr (cname, DIR_SEPARATOR);
3376       if (t)
3377 	cname = t + 1;
3378     }
3379   else
3380     {
3381       char *s;
3382 
3383       dname = jcf_write_base_directory;
3384 
3385       s = strrchr (dname, DIR_SEPARATOR);
3386 #ifdef DIR_SEPARATOR_2
3387       if (! s)
3388         s = strrchr (dname, DIR_SEPARATOR_2);
3389 #endif
3390       if (s)
3391         sep = *s;
3392       else
3393         sep = DIR_SEPARATOR;
3394 
3395       slash = dname + strlen (dname);
3396     }
3397 
3398   r = xmalloc (slash - dname + strlen (cname) + 2);
3399   strncpy (r, dname, slash - dname);
3400   r[slash - dname] = sep;
3401   strcpy (&r[slash - dname + 1], cname);
3402 
3403   /* We try to make new directories when we need them.  We only do
3404      this for directories which "might not" exist.  For instance, we
3405      assume the `-d' directory exists, but we don't assume that any
3406      subdirectory below it exists.  It might be worthwhile to keep
3407      track of which directories we've created to avoid gratuitous
3408      stat()s.  */
3409   dname = r + (slash - dname) + 1;
3410   while (1)
3411     {
3412       char *s = strchr (dname, sep);
3413       if (s == NULL)
3414 	break;
3415       *s = '\0';
3416       if (stat (r, &sb) == -1
3417 	  /* Try to make it.  */
3418 	  && mkdir (r, 0755) == -1)
3419 	fatal_io_error ("can't create directory %s", r);
3420 
3421       *s = sep;
3422       /* Skip consecutive separators.  */
3423       for (dname = s + 1; *dname && *dname == sep; ++dname)
3424 	;
3425     }
3426 
3427   return r;
3428 }
3429 
3430 /* Write out the contens of a class (RECORD_TYPE) CLAS, as a .class file.
3431    The output .class file name is make_class_file_name(CLAS). */
3432 
3433 void
write_classfile(clas)3434 write_classfile (clas)
3435      tree clas;
3436 {
3437   struct obstack *work = &temporary_obstack;
3438   struct jcf_partial state[1];
3439   char *class_file_name = make_class_file_name (clas);
3440   struct chunk *chunks;
3441 
3442   if (class_file_name != NULL)
3443     {
3444       FILE *stream;
3445       char *temporary_file_name;
3446 
3447       /* The .class file is initially written to a ".tmp" file so that
3448 	 if multiple instances of the compiler are running at once
3449 	 they do not see partially formed class files. */
3450       temporary_file_name = concat (class_file_name, ".tmp", NULL);
3451       stream = fopen (temporary_file_name, "wb");
3452       if (stream == NULL)
3453 	fatal_io_error ("can't open %s for writing", temporary_file_name);
3454 
3455       jcf_dependency_add_target (class_file_name);
3456       init_jcf_state (state, work);
3457       chunks = generate_classfile (clas, state);
3458       write_chunks (stream, chunks);
3459       if (fclose (stream))
3460 	fatal_io_error ("error closing %s", temporary_file_name);
3461 
3462       /* If a file named by the string pointed to by `new' exists
3463          prior to the call to the `rename' function, the bahaviour
3464          is implementation-defined.  ISO 9899-1990 7.9.4.2.
3465 
3466          For example, on Win32 with MSVCRT, it is an error. */
3467 
3468       unlink (class_file_name);
3469 
3470       if (rename (temporary_file_name, class_file_name) == -1)
3471 	{
3472 	  remove (temporary_file_name);
3473 	  fatal_io_error ("can't create %s", class_file_name);
3474 	}
3475       free (temporary_file_name);
3476       free (class_file_name);
3477     }
3478   release_jcf_state (state);
3479 }
3480 
3481 /* TODO:
3482    string concatenation
3483    synchronized statement
3484    */
3485 
3486 #include "gt-java-jcf-write.h"
3487