1 // boehm.cc - interface between libjava and Boehm GC.
2 
3 /* Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003  Free Software Foundation
4 
5    This file is part of libgcj.
6 
7 This software is copyrighted work licensed under the terms of the
8 Libgcj License.  Please consult the file "LIBGCJ_LICENSE" for
9 details.  */
10 
11 #include <config.h>
12 
13 #include <stdio.h>
14 #include <limits.h>
15 
16 #include <jvm.h>
17 #include <gcj/cni.h>
18 
19 #include <java/lang/Class.h>
20 #include <java/lang/reflect/Modifier.h>
21 #include <java-interp.h>
22 
23 // More nastiness: the GC wants to define TRUE and FALSE.  We don't
24 // need the Java definitions (themselves a hack), so we undefine them.
25 #undef TRUE
26 #undef FALSE
27 
28 extern "C"
29 {
30 #include <private/gc_pmark.h>
31 #include <gc_gcj.h>
32 
33 #ifdef THREAD_LOCAL_ALLOC
34 # define GC_REDIRECT_TO_LOCAL
35 # include <gc_local_alloc.h>
36 #endif
37 
38   // These aren't declared in any Boehm GC header.
39   void GC_finalize_all (void);
40   ptr_t GC_debug_generic_malloc (size_t size, int k, GC_EXTRA_PARAMS);
41 };
42 
43 #define MAYBE_MARK(Obj, Top, Limit, Source, Exit)  \
44 	Top=GC_MARK_AND_PUSH((GC_PTR)Obj, Top, Limit, (GC_PTR *)Source)
45 
46 // `kind' index used when allocating Java arrays.
47 static int array_kind_x;
48 
49 // Freelist used for Java arrays.
50 static ptr_t *array_free_list;
51 
52 // Lock used to protect access to Boehm's GC_enable/GC_disable functions.
53 static _Jv_Mutex_t disable_gc_mutex;
54 
55 
56 
57 // This is called by the GC during the mark phase.  It marks a Java
58 // object.  We use `void *' arguments and return, and not what the
59 // Boehm GC wants, to avoid pollution in our headers.
60 void *
_Jv_MarkObj(void * addr,void * msp,void * msl,void *)61 _Jv_MarkObj (void *addr, void *msp, void *msl, void * /* env */)
62 {
63   mse *mark_stack_ptr = (mse *) msp;
64   mse *mark_stack_limit = (mse *) msl;
65   jobject obj = (jobject) addr;
66 
67   // FIXME: if env is 1, this object was allocated through the debug
68   // interface, and addr points to the beginning of the debug header.
69   // In that case, we should really add the size of the header to addr.
70 
71   _Jv_VTable *dt = *(_Jv_VTable **) addr;
72   // The object might not yet have its vtable set, or it might
73   // really be an object on the freelist.  In either case, the vtable slot
74   // will either be 0, or it will point to a cleared object.
75   // This assumes Java objects have size at least 3 words,
76   // including the header.   But this should remain true, since this
77   // should only be used with debugging allocation or with large objects.
78   if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
79     return mark_stack_ptr;
80   jclass klass = dt->clas;
81   ptr_t p;
82 
83 # ifndef JV_HASH_SYNCHRONIZATION
84     // Every object has a sync_info pointer.
85     p = (ptr_t) obj->sync_info;
86     MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o1label);
87 # endif
88   // Mark the object's class.
89   p = (ptr_t) klass;
90   MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, obj, o2label);
91 
92   if (__builtin_expect (klass == &java::lang::Class::class$, false))
93     {
94       // Currently we allocate some of the memory referenced from class objects
95       // as pointerfree memory, and then mark it more intelligently here.
96       // We ensure that the ClassClass mark descriptor forces invocation of
97       // this procedure.
98       // Correctness of this is subtle, but it looks OK to me for now.  For the incremental
99       // collector, we need to make sure that the class object is written whenever
100       // any of the subobjects are altered and may need rescanning.  This may be tricky
101       // during construction, and this may not be the right way to do this with
102       // incremental collection.
103       // If we overflow the mark stack, we will rescan the class object, so we should
104       // be OK.  The same applies if we redo the mark phase because win32 unmapped part
105       // of our root set.		- HB
106       jclass c = (jclass) addr;
107 
108       p = (ptr_t) c->name;
109       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c3label);
110       p = (ptr_t) c->superclass;
111       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c4label);
112       for (int i = 0; i < c->constants.size; ++i)
113 	{
114 	  /* FIXME: We could make this more precise by using the tags -KKT */
115 	  p = (ptr_t) c->constants.data[i].p;
116 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5label);
117 	}
118 
119 #ifdef INTERPRETER
120       if (_Jv_IsInterpretedClass (c))
121 	{
122 	  p = (ptr_t) c->constants.tags;
123 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5alabel);
124 	  p = (ptr_t) c->constants.data;
125 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5blabel);
126 	  p = (ptr_t) c->vtable;
127 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c5clabel);
128 	}
129 #endif
130 
131       // If the class is an array, then the methods field holds a
132       // pointer to the element class.  If the class is primitive,
133       // then the methods field holds a pointer to the array class.
134       p = (ptr_t) c->methods;
135       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c6label);
136 
137       // The vtable might have been set, but the rest of the class
138       // could still be uninitialized.  If this is the case, then
139       // c.isArray will SEGV.  We check for this, and if it is the
140       // case we just return.
141       if (__builtin_expect (c->name == NULL, false))
142 	return mark_stack_ptr;
143 
144       if (! c->isArray() && ! c->isPrimitive())
145 	{
146 	  // Scan each method in the cases where `methods' really
147 	  // points to a methods structure.
148 	  for (int i = 0; i < c->method_count; ++i)
149 	    {
150 	      p = (ptr_t) c->methods[i].name;
151 	      MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
152 			     cm1label);
153 	      p = (ptr_t) c->methods[i].signature;
154 	      MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
155 			     cm2label);
156 	    }
157 	}
158 
159       // Mark all the fields.
160       p = (ptr_t) c->fields;
161       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8label);
162       for (int i = 0; i < c->field_count; ++i)
163 	{
164 	  _Jv_Field* field = &c->fields[i];
165 
166 #ifndef COMPACT_FIELDS
167 	  p = (ptr_t) field->name;
168 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8alabel);
169 #endif
170 	  p = (ptr_t) field->type;
171 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8blabel);
172 
173 	  // For the interpreter, we also need to mark the memory
174 	  // containing static members
175 	  if ((field->flags & java::lang::reflect::Modifier::STATIC))
176 	    {
177 	      p = (ptr_t) field->u.addr;
178 	      MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c8clabel);
179 
180 	      // also, if the static member is a reference,
181 	      // mark also the value pointed to.  We check for isResolved
182 	      // since marking can happen before memory is allocated for
183 	      // static members.
184 	      if (JvFieldIsRef (field) && field->isResolved())
185 		{
186 		  jobject val = *(jobject*) field->u.addr;
187 		  p = (ptr_t) val;
188 		  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
189 			      c, c8elabel);
190 		}
191 	    }
192 	}
193 
194       p = (ptr_t) c->vtable;
195       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, c9label);
196       p = (ptr_t) c->interfaces;
197       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cAlabel);
198       for (int i = 0; i < c->interface_count; ++i)
199 	{
200 	  p = (ptr_t) c->interfaces[i];
201 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cClabel);
202 	}
203       p = (ptr_t) c->loader;
204       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cBlabel);
205       p = (ptr_t) c->arrayclass;
206       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cDlabel);
207       p = (ptr_t) c->protectionDomain;
208       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cPlabel);
209       p = (ptr_t) c->hack_signers;
210       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c, cSlabel);
211 
212 #ifdef INTERPRETER
213       if (_Jv_IsInterpretedClass (c))
214 	{
215 	  _Jv_InterpClass* ic = (_Jv_InterpClass*) c;
216 
217 	  p = (ptr_t) ic->interpreted_methods;
218 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cElabel);
219 
220 	  for (int i = 0; i < c->method_count; i++)
221 	    {
222 	      p = (ptr_t) ic->interpreted_methods[i];
223 	      MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
224 			  cFlabel);
225 
226 	      // Mark the direct-threaded code.
227 	      if ((c->methods[i].accflags
228 		   & java::lang::reflect::Modifier::NATIVE) == 0)
229 		{
230 		  _Jv_InterpMethod *im
231 		    = (_Jv_InterpMethod *) ic->interpreted_methods[i];
232 		  if (im)
233 		    {
234 		      p = (ptr_t) im->prepared;
235 		      MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, \
236 				  cFlabel);
237 		    }
238 		}
239 
240 	      // The interpreter installs a heap-allocated trampoline
241 	      // here, so we'll mark it.
242 	      p = (ptr_t) c->methods[i].ncode;
243 	      MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, c,
244 			  cm3label);
245 	    }
246 
247 	  p = (ptr_t) ic->field_initializers;
248 	  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, ic, cGlabel);
249 
250 	}
251 #endif
252 
253     }
254   else
255     {
256       // NOTE: each class only holds information about the class
257       // itself.  So we must do the marking for the entire inheritance
258       // tree in order to mark all fields.  FIXME: what about
259       // interfaces?  We skip Object here, because Object only has a
260       // sync_info, and we handled that earlier.
261       // Note: occasionally `klass' can be null.  For instance, this
262       // can happen if a GC occurs between the point where an object
263       // is allocated and where the vtbl slot is set.
264       while (klass && klass != &java::lang::Object::class$)
265 	{
266 	  jfieldID field = JvGetFirstInstanceField (klass);
267 	  jint max = JvNumInstanceFields (klass);
268 
269 	  for (int i = 0; i < max; ++i)
270 	    {
271 	      if (JvFieldIsRef (field))
272 		{
273 		  jobject val = JvGetObjectField (obj, field);
274 		  p = (ptr_t) val;
275 		  MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit,
276 			      obj, elabel);
277 		}
278 	      field = field->getNextField ();
279 	    }
280 	  klass = klass->getSuperclass();
281 	}
282     }
283 
284   return mark_stack_ptr;
285 }
286 
287 // This is called by the GC during the mark phase.  It marks a Java
288 // array (of objects).  We use `void *' arguments and return, and not
289 // what the Boehm GC wants, to avoid pollution in our headers.
290 void *
_Jv_MarkArray(void * addr,void * msp,void * msl,void *)291 _Jv_MarkArray (void *addr, void *msp, void *msl, void * /*env*/)
292 {
293   mse *mark_stack_ptr = (mse *) msp;
294   mse *mark_stack_limit = (mse *) msl;
295   jobjectArray array = (jobjectArray) addr;
296 
297   _Jv_VTable *dt = *(_Jv_VTable **) addr;
298   // Assumes size >= 3 words.  That's currently true since arrays have
299   // a vtable, sync pointer, and size.  If the sync pointer goes away,
300   // we may need to round up the size.
301   if (__builtin_expect (! dt || !(dt -> get_finalizer()), false))
302     return mark_stack_ptr;
303   jclass klass = dt->clas;
304   ptr_t p;
305 
306 # ifndef JV_HASH_SYNCHRONIZATION
307     // Every object has a sync_info pointer.
308     p = (ptr_t) array->sync_info;
309     MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e1label);
310 # endif
311   // Mark the object's class.
312   p = (ptr_t) klass;
313   MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, &(dt -> clas), o2label);
314 
315   for (int i = 0; i < JvGetArrayLength (array); ++i)
316     {
317       jobject obj = elements (array)[i];
318       p = (ptr_t) obj;
319       MAYBE_MARK (p, mark_stack_ptr, mark_stack_limit, array, e2label);
320     }
321 
322   return mark_stack_ptr;
323 }
324 
325 // Generate a GC marking descriptor for a class.
326 //
327 // We assume that the gcj mark proc has index 0.  This is a dubious assumption,
328 // since another one could be registered first.  But the compiler also
329 // knows this, so in that case everything else will break, too.
330 #define GCJ_DEFAULT_DESCR GC_MAKE_PROC(GC_GCJ_RESERVED_MARK_PROC_INDEX,0)
331 
332 void *
_Jv_BuildGCDescr(jclass self)333 _Jv_BuildGCDescr(jclass self)
334 {
335   jlong desc = 0;
336   jint bits_per_word = CHAR_BIT * sizeof (void *);
337 
338   // Note: for now we only consider a bitmap mark descriptor.  We
339   // could also handle the case where the first N fields of a type are
340   // references.  However, this is not very likely to be used by many
341   // classes, and it is easier to compute things this way.
342 
343   // The vtable pointer.
344   desc |= 1ULL << (bits_per_word - 1);
345 #ifndef JV_HASH_SYNCHRONIZATION
346   // The sync_info field.
347   desc |= 1ULL << (bits_per_word - 2);
348 #endif
349 
350   for (jclass klass = self; klass != NULL; klass = klass->getSuperclass())
351     {
352       jfieldID field = JvGetFirstInstanceField(klass);
353       int count = JvNumInstanceFields(klass);
354 
355       for (int i = 0; i < count; ++i)
356 	{
357 	  if (field->isRef())
358 	    {
359 	      unsigned int off = field->getOffset();
360 	      // If we run into a weird situation, we bail.
361 	      if (off % sizeof (void *) != 0)
362 		return (void *) (GCJ_DEFAULT_DESCR);
363 	      off /= sizeof (void *);
364 	      // If we find a field outside the range of our bitmap,
365 	      // fall back to procedure marker. The bottom 2 bits are
366 	      // reserved.
367 	      if (off >= bits_per_word - 2)
368 		return (void *) (GCJ_DEFAULT_DESCR);
369 	      desc |= 1ULL << (bits_per_word - off - 1);
370 	    }
371 
372 	  field = field->getNextField();
373 	}
374     }
375 
376   // For bitmap mark type, bottom bits are 01.
377   desc |= 1;
378   // Bogus warning avoidance (on many platforms).
379   return (void *) (unsigned long) desc;
380 }
381 
382 // Allocate some space that is known to be pointer-free.
383 void *
_Jv_AllocBytes(jsize size)384 _Jv_AllocBytes (jsize size)
385 {
386   void *r = GC_MALLOC_ATOMIC (size);
387   // We have to explicitly zero memory here, as the GC doesn't
388   // guarantee that PTRFREE allocations are zeroed.  Note that we
389   // don't have to do this for other allocation types because we set
390   // the `ok_init' flag in the type descriptor.
391   memset (r, 0, size);
392   return r;
393 }
394 
395 // Allocate space for a new Java array.
396 // Used only for arrays of objects.
397 void *
_Jv_AllocArray(jsize size,jclass klass)398 _Jv_AllocArray (jsize size, jclass klass)
399 {
400   void *obj;
401   const jsize min_heap_addr = 16*1024;
402   // A heuristic.  If size is less than this value, the size
403   // stored in the array can't possibly be misinterpreted as
404   // a pointer.   Thus we lose nothing by scanning the object
405   // completely conservatively, since no misidentification can
406   // take place.
407 
408 #ifdef GC_DEBUG
409   // There isn't much to lose by scanning this conservatively.
410   // If we didn't, the mark proc would have to understand that
411   // it needed to skip the header.
412   obj = GC_MALLOC(size);
413 #else
414   if (size < min_heap_addr)
415     obj = GC_MALLOC(size);
416   else
417     obj = GC_generic_malloc (size, array_kind_x);
418 #endif
419   *((_Jv_VTable **) obj) = klass->vtable;
420   return obj;
421 }
422 
423 /* Allocate space for a new non-Java object, which does not have the usual
424    Java object header but may contain pointers to other GC'ed objects. */
425 void *
_Jv_AllocRawObj(jsize size)426 _Jv_AllocRawObj (jsize size)
427 {
428   return (void *) GC_MALLOC (size);
429 }
430 
431 static void
call_finalizer(GC_PTR obj,GC_PTR client_data)432 call_finalizer (GC_PTR obj, GC_PTR client_data)
433 {
434   _Jv_FinalizerFunc *fn = (_Jv_FinalizerFunc *) client_data;
435   jobject jobj = (jobject) obj;
436 
437   (*fn) (jobj);
438 }
439 
440 void
_Jv_RegisterFinalizer(void * object,_Jv_FinalizerFunc * meth)441 _Jv_RegisterFinalizer (void *object, _Jv_FinalizerFunc *meth)
442 {
443   GC_REGISTER_FINALIZER_NO_ORDER (object, call_finalizer, (GC_PTR) meth,
444 				  NULL, NULL);
445 }
446 
447 void
_Jv_RunFinalizers(void)448 _Jv_RunFinalizers (void)
449 {
450   GC_invoke_finalizers ();
451 }
452 
453 void
_Jv_RunAllFinalizers(void)454 _Jv_RunAllFinalizers (void)
455 {
456   GC_finalize_all ();
457 }
458 
459 void
_Jv_RunGC(void)460 _Jv_RunGC (void)
461 {
462   GC_gcollect ();
463 }
464 
465 long
_Jv_GCTotalMemory(void)466 _Jv_GCTotalMemory (void)
467 {
468   return GC_get_heap_size ();
469 }
470 
471 long
_Jv_GCFreeMemory(void)472 _Jv_GCFreeMemory (void)
473 {
474   return GC_get_free_bytes ();
475 }
476 
477 void
_Jv_GCSetInitialHeapSize(size_t size)478 _Jv_GCSetInitialHeapSize (size_t size)
479 {
480   size_t current = GC_get_heap_size ();
481   if (size > current)
482     GC_expand_hp (size - current);
483 }
484 
485 void
_Jv_GCSetMaximumHeapSize(size_t size)486 _Jv_GCSetMaximumHeapSize (size_t size)
487 {
488   GC_set_max_heap_size ((GC_word) size);
489 }
490 
491 // From boehm's misc.c
492 extern "C" void GC_enable();
493 extern "C" void GC_disable();
494 
495 void
_Jv_DisableGC(void)496 _Jv_DisableGC (void)
497 {
498   _Jv_MutexLock (&disable_gc_mutex);
499   GC_disable();
500   _Jv_MutexUnlock (&disable_gc_mutex);
501 }
502 
503 void
_Jv_EnableGC(void)504 _Jv_EnableGC (void)
505 {
506   _Jv_MutexLock (&disable_gc_mutex);
507   GC_enable();
508   _Jv_MutexUnlock (&disable_gc_mutex);
509 }
510 
handle_out_of_memory(size_t)511 static void * handle_out_of_memory(size_t)
512 {
513   _Jv_ThrowNoMemory();
514 }
515 
516 void
_Jv_InitGC(void)517 _Jv_InitGC (void)
518 {
519   int proc;
520 
521   // Ignore pointers that do not point to the start of an object.
522   GC_all_interior_pointers = 0;
523 
524   // Configure the collector to use the bitmap marking descriptors that we
525   // stash in the class vtable.
526   GC_init_gcj_malloc (0, (void *) _Jv_MarkObj);
527 
528   // Cause an out of memory error to be thrown from the allocators,
529   // instead of returning 0.  This is cheaper than checking on allocation.
530   GC_oom_fn = handle_out_of_memory;
531 
532   GC_java_finalization = 1;
533 
534   // We use a different mark procedure for object arrays. This code
535   // configures a different object `kind' for object array allocation and
536   // marking. FIXME: see above.
537   array_free_list = (ptr_t *) GC_generic_malloc_inner ((MAXOBJSZ + 1)
538 						       * sizeof (ptr_t),
539 						       PTRFREE);
540   memset (array_free_list, 0, (MAXOBJSZ + 1) * sizeof (ptr_t));
541 
542   proc = GC_n_mark_procs++;
543   GC_mark_procs[proc] = (GC_mark_proc) _Jv_MarkArray;
544 
545   array_kind_x = GC_n_kinds++;
546   GC_obj_kinds[array_kind_x].ok_freelist = array_free_list;
547   GC_obj_kinds[array_kind_x].ok_reclaim_list = 0;
548   GC_obj_kinds[array_kind_x].ok_descriptor = GC_MAKE_PROC (proc, 0);
549   GC_obj_kinds[array_kind_x].ok_relocate_descr = FALSE;
550   GC_obj_kinds[array_kind_x].ok_init = TRUE;
551 
552   _Jv_MutexInit (&disable_gc_mutex);
553 }
554 
555 #ifdef JV_HASH_SYNCHRONIZATION
556 // Allocate an object with a fake vtable pointer, which causes only
557 // the first field (beyond the fake vtable pointer) to be traced.
558 // Eventually this should probably be generalized.
559 
560 static _Jv_VTable trace_one_vtable = {
561     0, 			// class pointer
562     (void *)(2 * sizeof(void *)),
563 			// descriptor; scan 2 words incl. vtable ptr.
564 			// Least significant bits must be zero to
565 			// identify this as a length descriptor
566     {0}			// First method
567 };
568 
569 void *
_Jv_AllocTraceOne(jsize size)570 _Jv_AllocTraceOne (jsize size /* includes vtable slot */)
571 {
572   return GC_GCJ_MALLOC (size, &trace_one_vtable);
573 }
574 
575 // Ditto for two words.
576 // the first field (beyond the fake vtable pointer) to be traced.
577 // Eventually this should probably be generalized.
578 
579 static _Jv_VTable trace_two_vtable =
580 {
581   0, 			// class pointer
582   (void *)(3 * sizeof(void *)),
583 			// descriptor; scan 3 words incl. vtable ptr.
584   {0}			// First method
585 };
586 
587 void *
_Jv_AllocTraceTwo(jsize size)588 _Jv_AllocTraceTwo (jsize size /* includes vtable slot */)
589 {
590   return GC_GCJ_MALLOC (size, &trace_two_vtable);
591 }
592 
593 #endif /* JV_HASH_SYNCHRONIZATION */
594 
595 void
_Jv_GCInitializeFinalizers(void (* notifier)(void))596 _Jv_GCInitializeFinalizers (void (*notifier) (void))
597 {
598   GC_finalize_on_demand = 1;
599   GC_finalizer_notifier = notifier;
600 }
601 
602 void
_Jv_GCRegisterDisappearingLink(jobject * objp)603 _Jv_GCRegisterDisappearingLink (jobject *objp)
604 {
605   GC_general_register_disappearing_link ((GC_PTR *) objp, (GC_PTR) *objp);
606 }
607 
608 jboolean
_Jv_GCCanReclaimSoftReference(jobject)609 _Jv_GCCanReclaimSoftReference (jobject)
610 {
611   // For now, always reclaim soft references.  FIXME.
612   return true;
613 }
614