1 /* 2 * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers 3 * Copyright (c) 1991-1995 by Xerox Corporation. All rights reserved. 4 * Copyright (c) 2005 Hewlett-Packard Development Company, L.P. 5 * 6 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED 7 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK. 8 * 9 * Permission is hereby granted to use or copy this program 10 * for any purpose, provided the above notices are retained on all copies. 11 * Permission to modify the code and to distribute modified code is granted, 12 * provided the above notices are retained, and a notice that the code was 13 * modified is included with the above copyright notice. 14 */ 15 16 #ifndef GC_INLINE_H 17 #define GC_INLINE_H 18 19 /* WARNING: */ 20 /* Note that for these routines, it is the clients responsibility to */ 21 /* add the extra byte at the end to deal with one-past-the-end pointers.*/ 22 /* In the standard collector configuration, the collector assumes that */ 23 /* such a byte has been added, and hence does not trace the last word */ 24 /* in the resulting object. */ 25 /* This is not an issue if the collector is compiled with */ 26 /* DONT_ADD_BYTE_AT_END, or if GC_all_interior_pointers is not set. */ 27 /* This interface is most useful for compilers that generate C. */ 28 /* It is also used internally for thread-local allocation. */ 29 /* Manual use is hereby discouraged. */ 30 31 #include "gc.h" 32 #include "gc_tiny_fl.h" 33 34 #if __GNUC__ >= 3 35 # define GC_EXPECT(expr, outcome) __builtin_expect(expr,outcome) 36 /* Equivalent to (expr), but predict that usually (expr)==outcome. */ 37 #else 38 # define GC_EXPECT(expr, outcome) (expr) 39 #endif /* __GNUC__ */ 40 41 #ifndef GC_ASSERT 42 # define GC_ASSERT(expr) /* empty */ 43 #endif 44 45 /* Store a pointer to a list of newly allocated objects of kind k and */ 46 /* size lb in *result. The caller must make sure that *result is */ 47 /* traced even if objects are ptrfree. */ 48 GC_API void GC_CALL GC_generic_malloc_many(size_t /* lb */, int /* k */, 49 void ** /* result */); 50 51 /* The ultimately general inline allocation macro. Allocate an object */ 52 /* of size granules, putting the resulting pointer in result. Tiny_fl */ 53 /* is a "tiny" free list array, which will be used first, if the size */ 54 /* is appropriate. If granules is too large, we allocate with */ 55 /* default_expr instead. If we need to refill the free list, we use */ 56 /* GC_generic_malloc_many with the indicated kind. */ 57 /* Tiny_fl should be an array of GC_TINY_FREELISTS void * pointers. */ 58 /* If num_direct is nonzero, and the individual free list pointers */ 59 /* are initialized to (void *)1, then we allocate numdirect granules */ 60 /* directly using gmalloc before putting multiple objects into the */ 61 /* tiny_fl entry. If num_direct is zero, then the free lists may also */ 62 /* be initialized to (void *)0. */ 63 /* Note that we use the zeroth free list to hold objects 1 granule in */ 64 /* size that are used to satisfy size 0 allocation requests. */ 65 /* We rely on much of this hopefully getting optimized away in the */ 66 /* num_direct = 0 case. */ 67 /* Particularly if granules is constant, this should generate a small */ 68 /* amount of code. */ 69 # define GC_FAST_MALLOC_GRANS(result,granules,tiny_fl,num_direct,\ 70 kind,default_expr,init) \ 71 { \ 72 if (GC_EXPECT((granules) >= GC_TINY_FREELISTS,0)) { \ 73 result = (default_expr); \ 74 } else { \ 75 void **my_fl = (tiny_fl) + (granules); \ 76 void *my_entry=*my_fl; \ 77 void *next; \ 78 \ 79 while (GC_EXPECT((GC_word)my_entry \ 80 <= (num_direct) + GC_TINY_FREELISTS + 1, 0)) { \ 81 /* Entry contains counter or NULL */ \ 82 if ((GC_word)my_entry - 1 < (num_direct)) { \ 83 /* Small counter value, not NULL */ \ 84 *my_fl = (char *)my_entry + (granules) + 1; \ 85 result = (default_expr); \ 86 goto out; \ 87 } else { \ 88 /* Large counter or NULL */ \ 89 GC_generic_malloc_many(((granules) == 0? GC_GRANULE_BYTES : \ 90 GC_RAW_BYTES_FROM_INDEX(granules)), \ 91 kind, my_fl); \ 92 my_entry = *my_fl; \ 93 if (my_entry == 0) { \ 94 result = (*GC_get_oom_fn())((granules)*GC_GRANULE_BYTES); \ 95 goto out; \ 96 } \ 97 } \ 98 } \ 99 next = *(void **)(my_entry); \ 100 result = (void *)my_entry; \ 101 *my_fl = next; \ 102 init; \ 103 PREFETCH_FOR_WRITE(next); \ 104 GC_ASSERT(GC_size(result) >= (granules)*GC_GRANULE_BYTES); \ 105 GC_ASSERT((kind) == PTRFREE || ((GC_word *)result)[1] == 0); \ 106 out: ; \ 107 } \ 108 } 109 110 # define GC_WORDS_TO_WHOLE_GRANULES(n) \ 111 GC_WORDS_TO_GRANULES((n) + GC_GRANULE_WORDS - 1) 112 113 /* Allocate n words (NOT BYTES). X is made to point to the result. */ 114 /* This should really only be used if GC_all_interior_pointers is */ 115 /* not set, or DONT_ADD_BYTE_AT_END is set. See above. */ 116 /* The semantics changed in version 7.0; we no longer lock, and */ 117 /* the caller is responsible for supplying a cleared tiny_fl */ 118 /* free list array. For single-threaded applications, this may be */ 119 /* a global array. */ 120 # define GC_MALLOC_WORDS(result,n,tiny_fl) \ 121 { \ 122 size_t grans = GC_WORDS_TO_WHOLE_GRANULES(n); \ 123 GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \ 124 NORMAL, GC_malloc(grans*GC_GRANULE_BYTES), \ 125 *(void **)(result) = 0); \ 126 } 127 128 # define GC_MALLOC_ATOMIC_WORDS(result,n,tiny_fl) \ 129 { \ 130 size_t grans = GC_WORDS_TO_WHOLE_GRANULES(n); \ 131 GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \ 132 PTRFREE, GC_malloc_atomic(grans*GC_GRANULE_BYTES), \ 133 (void)0 /* no initialization */); \ 134 } 135 136 /* And once more for two word initialized objects: */ 137 # define GC_CONS(result, first, second, tiny_fl) \ 138 { \ 139 size_t grans = GC_WORDS_TO_WHOLE_GRANULES(2); \ 140 GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \ 141 NORMAL, GC_malloc(grans*GC_GRANULE_BYTES), \ 142 *(void **)(result) = (void *)(first)); \ 143 ((void **)(result))[1] = (void *)(second); \ 144 } 145 146 #endif /* !GC_INLINE_H */ 147