1 /* File: "mem.c" */
2
3 /* Copyright (c) 1994-2018 by Marc Feeley, All Rights Reserved. */
4
5 #define ___INCLUDED_FROM_MEM
6 #define ___VERSION 409003
7 #include "gambit.h"
8
9 #include "os_setup.h"
10 #include "os_base.h"
11 #include "os_time.h"
12 #include "setup.h"
13 #include "mem.h"
14 #include "c_intf.h"
15 #include "actlog.h"
16
17 /* The following includes are needed for debugging. */
18
19 #include <stdlib.h>
20 #include <string.h>
21
22
23 /*---------------------------------------------------------------------------*/
24
25 #ifdef ___DEBUG_GARBAGE_COLLECT
26
27 /*
28 * Defining the symbol ENABLE_CONSISTENCY_CHECKS will enable the GC to
29 * perform checks that detect when the heap is in an inconsistent
30 * state. This is useful to detect bugs in the GC and the rest of the
31 * system. To perform the consistency checks, the verbosity level in
32 * ___GSTATE->setup_params.debug_settings must be at least 1. The checks are
33 * very extensive and consequently are expensive. They should only be
34 * used for debugging.
35 */
36
37 #define ENABLE_CONSISTENCY_CHECKS
38
39
40 /*
41 * Defining the symbol SHOW_FRAMES will cause the GC to print out a
42 * trace of the continuation frames that are processed.
43 */
44
45 #undef SHOW_FRAMES
46
47
48 #define ENABLE_GC_TRACE_PHASES
49 #define ENABLE_GC_ACTLOG_PHASES
50 #define ENABLE_GC_ACTLOG_SCAN_COMPLETE_HEAP_CHUNK
51
52 #endif
53
54
55 /*---------------------------------------------------------------------------*/
56
57 /*
58 * Object representation.
59 *
60 * Memory allocated Scheme objects can be allocated using one of three
61 * allocation strategies:
62 *
63 * Permanently allocated:
64 * These objects, called 'permanent objects' for short, are never
65 * moved or reclaimed, and all pointers to memory allocated
66 * objects they contain must point to permanent objects. As a
67 * consequence, the GC does not have to scan permanent objects.
68 * Permanent objects can be allocated on the C heap, but they are
69 * typically allocated in C global variables and structures that
70 * are set up when the program starts up or when a module is
71 * dynamically loaded.
72 *
73 * Still dynamically allocated:
74 * These objects, called 'still objects' for short, are allocated
75 * on the C heap. Still objects are never moved but they can be
76 * reclaimed by the GC. A mark-and-sweep GC is used to
77 * garbage-collect still objects.
78 *
79 * Movable dynamically allocated:
80 * These objects, called 'movable objects' for short, are allocated
81 * in an area of memory that is managed by a compacting GC. The GC
82 * can move and reclaim movable objects.
83 *
84 * Scheme objects are encoded using integers of type ___WORD. A
85 * ___WORD either encodes an immediate value or encodes a pointer
86 * when the object is memory allocated. The two lower bits of a
87 * ___WORD contain a primary type tag for the object and the other
88 * bits contain the immediate value or the pointer. Because all
89 * memory allocated objects are aligned on ___WORD boundaries (and a
90 * ___WORD is either 4 or 8 bytes), the two lower bits of pointers
91 * are zero and can be used to store the tag without reducing the
92 * address space. The four tags are:
93 *
94 * immediate:
95 * ___tFIXNUM object is a small integer (fixnum)
96 * ___tSPECIAL object is a boolean, character, or other immediate
97 *
98 * memory allocated:
99 * if ___USE_SAME_TAG_FOR_PAIRS_AND_SUBTYPED is defined
100 * ___tMEM1 = ___tSUBTYPED = ___tPAIR subtyped object, possibly a pair
101 * ___tMEM2 contained object, or a pair
102 * otherwise
103 * ___tMEM1 = ___tSUBTYPED subtyped object, but not a pair
104 * ___tMEM2 = ___tPAIR a pair
105 *
106 * A special type of object exists to support object finalization:
107 * 'will' objects. Wills contain a weak reference to an object, the
108 * testator and a strong reference to a procedure, the action
109 * procedure. A will becomes executable when its testator object is
110 * not strongly reachable (i.e. the testator object is either
111 * unreachable or only reachable using paths from the roots that
112 * traverse at least one weak reference). When the GC detects that a
113 * will has become executable it is placed on a list of executable
114 * wills. Following the GC, this list is traversed to invoke the
115 * action procedures.
116 *
117 * All memory allocated objects, including pairs, are composed of at
118 * least a head and a body. The head is a single ___WORD that
119 * contains 3 "head" tag bits (the 3 lower bits), a subtype tag (the
120 * next 5 bits), and the length of the object in bytes (the remaining
121 * bits). The head is followed by the body of the object, which
122 * contains the rest of the information associated with the object.
123 * Depending on the subtype, the body can contain raw binary data
124 * (such as when the object is a string) and Scheme objects (such as
125 * when the object is a vector). Memory allocated objects have the
126 * following layout:
127 *
128 * _head_ _____body______
129 * / \ / \
130 * +--------+--------+--------+
131 * |llllssst| | |
132 * +--------+--------+--------+
133 * ^ ^ ^
134 * | | |
135 * length | |
136 * subtype head tag
137 *
138 * Of the 8 possible head tags, only 5 are currently used:
139 *
140 * ___PERM (P) the object is a permanent object
141 * ___STILL (S) the object is a still object
142 * ___MOVABLE0 (M) the object is a movable object in generation 0
143 * ___FORW (F) the object has been moved by the GC (counts as 2 tags)
144 *
145 * Permanent objects have the following layout:
146 *
147 * _head_ _____body______
148 * / \ / \
149 * +--------+--------+--------+
150 * | P| | |
151 * +--------+--------+--------+
152 *
153 * Still objects have the following layout:
154 *
155 * _link_ _ref__ length _mark_ _head_ _____body______
156 * / \ / count\ / \ / \ / \ / \
157 * +--------+--------+--------+--------+--------+--------+--------+
158 * | | | | | S| | |
159 * +--------+--------+--------+--------+--------+--------+--------+
160 *
161 * All still objects are linked in a list using the 'link' field. The
162 * 'refcount' field contains a reference count, which counts the
163 * number of pointers to this object that are hidden from the GC
164 * (typically these hidden pointers are in C data structures). When
165 * 'refcount' is zero, the object will survive a GC only if it is
166 * pointed to by a GC root or a live Scheme object. The 'length'
167 * field contains the length of the object and is only used to
168 * maintain statistics on the space allocated. The 'mark' field is
169 * used by the GC to indicate that the object has been marked (at the
170 * start of a GC it is set to -1). The 'mark' field links all objects
171 * that have been marked but have not yet been scanned. It contains a
172 * pointer to the next still object that needs to be scanned.
173 *
174 * Movable objects have the following layout:
175 *
176 * _head_ _____body______
177 * / \ / \
178 * +--------+--------+--------+
179 * | M| | |
180 * +--------+--------+--------+
181 *
182 * When a movable object is moved by the GC, the head is replaced
183 * with a pointer to the copy, tagged with ___FORW.
184 *
185 * Layout of body.
186 *
187 * _head_ __________body__________
188 * / \ / \
189 * +--------+--------+--------+--------+
190 * | | field_0| field_1| etc. |
191 * +--------+--------+--------+--------+
192 *
193 * Some types of objects have bodies that only contain pointers to
194 * other Scheme objects. For example, pairs have two fields (car and
195 * cdr) and vectors have one field per element. Other object types
196 * have bodies that only contain raw binary data (such as strings and
197 * bignums). The remaining object types have bodies that contain both
198 * pointers to Scheme objects and raw binary data. Their layout is
199 * summarized below.
200 *
201 * Symbols:
202 * subtype = ___sSYMBOL
203 * field_0 = name (a string or a fixnum <n> for a symbol named "g<n>")
204 * field_1 = hash code (non-negative fixnum)
205 * field_2 = link to next symbol in symbol table (#f for uninterned)
206 * field_3 = C pointer to global variable (0 if none exists)
207 *
208 * Note: interned symbols must be permanently allocated;
209 * uninterned symbols can be permanent, still or movable
210 *
211 * Keywords:
212 * subtype = ___sKEYWORD
213 * field_0 = name (a string or a fixnum <n> for a keyword named "g<n>")
214 * field_1 = hash code (non-negative fixnum)
215 * field_2 = link to next symbol in keyword table (#f for uninterned)
216 *
217 * Procedures:
218 *
219 * nonclosures (toplevel procedures)
220 * subtype = ___sPROCEDURE (length contains parameter descriptor)
221 * field_0 = C pointer to this object
222 * field_1 = C pointer to label (only when using gcc)
223 * field_2 = C pointer to host C procedure
224 *
225 * closures:
226 * subtype = ___sPROCEDURE
227 * field_0 = C pointer to entry procedure
228 * field_1 = free variable 1
229 * field_2 = free variable 2
230 * ...
231 *
232 * Note: the entry procedure must be a nonclosure procedure
233 *
234 * Return points:
235 * subtype = ___sRETURN
236 * field_0 = return frame descriptor
237 * field_1 = C pointer to label (only when using gcc)
238 * field_2 = C pointer to host C procedure
239 *
240 * Wills:
241 * subtype = ___sWEAK
242 * field_0 = C pointer to field_0 of next will in list
243 * field_1 = testator object
244 * field_2 = action procedure
245 *
246 * Note: wills must be movable
247 *
248 * GC hash tables:
249 * subtype = ___sWEAK
250 * field_0 = C pointer to field_0 of next GC hash table in list
251 * field_1 = flags
252 * field_2 = count*2 (twice number of active key-value entries)
253 * field_3 = used*2 (twice number of total entries including deleted)
254 * field_4 = key of entry #0
255 * field_5 = value of entry #0
256 * ...
257 *
258 * Continuations:
259 * subtype = ___sCONTINUATION
260 * field_0 = first frame (C pointer to stack at first and then Scheme obj)
261 * field_1 = dynamic environment (#f when continuation is delimited)
262 *
263 * Frame:
264 * subtype = ___sFRAME
265 * field_0 = return address
266 * field_1 = frame slot 1
267 * field_2 = frame slot 2
268 * ...
269 */
270
271
272 /*---------------------------------------------------------------------------*/
273
274 #define ___PSTATE_MEM(var) ___ps->mem.var
275 #define ___VMSTATE_MEM(var) ___VMSTATE_FROM_PSTATE(___ps)->mem.var
276
277 #define tospace_offset ___PSTATE_MEM(tospace_offset_)
278 #define msection_free_list ___PSTATE_MEM(msection_free_list_)
279 #define stack_msection ___PSTATE_MEM(stack_msection_)
280 #define alloc_stack_start ___PSTATE_MEM(alloc_stack_start_)
281 #define alloc_stack_ptr ___PSTATE_MEM(alloc_stack_ptr_)
282 #define alloc_stack_limit ___PSTATE_MEM(alloc_stack_limit_)
283 #define heap_msection ___PSTATE_MEM(heap_msection_)
284 #define alloc_heap_start ___PSTATE_MEM(alloc_heap_start_)
285 #define alloc_heap_ptr ___PSTATE_MEM(alloc_heap_ptr_)
286 #define alloc_heap_limit ___PSTATE_MEM(alloc_heap_limit_)
287 #define alloc_heap_chunk_start ___PSTATE_MEM(alloc_heap_chunk_start_)
288 #define alloc_heap_chunk_limit ___PSTATE_MEM(alloc_heap_chunk_limit_)
289
290 #ifndef ___SINGLE_THREADED_VMS
291 #define heap_chunks_to_scan_lock ___PSTATE_MEM(heap_chunks_to_scan_lock_)
292 #endif
293
294 #define heap_chunks_to_scan ___PSTATE_MEM(heap_chunks_to_scan_)
295 #define heap_chunks_to_scan_head ___PSTATE_MEM(heap_chunks_to_scan_head_)
296 #define heap_chunks_to_scan_tail ___PSTATE_MEM(heap_chunks_to_scan_tail_)
297 #define scan_ptr ___PSTATE_MEM(scan_ptr_)
298 #define still_objs_to_scan ___PSTATE_MEM(still_objs_to_scan_)
299 #define still_objs ___PSTATE_MEM(still_objs_)
300 #define words_still_objs ___PSTATE_MEM(words_still_objs_)
301 #define words_still_objs_deferred ___PSTATE_MEM(words_still_objs_deferred_)
302 #define bytes_allocated_minus_occupied ___PSTATE_MEM(bytes_allocated_minus_occupied_)
303 #define rc_head ___PSTATE_MEM(rc_head_)
304 #define traverse_weak_refs ___PSTATE_MEM(traverse_weak_refs_)
305 #define nonexecutable_wills ___PSTATE_MEM(nonexecutable_wills_)
306 #define executable_wills ___PSTATE_MEM(executable_wills_)
307 #define reached_gc_hash_tables ___PSTATE_MEM(reached_gc_hash_tables_)
308 #define words_prev_msections ___PSTATE_MEM(words_prev_msections_)
309 #define stack_fudge_used ___PSTATE_MEM(stack_fudge_used_)
310 #define heap_fudge_used ___PSTATE_MEM(heap_fudge_used_)
311
312 #ifdef ___DEBUG_GARBAGE_COLLECT
313 #define reference_location ___PSTATE_MEM(reference_location_)
314 #define container_body ___PSTATE_MEM(container_body_)
315 #define mark_array_call_line ___PSTATE_MEM(mark_array_call_line_)
316 #endif
317
318 #define heap_size ___VMSTATE_MEM(heap_size_)
319 #define normal_overflow_reserve ___VMSTATE_MEM(normal_overflow_reserve_)
320 #define overflow_reserve ___VMSTATE_MEM(overflow_reserve_)
321 #define occupied_words_movable ___VMSTATE_MEM(occupied_words_movable_)
322 #define occupied_words_still ___VMSTATE_MEM(occupied_words_still_)
323 #define the_msections ___VMSTATE_MEM(the_msections_)
324 #define alloc_msection ___VMSTATE_MEM(alloc_msection_)
325 #define nb_msections_assigned ___VMSTATE_MEM(nb_msections_assigned_)
326 #define target_processor_count ___VMSTATE_MEM(target_processor_count_)
327
328 #ifndef ___SINGLE_THREADED_VMS
329 #define misc_mem_lock ___VMSTATE_MEM(misc_mem_lock_)
330 #define alloc_mem_lock ___VMSTATE_MEM(alloc_mem_lock_)
331 #define scan_termination_mutex ___VMSTATE_MEM(scan_termination_mutex_)
332 #define scan_termination_condvar ___VMSTATE_MEM(scan_termination_condvar_)
333 #define scan_workers_count ___VMSTATE_MEM(scan_workers_count_)
334 #endif
335
336 #define nb_gcs ___VMSTATE_MEM(nb_gcs_)
337 #define gc_user_time ___VMSTATE_MEM(gc_user_time_)
338 #define gc_sys_time ___VMSTATE_MEM(gc_sys_time_)
339 #define gc_real_time ___VMSTATE_MEM(gc_real_time_)
340
341 #define latest_gc_user_time ___VMSTATE_MEM(latest_gc_user_time_)
342 #define latest_gc_sys_time ___VMSTATE_MEM(latest_gc_sys_time_)
343 #define latest_gc_real_time ___VMSTATE_MEM(latest_gc_real_time_)
344 #define latest_gc_heap_size ___VMSTATE_MEM(latest_gc_heap_size_)
345 #define latest_gc_alloc ___VMSTATE_MEM(latest_gc_alloc_)
346 #define latest_gc_live ___VMSTATE_MEM(latest_gc_live_)
347 #define latest_gc_movable ___VMSTATE_MEM(latest_gc_movable_)
348 #define latest_gc_still ___VMSTATE_MEM(latest_gc_still_)
349
350 /* words occupied by this processor by movable objects */
351
352 #define words_movable_objs(ps) \
353 (2*(ps->mem.words_prev_msections_ \
354 + (ps->mem.alloc_heap_ptr_ - ps->mem.alloc_heap_start_) \
355 + (ps->mem.alloc_stack_start_ - ps->mem.alloc_stack_ptr_)))
356
357 /* bytes occupied by this processor */
358
359 #define bytes_occupied(ps) \
360 (___CAST(___F64,ps->mem.words_still_objs_ \
361 + ps->mem.words_still_objs_deferred_ \
362 + words_movable_objs(___ps)) * ___WS)
363
364 /*---------------------------------------------------------------------------*/
365
366 #ifdef ___SINGLE_THREADED_VMS
367
368 #define ALLOC_MEM_LOCK()
369 #define ALLOC_MEM_UNLOCK()
370 #define MISC_MEM_LOCK()
371 #define MISC_MEM_UNLOCK()
372
373 #else
374
375 #define ALLOC_MEM_LOCK() ___SPINLOCK_LOCK(alloc_mem_lock)
376 #define ALLOC_MEM_UNLOCK() ___SPINLOCK_UNLOCK(alloc_mem_lock)
377 #define MISC_MEM_LOCK() ___SPINLOCK_LOCK(misc_mem_lock)
378 #define MISC_MEM_UNLOCK() ___SPINLOCK_UNLOCK(misc_mem_lock)
379
380 #endif
381
382
383 /*---------------------------------------------------------------------------*/
384
385 /*
386 * Memory for movable objects (including continuation frames) are
387 * allocated in fixed size msections. Each processor starts off with
388 * an msection for its stack (for allocating continuation frames) and
389 * an msection for allocating small objects. Stack allocations are
390 * done by decrementing the stack pointer and small objects are
391 * allocated by incrementing the heap pointer. Each msection is
392 * divided in two zones of equal size, used as a fromspace and tospace
393 * by the garbage collector. When a processor fills a fromspace, a
394 * new msection is obtained from a list of free msections to continue
395 * allocating. The diagram below shows a possible layout of msections
396 * for a situation with 3 processors and 12 msections:
397 *
398 * sp hp
399 * v v
400 * +---+---+-------+ +-----+-+-------+ +----+--+-------+ +--+----+-------+
401 * 0 | |###| | |#####| | | |####| | | |##| | |
402 * +---+---+-------+ +-----+-+-------+ +----+--+-------+ +--+----+-------+
403 *
404 * sp hp
405 * v v
406 * +-+-----+-------+ +--+----+-------+
407 * 1 | |#####| | |##| | |
408 * +-+-----+-------+ +--+----+-------+
409 *
410 * sp hp
411 * v v
412 * +----+--+-------+ +----+--+-------+ +-----+-+-------+
413 * 2 | |##| | |####| | | |#####| | |
414 * +----+--+-------+ +----+--+-------+ +-----+-+-------+
415 *
416 *
417 * free list of msections (msections yet to be assigned to processors):
418 * +-------+-------+ +-------+-------+ +-------+-------+
419 * | | | | | | | | |
420 * +-------+-------+ +-------+-------+ +-------+-------+
421 *
422 *
423 * In this example, 4 msections are assigned to processor 0. The first one
424 * is being used as a stack to allocate continuation frames. The other
425 * msections of that processor are being used for allocating small objects
426 * (two msections are full and the last is not yet full). There are
427 * three msections in the free list of msections.
428 *
429 * The heap size is defined as the space occupied by still objects and
430 * msections at the end of the latest garbage collection. To determine
431 * when to trigger a garbage collection, the memory manager needs to know
432 * approximately (but conservatively) how much of the heap is free. The
433 * free heap space is defined as the heap size minus the space of the
434 * msections assigned to processors minus the space occupied by still
435 * objects minus the overflow reserve. When an allocation request would
436 * cause the free heap space to become negative, a garbage collection is
437 * performed first to free space and possibly resize the heap.
438 *
439 * A given msection can be used for allocating small objects or for
440 * allocating continuation frames or for both. The position of the
441 * various pointers is as follows (only the fromspace is shown).
442 *
443 * Msection only used for allocating movable objects:
444 *
445 * <-------------------------- ___MSECTION_SIZE/2 ------------------------->
446 * +----+----+---------------------------------------------------------------+
447 * |obj1|obj2| |<-___MSECTION_FUDGE->|
448 * +----+----+---------------------------------------------------------------+
449 * ^ ^ ^ ^
450 * | | | |
451 * | alloc_heap_ptr ___ps->heap_limit alloc_heap_limit
452 * alloc_heap_start
453 *
454 * Msection only used for allocating continuation frames:
455 *
456 * <-------------------------- ___MSECTION_SIZE/2 ------------------------->
457 * +-----------------------------------------------------------+------+------+
458 * |<-___MSECTION_FUDGE->| |frame2|frame1|
459 * +-----------------------------------------------------------+------+------+
460 * ^ ^ ^ ^
461 * | | | |
462 * alloc_stack_limit ___ps->stack_limit alloc_stack_ptr |
463 * alloc_stack_start
464 *
465 * Msection used for allocating movable objects and allocating
466 * continuation frames:
467 *
468 * <-------------------------- ___MSECTION_SIZE/2 ------------------------->
469 * +----+-------------------------------------------------------------+------+
470 * |objs| |<-___MSECTION_FUDGE->|<-___MSECTION_FUDGE->| |frames|
471 * +----+-------------------------------------------------------------+------+
472 * ^ ^ ^ ^ ^ ^ ^
473 * | | | | | | |
474 * | | | alloc_heap_limit alloc_stack_limit | | |
475 * | | ___ps->heap_limit ___ps->stack_limit | |
476 * | alloc_heap_ptr alloc_stack_ptr |
477 * alloc_heap_start alloc_stack_start
478 */
479
480 #define compute_heap_space() \
481 (___CAST(___SIZE_TS,the_msections->nb_sections) * ___MSECTION_SIZE + occupied_words_still)
482
483 #define compute_assigned_heap_space() \
484 (___CAST(___SIZE_TS,nb_msections_assigned) * ___MSECTION_SIZE + occupied_words_still)
485
486 #define compute_free_heap_space() \
487 (heap_size - compute_assigned_heap_space() - overflow_reserve)
488
489 /*---------------------------------------------------------------------------*/
490
491 /* Constants related to representation of permanent and still objects: */
492
493 #ifdef ___USE_HANDLES
494 #define ___PERM_HANDLE 0
495 #define ___PERM_BODY 2
496 #else
497 #define ___PERM_HANDLE ___PERM_BODY
498 #define ___PERM_BODY 1
499 #endif
500 #define ___PERM_HEADER (___PERM_BODY-1)
501
502 #define ___STILL_LINK 0
503 #define ___STILL_REFCOUNT 1
504 #define ___STILL_LENGTH 2
505 #define ___STILL_MARK 3
506 #ifdef ___USE_HANDLES
507 #define ___STILL_HANDLE 4
508 #define ___STILL_BODY 6
509 #else
510 #define ___STILL_HANDLE ___STILL_BODY
511 #define ___STILL_BODY (5+1)/************/
512 #endif
513 #define ___STILL_HEADER (___STILL_BODY-1)
514
515
516 /*---------------------------------------------------------------------------*/
517
518 /* Allocation and reclamation of aligned blocks of memory. */
519
520
521 /*
522 * 'alloc_mem_aligned_aux (words, multiplier, modulus, heap)'
523 * allocates an aligned block of memory (using '___alloc_mem' when
524 * heap is false, and '___alloc_mem_heap' when heap is true). 'words'
525 * is the size of the block in words and 'multiplier' and 'modulus'
526 * specify its alignment in words. 'multiplier' must be a power of
527 * two and 0<=modulus<multiplier. The pointer returned corresponds to
528 * an address that is equal to (i*multiplier+modulus)*sizeof (___WORD)
529 * for some 'i'.
530 */
531
532 ___HIDDEN void *alloc_mem_aligned_aux
533 ___P((___SIZE_TS words,
534 unsigned int multiplier,
535 unsigned int modulus,
536 ___BOOL heap),
537 (words,
538 multiplier,
539 modulus,
540 heap)
541 ___SIZE_TS words;
542 unsigned int multiplier;
543 unsigned int modulus;
544 ___BOOL heap;)
545 {
546 void *container; /* pointer to block returned by ___alloc_mem{_heap} */
547 unsigned int extra; /* space for alignment to multiplier */
548
549 /* Make sure alignment is sufficient for pointers */
550
551 if (multiplier < sizeof (void*) / ___WS)
552 multiplier = sizeof (void*) / ___WS;
553
554 /* How many extra bytes are needed for padding */
555
556 extra = (multiplier * ___WS) - 1;
557 if (modulus < sizeof (void*) / ___WS)
558 extra += sizeof (void*);
559
560 if (heap)
561 container = ___ALLOC_MEM_HEAP(extra + (words+modulus) * ___WS);
562 else
563 container = ___ALLOC_MEM(extra + (words+modulus) * ___WS);
564
565 if (container == 0)
566 return 0;
567 else
568 {
569 void *ptr = ___CAST(void*,
570 (((___CAST(___WORD,container) + extra) &
571 -___CAST(___WORD,multiplier * ___WS)) +
572 modulus * ___WS));
573 void **cptr = ___CAST(void**,
574 (___CAST(___WORD,ptr) - ___CAST(___WORD,sizeof (void*))) &
575 -___CAST(___WORD,sizeof (void*)));
576
577 *cptr = container;
578 return ptr;
579 }
580 }
581
582
583 ___HIDDEN void *alloc_mem_aligned
584 ___P((___SIZE_TS words,
585 unsigned int multiplier,
586 unsigned int modulus),
587 (words,
588 multiplier,
589 modulus)
590 ___SIZE_TS words;
591 unsigned int multiplier;
592 unsigned int modulus;)
593 {
594 return alloc_mem_aligned_aux (words, multiplier, modulus, 0);
595 }
596
597
598 ___HIDDEN void *alloc_mem_aligned_heap
599 ___P((___SIZE_TS words,
600 unsigned int multiplier,
601 unsigned int modulus),
602 (words,
603 multiplier,
604 modulus)
605 ___SIZE_TS words;
606 unsigned int multiplier;
607 unsigned int modulus;)
608 {
609 return alloc_mem_aligned_aux (words, multiplier, modulus, 1);
610 }
611
612
613 /*
614 * 'free_mem_aligned (ptr)' reclaims the aligned block of memory 'ptr'
615 * that was allocated using 'alloc_mem_aligned'.
616 */
617
618 ___HIDDEN void free_mem_aligned
619 ___P((void *ptr),
620 (ptr)
621 void *ptr;)
622 {
623 void **cptr = ___CAST(void**,
624 (___CAST(___WORD,ptr) - ___CAST(___WORD,sizeof (void*))) &
625 -___CAST(___WORD,sizeof (void*)));
626 ___FREE_MEM(*cptr);
627 }
628
629
630 /*
631 * 'free_mem_aligned_heap (ptr)' reclaims the aligned block of memory
632 * 'ptr' that was allocated using 'alloc_mem_aligned_heap'.
633 */
634
635 ___HIDDEN void free_mem_aligned_heap
636 ___P((void *ptr),
637 (ptr)
638 void *ptr;)
639 {
640 void **cptr = ___CAST(void**,
641 (___CAST(___WORD,ptr) - ___CAST(___WORD,sizeof (void*))) &
642 -___CAST(___WORD,sizeof (void*)));
643 ___FREE_MEM_HEAP(*cptr);
644 }
645
646
647 /*---------------------------------------------------------------------------*/
648
649 /* Allocation of reference counted blocks of memory. */
650
651 ___HIDDEN void setup_rc
652 ___P((___processor_state ___ps),
653 (___ps)
654 ___processor_state ___ps;)
655 {
656 rc_head.prev = &rc_head;
657 rc_head.next = &rc_head;
658 rc_head.refcount = 1;
659 rc_head.data = ___FAL;
660 }
661
662 ___HIDDEN void cleanup_rc
663 ___P((___processor_state ___ps),
664 (___ps)
665 ___processor_state ___ps;)
666 {
667 ___rc_header *h = rc_head.next;
668
669 rc_head.prev = &rc_head;
670 rc_head.next = &rc_head;
671
672 while (h != &rc_head)
673 {
674 ___rc_header *next = h->next;
675 ___FREE_MEM(h);
676 h = next;
677 }
678 }
679
680
___EXP_FUNC(void *,___alloc_rc)681 ___EXP_FUNC(void*,___alloc_rc)
682 ___P((___PSD
683 ___SIZE_T bytes),
684 (___PSV
685 bytes)
686 ___PSDKR
687 ___SIZE_T bytes;)
688 {
689 ___PSGET
690 ___rc_header *h = ___CAST(___rc_header*,
691 ___ALLOC_MEM(bytes + sizeof (___rc_header)));
692
693 if (h != 0)
694 {
695 ___rc_header *head = &rc_head;
696 ___rc_header *tail = head->prev;
697
698 h->prev = tail;
699 h->next = head;
700 head->prev = h;
701 tail->next = h;
702
703 h->refcount = 1;
704 h->data = ___FAL;
705
706 return ___CAST(void*,h+1);
707 }
708
709 return 0;
710 }
711
712
___EXP_FUNC(void,___release_rc)713 ___EXP_FUNC(void,___release_rc)
714 ___P((void *ptr),
715 (ptr)
716 void *ptr;)
717 {
718 if (ptr != 0)
719 {
720 ___rc_header *h = ___CAST(___rc_header*,ptr) - 1;
721
722 if (--h->refcount == 0)
723 {
724 ___rc_header *prev = h->prev;
725 ___rc_header *next = h->next;
726
727 next->prev = prev;
728 prev->next = next;
729
730 ___FREE_MEM(h);
731 }
732 }
733 }
734
735
___EXP_FUNC(void,___addref_rc)736 ___EXP_FUNC(void,___addref_rc)
737 ___P((void *ptr),
738 (ptr)
739 void *ptr;)
740 {
741 if (ptr != 0)
742 {
743 ___rc_header *h = ___CAST(___rc_header*,ptr) - 1;
744 h->refcount++;
745 }
746 }
747
748
___EXP_FUNC(___SCMOBJ,___data_rc)749 ___EXP_FUNC(___SCMOBJ,___data_rc)
750 ___P((void *ptr),
751 (ptr)
752 void *ptr;)
753 {
754 ___rc_header *h = ___CAST(___rc_header*,ptr) - 1;
755 return h->data;
756 }
757
758
___EXP_FUNC(void,___set_data_rc)759 ___EXP_FUNC(void,___set_data_rc)
760 ___P((void *ptr,
761 ___SCMOBJ val),
762 (ptr,
763 val)
764 void *ptr;
765 ___SCMOBJ val;)
766 {
767 ___rc_header *h = ___CAST(___rc_header*,ptr) - 1;
768 h->data = val;
769 }
770
771
772 /*---------------------------------------------------------------------------*/
773
774 /* Allocation of movable objects. */
775
776 /*
777 * 'find_msection (ms, ptr)' finds the position in the 'ms->sections'
778 * array of the msection that contains the pointer 'ptr'. More
779 * precisely, if ___ALLOC_MEM_UP is defined, it returns the integer
780 * 'i' (-1<=i<=n-1) such that 'ptr' is between the start of section i
781 * and section i+1. -1 is returned if 'ptr' is lower than the lowest
782 * section and 'n' is returned if 'ptr' is not lower than the highest
783 * section. If ___ALLOC_MEM_UP is not defined, it returns the integer
784 * 'i' (0<=i<=n) such that 'ptr' is between the start of section i and
785 * section i-1. n is returned if 'ptr' is lower than the lowest
786 * section and 0 is returned if 'ptr' is not lower than the highest
787 * section.
788 */
789
790 ___HIDDEN int find_msection
791 ___P((___msections *ms,
792 void *ptr),
793 (ms,
794 ptr)
795 ___msections *ms;
796 void *ptr;)
797 {
798 int ns = ms->nb_sections;
799 ___msection **sections = ms->sections;
800 int lo, hi;
801
802 #ifdef ___ALLOC_MEM_UP
803 if (ns == 0 ||
804 ptr < ___CAST(void*,sections[0]))
805 return -1;
806 #else
807 if (ns == 0 ||
808 ptr < ___CAST(void*,sections[ns-1]))
809 return ns;
810 #endif
811
812 /* binary search */
813
814 lo = 0;
815 hi = ns-1;
816
817 /* loop invariant: lo <= find_msection (ms, ptr) <= hi */
818
819 while (lo < hi)
820 {
821 int mid = (lo+hi) / 2; /* lo <= mid < hi */
822 #ifdef ___ALLOC_MEM_UP
823 if (ptr < ___CAST(void*,sections[mid+1])) hi = mid; else lo = mid+1;
824 #else
825 if (ptr < ___CAST(void*,sections[mid])) lo = mid+1; else hi = mid;
826 #endif
827 }
828
829 return lo;
830 }
831
832
833 /*
834 * 'adjust_msections (msp, n)' contracts or expands the msections
835 * pointed to by 'msp' so that it contains 'n' sections. When the
836 * msections is contracted, the last sections allocated (i.e. those at
837 * the end of the doubly-linked list of sections) will be reclaimed.
838 * When expanding the msections there may not be enough memory to
839 * allocate new sections so the operation may fail. However
840 * 'adjust_msections' will always leave the msections in a consistent
841 * state and there will be at least as many sections as when the
842 * expansion was started. Failure can be detected by checking the
843 * 'nb_sections' field.
844 */
845
846 ___HIDDEN void adjust_msections
847 ___P((___msections **msp,
848 int n),
849 (msp,
850 n)
851 ___msections **msp;
852 int n;)
853 {
854 int max_ns, ns;
855 ___msections *ms = *msp;
856 ___msection *hd;
857 ___msection *tl;
858
859 if (ms == 0)
860 {
861 max_ns = 0;
862 ns = 0;
863 hd = 0;
864 tl = 0;
865 }
866 else
867 {
868 max_ns = ms->max_nb_sections;
869 ns = ms->nb_sections;
870 hd = ms->head;
871 tl = ms->tail;
872 }
873
874 if (ms == 0 || n > max_ns)
875 {
876 /* must allocate a new msections structure */
877
878 ___msections *new_ms;
879 int i;
880
881 while (n > max_ns) /* grow max_nb_sections until big enough */
882 max_ns = 2*max_ns + 1;
883
884 new_ms = ___CAST(___msections*,
885 alloc_mem_aligned
886 (___WORDS(___sizeof_msections(max_ns)),
887 1,
888 0));
889
890 if (new_ms == 0)
891 return;
892
893 new_ms->max_nb_sections = max_ns;
894 new_ms->nb_sections = ns;
895 new_ms->head = hd;
896 new_ms->tail = tl;
897
898 for (i=ns-1; i>=0; i--)
899 new_ms->sections[i] = ms->sections[i];
900
901 if (ms != 0)
902 free_mem_aligned (ms);
903
904 ms = new_ms;
905
906 *msp = ms;
907 }
908
909 if (n < ns)
910 {
911 /* contraction of the msections */
912
913 int j;
914
915 while (ns > n)
916 {
917 ___msection *s = tl;
918
919 tl = tl->prev;
920
921 if (tl == 0)
922 hd = 0;
923 else
924 tl->next = 0;
925
926 for (j=s->pos; j<ns-1; j++)
927 {
928 ms->sections[j] = ms->sections[j+1];
929 ms->sections[j]->pos = j;
930 }
931
932 free_mem_aligned_heap (s);
933
934 ns--;
935 }
936
937 ms->nb_sections = ns;
938 ms->head = hd;
939 ms->tail = tl;
940
941 /*
942 * Contraction of the msections structure is not performed
943 * because there is typically very little memory to be
944 * reclaimed.
945 */
946 }
947 else
948 {
949 /* expansion of the msections */
950
951 int i, j;
952
953 while (ns < n)
954 {
955 ___msection *s = ___CAST(___msection*,
956 alloc_mem_aligned_heap
957 (___WORDS(___sizeof_msection(___MSECTION_SIZE)),
958 1,
959 0));
960
961 if (s == 0)
962 return;
963
964 i = find_msection (ms, ___CAST(void*,s));
965
966 #ifdef ___ALLOC_MEM_UP
967 i++;
968 #endif
969
970 for (j=ns; j>i; j--)
971 {
972 ms->sections[j] = ms->sections[j-1];
973 ms->sections[j]->pos = j;
974 }
975
976 ms->sections[i] = s;
977
978 if (tl == 0)
979 {
980 hd = s;
981 s->index = 0;
982 }
983 else
984 {
985 tl->next = s;
986 s->index = tl->index + 1;
987 }
988
989 s->pos = i;
990 s->prev = tl;
991 s->next = 0;
992
993 tl = s;
994
995 ms->nb_sections = ++ns;
996 ms->head = hd;
997 ms->tail = tl;
998 }
999 }
1000 }
1001
1002
1003 /*
1004 * 'free_msections (msp)' releases all memory associated with the
1005 * msections pointed to by 'msp'.
1006 */
1007
1008 ___HIDDEN void free_msections
1009 ___P((___msections **msp),
1010 (msp)
1011 ___msections **msp;)
1012 {
1013 ___msections *ms = *msp;
1014
1015 if (ms != 0)
1016 {
1017 int i;
1018
1019 for (i=ms->nb_sections-1; i>=0; i--)
1020 free_mem_aligned_heap (ms->sections[i]);
1021
1022 free_mem_aligned (ms);
1023
1024 *msp = 0;
1025 }
1026 }
1027
1028
1029 /*---------------------------------------------------------------------------*/
1030
1031 /* Allocation of permanent objects. */
1032
1033 /*
1034 * 'alloc_mem_aligned_psection (words, multiplier, modulus)' allocates
1035 * an aligned block of memory inside a new psection. 'words' is the
1036 * size of the block in words and 'multiplier' and 'modulus' specify
1037 * its alignment in words. 'multiplier' must be a power of two and
1038 * 0<=modulus<multiplier. The pointer returned corresponds to an
1039 * address that is equal to (i*multiplier+modulus)*sizeof (___WORD) for
1040 * some 'i'.
1041 */
1042
1043 ___HIDDEN void *alloc_mem_aligned_psection
1044 ___P((___SIZE_TS words,
1045 unsigned int multiplier,
1046 unsigned int modulus),
1047 (words,
1048 multiplier,
1049 modulus)
1050 ___SIZE_TS words;
1051 unsigned int multiplier;
1052 unsigned int modulus;)
1053 {
1054 void *container;
1055
1056 /* Make sure alignment is sufficient for pointers */
1057
1058 if (multiplier < sizeof (void*) / ___WS)
1059 multiplier = sizeof (void*) / ___WS;
1060
1061 /* Make space for psection link and modulus */
1062
1063 if (modulus < (sizeof (void*) + ___WS - 1) / ___WS)
1064 modulus += ((sizeof (void*) + multiplier * ___WS - 1) / ___WS) &
1065 -multiplier;
1066
1067 /* Allocate container */
1068
1069 container = alloc_mem_aligned_heap (words+modulus, multiplier, 0);
1070
1071 if (container == 0)
1072 return 0;
1073
1074 *___CAST(void**,container) = ___GSTATE->mem.psections;
1075 ___GSTATE->mem.psections = container;
1076 return ___CAST(void*,___CAST(___WORD*,container) + modulus);
1077 }
1078
1079
1080 /*
1081 * 'alloc_mem_aligned_perm (words, multiplier, modulus)' allocates an
1082 * aligned block of memory inside a psection. If there is enough free
1083 * space in a previously allocated psection that psection is used,
1084 * otherwise a new psection is allocated. 'words' is the size of the
1085 * block in words and 'multiplier' and 'modulus' specify its alignment
1086 * in words. 'multiplier' must be a power of two and
1087 * 0<=modulus<multiplier. The pointer returned corresponds to an
1088 * address that is equal to (i*multiplier+modulus)*sizeof (___WORD) for
1089 * some 'i'.
1090 */
1091
1092 ___HIDDEN void *alloc_mem_aligned_perm
1093 ___P((___SIZE_TS words,
1094 int multiplier,
1095 int modulus),
1096 (words,
1097 multiplier,
1098 modulus)
1099 ___SIZE_TS words;
1100 int multiplier;
1101 int modulus;)
1102 {
1103 ___SIZE_TS waste;
1104 ___WORD *base;
1105
1106 /*
1107 * Try to satisfy request in current psection.
1108 */
1109
1110 if (___GSTATE->mem.palloc_ptr != 0)
1111 {
1112 ___WORD *new_palloc_ptr;
1113
1114 base = ___CAST(___WORD*,
1115 ___CAST(___WORD,___GSTATE->mem.palloc_ptr+multiplier-1-modulus) &
1116 (multiplier * -___WS)) +
1117 modulus;
1118
1119 new_palloc_ptr = base + words;
1120
1121 if (new_palloc_ptr <= ___GSTATE->mem.palloc_limit) /* did it fit in the psection? */
1122 {
1123 ___GSTATE->mem.palloc_ptr = new_palloc_ptr;
1124 return base;
1125 }
1126
1127 waste = ___GSTATE->mem.palloc_limit - ___GSTATE->mem.palloc_ptr;
1128 }
1129 else
1130 waste = 0;
1131
1132 /*
1133 * Request can't be satisfied in current psection so we must
1134 * allocate a new psection.
1135 */
1136
1137 if (waste > ___PSECTION_WASTE || words > ___PSECTION_SIZE)
1138 return alloc_mem_aligned_psection (words, multiplier, modulus);
1139
1140 base = ___CAST(___WORD*,
1141 alloc_mem_aligned_psection
1142 (___PSECTION_SIZE,
1143 multiplier,
1144 modulus));
1145
1146 if (base != 0)
1147 {
1148 ___GSTATE->mem.palloc_ptr = base + words;
1149 ___GSTATE->mem.palloc_limit = base + ___PSECTION_SIZE;
1150 }
1151
1152 return base;
1153 }
1154
1155
1156 ___HIDDEN void free_psections ___PVOID
1157 {
1158 void *base = ___GSTATE->mem.psections;
1159
1160 ___GSTATE->mem.psections = 0;
1161
1162 while (base != 0)
1163 {
1164 void *link = *___CAST(void**,base);
1165 free_mem_aligned_heap (base);
1166 base = link;
1167 }
1168 }
1169
1170
1171 void ___glo_list_setup ___PVOID
1172 {
1173 int i;
1174
1175 ___GSTATE->mem.glo_list.count = 0;
1176
1177 for (i=___GLO_SUBLIST_COUNT-1; i>=0; i--)
1178 {
1179 ___glo_sublist_struct *sl = &___GSTATE->mem.glo_list.sublist[i];
1180 sl->head = 0;
1181 sl->tail = 0;
1182 }
1183 }
1184
1185
1186 void ___glo_list_add
1187 ___P((___glo_struct *glo),
1188 (glo)
1189 ___glo_struct *glo;)
1190 {
1191 int i = ___GSTATE->mem.glo_list.count++ % ___GLO_SUBLIST_COUNT;
1192 ___glo_sublist_struct *sl = &___GSTATE->mem.glo_list.sublist[i];
1193
1194 glo->next = 0;
1195
1196 if (sl->head == 0)
1197 sl->head = glo;
1198 else
1199 sl->tail->next = glo;
1200
1201 sl->tail = glo;
1202 }
1203
1204
1205 ___glo_struct *___glo_list_search_obj
1206 ___P((___SCMOBJ obj,
1207 ___BOOL prm),
1208 (obj,
1209 prm)
1210 ___SCMOBJ obj;
1211 ___BOOL prm;)
1212 {
1213 ___glo_struct *glo = 0;
1214 int glo_depth = 999999999;
1215 int i;
1216
1217 for (i=___GLO_SUBLIST_COUNT-1; i>=0; i--)
1218 {
1219 ___glo_sublist_struct *sl = &___GSTATE->mem.glo_list.sublist[i];
1220 ___glo_struct *probe = sl->head;
1221 int probe_depth = 0;
1222
1223 if (prm)
1224 {
1225 while (probe != 0 && ___PRMCELL(probe->prm) != obj)
1226 {
1227 probe = probe->next;
1228 if (++probe_depth > glo_depth) break;
1229 }
1230 }
1231 else
1232 {
1233 while (probe != 0 && ___GLOCELL(probe->val) != obj)
1234 {
1235 probe = probe->next;
1236 if (++probe_depth > glo_depth) break;
1237 }
1238 }
1239
1240 if (probe != 0)
1241 {
1242 if (glo == 0 || probe_depth <= glo_depth)
1243 {
1244 glo = probe;
1245 glo_depth = probe_depth;
1246 }
1247 }
1248 }
1249
1250 return glo;
1251 }
1252
1253
1254 ___SCMOBJ ___glo_struct_to_global_var
1255 ___P((___glo_struct *glo),
1256 (glo)
1257 ___glo_struct *glo;)
1258 {
1259 ___SCMOBJ result = ___FAL;
1260
1261 if (glo != 0)
1262 {
1263 int len = ___INT(___VECTORLENGTH(___GSTATE->symbol_table));
1264 int i;
1265
1266 for (i=1; i<len; i++)
1267 {
1268 ___SCMOBJ probe = ___FIELD(___GSTATE->symbol_table,i);
1269
1270 while (probe != ___NUL)
1271 {
1272 if (___GLOBALVARSTRUCT(probe) == glo)
1273 {
1274 result = probe;
1275 goto end_search;
1276 }
1277 probe = ___FIELD(probe,___SYMKEY_NEXT);
1278 }
1279 }
1280 end_search:;
1281 }
1282
1283 return result;
1284 }
1285
1286
1287 ___SCMOBJ ___obj_to_global_var
1288 ___P((___SCMOBJ obj,
1289 ___BOOL prm),
1290 (obj,
1291 prm)
1292 ___SCMOBJ obj;
1293 ___BOOL prm;)
1294 {
1295 /*
1296 * Find the global variable that is bound to the object obj.
1297 * If prm is true then the prm field of the global variable
1298 * is checked, otherwise the val field is checked.
1299 */
1300
1301 return ___glo_struct_to_global_var (___glo_list_search_obj (obj, prm));
1302 }
1303
1304
1305 ___SCMOBJ ___make_global_var
1306 ___P((___SCMOBJ sym),
1307 (sym)
1308 ___SCMOBJ sym;)
1309 {
1310 if (___GLOBALVARSTRUCT(sym) == 0)
1311 {
1312 ___glo_struct *glo = ___CAST(___glo_struct*,
1313 alloc_mem_aligned_perm
1314 (___WORDS(sizeof (___glo_struct)),
1315 1,
1316 0));
1317
1318 if (glo == 0)
1319 return ___FIX(___HEAP_OVERFLOW_ERR);
1320
1321 #ifdef ___SINGLE_VM
1322 glo->val = ___UNB1;
1323 #else
1324 glo->val = ___GSTATE->mem.glo_list.count;
1325 #endif
1326
1327 ___glo_list_add (glo);
1328
1329 ___PRMCELL(glo->prm) = ___FAL;
1330
1331 ___FIELD(sym,___SYMBOL_GLOBAL) = ___CAST(___SCMOBJ,glo);
1332 }
1333
1334 return sym;
1335 }
1336
1337
1338 #ifdef ___USE_find_global_var_bound_to
1339
1340 ___SCMOBJ ___find_global_var_bound_to
1341 ___P((___SCMOBJ val),
1342 (val)
1343 ___SCMOBJ val;)
1344 {
1345 ___SCMOBJ sym = ___NUL;
1346 int i;
1347
1348 for (i = ___INT(___VECTORLENGTH(___GSTATE->symbol_table)) - 1; i>0; i--)
1349 {
1350 sym = ___FIELD(___GSTATE->symbol_table,i);
1351
1352 while (sym != ___NUL)
1353 {
1354 ___glo_struct *g = ___GLOBALVARSTRUCT(sym);
1355
1356 if (g != 0 &&
1357 (___PRMCELL(g->prm) == val || ___GLOCELL(g->val) == val))
1358 {
1359 i = 0;
1360 break;
1361 }
1362
1363 sym = ___FIELD(sym,___SYMKEY_NEXT);
1364 }
1365 }
1366
1367 return sym;
1368 }
1369
1370 #endif
1371
1372
1373 /*---------------------------------------------------------------------------*/
1374
1375 /*
1376 * '___still_obj_refcount_inc (obj)' increments the reference count of
1377 * the still object 'obj'.
1378 */
1379
___EXP_FUNC(void,___still_obj_refcount_inc)1380 ___EXP_FUNC(void,___still_obj_refcount_inc)
1381 ___P((___WORD obj),
1382 (obj)
1383 ___WORD obj;)
1384 {
1385 ___BODY0(obj)[___STILL_REFCOUNT-___STILL_BODY]++;
1386 }
1387
1388
1389 /*
1390 * '___still_obj_refcount_dec (obj)' decrements the reference count of
1391 * the still object 'obj'.
1392 */
1393
___EXP_FUNC(void,___still_obj_refcount_dec)1394 ___EXP_FUNC(void,___still_obj_refcount_dec)
1395 ___P((___WORD obj),
1396 (obj)
1397 ___WORD obj;)
1398 {
1399 ___BODY0(obj)[___STILL_REFCOUNT-___STILL_BODY]--;
1400 }
1401
1402
1403 /*---------------------------------------------------------------------------*/
1404
1405 /*
1406 * '___alloc_scmobj (___ps, subtype, bytes)' allocates a permanent or
1407 * still Scheme object (depending on '___ps') of subtype 'subtype'
1408 * with a body containing 'bytes' bytes, and returns it as an encoded
1409 * Scheme object. When '___ps' is NULL, a permanent object is
1410 * allocated, and when '___ps' is not NULL, a still object is
1411 * allocated in the heap of that processor's VM. The initialization
1412 * of the object's body must be done by the caller. In the case of
1413 * still objects this initialization must be done before the next
1414 * allocation is requested. The 'refcount' field of still objects is
1415 * initially 1. A fixnum error code is returned when there is an
1416 * error.
1417 */
1418
1419 ___HIDDEN ___WORD alloc_scmobj_perm
1420 ___P((int subtype,
1421 ___SIZE_TS bytes),
1422 (subtype,
1423 bytes)
1424 int subtype;
1425 ___SIZE_TS bytes;)
1426 {
1427 void *ptr;
1428 ___WORD *base;
1429 ___WORD *body;
1430 ___SIZE_TS words = ___PERM_BODY + ___WORDS(bytes);
1431
1432 /*
1433 * Some objects, such as ___sFOREIGN, ___sS64VECTOR, ___sU64VECTOR,
1434 * ___sF64VECTOR, ___sFLONUM and ___sBIGNUM, must have a body that
1435 * is aligned on a multiple of 8 on some machines. Here, we force
1436 * alignment to a multiple of 8 even if not necessary in all cases
1437 * because it is typically more efficient due to a better
1438 * utilization of the cache.
1439 */
1440
1441 ptr = alloc_mem_aligned_perm (words,
1442 8>>___LWS,
1443 (-___PERM_BODY)&((8>>___LWS)-1));
1444
1445 if (ptr == 0)
1446 return ___FIX(___HEAP_OVERFLOW_ERR);
1447
1448 base = ___CAST(___WORD*,ptr);
1449 body = base + ___PERM_BODY;
1450
1451 #ifdef ___USE_HANDLES
1452 base[___PERM_HANDLE] = ___CAST(___WORD,body);
1453 #endif
1454
1455 base[___PERM_HEADER] = ___MAKE_HD(bytes, subtype, ___PERM);
1456
1457 if (subtype == ___sPAIR)
1458 return ___PAIR_FROM_BODY(body);
1459 else
1460 return ___SUBTYPED_FROM_BODY(body);
1461 }
1462
1463
1464 ___HIDDEN ___WORD alloc_scmobj_still
1465 ___P((___processor_state ___ps,
1466 int subtype,
1467 ___SIZE_TS bytes),
1468 (___ps,
1469 subtype,
1470 bytes)
1471 ___processor_state ___ps;
1472 int subtype;
1473 ___SIZE_TS bytes;)
1474 {
1475 void *ptr;
1476 ___WORD *base;
1477 ___WORD *body;
1478 ___SIZE_TS words = ___STILL_BODY + ___WORDS(bytes);
1479 ___SIZE_TS words_including_deferred = words + words_still_objs_deferred;
1480
1481 #ifdef CALL_GC_FREQUENTLY
1482 if (--___gc_calls_to_punt < 0) goto invoke_gc;
1483 #endif
1484
1485 if (words_including_deferred <= ___MAX_STILL_DEFERRED)
1486 {
1487 /*
1488 * Allocate the still object and defer its accounting at the VM
1489 * level.
1490 */
1491
1492 /*
1493 * Some objects, such as ___sFOREIGN, ___sS64VECTOR,
1494 * ___sU64VECTOR, ___sF64VECTOR, ___sFLONUM and ___sBIGNUM, must
1495 * have a body that is aligned on a multiple of 8 on some
1496 * machines. Here, we force alignment to a multiple of 8 even
1497 * if not necessary in all cases because it is typically more
1498 * efficient due to a better utilization of the cache.
1499 */
1500
1501 if ((ptr = alloc_mem_aligned_heap (words,
1502 8>>___LWS,
1503 (-___STILL_BODY)&((8>>___LWS)-1)))
1504 == 0)
1505 {
1506 /*
1507 * Couldn't allocate the still object (probably the C heap is full).
1508 */
1509
1510 return ___FIX(___HEAP_OVERFLOW_ERR);
1511 }
1512
1513 words_still_objs_deferred = words_including_deferred;
1514 }
1515 else
1516 {
1517 /*
1518 * The space for the still object (and the deferred previous
1519 * ones) is considerable, so the availability of the space must
1520 * be checked at the VM level. The VM's memory allocation lock
1521 * must be acquired to ensure correct bookkeeping.
1522 */
1523
1524 ALLOC_MEM_LOCK();
1525
1526 if (words_including_deferred <= compute_free_heap_space())
1527 {
1528 /*
1529 * There is sufficient free heap space, so no need to call GC.
1530 */
1531
1532 occupied_words_still += words_including_deferred;
1533
1534 ALLOC_MEM_UNLOCK();
1535
1536 /*
1537 * Space accounting for previous still objects is now accounted
1538 * for at the VM level.
1539 */
1540
1541 words_still_objs_deferred = 0;
1542 }
1543 else
1544 {
1545 /*
1546 * There is insufficient free heap space, so call GC.
1547 */
1548
1549 ALLOC_MEM_UNLOCK();
1550
1551 #ifdef CALL_GC_FREQUENTLY
1552 invoke_gc:
1553 #endif
1554
1555 if (___garbage_collect (___PSP words))
1556 return ___FIX(___HEAP_OVERFLOW_ERR);
1557 }
1558
1559 /*
1560 * Allocate the still object. See comments above for other call
1561 * to alloc_mem_aligned_heap.
1562 */
1563
1564 if ((ptr = alloc_mem_aligned_heap (words,
1565 8>>___LWS,
1566 (-___STILL_BODY)&((8>>___LWS)-1)))
1567 == 0)
1568 {
1569 /*
1570 * Couldn't allocate the still object (probably the C heap is full).
1571 * So undo its accounting at the VM level.
1572 */
1573
1574 ALLOC_MEM_LOCK();
1575
1576 occupied_words_still -= words;
1577
1578 ALLOC_MEM_UNLOCK();
1579
1580 return ___FIX(___HEAP_OVERFLOW_ERR);
1581 }
1582 }
1583
1584 /* Initialize still object and add it to the still_objs list. */
1585
1586 base = ___CAST(___WORD*,ptr);
1587 body = base + ___STILL_BODY;
1588
1589 base[___STILL_LINK] = still_objs;
1590 still_objs = ___CAST(___WORD,base);
1591 base[___STILL_REFCOUNT] = 1;
1592 base[___STILL_LENGTH] = words;
1593
1594 #ifdef ___USE_HANDLES
1595 base[___STILL_HANDLE] = ___CAST(___WORD,body);
1596 #endif
1597
1598 base[___STILL_HEADER] = ___MAKE_HD(bytes, subtype, ___STILL);
1599
1600 /* Return tagged reference to still object. */
1601
1602 if (subtype == ___sPAIR)
1603 return ___PAIR_FROM_BODY(body);
1604 else
1605 return ___SUBTYPED_FROM_BODY(body);
1606 }
1607
1608
___EXP_FUNC(___WORD,___alloc_scmobj)1609 ___EXP_FUNC(___WORD,___alloc_scmobj)
1610 ___P((___processor_state ___ps,
1611 int subtype,
1612 ___SIZE_TS bytes),
1613 (___ps,
1614 subtype,
1615 bytes)
1616 ___processor_state ___ps;
1617 int subtype;
1618 ___SIZE_TS bytes;)
1619 {
1620 if (___ps == NULL)
1621 return alloc_scmobj_perm (subtype, bytes);
1622 else
1623 return alloc_scmobj_still (___ps, subtype, bytes);
1624 }
1625
1626
___EXP_FUNC(___SCMOBJ,___release_scmobj)1627 ___EXP_FUNC(___SCMOBJ,___release_scmobj)
1628 ___P((___SCMOBJ obj),
1629 (obj)
1630 ___SCMOBJ obj;)
1631 {
1632 if (___MEM_ALLOCATED(obj) &&
1633 ___HD_TYP(___HEADER(obj)) == ___STILL)
1634 ___still_obj_refcount_dec (obj);
1635 return obj;
1636 }
1637
1638
1639 /*
1640 * '___make_pair (___ps, car, cdr)' creates a Scheme pair having the
1641 * values 'car' and 'cdr' in its CAR and CDR fields. The 'car' and
1642 * 'cdr' arguments must not be movable objects and any still object
1643 * must be reachable some other way or have a nonzero refcount. A
1644 * permanent or still object is allocated, depending on '___ps'. When
1645 * '___ps' is NULL, a permanent object is allocated, and when '___ps'
1646 * is not NULL, a still object is allocated in the heap of that
1647 * processor. The 'refcount' field of still objects is initially 1.
1648 * A fixnum error code is returned when there is an error.
1649 */
1650
___EXP_FUNC(___WORD,___make_pair)1651 ___EXP_FUNC(___WORD,___make_pair)
1652 ___P((___processor_state ___ps,
1653 ___WORD car,
1654 ___WORD cdr),
1655 (___ps,
1656 car,
1657 cdr)
1658 ___processor_state ___ps;
1659 ___WORD car;
1660 ___WORD cdr;)
1661 {
1662 ___WORD obj = ___alloc_scmobj (___ps, ___sPAIR, ___PAIR_SIZE<<___LWS);
1663
1664 if (!___FIXNUMP(obj))
1665 {
1666 ___CAR_FIELD(obj) = car;
1667 ___CDR_FIELD(obj) = cdr;
1668 }
1669
1670 return obj;
1671 }
1672
1673
1674 /*
1675 * '___make_vector (___ps, length, init)' creates a Scheme vector of
1676 * length 'length' and initialized with the value 'init'. The 'init'
1677 * argument must not be a movable object and if it is a still object
1678 * it must be reachable some other way or have a nonzero refcount. A
1679 * permanent or still object is allocated, depending on '___ps'. When
1680 * '___ps' is NULL, a permanent object is allocated, and when '___ps'
1681 * is not NULL, a still object is allocated in the heap of that
1682 * processor. The 'refcount' field of still objects is initially 1.
1683 * A fixnum error code is returned when there is an error.
1684 */
1685
___EXP_FUNC(___WORD,___make_vector)1686 ___EXP_FUNC(___WORD,___make_vector)
1687 ___P((___processor_state ___ps,
1688 ___SIZE_TS length,
1689 ___WORD init),
1690 (___ps,
1691 length,
1692 init)
1693 ___processor_state ___ps;
1694 ___SIZE_TS length;
1695 ___WORD init;)
1696 {
1697 if (length > ___CAST(___WORD,___LMASK >> (___LF+___LWS)))
1698 return ___FIX(___HEAP_OVERFLOW_ERR);
1699 else
1700 {
1701 ___WORD obj = ___alloc_scmobj (___ps, ___sVECTOR, length<<___LWS);
1702
1703 if (!___FIXNUMP(obj))
1704 {
1705 int i;
1706 for (i=0; i<length; i++)
1707 ___FIELD(obj, i) = init;
1708 }
1709
1710 return obj;
1711 }
1712 }
1713
1714
1715 /*---------------------------------------------------------------------------*/
1716
1717 /*
1718 * Routines to manage symbol table, keyword table and global variable
1719 * table.
1720 */
1721
1722 /*
1723 * The hashing functions '___hash_UTF_8_string (str)' and
1724 * '___hash_scheme_string (str)' must compute the same value as the
1725 * function 'targ-hash' in the file gsc/_t-c-3.scm. A fixnum error
1726 * code is returned when there is an error.
1727 *
1728 * These functions implement an adaptation of the FNV1a hash algorithm
1729 * (see https://tools.ietf.org/html/draft-eastlake-fnv-12). Instead
1730 * of iterating over bytes, an iteration over Unicode code points is
1731 * used. This will give the same result if the string contains only
1732 * ISO-8859-1 characters. However, only the lower 29 bits of the
1733 * standard 32 bit FNV1a algorithm are returned so the result fits in
1734 * a fixnum on a 32 bit word architecture.
1735 */
1736
1737 #define FN1a_prime 0x01000193
1738 #define FN1a_offset_basis 0x011C9DC5
1739
1740 #define HASH_STEP(h,c) (((h)^(c)) * FN1a_prime) & ___MAX_FIX32
1741
1742 ___SCMOBJ ___hash_UTF_8_string
1743 ___P((___UTF_8STRING str),
1744 (str)
1745 ___UTF_8STRING str;)
1746 {
1747 ___UM32 h = FN1a_offset_basis;
1748 ___UTF_8STRING p = str;
1749 ___UCS_4 c;
1750
1751 for (;;)
1752 {
1753 ___UTF_8STRING start = p;
1754 c = ___UTF_8_get (&p);
1755 if (p == start || c > ___MAX_CHR)
1756 return ___FIX(___CTOS_UTF_8STRING_ERR);
1757 if (c == 0)
1758 break;
1759 h = HASH_STEP(h,c);
1760 }
1761
1762 return ___FIX(h);
1763 }
1764
1765
1766 ___SCMOBJ ___hash_scheme_string
1767 ___P((___SCMOBJ str),
1768 (str)
1769 ___SCMOBJ str;)
1770 {
1771 ___SIZE_T i, n = ___INT(___STRINGLENGTH(str));
1772 ___UM32 h = FN1a_offset_basis;
1773
1774 for (i=0; i<n; i++)
1775 h = HASH_STEP(h,___INT(___STRINGREF(str,___FIX(i))));
1776
1777 return ___FIX(h);
1778 }
1779
1780
1781 ___HIDDEN ___SCMOBJ symkey_table
1782 ___P((unsigned int subtype),
1783 (subtype)
1784 unsigned int subtype;)
1785 {
1786 switch (subtype)
1787 {
1788 case ___sKEYWORD:
1789 return ___GSTATE->keyword_table;
1790 default: /* assume ___sSYMBOL */
1791 return ___GSTATE->symbol_table;
1792 }
1793 }
1794
1795
1796 ___HIDDEN void symkey_table_set
1797 ___P((unsigned int subtype,
1798 ___SCMOBJ new_table),
1799 (subtype,
1800 new_table)
1801 unsigned int subtype;
1802 ___SCMOBJ new_table;)
1803 {
1804 switch (subtype)
1805 {
1806 case ___sKEYWORD:
1807 ___GSTATE->keyword_table = new_table;
1808 break;
1809 default: /* assume ___sSYMBOL */
1810 ___GSTATE->symbol_table = new_table;
1811 break;
1812 }
1813 }
1814
1815
1816 ___HIDDEN ___SCMOBJ alloc_symkey_table
1817 ___P((unsigned int subtype,
1818 ___SIZE_TS length),
1819 (subtype,
1820 length)
1821 unsigned int subtype;
1822 ___SIZE_TS length;)
1823 {
1824 ___SCMOBJ tbl = ___make_vector (NULL, length+1, ___NUL);
1825
1826 if (!___FIXNUMP(tbl))
1827 ___FIELD(tbl,0) = ___FIX(0);
1828
1829 return tbl;
1830 }
1831
1832
1833 void ___intern_symkey
1834 ___P((___SCMOBJ symkey),
1835 (symkey)
1836 ___SCMOBJ symkey;)
1837 {
1838 unsigned int subtype = ___INT(___SUBTYPE(symkey));
1839 ___SCMOBJ tbl = symkey_table (subtype);
1840 int i = ___INT(___FIELD(symkey,___SYMKEY_HASH))
1841 % (___INT(___VECTORLENGTH(tbl)) - 1)
1842 + 1;
1843
1844 /*
1845 * Add symbol/keyword to the appropriate list.
1846 */
1847
1848 ___FIELD(symkey,___SYMKEY_NEXT) = ___FIELD(tbl,i);
1849 ___FIELD(tbl,i) = symkey;
1850
1851 ___FIELD(tbl,0) = ___FIXADD(___FIELD(tbl,0),___FIX(1));
1852
1853 /*
1854 * Grow and rehash the table when it is too loaded (above an average
1855 * list length of 4).
1856 */
1857
1858 if (___INT(___FIELD(tbl,0)) > ___INT(___VECTORLENGTH(tbl)) * 4)
1859 {
1860 int new_len = (___INT(___VECTORLENGTH(tbl))-1) * 2;
1861 ___SCMOBJ newtbl = alloc_symkey_table (subtype, new_len);
1862
1863 if (!___FIXNUMP(newtbl))
1864 {
1865 for (i=___INT(___VECTORLENGTH(tbl))-1; i>0; i--)
1866 {
1867 ___SCMOBJ probe = ___FIELD(tbl,i);
1868
1869 while (probe != ___NUL)
1870 {
1871 ___SCMOBJ symkey = probe;
1872 int j = ___INT(___FIELD(symkey,___SYMKEY_HASH))%new_len + 1;
1873
1874 probe = ___FIELD(symkey,___SYMKEY_NEXT);
1875 ___FIELD(symkey,___SYMKEY_NEXT) = ___FIELD(newtbl,j);
1876 ___FIELD(newtbl,j) = symkey;
1877 }
1878 }
1879
1880 ___FIELD(newtbl,0) = ___FIELD(tbl,0);
1881
1882 symkey_table_set (subtype, newtbl);
1883 }
1884 }
1885 }
1886
1887
1888 ___SCMOBJ ___new_symkey
1889 ___P((___SCMOBJ name, /* name must be a permanent object */
1890 unsigned int subtype),
1891 (name,
1892 subtype)
1893 ___SCMOBJ name;
1894 unsigned int subtype;)
1895 {
1896 ___SCMOBJ obj;
1897 ___SCMOBJ tbl;
1898
1899 switch (subtype)
1900 {
1901 case ___sKEYWORD:
1902 obj = ___alloc_scmobj (NULL, ___sKEYWORD, ___KEYWORD_SIZE<<___LWS);
1903 break;
1904 default: /* assume ___sSYMBOL */
1905 obj = ___alloc_scmobj (NULL, ___sSYMBOL, ___SYMBOL_SIZE<<___LWS);
1906 break;
1907 }
1908
1909 if (___FIXNUMP(obj))
1910 return obj;
1911
1912 tbl = symkey_table (subtype);
1913
1914 /* object layout is same for ___sSYMBOL and ___sKEYWORD */
1915
1916 ___FIELD(obj,___SYMKEY_NAME) = name;
1917 ___FIELD(obj,___SYMKEY_HASH) = ___hash_scheme_string (name);
1918
1919 if (subtype == ___sSYMBOL)
1920 ___FIELD(obj,___SYMBOL_GLOBAL) = ___CAST(___SCMOBJ,___CAST(___glo_struct*,0));
1921
1922 ___intern_symkey (obj);
1923
1924 return obj;
1925 }
1926
1927
1928 ___SCMOBJ ___find_symkey_from_UTF_8_string
1929 ___P((char *str,
1930 unsigned int subtype),
1931 (str,
1932 subtype)
1933 char *str;
1934 unsigned int subtype;)
1935 {
1936 ___SCMOBJ tbl;
1937 ___SCMOBJ probe;
1938 ___SCMOBJ h = ___hash_UTF_8_string (str);
1939
1940 if (h < ___FIX(0))
1941 return h;
1942
1943 tbl = symkey_table (subtype);
1944 probe = ___FIELD(tbl, ___INT(h) % (___INT(___VECTORLENGTH(tbl))-1) + 1);
1945
1946 while (probe != ___NUL)
1947 {
1948 ___SCMOBJ name = ___FIELD(probe,___SYMKEY_NAME);
1949 ___SIZE_T i;
1950 ___SIZE_T n = ___INT(___STRINGLENGTH(name));
1951 ___UTF_8STRING p = str;
1952 for (i=0; i<n; i++)
1953 if (___UTF_8_get (&p) !=
1954 ___CAST(___UCS_4,___INT(___STRINGREF(name,___FIX(i)))))
1955 goto next;
1956 if (___UTF_8_get (&p) == 0)
1957 return probe;
1958 next:
1959 probe = ___FIELD(probe,___SYMKEY_NEXT);
1960 }
1961
1962 return ___FAL;
1963 }
1964
1965
1966 ___SCMOBJ ___find_symkey_from_scheme_string
1967 ___P((___SCMOBJ str,
1968 unsigned int subtype),
1969 (str,
1970 subtype)
1971 ___SCMOBJ str;
1972 unsigned int subtype;)
1973 {
1974 ___SCMOBJ tbl;
1975 ___SCMOBJ probe;
1976 ___SCMOBJ h = ___hash_scheme_string (str);
1977
1978 tbl = symkey_table (subtype);
1979 probe = ___FIELD(tbl, ___INT(h) % (___INT(___VECTORLENGTH(tbl))-1) + 1);
1980
1981 while (probe != ___NUL)
1982 {
1983 ___SCMOBJ name = ___FIELD(probe,___SYMKEY_NAME);
1984 ___SIZE_TS i = 0;
1985 ___SIZE_TS n = ___INT(___STRINGLENGTH(name));
1986 if (___INT(___STRINGLENGTH(str)) == n)
1987 {
1988 for (i=0; i<n; i++)
1989 if (___STRINGREF(str,___FIX(i)) != ___STRINGREF(name,___FIX(i)))
1990 goto next;
1991 return probe;
1992 }
1993 next:
1994 probe = ___FIELD(probe,___SYMKEY_NEXT);
1995 }
1996
1997 return ___FAL;
1998 }
1999
2000
2001 ___SCMOBJ ___make_symkey_from_UTF_8_string
2002 ___P((___UTF_8STRING str,
2003 unsigned int subtype),
2004 (str,
2005 subtype)
2006 ___UTF_8STRING str;
2007 unsigned int subtype;)
2008 {
2009 ___SCMOBJ obj = ___find_symkey_from_UTF_8_string (str, subtype);
2010
2011 if (obj == ___FAL)
2012 {
2013 ___SCMOBJ name;
2014 ___SCMOBJ err;
2015
2016 if ((err = ___NONNULLUTF_8STRING_to_SCMOBJ
2017 (NULL, /* allocate as permanent object */
2018 str,
2019 &name,
2020 -1))
2021 != ___FIX(___NO_ERR))
2022 return err;
2023
2024 obj = ___new_symkey (name, subtype);
2025 }
2026
2027 return obj;
2028 }
2029
2030
2031 ___SCMOBJ ___make_symkey_from_scheme_string
2032 ___P((___SCMOBJ str,
2033 unsigned int subtype),
2034 (str,
2035 subtype)
2036 ___SCMOBJ str;
2037 unsigned int subtype;)
2038 {
2039 ___SCMOBJ obj = ___find_symkey_from_scheme_string (str, subtype);
2040
2041 if (obj == ___FAL)
2042 {
2043 ___SIZE_T n = ___INT(___STRINGLENGTH(str));
2044 ___SCMOBJ name = ___alloc_scmobj (NULL, ___sSTRING, n<<___LCS);
2045
2046 if (___FIXNUMP(name))
2047 return name;
2048
2049 memmove (___BODY_AS(name,___tSUBTYPED),
2050 ___BODY_AS(str,___tSUBTYPED),
2051 n<<___LCS);
2052
2053 obj = ___new_symkey (name, subtype);
2054 }
2055
2056 return obj;
2057 }
2058
2059
2060 void ___for_each_symkey
2061 ___P((unsigned int subtype,
2062 void (*visit) (___SCMOBJ symkey, void *data),
2063 void *data),
2064 (subtype,
2065 visit,
2066 data)
2067 unsigned int subtype;
2068 void (*visit) ();
2069 void *data;)
2070 {
2071 ___SCMOBJ tbl = symkey_table (subtype);
2072 int i;
2073
2074 for (i=___INT(___VECTORLENGTH(tbl))-1; i>0; i--)
2075 {
2076 ___SCMOBJ probe = ___FIELD(tbl, i);
2077
2078 while (probe != ___NUL)
2079 {
2080 visit (probe, data);
2081 probe = ___FIELD(probe,___SYMKEY_NEXT);
2082 }
2083 }
2084 }
2085
2086
2087 /*---------------------------------------------------------------------------*/
2088
2089
2090 #define fromspace_offset ((___MSECTION_SIZE>>1) - tospace_offset)
2091
2092 #define start_of_fromspace(ms) ms->base + fromspace_offset
2093
2094 #define start_of_tospace(ms) ms->base + tospace_offset
2095
2096
2097 /*---------------------------------------------------------------------------*/
2098
2099 #ifdef ___DEBUG_GARBAGE_COLLECT
2100
2101
2102 #define ZAP_USING_INVALID_HEAD_TAG_not
2103 #define ZAP_PATTERN ___CAST(___WORD,0xcafebabe)
2104 #define INVALID_HEAD_TAG 4
2105
2106
2107 char *subtype_to_string
2108 ___P((int subtype),
2109 (subtype)
2110 int subtype;)
2111 {
2112 switch (subtype)
2113 {
2114 case ___sVECTOR: return "vector";
2115 case ___sPAIR: return "pair";
2116 case ___sRATNUM: return "ratnum";
2117 case ___sCPXNUM: return "cpxnum";
2118 case ___sSTRUCTURE: return "structure";
2119 case ___sBOXVALUES: return "boxvalues";
2120 case ___sMEROON: return "meroon";
2121 case ___sJAZZ: return "jazz";
2122 case ___sSYMBOL: return "symbol";
2123 case ___sKEYWORD: return "keyword";
2124 case ___sFRAME: return "frame";
2125 case ___sCONTINUATION: return "continuation";
2126 case ___sPROMISE: return "promise";
2127 case ___sWEAK: return "weak";
2128 case ___sPROCEDURE: return "procedure";
2129 case ___sRETURN: return "return";
2130 case ___sFOREIGN: return "foreign";
2131 case ___sSTRING: return "string";
2132 case ___sS8VECTOR: return "s8vector";
2133 case ___sU8VECTOR: return "u8vector";
2134 case ___sS16VECTOR: return "s16vector";
2135 case ___sU16VECTOR: return "u16vector";
2136 case ___sS32VECTOR: return "s32vector";
2137 case ___sU32VECTOR: return "u32vector";
2138 case ___sF32VECTOR: return "f32vector";
2139 case ___sS64VECTOR: return "s64vector";
2140 case ___sU64VECTOR: return "u64vector";
2141 case ___sF64VECTOR: return "f64vector";
2142 case ___sFLONUM: return "flonum";
2143 case ___sBIGNUM: return "bignum";
2144 default: return "UNKNOWN SUBTYPE";
2145 }
2146 }
2147
2148 void print_value
2149 ___P((___SCMOBJ val),
2150 (val)
2151 ___SCMOBJ val;)
2152 {
2153 ___SCMOBJ ___temp;
2154 if (___MEM_ALLOCATED(val))
2155 {
2156 ___WORD* body = ___BODY0(val);
2157 ___WORD head = body[-1];
2158 int subtype;
2159 int shift = 0;
2160
2161 if (___TYP(head) == ___FORW)
2162 {
2163 /* indirect forwarding pointer */
2164 body = ___BODY0(head);
2165 head = body[-1];
2166 }
2167
2168 if (head == ZAP_PATTERN)
2169 ___printf ("[WARNING: HEAD=ZAP_PATTERN] ");
2170 else if (___HD_TYP(head) == INVALID_HEAD_TAG)
2171 {
2172 ___printf ("[WARNING: HEAD HAS INVALID TAG] ");
2173 shift = ___HTB;
2174 }
2175
2176 head >>= shift;
2177 subtype = ___HD_SUBTYPE(head);
2178
2179 if (subtype == ___sPAIR)
2180 {
2181 ___printf ("0x%" ___PRIxWORD " (... . ...)", val);
2182 }
2183 else
2184 {
2185 ___SCMOBJ sym;
2186 if (subtype == ___sPROCEDURE || subtype == ___sRETURN)
2187 {
2188 ___printf ("0x%" ___PRIxWORD " ", val);
2189 if (subtype == ___sPROCEDURE)
2190 ___printf ("#<procedure ");
2191 else
2192 ___printf ("#<return ");
2193 if ((sym = ___find_global_var_bound_to (val)) != ___NUL)
2194 print_value (___FIELD(sym,___SYMKEY_NAME));
2195 else
2196 {
2197 if (___HD_TYP(head) == ___PERM)
2198 {
2199 ___SCMOBJ *start = &body[-1];
2200 ___SCMOBJ *ptr = start;
2201 while (!___TESTHEADERTAG(*ptr,___sVECTOR))
2202 ptr -= ___LABEL_SIZE;
2203 ptr += ___LABEL_SIZE;
2204 if (ptr == start)
2205 ___printf ("???");
2206 else
2207 {
2208 ___printf ("%d in ", (start-ptr)/___LABEL_SIZE);
2209 print_value (___TAG(ptr,___tSUBTYPED));
2210 }
2211 }
2212 else
2213 ___printf ("???");
2214 }
2215 ___printf (">");
2216 }
2217 else if (subtype == ___sSTRING)
2218 {
2219 int i;
2220 ___SCMOBJ str = ___SUBTYPED_FROM_BODY(body);
2221 ___printf ("\"");
2222 for (i=0; i<___INT(___STRINGLENGTH(str)); i++)
2223 ___printf ("%c", ___INT(___STRINGREF(str,___FIX(i))));
2224 ___printf ("\"");
2225 }
2226 else if (subtype == ___sSYMBOL)
2227 {
2228 ___printf ("#<symbol ");
2229 print_value (body[___SYMKEY_NAME]>>shift);
2230 ___printf (">");
2231 }
2232 else
2233 {
2234 ___printf ("#<%s>", subtype_to_string (subtype));
2235 }
2236 }
2237 }
2238 else if (___FIXNUMP(val))
2239 ___printf ("%d", ___INT(val));
2240 else if (___CHARP(val))
2241 ___printf ("#\\x%x", ___INT(val));
2242 else if (val == ___FAL)
2243 ___printf ("#f");
2244 else if (val == ___TRU)
2245 ___printf ("#t");
2246 else if (val == ___NUL)
2247 ___printf ("()");
2248 else if (val == ___EOF)
2249 ___printf ("#!eof");
2250 else if (val == ___VOID)
2251 ___printf ("#!void");
2252 else if (val == ___ABSENT)
2253 ___printf ("#absent");
2254 else if (val == ___UNB1)
2255 ___printf ("#!unbound");
2256 else if (val == ___UNB2)
2257 ___printf ("#!unbound2");
2258 else if (val == ___OPTIONAL)
2259 ___printf ("#!optional");
2260 else if (val == ___KEYOBJ)
2261 ___printf ("#!key");
2262 else if (val == ___REST)
2263 ___printf ("#!rest");
2264 else if (val == ___UNUSED)
2265 ___printf ("#unused");
2266 else if (val == ___DELETED)
2267 ___printf ("#deleted");
2268 else
2269 ___printf ("#unknown(0x" ___PRIxWORD ")", val);
2270 }
2271
2272 #endif
2273
2274
2275 #ifdef ENABLE_CONSISTENCY_CHECKS
2276
2277 #define IN_OBJECT 0
2278 #define IN_REGISTER 1
2279 #define IN_SAVED 2
2280 #define IN_PROCESSOR_SCMOBJ 3
2281 #define IN_VM_SCMOBJ 4
2282 #define IN_SYMKEY_TABLE 5
2283 #define IN_GLOBAL_VAR 6
2284 #define IN_WILL_LIST 7
2285 #define IN_CONTINUATION 8
2286 #define IN_RC 9
2287
2288
2289 ___HIDDEN void print_prefix
2290 ___P((char *prefix,
2291 int indent),
2292 (prefix,
2293 indent)
2294 char *prefix;
2295 int indent;)
2296 {
2297 int i;
2298
2299 ___printf ("%s", prefix);
2300
2301 for (i=0; i<indent; i++)
2302 ___printf (" ");
2303 }
2304
2305
2306 ___HIDDEN void print_object
2307 ___P((___WORD obj,
2308 int max_depth,
2309 char *prefix,
2310 int indent),
2311 (obj,
2312 max_depth,
2313 prefix,
2314 indent)
2315 ___WORD obj;
2316 int max_depth;
2317 char *prefix;
2318 int indent;)
2319 {
2320 int typ = ___TYP(obj);
2321
2322 print_prefix (prefix, indent);
2323
2324 if (typ == ___tFIXNUM)
2325 ___printf ("%d\n", ___INT(obj));
2326 else if (typ == ___tSPECIAL)
2327 {
2328 if (obj >= 0)
2329 ___printf ("#\\%c\n", ___INT(obj));
2330 else if (obj == ___FAL)
2331 ___printf ("#f\n");
2332 else if (obj == ___TRU)
2333 ___printf ("#t\n");
2334 else if (obj == ___NUL)
2335 ___printf ("()\n");
2336 else if (obj == ___EOF)
2337 ___printf ("#!eof\n");
2338 else if (obj == ___VOID)
2339 ___printf ("#!void\n");
2340 else if (obj == ___ABSENT)
2341 ___printf ("#<absent>\n");
2342 else if (obj == ___UNB1)
2343 ___printf ("#<unbound1>\n");
2344 else if (obj == ___UNB2)
2345 ___printf ("#<unbound2>\n");
2346 else if (obj == ___OPTIONAL)
2347 ___printf ("#!optional\n");
2348 else if (obj == ___KEYOBJ)
2349 ___printf ("#!key\n");
2350 else if (obj == ___REST)
2351 ___printf ("#!rest\n");
2352 else if (obj == ___UNUSED)
2353 ___printf ("#<unused>\n");
2354 else if (obj == ___DELETED)
2355 ___printf ("#<deleted>\n");
2356 else
2357 ___printf ("#<unknown 0x%" ___PRIxWORD ">\n", obj);
2358 }
2359 else
2360 {
2361 ___WORD* body = ___BODY0(obj);
2362 ___WORD head = body[-1];
2363 int subtype;
2364 int shift = 0;
2365
2366 if (___TYP(head) == ___FORW)
2367 {
2368 /* indirect forwarding pointer */
2369 body = ___BODY0(head);
2370 head = body[-1];
2371 }
2372
2373 if (___HD_TYP(head) == INVALID_HEAD_TAG)
2374 shift = ___HTB;
2375
2376 head >>= shift;
2377 subtype = ___HD_SUBTYPE(head);
2378
2379 switch (subtype)
2380 {
2381 case ___sVECTOR:
2382 if (max_depth > 0)
2383 {
2384 int i;
2385 ___printf ("#(\n");
2386 for (i=0; i<___CAST(int,___HD_WORDS(head)); i++)
2387 print_object (___FIELD(obj,i)>>shift, max_depth-1, prefix, indent+2);
2388 print_prefix (prefix, indent);
2389 ___printf (")\n");
2390 }
2391 else
2392 ___printf ("#(...)\n");
2393 break;
2394 case ___sPAIR:
2395 if (max_depth > 0)
2396 {
2397 ___printf ("(\n");
2398 print_object (___CAR(obj)>>shift, max_depth-1, prefix, indent+1);
2399 print_prefix (prefix, indent);
2400 ___printf (" .\n");
2401 print_object (___CDR(obj)>>shift, max_depth-1, prefix, indent+1);
2402 print_prefix (prefix, indent);
2403 ___printf (")\n");
2404 }
2405 else
2406 ___printf ("(...)\n");
2407 break;
2408 case ___sRATNUM:
2409 ___printf ("RATNUM\n");
2410 break;
2411 case ___sCPXNUM:
2412 ___printf ("CPXNUM\n");
2413 break;
2414 case ___sSTRUCTURE:
2415 ___printf ("STRUCTURE\n");
2416 break;
2417 case ___sBOXVALUES:
2418 ___printf ("BOXVALUES\n");
2419 break;
2420 case ___sMEROON:
2421 ___printf ("MEROON\n");
2422 break;
2423 case ___sSYMBOL:
2424 ___printf ("SYMBOL ");
2425 print_object (___FIELD(obj,___SYMKEY_NAME)>>shift, max_depth-1, "", 0);
2426 break;
2427 case ___sKEYWORD:
2428 ___printf ("KEYWORD ");
2429 print_object (___FIELD(obj,___SYMKEY_NAME)>>shift, max_depth-1, "", 0);
2430 break;
2431 case ___sFRAME:
2432 ___printf ("FRAME\n");
2433 break;
2434 case ___sCONTINUATION:
2435 ___printf ("CONTINUATION\n");
2436 break;
2437 case ___sPROMISE:
2438 ___printf ("PROMISE\n");
2439 break;
2440 case ___sWEAK:
2441 ___printf ("WEAK\n");
2442 break;
2443 case ___sPROCEDURE:
2444 ___printf ("PROCEDURE\n");
2445 break;
2446 case ___sRETURN:
2447 ___printf ("RETURN\n");
2448 break;
2449 case ___sFOREIGN:
2450 ___printf ("FOREIGN\n");
2451 break;
2452 case ___sSTRING:
2453 {
2454 int i;
2455 int len = ___HD_BYTES(head)>>___LCS;
2456 ___printf ("STRING ");
2457 for (i=0; i<len; i++)
2458 ___printf ("%c", ___INT(___STRINGREF(obj,___FIX(i))));
2459 ___printf ("\n");
2460 }
2461 break;
2462 case ___sS8VECTOR:
2463 ___printf ("S8VECTOR\n");
2464 break;
2465 case ___sU8VECTOR:
2466 ___printf ("U8VECTOR\n");
2467 break;
2468 case ___sS16VECTOR:
2469 ___printf ("S16VECTOR\n");
2470 break;
2471 case ___sU16VECTOR:
2472 ___printf ("U16VECTOR\n");
2473 break;
2474 case ___sS32VECTOR:
2475 ___printf ("S32VECTOR\n");
2476 break;
2477 case ___sU32VECTOR:
2478 ___printf ("U32VECTOR\n");
2479 break;
2480 case ___sF32VECTOR:
2481 ___printf ("F32VECTOR\n");
2482 break;
2483 case ___sS64VECTOR:
2484 ___printf ("S64VECTOR\n");
2485 break;
2486 case ___sU64VECTOR:
2487 ___printf ("U64VECTOR\n");
2488 break;
2489 case ___sF64VECTOR:
2490 ___printf ("F64VECTOR\n");
2491 break;
2492 case ___sFLONUM:
2493 ___printf ("FLONUM\n");
2494 break;
2495 case ___sBIGNUM:
2496 ___printf ("BIGNUM\n");
2497 break;
2498 default:
2499 ___printf ("UNKNOWN\n");
2500 break;
2501 }
2502 }
2503 }
2504
2505
2506 ___HIDDEN void print_global_var_name
2507 ___P((___glo_struct *glo),
2508 (glo)
2509 ___glo_struct *glo;)
2510 {
2511 ___SCMOBJ sym = ___NUL;
2512 int i;
2513
2514 for (i = ___INT(___VECTORLENGTH(___GSTATE->symbol_table)) - 1; i>0; i--)
2515 {
2516 sym = ___FIELD(___GSTATE->symbol_table,i);
2517
2518 while (sym != ___NUL)
2519 {
2520 ___SCMOBJ g = ___FIELD(sym,___SYMBOL_GLOBAL);
2521
2522 if (g != ___FIX(0))
2523 {
2524 ___glo_struct *p = ___CAST(___glo_struct*,g);
2525
2526 if (p == glo)
2527 {
2528 ___SCMOBJ name = ___FIELD(sym,___SYMKEY_NAME);
2529 for (i=0; i<___INT(___STRINGLENGTH(name)); i++)
2530 ___printf ("%c", ___INT(___STRINGREF(name,___FIX(i))));
2531 i = 0;
2532 break;
2533 }
2534 }
2535
2536 sym = ___FIELD(sym,___SYMKEY_NEXT);
2537 }
2538 }
2539 }
2540
2541
2542 ___HIDDEN void dump_memory_map
2543 ___P((___PSDNC),
2544 (___PSVNC)
2545 ___PSDKR)
2546 {
2547 ___PSGET
2548 int ns = the_msections->nb_sections;
2549 ___msection **sections = the_msections->sections;
2550 int i;
2551
2552 ___printf (">>> Memory map:\n");
2553
2554 for (i=0; i<ns; i++)
2555 ___printf (">>> msection %2d: %p .. %p .. %p\n",
2556 i,
2557 sections[i]->base,
2558 sections[i]->base + (___MSECTION_SIZE>>1),
2559 sections[i]->base + ___MSECTION_SIZE);
2560
2561 ___printf (">>> alloc_msection = %p\n", alloc_msection);
2562 ___printf (">>> stack_msection = %p\n", stack_msection);
2563 ___printf (">>> heap_msection = %p\n", heap_msection);
2564 ___printf (">>> alloc_stack_ptr = %p\n", alloc_stack_ptr);
2565 ___printf (">>> alloc_stack_limit = %p\n", alloc_stack_limit);
2566 ___printf (">>> alloc_heap_limit = %p\n", alloc_heap_limit);
2567 ___printf (">>> alloc_heap_ptr = %p\n", alloc_heap_ptr);
2568 ___printf (">>> scan_ptr = %p\n", scan_ptr);
2569 }
2570
2571 ___HIDDEN void explain_problem
2572 ___P((___PSD
2573 ___WORD obj,
2574 int shift,
2575 char *msg),
2576 (___PSV
2577 obj,
2578 shift,
2579 msg)
2580 ___PSDKR
2581 ___WORD obj;
2582 int shift;
2583 char *msg;)
2584 {
2585 ___PSGET
2586
2587 dump_memory_map (___PSPNC);
2588
2589 ___printf (">>> The object 0x%" ___PRIxWORD " %s\n", obj, msg);
2590
2591 {
2592 int j;
2593 ___WORD head = ___BODY0(obj)[-1]>>shift;
2594 ___SIZE_TS words = ___HD_WORDS(head);
2595 if (words > 10)
2596 words = 10;
2597 for (j=-1; j<words; j++)
2598 {
2599 ___printf (">>> body[%2d] = 0x%" ___PRIxWORD "\n", j, ___BODY0(obj)[j]>>shift);
2600 print_object (___BODY0(obj)[j]>>shift, 1, ">>> ", 0);
2601 }
2602 }
2603
2604 switch (reference_location)
2605 {
2606 case IN_OBJECT:
2607 {
2608 ___WORD container;
2609 ___WORD head = container_body[-1];
2610 ___SIZE_TS words = ___HD_WORDS(head);
2611 int subtype = ___HD_SUBTYPE(head);
2612 int i;
2613
2614 if (subtype == ___sPAIR)
2615 container = ___PAIR_FROM_BODY(container_body);
2616 else
2617 container = ___SUBTYPED_FROM_BODY(container_body);
2618
2619 ___printf (">>> The reference was found in ");
2620 if (___HD_TYP(head) == ___PERM)
2621 ___printf ("___PERM ");
2622 else if (___HD_TYP(head) == ___STILL)
2623 ___printf ("___STILL ");
2624 else if (___HD_TYP(head) == ___MOVABLE0)
2625 ___printf ("___MOVABLE0 ");
2626 else if (___TYP(head) == ___FORW)
2627 ___printf ("___FORW ");
2628 else
2629 ___printf ("UNKNOWN ");
2630 ___printf ("object with body at %p:\n", container_body);
2631
2632 ___printf (">>> subtype = %d\n", subtype);
2633 ___printf (">>> length = %ld words\n", words);
2634 if (words <= 100)
2635 {
2636 for (i=-1; i<words; i++)
2637 ___printf (">>> body[%2d] = 0x%" ___PRIxWORD "\n", i, container_body[i]);
2638 }
2639 else
2640 {
2641 for (i=0; i<50; i++)
2642 ___printf (">>> body[%2d] = 0x%" ___PRIxWORD "\n", i, container_body[i]);
2643 ___printf ("...\n");
2644 for (i=words-50; i<words; i++)
2645 ___printf (">>> body[%2d] = 0x%" ___PRIxWORD "\n", i, container_body[i]);
2646 }
2647 ___printf (">>> container =\n");
2648 print_object (container, 4, ">>> ", 0);
2649 break;
2650 }
2651
2652 case IN_REGISTER:
2653 ___printf (">>> The reference was found in a register\n");
2654 break;
2655
2656 case IN_SAVED:
2657 ___printf (">>> The reference was found in the saved objects\n");
2658 break;
2659
2660 case IN_PROCESSOR_SCMOBJ:
2661 ___printf (">>> The reference was found in the processor object\n");
2662 break;
2663
2664 case IN_VM_SCMOBJ:
2665 ___printf (">>> The reference was found in the VM object\n");
2666 break;
2667
2668 case IN_SYMKEY_TABLE:
2669 ___printf (">>> The reference was found in the symbol or keyword table\n");
2670 break;
2671
2672 case IN_GLOBAL_VAR:
2673 ___printf (">>> The reference was found in a global variable\n");
2674 break;
2675
2676 case IN_WILL_LIST:
2677 ___printf (">>> The reference was found in a will list\n");
2678 break;
2679
2680 case IN_CONTINUATION:
2681 ___printf (">>> The reference was found in a continuation\n");
2682 break;
2683
2684 case IN_RC:
2685 ___printf (">>> The reference was found in a reference counted object\n");
2686 break;
2687 }
2688 }
2689
2690
2691 ___HIDDEN void bug
2692 ___P((___PSD
2693 ___WORD obj,
2694 int shift,
2695 char *msg),
2696 (___PSV
2697 obj,
2698 shift,
2699 msg)
2700 ___PSDKR
2701 ___WORD obj;
2702 int shift;
2703 char *msg;)
2704 {
2705 ___PSGET
2706 char *msgs[2];
2707
2708 ___printf (">>> P%d: the GC has detected the following inconsistency\n",
2709 ___PROCESSOR_ID(___ps,___VMSTATE_FROM_PSTATE(___ps)));
2710 ___printf (">>> during call of mark_array on line %d of mem.c:\n",
2711 mark_array_call_line);
2712 explain_problem (___PSP obj, shift, msg);
2713 msgs[0] = "GC inconsistency detected";
2714 msgs[1] = 0;
2715 ___fatal_error (msgs);
2716 }
2717
2718
2719 ___HIDDEN void validate_old_obj
2720 ___P((___PSD
2721 ___WORD obj),
2722 (___PSV
2723 obj)
2724 ___PSDKR
2725 ___WORD obj;)
2726 {
2727 ___PSGET
2728 ___WORD *hd_ptr = ___BODY0(obj)-1;
2729 ___WORD head;
2730 int i = find_msection (the_msections, hd_ptr);
2731 if (i >= 0 && i < the_msections->nb_sections)
2732 {
2733 ___PTRDIFF_T pos = hd_ptr - the_msections->sections[i]->base;
2734 if (pos >= 0 && pos < ___MSECTION_SIZE)
2735 {
2736 head = *hd_ptr;
2737 if (___TYP(head) == ___FORW)
2738 {
2739 ___WORD *hd_ptr2 = ___BODY0(head)-1;
2740 int i2 = find_msection (the_msections, hd_ptr2);
2741 if (i2 >= 0 && i2 < the_msections->nb_sections)
2742 {
2743 ___PTRDIFF_T pos2 = hd_ptr2 -
2744 (the_msections->sections[i2]->base +
2745 tospace_offset);
2746 if (pos2 < 0 || pos2 >= ___MSECTION_SIZE>>1)
2747 bug (___PSP obj, 0, "was copied outside of tospace");
2748 else if (___HD_TYP((*hd_ptr2)) != ___MOVABLE0)
2749 bug (___PSP obj, 0, "was copied and copy is not ___MOVABLE0");
2750 }
2751 else
2752 bug (___PSP obj, 0, "was copied outside of tospace");
2753 }
2754 else if (___HD_TYP(head) != ___MOVABLE0)
2755 bug (___PSP obj, ___HTB, "should be ___MOVABLE0");
2756 else
2757 {
2758 pos -= tospace_offset;
2759 if (pos >= 0 && pos < ___MSECTION_SIZE>>1)
2760 bug (___PSP obj, 0, "is in tospace");
2761 }
2762 return;
2763 }
2764 }
2765 head = *hd_ptr; /* this dereference will likely bomb if there is a bug */
2766 if (___HD_TYP(head) != ___PERM && ___HD_TYP(head) != ___STILL)
2767 bug (___PSP obj, 0, "is not ___PERM or ___STILL");
2768 }
2769
2770
2771 ___HIDDEN void zap_section
2772 ___P((___WORD *start,
2773 ___WORD *end),
2774 (start,
2775 end)
2776 ___WORD *start;
2777 ___WORD *end;)
2778 {
2779 while (start < end)
2780 {
2781 #ifdef ZAP_USING_INVALID_HEAD_TAG
2782 *start = (*start << ___HTB) | INVALID_HEAD_TAG;
2783 #else
2784 *start = ZAP_PATTERN;
2785 #endif
2786 start++;
2787 }
2788 }
2789
2790
2791 ___HIDDEN int unzapped_words
2792 ___P((___WORD *start,
2793 int words),
2794 (start,
2795 words)
2796 ___WORD *start;
2797 int words;)
2798 {
2799 ___WORD *ptr = start;
2800
2801 while (words > 0 && *ptr++ == ZAP_PATTERN)
2802 words--;
2803
2804 return words;
2805 }
2806
2807
2808 ___HIDDEN void check_fudge_used
2809 ___P((___PSDNC),
2810 (___PSVNC)
2811 ___PSDKR)
2812 {
2813 ___PSGET
2814 int s;
2815 int h;
2816
2817 s = unzapped_words (___ps->stack_limit - ___MSECTION_FUDGE,
2818 ___MSECTION_FUDGE);
2819
2820 if (s > stack_fudge_used)
2821 stack_fudge_used = s;
2822
2823 h = ___ps->hp - ___ps->heap_limit;
2824
2825 if (h > heap_fudge_used)
2826 heap_fudge_used = h;
2827
2828 #ifdef ___DEBUG_GARBAGE_COLLECT
2829 ___printf ("********* used fudge: stack = %d heap = %d\n", s, h);
2830 #endif
2831 }
2832
2833
2834 #endif
2835
2836
2837 /*---------------------------------------------------------------------------*/
2838
2839 #ifdef ___DEBUG_GARBAGE_COLLECT
2840
2841 #define fatal_heap_overflow() fatal_heap_overflow_debug (__LINE__)
2842
2843 ___HIDDEN void fatal_heap_overflow_debug
2844 ___P((int line),
2845 (line)
2846 int line;)
2847
2848 #else
2849
2850 ___HIDDEN void fatal_heap_overflow ___PVOID
2851
2852 #endif
2853 {
2854 char *msgs[2];
2855
2856 #ifdef ___DEBUG_GARBAGE_COLLECT
2857 ___printf ("fatal_heap_overflow called at mem.c:%d\n", line);
2858 #endif
2859
2860 msgs[0] = "Heap overflow";
2861 msgs[1] = 0;
2862
2863 ___fatal_error (msgs);
2864 }
2865
2866
2867 ___HIDDEN ___msection *next_msection_without_locking
2868 ___P((___processor_state ___ps,
2869 ___msection *ms),
2870 (___ps,
2871 ms)
2872 ___processor_state ___ps;
2873 ___msection *ms;)
2874 {
2875 ___msection *result;
2876
2877 /*
2878 * This function allocates an msection from the list of free
2879 * msections. This is done when a processor has used up all of the
2880 * space in its current reserved msection (either heap_msection for
2881 * a heap allocation or stack_msection for a stack allocation).
2882 * This operation must be done in a critical section because
2883 * multiple processors may exhaust their space concurrently, either
2884 * when doing a GC or when ___stack_limit or ___heap_limit are
2885 * called. However, this mutual exclusion is the responsibility of
2886 * the caller of next_msection_without_locking.
2887 *
2888 * A spinlock is appropriate for this critical section because the
2889 * critical section takes very little time and the requests for new
2890 * msections happen relatively much less frequently. An experiment
2891 * on a 2.6 GHz Intel Core i7 using a tight allocation loop of pairs
2892 * and ___MSECTION_SIZE=131072, indicates that the critical section
2893 * lasts 0.2 microseconds and next_msection_without_locking
2894 * is called every 300 microseconds.
2895 */
2896
2897 if (nb_msections_assigned == 0)
2898 result = the_msections->head; /* start at head of free msections */
2899 else
2900 result = alloc_msection->next; /* move to next free msection */
2901
2902 if (result == 0)
2903 {
2904 /*
2905 * If there are no free msections to allocate the next heap or
2906 * stack msection, it is possible to use ms for both the heap
2907 * allocations and stack allocations. But if it is currently
2908 * the case that both use the same msection, then it is an error
2909 * because the garbage collector should have been called to free
2910 * some space.
2911 */
2912
2913 if (stack_msection == heap_msection)
2914 fatal_heap_overflow ();
2915
2916 result = ms;
2917 }
2918 else
2919 {
2920 alloc_msection = result;
2921 nb_msections_assigned++;
2922 }
2923
2924 return result;
2925 }
2926
2927
2928 ___HIDDEN void set_stack_msection
2929 ___P((___processor_state ___ps,
2930 ___msection *ms),
2931 (___ps,
2932 ms)
2933 ___processor_state ___ps;
2934 ___msection *ms;)
2935 {
2936 stack_msection = ms;
2937
2938 alloc_stack_limit = start_of_tospace(ms);
2939 alloc_stack_start = alloc_stack_limit + (___MSECTION_SIZE>>1);
2940 alloc_stack_ptr = alloc_stack_start;
2941 }
2942
2943
2944 ___HIDDEN void set_stack_msection_possibly_sharing_with_heap
2945 ___P((___processor_state ___ps,
2946 ___msection *ms),
2947 (___ps,
2948 ms)
2949 ___processor_state ___ps;
2950 ___msection *ms;)
2951 {
2952 set_stack_msection (___ps, ms);
2953
2954 if (ms == heap_msection)
2955 {
2956 /*
2957 * The same msection will be used for the stack and the heap, so
2958 * adjust the heap and stack limits accordingly. 3/4 of the
2959 * remaining usable space is assigned to the stack.
2960 */
2961
2962 ___SIZE_TS space = alloc_heap_limit - alloc_heap_ptr;
2963
2964 if (space < 2*___MSECTION_FUDGE)
2965 fatal_heap_overflow ();
2966
2967 space = (space - 2*___MSECTION_FUDGE) >> 2; /* 1/4 of usable space */
2968
2969 alloc_heap_limit = alloc_heap_ptr + (space + ___MSECTION_FUDGE);
2970 alloc_stack_limit = alloc_stack_ptr - (3*space + ___MSECTION_FUDGE);
2971 }
2972
2973 #ifdef ENABLE_CONSISTENCY_CHECKS
2974 if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) >= 1)
2975 zap_section (alloc_stack_limit, alloc_stack_ptr);
2976 #endif
2977 }
2978
2979
2980 ___HIDDEN void stack_msection_stop_using
2981 ___P((___processor_state ___ps,
2982 ___WORD *stack_start,
2983 ___WORD *stack_ptr),
2984 (___ps,
2985 stack_start,
2986 stack_ptr)
2987 ___processor_state ___ps;
2988 ___WORD *stack_start;
2989 ___WORD *stack_ptr;)
2990 {
2991 words_prev_msections += stack_start - stack_ptr;
2992 }
2993
2994
2995 ___HIDDEN void stack_msection_resume_using
2996 ___P((___processor_state ___ps,
2997 ___WORD *stack_start,
2998 ___WORD *stack_ptr),
2999 (___ps,
3000 stack_start,
3001 stack_ptr)
3002 ___processor_state ___ps;
3003 ___WORD *stack_start;
3004 ___WORD *stack_ptr;)
3005 {
3006 words_prev_msections -= stack_start - stack_ptr;
3007 }
3008
3009
3010 ___HIDDEN void next_stack_msection_without_locking
3011 ___P((___processor_state ___ps),
3012 (___ps)
3013 ___processor_state ___ps;)
3014 {
3015 ___msection *ms;
3016 ms = next_msection_without_locking (___ps, heap_msection);
3017 set_stack_msection_possibly_sharing_with_heap (___ps, ms);
3018 }
3019
3020
3021 ___HIDDEN void next_stack_msection
3022 ___P((___processor_state ___ps),
3023 (___ps)
3024 ___processor_state ___ps;)
3025 {
3026 ___msection *ms;
3027 ALLOC_MEM_LOCK();
3028 ms = next_msection_without_locking (___ps, heap_msection);
3029 ALLOC_MEM_UNLOCK();
3030 set_stack_msection_possibly_sharing_with_heap (___ps, ms);
3031 }
3032
3033
3034 ___HIDDEN void start_heap_chunk
3035 ___P((___processor_state ___ps),
3036 (___ps)
3037 ___processor_state ___ps;)
3038 {
3039 alloc_heap_chunk_start = alloc_heap_ptr;
3040 alloc_heap_chunk_limit = alloc_heap_ptr + ___MSECTION_CHUNK;
3041 }
3042
3043
3044 #define NULL_CHUNK_LINK ___TAG(0, ___FORW)
3045
3046
3047 ___HIDDEN void end_heap_chunk
3048 ___P((___processor_state ___ps),
3049 (___ps)
3050 ___processor_state ___ps;)
3051 {
3052 /*
3053 * Add the end of chunk marker.
3054 *
3055 * This is done even when the chunk is empty, in case the chunk is
3056 * currently being scanned.
3057 */
3058
3059 *alloc_heap_ptr++ = NULL_CHUNK_LINK; /* leave space for next chunk's link */
3060
3061 if (alloc_heap_ptr != alloc_heap_chunk_start &&
3062 !(scan_ptr >= alloc_heap_chunk_start && scan_ptr < alloc_heap_ptr))
3063 {
3064 /*
3065 * The chunk being ended is not empty and it isn't currently
3066 * being scanned, so add it to the heap chunk FIFO and wake any
3067 * idle processor.
3068 */
3069
3070 ___WORD *new_tail = alloc_heap_chunk_start-1;
3071
3072 #ifndef ___SINGLE_THREADED_VMS
3073 ___SPINLOCK_LOCK(heap_chunks_to_scan_lock);
3074 #endif
3075
3076 *heap_chunks_to_scan_tail = ___TAG(new_tail, ___FORW);
3077
3078 /*
3079 * A memory barrier is needed to ensure that the content of the
3080 * chunk, including the end of chunk and the link to the chunk,
3081 * is visible to other processors.
3082 */
3083
3084 ___SHARED_MEMORY_BARRIER();
3085
3086 heap_chunks_to_scan_tail = new_tail;
3087
3088 /*
3089 * A memory barrier is needed to ensure that the new tail of the
3090 * heap chunk FIFO are visible to other processors. This can't
3091 * be combined with the previous memory barrier because the
3092 * ordering is important (as soon as the modification of
3093 * heap_chunks_to_scan_tail is observed by another processor, it
3094 * may attempt to read the content of the chunk).
3095 */
3096
3097 ___SHARED_MEMORY_BARRIER();
3098
3099 #ifndef ___SINGLE_THREADED_VMS
3100 ___SPINLOCK_UNLOCK(heap_chunks_to_scan_lock);
3101 ___CONDVAR_SIGNAL(scan_termination_condvar);
3102 #endif
3103 }
3104 }
3105
3106
3107 ___HIDDEN void set_heap_msection
3108 ___P((___processor_state ___ps,
3109 ___msection *ms),
3110 (___ps,
3111 ms)
3112 ___processor_state ___ps;
3113 ___msection *ms;)
3114 {
3115 heap_msection = ms;
3116
3117 alloc_heap_start = start_of_tospace(ms);
3118 alloc_heap_limit = alloc_heap_start + (___MSECTION_SIZE>>1);
3119 alloc_heap_ptr = alloc_heap_start;
3120 }
3121
3122
3123 ___HIDDEN void set_heap_msection_possibly_sharing_with_stack
3124 ___P((___processor_state ___ps,
3125 ___msection *ms),
3126 (___ps,
3127 ms)
3128 ___processor_state ___ps;
3129 ___msection *ms;)
3130 {
3131 set_heap_msection (___ps, ms);
3132
3133 if (ms == stack_msection)
3134 {
3135 /*
3136 * The same msection will be used for the stack and the heap, so
3137 * adjust the heap and stack limits accordingly. 3/4 of the
3138 * remaining usable space is assigned to the heap.
3139 */
3140
3141 ___SIZE_TS space = alloc_stack_ptr - alloc_stack_limit;
3142
3143 if (space < 2*___MSECTION_FUDGE)
3144 fatal_heap_overflow ();
3145
3146 space = (space - 2*___MSECTION_FUDGE) >> 2; /* 1/4 of usable space */
3147
3148 alloc_stack_limit = alloc_stack_ptr - (space + ___MSECTION_FUDGE);
3149 alloc_heap_limit = alloc_heap_ptr + (3*space + ___MSECTION_FUDGE);
3150 }
3151
3152 #ifdef ENABLE_CONSISTENCY_CHECKS
3153 // if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) >= 1)
3154 // zap_section (alloc_heap_ptr, alloc_heap_limit);
3155 #endif
3156 }
3157
3158
3159 ___HIDDEN void next_heap_msection_without_locking
3160 ___P((___processor_state ___ps),
3161 (___ps)
3162 ___processor_state ___ps;)
3163 {
3164 ___msection *ms;
3165 words_prev_msections += alloc_heap_ptr - alloc_heap_start;
3166 ms = next_msection_without_locking (___ps, stack_msection);
3167 set_heap_msection_possibly_sharing_with_stack (___ps, ms);
3168 }
3169
3170
3171 ___HIDDEN void next_heap_msection
3172 ___P((___processor_state ___ps),
3173 (___ps)
3174 ___processor_state ___ps;)
3175 {
3176 ___msection *ms;
3177 words_prev_msections += alloc_heap_ptr - alloc_heap_start;
3178 ALLOC_MEM_LOCK();
3179 ms = next_msection_without_locking (___ps, stack_msection);
3180 ALLOC_MEM_UNLOCK();
3181 set_heap_msection_possibly_sharing_with_stack (___ps, ms);
3182 }
3183
3184
3185 ___HIDDEN void prepare_heap_msection
3186 ___P((___processor_state ___ps),
3187 (___ps)
3188 ___processor_state ___ps;)
3189 {
3190 /*
3191 * Heap chunks are non-empty, except possibly for the last chunk in
3192 * the msection. Consequently, an msection may end with 2
3193 * consecutive end of chunk markers. One for the last non-empty
3194 * chunk and one for an empty chunk. The heap allocation limit must
3195 * be adjusted to handle this case.
3196 */
3197
3198 alloc_heap_limit -= 2; /* leave space for 2 end of chunk markers */
3199
3200 *alloc_heap_ptr++ = NULL_CHUNK_LINK; /* link of msection's first chunk */
3201
3202 start_heap_chunk (___ps);
3203 }
3204
3205
3206 ___HIDDEN void setup_stack_heap_vmstate
3207 ___P((___virtual_machine_state ___vms),
3208 (___vms)
3209 ___virtual_machine_state ___vms;)
3210 {
3211 #undef ___VMSTATE_MEM
3212 #define ___VMSTATE_MEM(var) ___vms->mem.var
3213
3214 ___msection *alloc = the_msections->head;
3215 int np = ___vms->processor_count;
3216 int i;
3217
3218 for (i=0; i<np; i++)
3219 {
3220 ___processor_state ___ps = ___PSTATE_FROM_PROCESSOR_ID(i,___vms);
3221
3222 tospace_offset = fromspace_offset; /* Flip fromspace and tospace */
3223
3224 msection_free_list = 0;
3225
3226 words_prev_msections = 0;
3227
3228 stack_msection = 0;
3229 heap_msection = 0;
3230
3231 set_stack_msection (___ps, alloc);
3232 alloc = alloc->next;
3233
3234 alloc_msection = alloc;
3235
3236 set_heap_msection (___ps, alloc);
3237 alloc = alloc->next;
3238
3239 prepare_heap_msection (___ps);
3240 scan_ptr = alloc_heap_chunk_start;
3241
3242 heap_chunks_to_scan = NULL_CHUNK_LINK;
3243 heap_chunks_to_scan_head = &heap_chunks_to_scan;
3244 heap_chunks_to_scan_tail = &heap_chunks_to_scan;
3245 }
3246
3247 nb_msections_assigned = np*2;
3248
3249 #ifndef ___SINGLE_THREADED_VMS
3250
3251 /* Initialize the active scanning workers count */
3252
3253 scan_workers_count[0] = np;
3254 scan_workers_count[1] = np;
3255
3256 #endif
3257
3258 #undef ___VMSTATE_MEM
3259 #define ___VMSTATE_MEM(var) ___VMSTATE_FROM_PSTATE(___ps)->mem.var
3260 }
3261
3262
3263 /*---------------------------------------------------------------------------*/
3264
3265 #ifdef ENABLE_CONSISTENCY_CHECKS
3266
3267 #define mark_array(start,n) mark_array_debug (start,n,__LINE__)
3268
3269 ___HIDDEN void mark_array_debug
3270 ___P((___PSD
3271 ___WORD *start,
3272 ___WORD n,
3273 int line),
3274 (___PSV
3275 start,
3276 n,
3277 line)
3278 ___PSDKR
3279 ___WORD *start;
3280 ___WORD n;
3281 int line;)
3282
3283 #else
3284
3285 ___HIDDEN void mark_array
3286 ___P((___PSD
3287 ___WORD *start,
3288 ___WORD n),
3289 (___PSV
3290 start,
3291 n)
3292 ___PSDKR
3293 ___WORD *start;
3294 ___WORD n;)
3295
3296 #endif
3297 {
3298 ___PSGET
3299 ___WORD *alloc = alloc_heap_ptr;
3300 ___WORD *limit = alloc_heap_limit;
3301
3302 #ifdef ENABLE_CONSISTENCY_CHECKS
3303 mark_array_call_line = line;
3304 #endif
3305
3306 while (n-- > 0)
3307 {
3308 ___WORD *cell = start++;
3309
3310 again: /* looping back here is possible when tail marking */
3311 {
3312 ___WORD obj = *cell;
3313
3314 if (___MEM_ALLOCATED(obj))
3315 {
3316 ___WORD *body;
3317 ___WORD head;
3318 int head_typ;
3319 int subtype;
3320
3321 #ifdef ENABLE_CONSISTENCY_CHECKS
3322 if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) >= 1)
3323 validate_old_obj (___PSP obj);
3324 #endif
3325
3326 body = ___BODY0(obj);
3327 head = body[-1];
3328 subtype = ___HD_SUBTYPE(head);
3329 head_typ = ___HD_TYP(head);
3330
3331 if (head_typ == ___MOVABLE0)
3332 {
3333 ___SIZE_TS words = ___HD_WORDS(head);
3334 /*TODO: add allocation of handle if using handles*/
3335 #if ___WS == 4
3336 ___BOOL pad = 0;
3337 while (alloc + words + (subtype >= ___sS64VECTOR ? 2 : 1) >
3338 limit)
3339 #else
3340 while (alloc + words + 1 > limit)
3341 #endif
3342 {
3343 alloc_heap_ptr = alloc;
3344 end_heap_chunk (___ps);
3345 next_heap_msection (___ps);
3346 prepare_heap_msection (___ps);
3347 alloc = alloc_heap_ptr;
3348 limit = alloc_heap_limit;
3349 }
3350 #if ___WS != 8
3351 /*
3352 * ___sS64VECTOR, ___sU64VECTOR, ___sF64VECTOR,
3353 * ___sFLONUM and ___sBIGNUM need to be aligned on a
3354 * multiple of 8.
3355 */
3356 if (subtype >= ___sS64VECTOR)
3357 {
3358 if ((___CAST(___WORD,alloc) & (8-1)) == 0)
3359 *alloc++ = ___MAKE_HD_WORDS(0, ___sVECTOR);
3360 else
3361 pad = 1;
3362 }
3363 #endif
3364 *alloc++ = head;
3365
3366 #ifdef ___SINGLE_THREADED_VMS
3367
3368 body[-1] = ___TAG(alloc - ___REFERENCE_TO_BODY, ___FORW);
3369
3370 #else
3371
3372 {
3373 ___WORD head_now =
3374 ___COMPARE_AND_SWAP_WORD(&body[-1],
3375 head,
3376 ___TAG(alloc - ___REFERENCE_TO_BODY, ___FORW));
3377
3378 if (head_now != head)
3379 {
3380 /*
3381 * Other processor forwarded the object first so
3382 * the allocation must be undone and head_now is
3383 * the correct forwarding pointer.
3384 */
3385
3386 alloc--;
3387 *cell = ___TAG(___UNTAG_AS(head_now, ___FORW), ___TYP(obj));
3388 continue;
3389 }
3390 }
3391
3392 #endif
3393
3394 *cell = ___TAG(alloc - ___REFERENCE_TO_BODY, ___TYP(obj));
3395
3396 if (words > 0 && subtype <= ___sBOXVALUES)
3397 cell = alloc;
3398 else
3399 cell = 0;
3400
3401 while (words > 0)
3402 {
3403 *alloc++ = *body++;
3404 words--;
3405 }
3406 #if ___WS == 4
3407 if (pad)
3408 *alloc++ = ___MAKE_HD_WORDS(0, ___sVECTOR);
3409 #endif
3410 if (alloc >= alloc_heap_chunk_limit)
3411 {
3412 alloc_heap_ptr = alloc;
3413 end_heap_chunk (___ps);
3414 start_heap_chunk (___ps);
3415 alloc = alloc_heap_ptr;
3416 }
3417
3418 if (cell != 0) goto again;
3419 }
3420 else if (head_typ == ___STILL)
3421 {
3422 #ifdef ___SINGLE_THREADED_VMS
3423
3424 if (body[___STILL_MARK - ___STILL_BODY] == -1)
3425 {
3426 body[___STILL_MARK - ___STILL_BODY]
3427 = ___CAST(___WORD,still_objs_to_scan);
3428 still_objs_to_scan
3429 = ___CAST(___WORD,body - ___STILL_BODY);
3430 }
3431
3432 #else
3433
3434 if (___COMPARE_AND_SWAP_WORD(&body[___STILL_MARK - ___STILL_BODY],
3435 -1,
3436 ___CAST(___WORD,still_objs_to_scan))
3437 == -1)
3438 {
3439 still_objs_to_scan
3440 = ___CAST(___WORD,body - ___STILL_BODY);
3441 }
3442 #endif
3443 }
3444 else if (___TYP(head_typ) == ___FORW)
3445 {
3446 *cell = ___TAG(___UNTAG_AS(head, ___FORW), ___TYP(obj));
3447 }
3448 #ifdef ENABLE_CONSISTENCY_CHECKS
3449 else if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) >= 1 &&
3450 head_typ != ___PERM)
3451 bug (___PSP obj, 0, "was not ___PERM, ___STILL, ___MOVABLE0 or ___FORW");
3452 #endif
3453 }
3454 }
3455 }
3456
3457 alloc_heap_ptr = alloc;
3458 }
3459
3460
3461 ___HIDDEN void mark_captured_continuation
3462 ___P((___PSD
3463 ___WORD *orig_ptr),
3464 (___PSV
3465 orig_ptr)
3466 ___PSDKR
3467 ___WORD *orig_ptr;)
3468 {
3469 ___PSGET
3470 ___WORD *ptr = orig_ptr;
3471 int fs, link, i;
3472 ___WORD *fp;
3473 ___WORD ra1;
3474 ___WORD ra2;
3475 ___WORD cf;
3476
3477 cf = *ptr;
3478
3479 #ifdef SHOW_FRAMES
3480 ___printf ("mark_captured_continuation cf=%p\n", ___CAST(void*,cf));
3481 #endif
3482
3483 if (___TYP(cf) == ___tFIXNUM && cf != ___END_OF_CONT_MARKER)
3484 {
3485 /* continuation frame is in the stack */
3486
3487 ___WORD *alloc = alloc_heap_ptr;
3488 ___WORD *limit = alloc_heap_limit;
3489
3490 MISC_MEM_LOCK();
3491
3492 next_frame:
3493
3494 fp = ___CAST(___WORD*,cf);
3495
3496 ra1 = ___FP_STK(fp,-___FRAME_STACK_RA);
3497
3498 #ifdef SHOW_FRAMES
3499 ___printf (" frame [ra=0x%" ___PRIxWORD "] ", ra1);
3500 #endif
3501
3502 if (ra1 == ___GSTATE->internal_return)
3503 {
3504 ___WORD actual_ra = ___FP_STK(fp,___RETI_RA);
3505 ___RETI_GET_FS_LINK(actual_ra,fs,link)
3506 ___COVER_MARK_CAPTURED_CONTINUATION_RETI;
3507 }
3508 else
3509 {
3510 ___RETN_GET_FS_LINK(ra1,fs,link)
3511 ___COVER_MARK_CAPTURED_CONTINUATION_RETN;
3512 }
3513
3514 #ifdef SHOW_FRAMES
3515 ___printf ("fs=%d link=%d fp=%p ra=", fs, link, fp);
3516 print_value (ra1);
3517 ___printf ("\n");
3518 #endif
3519
3520 ___FP_ADJFP(fp,-___FRAME_SPACE(fs)) /* get base of frame */
3521
3522 ra2 = ___FP_STK(fp,link+1);
3523
3524 if (___TYP(ra2) == ___tFIXNUM)
3525 {
3526 ___COVER_MARK_CAPTURED_CONTINUATION_ALREADY_COPIED;
3527 *ptr = ra2; /* already copied, replace by forwarding pointer */
3528 }
3529 else
3530 {
3531 ___WORD forw;
3532 ___SIZE_TS words;
3533
3534 ___COVER_MARK_CAPTURED_CONTINUATION_COPY;
3535
3536 words = fs + ___FRAME_EXTRA_SLOTS;
3537
3538 while (alloc + words + ___SUBTYPED_BODY > limit)
3539 {
3540 alloc_heap_ptr = alloc;
3541 end_heap_chunk (___ps);
3542 next_heap_msection (___ps);
3543 prepare_heap_msection (___ps);
3544 alloc = alloc_heap_ptr;
3545 limit = alloc_heap_limit;
3546 }
3547
3548 /*TODO: add allocation of handle if using handles*/
3549
3550 *alloc++ = ___MAKE_HD_WORDS(words, ___sFRAME);
3551 #if ___SUBTYPED_BODY != 1
3552 #error "___SUBTYPED_BODY != 1"
3553 #endif
3554 forw = ___TAG(alloc - ___REFERENCE_TO_BODY, ___tFIXNUM);
3555 *alloc++ = ra1;
3556 #if ___FRAME_EXTRA_SLOTS != 1
3557 #error "___FRAME_EXTRA_SLOTS != 1"
3558 #endif
3559
3560 for (i=fs; i>0; i--)
3561 *alloc++ = ___FP_STK(fp,i);
3562
3563 if (ra2 == ___GSTATE->handler_break)
3564 {
3565 /* first frame of that section */
3566
3567 ___COVER_MARK_CAPTURED_CONTINUATION_FIRST_FRAME;
3568
3569 cf = ___FP_STK(fp,-___BREAK_FRAME_NEXT);
3570 }
3571 else
3572 {
3573 /* not the first frame of that section */
3574
3575 ___COVER_MARK_CAPTURED_CONTINUATION_NOT_FIRST_FRAME;
3576
3577 ___FP_SET_STK(fp,-___FRAME_STACK_RA,ra2)
3578 cf = ___CAST(___WORD,fp);
3579 }
3580
3581 ___FP_SET_STK(alloc,link+1,cf)
3582 ___FP_SET_STK(fp,link+1,forw) /* leave a forwarding pointer */
3583
3584 *ptr = forw;
3585
3586 ptr = &___FP_STK(alloc,link+1);
3587
3588 if (alloc_heap_ptr >= alloc_heap_chunk_limit)
3589 {
3590 alloc_heap_ptr = alloc;
3591 end_heap_chunk (___ps);
3592 start_heap_chunk (___ps);
3593 alloc = alloc_heap_ptr;
3594 }
3595
3596 if (___TYP(cf) == ___tFIXNUM && cf != ___END_OF_CONT_MARKER)
3597 goto next_frame;
3598 }
3599
3600 *orig_ptr = ___TAG(___UNTAG_AS(*orig_ptr, ___tFIXNUM), ___tSUBTYPED);
3601
3602 alloc_heap_ptr = alloc;
3603
3604 MISC_MEM_UNLOCK();
3605 }
3606 else
3607 mark_array (___PSP orig_ptr, 1);
3608 }
3609
3610
3611 ___HIDDEN void mark_frame
3612 ___P((___PSD
3613 ___WORD *fp,
3614 int fs,
3615 ___WORD gcmap,
3616 ___WORD *nextgcmap),
3617 (___PSV
3618 fp,
3619 fs,
3620 gcmap,
3621 nextgcmap)
3622 ___PSDKR
3623 ___WORD *fp;
3624 int fs;
3625 ___WORD gcmap;
3626 ___WORD *nextgcmap;)
3627 {
3628 int i = 1;
3629
3630 for (;;)
3631 {
3632 if (gcmap & 1)
3633 {
3634 int j = i;
3635 do
3636 {
3637 if (i == fs)
3638 {
3639 #ifdef SHOW_FRAMES
3640 {
3641 int k = j;
3642 while (k <= i)
3643 {
3644 ___WORD obj = ___FP_STK(fp,k);
3645 ___printf (" %2d: ", k);
3646 print_value (obj);
3647 ___printf ("\n");
3648 k++;
3649 }
3650 }
3651 #endif
3652 mark_array (___PSP &___FP_STK(fp,i), i-j+1);
3653 return;
3654 }
3655 if ((i & (___WORD_WIDTH-1)) == 0)
3656 gcmap = *nextgcmap++;
3657 else
3658 gcmap >>= 1;
3659 i++;
3660 } while (gcmap & 1);
3661 #ifdef SHOW_FRAMES
3662 {
3663 int k = j;
3664 while (k < i)
3665 {
3666 ___WORD obj = ___FP_STK(fp,k);
3667 ___printf (" %2d: ", k);
3668 print_value (obj);
3669 ___printf ("\n");
3670 k++;
3671 }
3672 }
3673 #endif
3674 mark_array (___PSP &___FP_STK(fp,i-1), i-j);
3675 }
3676 if (i == fs)
3677 return;
3678 if ((i & (___WORD_WIDTH-1)) == 0)
3679 {
3680 gcmap = *nextgcmap++;
3681 #ifdef SHOW_FRAMES
3682 ___printf ("gcmap = 0x%" ___PRIxWORD "\n", gcmap);
3683 #endif
3684 }
3685 else
3686 gcmap >>= 1;
3687 i++;
3688 }
3689 }
3690
3691
3692 ___HIDDEN void mark_continuation
3693 ___P((___PSDNC),
3694 (___PSVNC)
3695 ___PSDKR)
3696 {
3697 ___PSGET
3698 int fs, link;
3699 ___WORD *fp;
3700 ___WORD ra1;
3701 ___WORD ra2;
3702 ___WORD gcmap;
3703 ___WORD *nextgcmap = 0;
3704
3705 #ifdef ENABLE_CONSISTENCY_CHECKS
3706 reference_location = IN_CONTINUATION;
3707 #endif
3708
3709 fp = ___ps->fp;
3710
3711 #ifdef SHOW_FRAMES
3712 ___printf ("mark_continuation fp=%p\n", fp);
3713 #endif
3714
3715 if (fp != ___ps->stack_break)
3716 for (;;)
3717 {
3718 ra1 = ___FP_STK(fp,-___FRAME_STACK_RA);
3719
3720 #ifdef SHOW_FRAMES
3721 ___printf (" frame [ra=0x%" ___PRIxWORD "] ", ra1);
3722 #endif
3723
3724 if (ra1 == ___GSTATE->internal_return)
3725 {
3726 ___WORD actual_ra = ___FP_STK(fp,___RETI_RA);
3727 ___RETI_GET_FS_LINK_GCMAP(actual_ra,fs,link,gcmap,nextgcmap)
3728 ___COVER_MARK_CONTINUATION_RETI;
3729 }
3730 else
3731 {
3732 ___RETN_GET_FS_LINK_GCMAP(ra1,fs,link,gcmap,nextgcmap)
3733 ___COVER_MARK_CONTINUATION_RETN;
3734 }
3735
3736 #ifdef SHOW_FRAMES
3737 ___printf ("fs=%d link=%d fp=%p ra=", fs, link, fp);
3738 print_value (ra1);
3739 ___printf ("\n");
3740 #endif
3741
3742 ___FP_ADJFP(fp,-___FRAME_SPACE(fs)) /* get base of frame */
3743
3744 ra2 = ___FP_STK(fp,link+1);
3745
3746 #ifdef SHOW_FRAMES
3747 if (fp == ___ps->stack_break)
3748 ___printf (" (first frame above break frame)\n");
3749 #endif
3750
3751 mark_frame (___PSP fp, fs, gcmap, nextgcmap);
3752
3753 if (fp == ___ps->stack_break)
3754 break;
3755
3756 ___FP_SET_STK(fp,-___FRAME_STACK_RA,ra2)
3757 }
3758
3759 mark_captured_continuation (___PSP &___FP_STK(fp,-___BREAK_FRAME_NEXT));
3760 }
3761
3762
3763 ___HIDDEN void mark_rc
3764 ___P((___PSDNC),
3765 (___PSVNC)
3766 ___PSDKR)
3767 {
3768 ___PSGET
3769 ___rc_header *h = rc_head.next;
3770
3771 #ifdef ENABLE_CONSISTENCY_CHECKS
3772 reference_location = IN_RC;
3773 #endif
3774
3775 while (h != &rc_head)
3776 {
3777 ___rc_header *next = h->next;
3778 mark_array (___PSP &h->data, 1);
3779 h = next;
3780 }
3781 }
3782
3783
3784 #define UNMARKED_MOVABLE(obj) \
3785 ((unmarked_typ = ___HD_TYP((unmarked_body=___BODY0(obj))[-1])) == ___MOVABLE0)
3786
3787 #define UNMARKED_STILL(obj) \
3788 (unmarked_typ == ___STILL && \
3789 unmarked_body[___STILL_MARK - ___STILL_BODY] == -1)
3790
3791 #define UNMARKED(obj) \
3792 (UNMARKED_MOVABLE(obj) || UNMARKED_STILL(obj))
3793
3794
3795 ___HIDDEN ___SIZE_TS scan
3796 ___P((___PSD
3797 ___WORD *body,
3798 ___WORD head),
3799 (___PSV
3800 body,
3801 head)
3802 ___PSDKR
3803 ___WORD *body;
3804 ___WORD head;)
3805 {
3806 ___PSGET
3807 ___SIZE_TS words = ___HD_WORDS(head);
3808 int subtype = ___HD_SUBTYPE(head);
3809
3810 #ifdef ENABLE_CONSISTENCY_CHECKS
3811 reference_location = IN_OBJECT;
3812 container_body = body;
3813 #endif
3814
3815 switch (subtype)
3816 {
3817 case ___sFOREIGN:
3818 case ___sSTRING:
3819 case ___sS8VECTOR:
3820 case ___sU8VECTOR:
3821 case ___sS16VECTOR:
3822 case ___sU16VECTOR:
3823 case ___sS32VECTOR:
3824 case ___sU32VECTOR:
3825 case ___sS64VECTOR:
3826 case ___sU64VECTOR:
3827 case ___sF32VECTOR:
3828 case ___sF64VECTOR:
3829 case ___sFLONUM:
3830 case ___sBIGNUM:
3831 break;
3832
3833 case ___sWEAK:
3834 if (words == ___WILL_SIZE)
3835 {
3836 /* Object is a will */
3837
3838 /*
3839 * The will contains a weak reference to its testator object
3840 * and a strong reference to the action procedure.
3841 * Consequently, the action procedure must be marked and,
3842 * only if traverse_weak_refs is true, the testator object
3843 * is also marked. The link field is never scanned.
3844 */
3845
3846 if (traverse_weak_refs)
3847 mark_array (___PSP body+1, 2); /* scan action and testator */
3848 else
3849 {
3850 mark_array (___PSP body+2, 1); /* scan action only */
3851
3852 /*
3853 * Remember that this will's testator object remains to
3854 * be marked by the process_wills function.
3855 */
3856
3857 body[0] = body[0] | ___UNMARKED_TESTATOR_WILL;
3858 }
3859 }
3860 else
3861 {
3862 /* Object is a GC hash table */
3863
3864 int flags = ___INT(body[___GCHASHTABLE_FLAGS]);
3865 int i;
3866
3867 if ((flags & ___GCHASHTABLE_FLAG_WEAK_KEYS) == 0 &&
3868 (flags & ___GCHASHTABLE_FLAG_MEM_ALLOC_KEYS))
3869 {
3870 for (i=words-2; i>=___GCHASHTABLE_KEY0; i-=2)
3871 mark_array (___PSP body+i, 1); /* mark objects in key fields */
3872 }
3873
3874 if ((flags & ___GCHASHTABLE_FLAG_WEAK_VALS) == 0)
3875 {
3876 for (i=words-1; i>=___GCHASHTABLE_VAL0; i-=2)
3877 mark_array (___PSP body+i, 1); /* mark objects in value fields */
3878 }
3879
3880 body[0] = reached_gc_hash_tables;
3881 reached_gc_hash_tables = ___CAST(___WORD,body);
3882 }
3883 break;
3884
3885 case ___sSYMBOL:
3886 case ___sKEYWORD:
3887 mark_array (___PSP body, 1); /* only scan name of symbols & keywords */
3888 break;
3889
3890 case ___sCONTINUATION:
3891 mark_captured_continuation (___PSP &body[___CONTINUATION_FRAME]);
3892 mark_array (___PSP body+1, words-1); /* skip the frame pointer */
3893 break;
3894
3895 case ___sFRAME:
3896 {
3897 int fs, link;
3898 ___WORD *fp = body + ___FRAME_EXTRA_SLOTS;
3899 ___WORD ra = body[0];
3900 ___WORD gcmap;
3901 ___WORD *nextgcmap = 0;
3902 ___WORD frame;
3903
3904 #ifdef SHOW_FRAMES
3905 ___printf ("___sFRAME object\n");
3906 ___printf (" frame [ra=0x%" ___PRIxWORD "] ", ra);
3907 #endif
3908
3909 if (ra == ___GSTATE->internal_return)
3910 {
3911 ___WORD actual_ra = body[___FRAME_RETI_RA];
3912 ___RETI_GET_FS_LINK_GCMAP(actual_ra,fs,link,gcmap,nextgcmap)
3913 ___COVER_SCAN_FRAME_RETI;
3914 }
3915 else
3916 {
3917 ___RETN_GET_FS_LINK_GCMAP(ra,fs,link,gcmap,nextgcmap)
3918 ___COVER_SCAN_FRAME_RETN;
3919 }
3920
3921 #ifdef SHOW_FRAMES
3922 ___printf ("fs=%d link=%d fp=%p ra=", fs, link, fp);
3923 print_value (ra);
3924 ___printf ("\n");
3925 #endif
3926
3927 fp += fs;
3928
3929 frame = ___FP_STK(fp,link+1);
3930
3931 if (___TYP(frame) == ___tFIXNUM && frame != ___END_OF_CONT_MARKER)
3932 ___FP_SET_STK(fp,link+1,___FAL)
3933
3934 mark_frame (___PSP fp, fs, gcmap, nextgcmap);
3935
3936 if (___TYP(frame) == ___tFIXNUM && frame != ___END_OF_CONT_MARKER)
3937 ___FP_SET_STK(fp,link+1,___TAG(___UNTAG_AS(frame, ___tFIXNUM), ___tSUBTYPED))
3938
3939 mark_array (___PSP &body[0], 1);
3940 }
3941 break;
3942
3943 case ___sPROCEDURE:
3944
3945 /*
3946 * The object can only be a closure (nonclosures are permanent objects).
3947 */
3948
3949 #ifdef ___SUPPORT_LOWLEVEL_EXEC
3950
3951 /* update the closure's lowlevel code trampoline */
3952
3953 ___CLO_LOWLEVEL_TRAMPOLINE_SETUP(body,
3954 body[___LABEL_ENTRY_OR_DESCR]);
3955
3956 #endif
3957
3958 /* only need to scan the free variables */
3959
3960 mark_array (___PSP body+___CLO_FREEVARS, words-___CLO_FREEVARS);
3961
3962 break;
3963
3964 default:
3965
3966 if (___HD_TYP(head) == ___MOVABLE0 && subtype <= ___sBOXVALUES)
3967 mark_array (___PSP body+1, words-1);
3968 else
3969 mark_array (___PSP body, words);
3970
3971 break;
3972 }
3973
3974 return words;
3975 }
3976
3977
3978 #define scan_no_fast_path(ptr, head) \
3979 do { \
3980 ptr += scan (___PSP (ptr)+1, head) + 1; \
3981 } while (0)
3982
3983
3984 #ifdef ___USE_SCAN_NO_FAST_PATH
3985
3986 #define scan_and_advance(ptr, head) scan_no_fast_path(ptr, head)
3987
3988 #else
3989
3990 #define scan_and_advance(ptr, head) \
3991 do { \
3992 if ((head) == ___MAKE_HD_WORDS(___PAIR_SIZE,___sPAIR)) \
3993 { \
3994 mark_array (___PSP (ptr)+2, ___PAIR_SIZE-1); \
3995 ptr += ___PAIR_SIZE+1; \
3996 } \
3997 else \
3998 scan_no_fast_path(ptr, head); \
3999 } while (0)
4000
4001 #endif
4002
4003
4004 ___HIDDEN void setup_still_objs_to_scan
4005 ___P((___PSDNC),
4006 (___PSVNC)
4007 ___PSDKR)
4008 {
4009 ___PSGET
4010 ___WORD *base = ___CAST(___WORD*,still_objs);
4011 ___WORD *to_scan = 0;
4012
4013 while (base != 0)
4014 {
4015 if (base[___STILL_REFCOUNT] == 0)
4016 base[___STILL_MARK] = -1;
4017 else
4018 {
4019 base[___STILL_MARK] = ___CAST(___WORD,to_scan);
4020 to_scan = base;
4021 }
4022 base = ___CAST(___WORD*,base[___STILL_LINK]);
4023 }
4024
4025 still_objs_to_scan = ___CAST(___WORD,to_scan);
4026 }
4027
4028
4029 ___HIDDEN void scan_still_objs_to_scan
4030 ___P((___PSDNC),
4031 (___PSVNC)
4032 ___PSDKR)
4033 {
4034 ___PSGET
4035 ___WORD *base;
4036
4037 while ((base = ___CAST(___WORD*,still_objs_to_scan)) != 0)
4038 {
4039 ___WORD *body = base + ___STILL_BODY;
4040 still_objs_to_scan = base[___STILL_MARK];
4041 scan (___PSP body, body[-1]);
4042 }
4043 }
4044
4045
4046 ___HIDDEN void scan_complete_heap_chunk
4047 ___P((___PSD
4048 ___WORD *start),
4049 (___PSV
4050 start)
4051 ___PSDKR
4052 ___WORD *start;)
4053 {
4054 ___PSGET
4055 ___WORD *ptr = start;
4056 ___WORD head;
4057
4058 #ifdef ENABLE_GC_ACTLOG_SCAN_COMPLETE_HEAP_CHUNK
4059 ___ACTLOG_BEGIN_PS(scan_complete_heap_chunk,_);
4060 #endif
4061
4062 while (___TYP((head = *ptr)) != ___FORW) /* not end of complete chunk? */
4063 {
4064 scan_and_advance(ptr, head); /* note: this advances ptr */
4065 }
4066
4067 #ifdef ENABLE_GC_ACTLOG_SCAN_COMPLETE_HEAP_CHUNK
4068 ___ACTLOG_END_PS();
4069 #endif
4070 }
4071
4072
4073 ___HIDDEN void scan_movable_objs_to_scan
4074 ___P((___PSDNC),
4075 (___PSVNC)
4076 ___PSDKR)
4077 {
4078 ___PSGET
4079
4080 /*
4081
4082 SITUATION #1: scanning a complete chunk
4083
4084 chunk 1 chunk 2 chunk 3 incomplete chunk
4085 +-------+---------+---------+------------------+-----
4086 |L |L |L |L |
4087 +-------+---------+---------+------------------+-----
4088 ^ ^
4089 scan_ptr alloc_heap_ptr
4090
4091
4092 SITUATION #2: scanning the incomplete chunk
4093
4094 chunk 1 chunk 2 chunk 3 incomplete chunk
4095 +-------+---------+---------+------------------+-----
4096 |L |L |L |L |
4097 +-------+---------+---------+------------------+-----
4098 ^ ^
4099 scan_ptr alloc_heap_ptr
4100
4101
4102 SITUATION #3: done scanning all movable objects
4103
4104 chunk 1 chunk 2 chunk 3 incomplete chunk
4105 +-------+---------+---------+------------------+-----
4106 |L |L |L |L |
4107 +-------+---------+---------+------------------+-----
4108 ^
4109 scan_ptr alloc_heap_ptr
4110
4111 L = link to next chunk tagged with ___FORW
4112
4113 */
4114
4115 ___WORD *ptr = scan_ptr;
4116 ___VOLATILE ___WORD *hcsh;
4117
4118 while (ptr != alloc_heap_ptr) /* SITUATION #1 or #2 ? */
4119 {
4120 ___WORD head;
4121 while (___TYP((head = *ptr)) != ___FORW) /* not end of complete chunk? */
4122 {
4123 scan_and_advance(ptr, head); /* note: this advances ptr */
4124 if (ptr == alloc_heap_ptr) /* end of incomplete chunk? */
4125 {
4126 /* SITUATION #3, done scanning all movable objects */
4127 scan_ptr = ptr;
4128 return;
4129 }
4130 }
4131
4132 scan_ptr = ptr; /* remember where scan ended */
4133
4134 /*
4135 * SITUATION #1, at end of complete chunk.
4136 */
4137
4138 ___SPINLOCK_LOCK(heap_chunks_to_scan_lock);
4139
4140 while ((hcsh=heap_chunks_to_scan_head) != heap_chunks_to_scan_tail)
4141 {
4142 /*
4143 * Get the next complete heap chunk from heap chunk FIFO and
4144 * scan it.
4145 */
4146
4147 ptr = ___UNTAG_AS(*hcsh, ___FORW);
4148
4149 heap_chunks_to_scan_head = ptr;
4150
4151 ___SHARED_MEMORY_BARRIER(); /* share heap_chunks_to_scan_head */
4152
4153 ___SPINLOCK_UNLOCK(heap_chunks_to_scan_lock);
4154
4155 scan_complete_heap_chunk (___PSP ptr+1);
4156
4157 ___SPINLOCK_LOCK(heap_chunks_to_scan_lock);
4158 }
4159
4160 ___SPINLOCK_UNLOCK(heap_chunks_to_scan_lock);
4161
4162 /*
4163 * Scan the incomplete heap chunk currently being created.
4164 */
4165
4166 ptr = alloc_heap_chunk_start;
4167
4168 scan_ptr = ptr;
4169 }
4170 }
4171
4172
4173 ___HIDDEN void free_unmarked_still_objs
4174 ___P((___PSDNC),
4175 (___PSVNC)
4176 ___PSDKR)
4177 {
4178 ___PSGET
4179 ___WORD *last = &still_objs;
4180 ___WORD *base = ___CAST(___WORD*,*last);
4181 ___SIZE_TS live_words_still = 0;
4182
4183 while (base != 0)
4184 {
4185 ___WORD link = base[___STILL_LINK];
4186 if (base[___STILL_MARK] == -1)
4187 {
4188 ___WORD head = base[___STILL_BODY-1];
4189 if (___HD_SUBTYPE(head) == ___sFOREIGN)
4190 ___release_foreign
4191 (___TAG(base + ___STILL_BODY - ___REFERENCE_TO_BODY, ___tSUBTYPED));
4192 free_mem_aligned_heap (base);
4193 }
4194 else
4195 {
4196 live_words_still += base[___STILL_LENGTH];
4197 *last = ___CAST(___WORD,base);
4198 last = base + ___STILL_LINK;
4199 }
4200 base = ___CAST(___WORD*,link);
4201 }
4202
4203 *last = 0;
4204
4205 words_still_objs = live_words_still;
4206 words_still_objs_deferred = 0;
4207
4208 /*
4209 * In principle the occupied_words_still could be updated here but
4210 * this would require acquiring a lock, so instead the
4211 * occupied_words_still will be recomputed from all of the
4212 * processor's words_still_objs when computing the space occupied by
4213 * live objects prior to resizing the heap.
4214 */
4215 }
4216
4217
4218 ___HIDDEN void free_still_objs
4219 ___P((___processor_state ___ps),
4220 (___ps)
4221 ___processor_state ___ps;)
4222 {
4223 ___WORD *base = ___CAST(___WORD*,still_objs);
4224
4225 still_objs = 0;
4226
4227 while (base != 0)
4228 {
4229 ___WORD link = base[___STILL_LINK];
4230 ___WORD head = base[___STILL_BODY-1];
4231 if (___HD_SUBTYPE(head) == ___sFOREIGN)
4232 ___release_foreign
4233 (___TAG(base + ___STILL_BODY - ___REFERENCE_TO_BODY, ___tSUBTYPED));
4234 free_mem_aligned_heap (base);
4235 base = ___CAST(___WORD*,link);
4236 }
4237 }
4238
4239
4240 #define SET_MAX(var,val) do { if ((var) < (val)) var = val; } while (0)
4241 #define SET_MIN(var,val) do { if ((var) > (val)) var = val; } while (0)
4242
4243
4244 ___HIDDEN ___SIZE_TS adjust_heap
4245 ___P((___SIZE_TS live),
4246 (live)
4247 ___SIZE_TS live;)
4248 {
4249 ___SIZE_TS target;
4250
4251 if (___GSTATE->setup_params.adjust_heap_hook != 0)
4252 return ___GSTATE->setup_params.adjust_heap_hook (live);
4253
4254 if (___GSTATE->setup_params.live_percent < 100)
4255 target = live / ___GSTATE->setup_params.live_percent * 100;
4256 else
4257 target = live + ___MSECTION_BIGGEST;
4258
4259 SET_MAX(target,
4260 ___CAST(___SIZE_TS,(___GSTATE->setup_params.min_heap >> ___LWS)));
4261
4262 if (___GSTATE->setup_params.max_heap > 0)
4263 SET_MIN(target,
4264 ___CAST(___SIZE_TS,(___GSTATE->setup_params.max_heap >> ___LWS)));
4265
4266 return target;
4267 }
4268
4269
4270 ___HIDDEN void prepare_mem_pstate
4271 ___P((___processor_state ___ps),
4272 (___ps)
4273 ___processor_state ___ps;)
4274 {
4275 ___SIZE_TS avail;
4276 ___SIZE_TS stack_avail;
4277 ___SIZE_TS stack_left_before_fudge;
4278 ___SIZE_TS heap_avail;
4279 ___SIZE_TS heap_left_before_fudge;
4280
4281 #ifdef ___CALL_GC_FREQUENTLY
4282 avail = 0;
4283 ___ps->mem.gc_calls_to_punt_ = 2000;
4284 #else
4285 avail = compute_free_heap_space()/2;
4286 SET_MAX(avail,0);
4287 #endif
4288
4289 stack_avail = avail/2;
4290 stack_left_before_fudge = (alloc_stack_ptr - alloc_stack_limit)
4291 - ___MSECTION_FUDGE;
4292
4293 ___ps->fp = alloc_stack_ptr;
4294 ___ps->stack_limit = alloc_stack_ptr
4295 - ((stack_avail < stack_left_before_fudge)
4296 ? stack_avail
4297 : stack_left_before_fudge);
4298
4299 heap_avail = avail - stack_avail;
4300 heap_left_before_fudge = (alloc_heap_limit - alloc_heap_ptr)
4301 - ___MSECTION_FUDGE;
4302
4303 ___ps->hp = alloc_heap_ptr;
4304 ___ps->heap_limit = alloc_heap_ptr
4305 + ((heap_avail < heap_left_before_fudge)
4306 ? heap_avail
4307 : heap_left_before_fudge);
4308
4309 /* set stack overflow and interrupt detection limit */
4310
4311 ___begin_interrupt_service_pstate (___ps);
4312 ___end_interrupt_service_pstate (___ps, 0);
4313
4314 #ifdef ENABLE_CONSISTENCY_CHECKS
4315 if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) >= 1)
4316 {
4317 ___WORD *end = ___ps->stack_limit;
4318 ___WORD *start = end - ___MSECTION_FUDGE;
4319 if (end > alloc_stack_ptr)
4320 end = alloc_stack_ptr;
4321 zap_section (start, end);
4322 if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) == 3)
4323 {
4324 ___printf ("heap_size = %d\n", heap_size);
4325 ___printf ("avail = %d\n", avail);
4326 ___printf ("stack_avail = %d\n", stack_avail);
4327 ___printf ("heap_avail = %d\n", heap_avail);
4328 ___printf ("stack_msection = %p\n", stack_msection);
4329 ___printf ("heap_msection = %p\n", heap_msection);
4330 ___printf ("___ps->stack_start = %p\n", ___ps->stack_start);
4331 ___printf ("___ps->stack_break = %p\n", ___ps->stack_break);
4332 ___printf ("___ps->fp = %p\n", ___ps->fp);
4333 ___printf ("alloc_stack_ptr = %p\n", alloc_stack_ptr);
4334 ___printf ("___ps->stack_limit = %p\n", ___ps->stack_limit);
4335 ___printf ("alloc_stack_limit = %p\n", alloc_stack_limit);
4336 ___printf ("alloc_heap_limit = %p\n", alloc_heap_limit);
4337 ___printf ("___ps->heap_limit = %p\n", ___ps->heap_limit);
4338 ___printf ("___ps->hp = %p\n", ___ps->hp);
4339 ___printf ("alloc_heap_ptr = %p\n", alloc_heap_ptr);
4340 ___printf ("alloc_heap_start = %p\n", alloc_heap_start);
4341 }
4342 }
4343 #endif
4344 }
4345
4346
4347 ___SCMOBJ ___setup_mem_pstate
4348 ___P((___processor_state ___ps),
4349 (___ps)
4350 ___processor_state ___ps;)
4351 {
4352 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
4353 ___SCMOBJ err;
4354
4355 /*
4356 * Setup processor's activity log.
4357 */
4358
4359 if ((err = ___setup_actlog_pstate (___ps)) != ___FIX(___NO_ERR))
4360 return err;
4361
4362 /*
4363 * Setup location of tospace.
4364 */
4365
4366 tospace_offset = ___PSTATE_FROM_PROCESSOR_ID(0,___vms)->mem.tospace_offset_;
4367
4368 ___SPINLOCK_INIT(heap_chunks_to_scan_lock);
4369
4370 /*
4371 * Allocate processor's stack and heap.
4372 */
4373
4374 msection_free_list = 0;
4375
4376 words_prev_msections = 0;
4377
4378 stack_msection = 0;
4379 alloc_stack_start = 0;
4380 alloc_stack_ptr = 0;
4381
4382 heap_msection = 0;
4383 alloc_heap_start = 0;
4384 alloc_heap_ptr = 0;
4385
4386 next_stack_msection (___ps); /* allocate one msection for stack */
4387 next_heap_msection (___ps); /* allocate one msection for local heap */
4388
4389 /* Setup list of still objects. */
4390
4391 still_objs = 0;
4392 words_still_objs = 0;
4393 words_still_objs_deferred = 0;
4394
4395 /* Keep track of bytes allocated */
4396
4397 bytes_allocated_minus_occupied = 0.0;
4398
4399 /*
4400 * Setup reference counted memory management.
4401 */
4402
4403 setup_rc (___ps);
4404
4405 /*
4406 * Create "break frame" of initial top section.
4407 */
4408
4409 ___ps->stack_start = alloc_stack_start;
4410 alloc_stack_ptr = alloc_stack_start;
4411
4412 ___FP_ADJFP(alloc_stack_ptr,___FIRST_BREAK_FRAME_SPACE)
4413 ___FP_SET_STK(alloc_stack_ptr,-___BREAK_FRAME_NEXT,___END_OF_CONT_MARKER)
4414
4415 ___ps->stack_break = alloc_stack_ptr;
4416
4417 /*
4418 * Setup will lists.
4419 */
4420
4421 nonexecutable_wills = ___TAG(0,0); /* tagged empty list */
4422 executable_wills = ___TAG(0,___EXECUTABLE_WILL); /* tagged empty list */
4423
4424 #ifdef ___DEBUG_CTRL_FLOW_HISTORY
4425
4426 {
4427 int i;
4428 ___ps->ctrl_flow_history_index = 0;
4429 for (i=___CTRL_FLOW_HISTORY_LENGTH-1; i>=0; i--)
4430 ___ps->ctrl_flow_history[i].line = 0;
4431 }
4432
4433 #endif
4434
4435 #ifdef ___DEBUG_STACK_LIMIT
4436 ___ps->poll_location.line = 0;
4437 ___ps->stack_limit_location.line = 0;
4438 #endif
4439
4440 #ifdef ___DEBUG_HEAP_LIMIT
4441 ___ps->check_heap_location.line = 0;
4442 ___ps->heap_limit_location.line = 0;
4443 #endif
4444
4445 #ifdef ___HEARTBEAT_USING_POLL_COUNTDOWN
4446 ___ps->heartbeat_interval = ___HEARTBEAT_USING_POLL_COUNTDOWN;
4447 ___ps->heartbeat_countdown = ___ps->heartbeat_interval;
4448 #endif
4449
4450 prepare_mem_pstate (___ps);
4451
4452 return err;
4453 }
4454
4455
4456 ___SCMOBJ ___setup_mem_vmstate
4457 ___P((___virtual_machine_state ___vms),
4458 (___vms)
4459 ___virtual_machine_state ___vms;)
4460 {
4461 #undef ___VMSTATE_MEM
4462 #define ___VMSTATE_MEM(var) ___vms->mem.var
4463
4464 int init_nb_sections;
4465
4466 #ifndef ___SINGLE_THREADED_VMS
4467
4468 /*
4469 * Initialize spinlock for VM level memory allocation.
4470 */
4471
4472 ___SPINLOCK_INIT(misc_mem_lock);
4473 ___SPINLOCK_INIT(alloc_mem_lock);
4474
4475 /*
4476 * Initialize condition variable to determine end of scan at VM level.
4477 */
4478
4479 ___MUTEX_INIT(scan_termination_mutex);
4480 ___CONDVAR_INIT(scan_termination_condvar);
4481
4482 #endif
4483
4484 #ifndef ___SINGLE_VM
4485
4486 /*
4487 * Add to tail of virtual machine circular list.
4488 */
4489
4490 ___MUTEX_LOCK(___GSTATE->vm_list_mut);
4491
4492 {
4493 ___virtual_machine_state head = &___GSTATE->vmstate0;
4494 ___virtual_machine_state tail = head->prev;
4495
4496 ___vms->prev = tail;
4497 ___vms->next = head;
4498 head->prev = ___vms;
4499 tail->next = ___vms;
4500 }
4501
4502 ___MUTEX_UNLOCK(___GSTATE->vm_list_mut);
4503
4504 /* TODO: implement expansion of glos array when number of globals grows beyond 20000 */
4505
4506 { int n = 20000;
4507 ___vms->glos = ___CAST(___SCMOBJ*,___ALLOC_MEM(n * sizeof (___SCMOBJ)));
4508 while (--n>=0) { ___vms->glos[n] = ___UNB1; }
4509 }
4510
4511 #endif
4512
4513 /*
4514 * It is important to initialize the_msections first so
4515 * that if the program terminates early the procedure
4516 * ___cleanup_mem_vmstate will not access dangling pointers.
4517 */
4518
4519 the_msections = 0;
4520
4521 /*
4522 * Setup location of tospace.
4523 */
4524
4525 ___PSTATE_FROM_PROCESSOR_ID(0,___vms)->mem.tospace_offset_ = 0;
4526
4527 /*
4528 * Set the overflow reserve so that the rest parameter handler can
4529 * construct the rest parameter list without having to call the
4530 * garbage collector.
4531 */
4532
4533 normal_overflow_reserve = 2*((___MAX_NB_PARMS+___SUBTYPED_BODY) +
4534 ___MAX_NB_ARGS*(___PAIR_SIZE+___PAIR_BODY));
4535 overflow_reserve = normal_overflow_reserve;
4536
4537 /* Setup GC statistics */
4538
4539 nb_gcs = 0.0;
4540 gc_user_time = 0.0;
4541 gc_sys_time = 0.0;
4542 gc_real_time = 0.0;
4543
4544 latest_gc_real_time = 0.0;
4545 latest_gc_heap_size = ___CAST(___F64,heap_size) * ___WS;
4546 latest_gc_live = 0.0;
4547 latest_gc_movable = 0.0;
4548 latest_gc_still = 0.0;
4549
4550 /* Allocate msections of VM */
4551
4552 init_nb_sections =
4553 ___MIN_NB_MSECTIONS_PER_PROCESSOR * ___vms->processor_count +
4554 ___CEILING_DIV((___GSTATE->setup_params.min_heap >> ___LWS) +
4555 normal_overflow_reserve,
4556 ___MSECTION_SIZE - 2*___MSECTION_FUDGE);
4557
4558 adjust_msections (&the_msections, init_nb_sections);
4559
4560 if (the_msections == 0 ||
4561 the_msections->nb_sections != init_nb_sections)
4562 return ___FIX(___HEAP_OVERFLOW_ERR);
4563
4564 occupied_words_movable = 0;
4565 occupied_words_still = 0;
4566
4567 nb_msections_assigned = 0;
4568
4569 heap_size = compute_heap_space();
4570
4571 return ___FIX(___NO_ERR);
4572
4573 #undef ___VMSTATE_MEM
4574 #define ___VMSTATE_MEM(var) ___VMSTATE_FROM_PSTATE(___ps)->mem.var
4575 }
4576
4577
4578 ___SCMOBJ ___setup_mem ___PVOID
4579 {
4580 if (___GSTATE->setup_params.min_heap == 0)
4581 {
4582 /*
4583 * Choose a reasonable minimum heap size.
4584 */
4585
4586 ___GSTATE->setup_params.min_heap = ___cpu_cache_size (0, 0) / 2;
4587
4588 SET_MAX(___GSTATE->setup_params.min_heap, ___DEFAULT_MIN_HEAP);
4589 }
4590
4591 if (___GSTATE->setup_params.live_percent <= 0 ||
4592 ___GSTATE->setup_params.live_percent > 100)
4593 {
4594 /*
4595 * Choose a reasonable minimum live percent.
4596 */
4597
4598 ___GSTATE->setup_params.live_percent = ___DEFAULT_LIVE_PERCENT;
4599 }
4600
4601 /*
4602 * Setup psections.
4603 */
4604
4605 ___GSTATE->mem.psections = 0;
4606 ___GSTATE->mem.palloc_ptr = 0;
4607
4608 /*
4609 * Create empty global variable list, symbol table and keyword
4610 * table.
4611 */
4612
4613 ___glo_list_setup ();
4614
4615 {
4616 ___SCMOBJ t = alloc_symkey_table (___sSYMBOL, INIT_SYMBOL_TABLE_LENGTH);
4617
4618 if (___FIXNUMP(t))
4619 return t;
4620
4621 ___GSTATE->symbol_table = t;
4622 }
4623
4624 {
4625 ___SCMOBJ t = alloc_symkey_table (___sKEYWORD, INIT_KEYWORD_TABLE_LENGTH);
4626
4627 if (___FIXNUMP(t))
4628 return t;
4629
4630 ___GSTATE->keyword_table = t;
4631 }
4632
4633 return ___FIX(___NO_ERR);
4634 }
4635
4636
4637 void ___cleanup_mem_pstate
4638 ___P((___processor_state ___ps),
4639 (___ps)
4640 ___processor_state ___ps;)
4641 {
4642 free_still_objs (___ps);
4643 cleanup_rc (___ps);
4644 }
4645
4646
4647 void ___cleanup_mem_vmstate
4648 ___P((___virtual_machine_state ___vms),
4649 (___vms)
4650 ___virtual_machine_state ___vms;)
4651 {
4652 #undef ___VMSTATE_MEM
4653 #define ___VMSTATE_MEM(var) ___vms->mem.var
4654
4655 #ifndef ___SINGLE_THREADED_VMS
4656
4657 /*
4658 * Destroy spinlock for VM level memory allocation.
4659 */
4660
4661 ___SPINLOCK_DESTROY(misc_mem_lock);
4662 ___SPINLOCK_DESTROY(alloc_mem_lock);
4663
4664 /*
4665 * Destroy condition variable to determine end of scan at VM level.
4666 */
4667
4668 ___CONDVAR_DESTROY(scan_termination_condvar);
4669 ___MUTEX_DESTROY(scan_termination_mutex);
4670
4671 #endif
4672
4673 ___cleanup_mem_pstate (___PSTATE_FROM_PROCESSOR_ID(0,___vms));/*TODO: other processors?*/
4674
4675 free_msections (&the_msections);
4676
4677 #ifndef ___SINGLE_VM
4678
4679 /*
4680 * Remove from virtual machine circular list.
4681 */
4682
4683 /* It is assumed that ___GSTATE->vm_list_mut is currently locked */
4684
4685 {
4686 ___virtual_machine_state prev = ___vms->prev;
4687 ___virtual_machine_state next = ___vms->next;
4688
4689 next->prev = prev;
4690 prev->next = next;
4691 }
4692
4693 #endif
4694
4695 #undef ___VMSTATE_MEM
4696 #define ___VMSTATE_MEM(var) ___VMSTATE_FROM_PSTATE(___ps)->mem.var
4697 }
4698
4699
4700 void ___cleanup_mem ___PVOID
4701 {
4702 free_psections ();
4703 }
4704
4705
4706 ___HIDDEN void determine_will_executability
4707 ___P((___WORD list),
4708 (list)
4709 ___WORD list;)
4710 {
4711 while (___UNTAG(list) != 0)
4712 {
4713 ___WORD* will_body = ___UNTAG(list) + ___SUBTYPED_BODY;
4714 ___WORD will_head = will_body[-1];
4715 ___WORD testator;
4716
4717 ___WORD *unmarked_body; /* used by the UNMARKED macro */
4718 int unmarked_typ;
4719
4720 if (___TYP(will_head) == ___FORW) /* was will forwarded? */
4721 will_body = ___BODY0_AS(will_head,___FORW);
4722
4723 list = will_body[___WILL_NEXT];
4724
4725 testator = will_body[___WILL_TESTATOR];
4726
4727 if (___MEM_ALLOCATED(testator) &&
4728 UNMARKED(testator)) /* testator was not marked? */
4729 {
4730 /*
4731 * All paths to testator object from roots pass through
4732 * weak references, so mark will as executable.
4733 */
4734
4735 will_body[___WILL_NEXT] = list | ___EXECUTABLE_WILL;
4736 }
4737 }
4738 }
4739
4740
4741 ___HIDDEN void process_wills
4742 ___P((___PSDNC),
4743 (___PSVNC)
4744 ___PSDKR)
4745 {
4746 ___PSGET
4747 ___WORD* tail_exec;
4748 ___WORD* tail_nonexec;
4749 ___WORD curr;
4750
4751 #ifdef ENABLE_CONSISTENCY_CHECKS
4752 reference_location = IN_WILL_LIST;
4753 #endif
4754
4755 determine_will_executability (nonexecutable_wills);
4756
4757 /*
4758 * Finish scanning the wills whose testator object remains to be
4759 * marked.
4760 *
4761 * The wills that have become executable are also transferred from
4762 * the nonexecutable wills list to the executable wills list.
4763 */
4764
4765 tail_exec = &executable_wills;
4766 curr = *tail_exec;
4767
4768 while (___UNTAG(curr) != 0)
4769 {
4770 ___WORD will = ___SUBTYPED_FROM_START(___UNTAG(curr));
4771
4772 mark_array (___PSP &will, 1);
4773
4774 *tail_exec = ___TAG(___SUBTYPED_TO_START(will),___EXECUTABLE_WILL);
4775 tail_exec = &___BODY0_AS(will,___tSUBTYPED)[___WILL_NEXT];
4776 curr = *tail_exec;
4777 if (curr & ___UNMARKED_TESTATOR_WILL)
4778 mark_array (___PSP tail_exec+___WILL_TESTATOR, 1); /* mark testator object */
4779 }
4780
4781 tail_nonexec = &nonexecutable_wills;
4782 curr = *tail_nonexec;
4783
4784 while (___UNTAG(curr) != 0)
4785 {
4786 ___WORD will = ___SUBTYPED_FROM_START(___UNTAG(curr));
4787
4788 mark_array (___PSP &will, 1);
4789
4790 if (___BODY0_AS(will,___tSUBTYPED)[___WILL_NEXT] & ___EXECUTABLE_WILL)
4791 {
4792 /* move will to executable will list */
4793
4794 *tail_exec = ___TAG(___SUBTYPED_TO_START(will),___EXECUTABLE_WILL);
4795 tail_exec = &___BODY0_AS(will,___tSUBTYPED)[___WILL_NEXT];
4796 curr = *tail_exec;
4797 if (curr & ___UNMARKED_TESTATOR_WILL)
4798 mark_array (___PSP tail_exec+___WILL_TESTATOR, 1); /* mark testator object */
4799 }
4800 else
4801 {
4802 /* leave will in nonexecutable will list */
4803
4804 *tail_nonexec = ___TAG(___SUBTYPED_TO_START(will),0);
4805 tail_nonexec = &___BODY0_AS(will,___tSUBTYPED)[___WILL_NEXT];
4806 curr = *tail_nonexec;
4807 if (curr & ___UNMARKED_TESTATOR_WILL)
4808 mark_array (___PSP tail_nonexec+___WILL_TESTATOR, 1); /* mark testator object */
4809 }
4810 }
4811
4812 *tail_exec = ___TAG(0,___EXECUTABLE_WILL);
4813 *tail_nonexec = ___TAG(0,0);
4814 }
4815
4816
4817 ___HIDDEN void process_gc_hash_tables
4818 ___P((___PSDNC),
4819 (___PSVNC)
4820 ___PSDKR)
4821 {
4822 ___PSGET
4823 ___WORD curr = reached_gc_hash_tables;
4824
4825 while (curr != ___TAG(0,0))
4826 {
4827 ___WORD* body = ___CAST(___WORD*,curr);
4828 ___SIZE_TS words = ___HD_WORDS(body[-1]);
4829 int flags = ___INT(body[___GCHASHTABLE_FLAGS]);
4830 int i;
4831
4832 curr = body[___GCHASHTABLE_NEXT];
4833
4834 body[___GCHASHTABLE_NEXT] = ___FIX(0);
4835
4836 if (((___GCHASHTABLE_FLAG_WEAK_KEYS | ___GCHASHTABLE_FLAG_MEM_ALLOC_KEYS)
4837 & flags) ==
4838 (___GCHASHTABLE_FLAG_WEAK_KEYS | ___GCHASHTABLE_FLAG_MEM_ALLOC_KEYS))
4839 {
4840 if (flags & ___GCHASHTABLE_FLAG_WEAK_VALS)
4841 {
4842 /*
4843 * GC hash table is weak on keys and on values.
4844 */
4845
4846 /*
4847 * Eliminate GC hash table entries with an unmarked key
4848 * or an unmarked value.
4849 */
4850
4851 for (i=words-2; i>=___GCHASHTABLE_KEY0; i-=2)
4852 {
4853 ___WORD *unmarked_body; /* used by the UNMARKED macro */
4854 int unmarked_typ;
4855
4856 ___WORD key = body[i];
4857 ___WORD val = body[i+1];
4858
4859 if (___MEM_ALLOCATED(key))
4860 {
4861 ___WORD key_head = ___BODY0(key)[-1];
4862
4863 if (___TYP(key_head) == ___FORW)
4864 {
4865 /*
4866 * The key is movable and has been
4867 * forwarded.
4868 */
4869
4870 if (___MEM_ALLOCATED(val))
4871 {
4872 ___WORD val_head = ___BODY0(val)[-1];
4873
4874 if (___TYP(val_head) == ___FORW)
4875 {
4876 /*
4877 * The key is movable and has been
4878 * forwarded and the value is
4879 * movable and has been forwarded,
4880 * so update key field and value
4881 * field and remember to rehash next
4882 * time the GC hash table is
4883 * accessed.
4884 */
4885
4886 body[i] =
4887 ___TAG(___UNTAG_AS(key_head, ___FORW),
4888 ___TYP(key));
4889 body[i+1] =
4890 ___TAG(___UNTAG_AS(val_head, ___FORW),
4891 ___TYP(val));
4892 flags |= ___GCHASHTABLE_FLAG_KEY_MOVED;
4893 }
4894 else if (UNMARKED(val))
4895 {
4896 /*
4897 * Change the entry to indicate it
4898 * has been deleted.
4899 */
4900
4901 body[i] = ___DELETED;
4902 body[i+1] = ___UNUSED;
4903 body[___GCHASHTABLE_COUNT] =
4904 ___FIXSUB(body[___GCHASHTABLE_COUNT],
4905 ___FIX(1));
4906 flags |= ___GCHASHTABLE_FLAG_ENTRY_DELETED;
4907 }
4908 else
4909 {
4910 /*
4911 * The key is movable and has been
4912 * forwarded and the value is not
4913 * movable and is reachable, so
4914 * update key field and remember to
4915 * rehash next time the GC hash
4916 * table is accessed.
4917 */
4918
4919 body[i] =
4920 ___TAG(___UNTAG_AS(key_head, ___FORW),
4921 ___TYP(key));
4922 flags |= ___GCHASHTABLE_FLAG_KEY_MOVED;
4923 }
4924 }
4925 else
4926 {
4927 /*
4928 * The key is movable and has been
4929 * forwarded, and the value is not
4930 * memory allocated, so update key field
4931 * and remember to rehash next time the
4932 * GC hash table is accessed.
4933 */
4934
4935 body[i] =
4936 ___TAG(___UNTAG_AS(key_head, ___FORW),
4937 ___TYP(key));
4938 flags |= ___GCHASHTABLE_FLAG_KEY_MOVED;
4939 }
4940 }
4941 else if (UNMARKED(key))
4942 {
4943 /*
4944 * Change the entry to indicate it has been
4945 * deleted.
4946 */
4947
4948 body[i] = ___DELETED;
4949 body[i+1] = ___UNUSED;
4950 body[___GCHASHTABLE_COUNT] =
4951 ___FIXSUB(body[___GCHASHTABLE_COUNT],___FIX(1));
4952 flags |= ___GCHASHTABLE_FLAG_ENTRY_DELETED;
4953 }
4954 else
4955 {
4956 /*
4957 * The key is not movable and is reachable.
4958 */
4959
4960 if (___MEM_ALLOCATED(val))
4961 {
4962 ___WORD val_head = ___BODY0(val)[-1];
4963
4964 if (___TYP(val_head) == ___FORW)
4965 {
4966 /*
4967 * The key is not movable and is
4968 * reachable and the value is
4969 * movable and has been forwarded,
4970 * so update value field.
4971 */
4972
4973 body[i+1] =
4974 ___TAG(___UNTAG_AS(val_head, ___FORW),
4975 ___TYP(val));
4976 }
4977 else if (UNMARKED(val))
4978 {
4979 /*
4980 * Change the entry to indicate it
4981 * has been deleted.
4982 */
4983
4984 body[i] = ___DELETED;
4985 body[i+1] = ___UNUSED;
4986 body[___GCHASHTABLE_COUNT] =
4987 ___FIXSUB(body[___GCHASHTABLE_COUNT],
4988 ___FIX(1));
4989 flags |= ___GCHASHTABLE_FLAG_ENTRY_DELETED;
4990 }
4991 else
4992 {
4993 /*
4994 * The key is not movable and is
4995 * reachable and the value is not
4996 * movable and is reachable, so
4997 * leave fields untouched.
4998 */
4999 }
5000 }
5001 else
5002 {
5003 /*
5004 * The key is not movable and is
5005 * reachable and the value is not memory
5006 * allocated, so leave fields untouched.
5007 */
5008 }
5009 }
5010 }
5011 else
5012 {
5013 /*
5014 * The key is not memory allocated.
5015 */
5016
5017 if (___MEM_ALLOCATED(val))
5018 {
5019 ___WORD val_head = ___BODY0(val)[-1];
5020
5021 if (___TYP(val_head) == ___FORW)
5022 {
5023 /*
5024 * The key is not memory allocated and
5025 * the value is movable and has been
5026 * forwarded, so update value field.
5027 */
5028
5029 body[i+1] =
5030 ___TAG(___UNTAG_AS(val_head, ___FORW),
5031 ___TYP(val));
5032 }
5033 else if (UNMARKED(val))
5034 {
5035 /*
5036 * Change the entry to indicate it
5037 * has been deleted.
5038 */
5039
5040 body[i] = ___DELETED;
5041 body[i+1] = ___UNUSED;
5042 body[___GCHASHTABLE_COUNT] =
5043 ___FIXSUB(body[___GCHASHTABLE_COUNT],
5044 ___FIX(1));
5045 flags |= ___GCHASHTABLE_FLAG_ENTRY_DELETED;
5046 }
5047 else
5048 {
5049 /*
5050 * The key is not memory allocated and
5051 * the value is not movable and is
5052 * reachable, so leave fields untouched.
5053 */
5054 }
5055 }
5056 else
5057 {
5058 /*
5059 * The key is not memory allocated and the
5060 * value is not memory allocated, so leave
5061 * fields untouched.
5062 */
5063 }
5064 }
5065 }
5066 }
5067 else
5068 {
5069 /*
5070 * GC hash table is weak on keys only.
5071 */
5072
5073 /*
5074 * Eliminate GC hash table entries with an unmarked key.
5075 */
5076
5077 for (i=words-2; i>=___GCHASHTABLE_KEY0; i-=2)
5078 {
5079 ___WORD *unmarked_body; /* used by the UNMARKED macro */
5080 int unmarked_typ;
5081
5082 ___WORD key = body[i];
5083
5084 if (___MEM_ALLOCATED(key))
5085 {
5086 ___WORD head = ___BODY0(key)[-1];
5087
5088 if (___TYP(head) == ___FORW)
5089 {
5090 /*
5091 * The key is movable and has been
5092 * forwarded, so update key field and
5093 * remember to rehash next time the
5094 * GC hash table is accessed.
5095 */
5096
5097 body[i] = ___TAG(___UNTAG_AS(head, ___FORW),
5098 ___TYP(key));
5099 flags |= ___GCHASHTABLE_FLAG_KEY_MOVED;
5100 }
5101 else if (UNMARKED(key))
5102 {
5103 /*
5104 * Change the entry to indicate it has been
5105 * deleted.
5106 */
5107
5108 body[i] = ___DELETED;
5109 body[i+1] = ___UNUSED;
5110 body[___GCHASHTABLE_COUNT] =
5111 ___FIXSUB(body[___GCHASHTABLE_COUNT],___FIX(1));
5112 flags |= ___GCHASHTABLE_FLAG_ENTRY_DELETED;
5113 }
5114 }
5115 }
5116 }
5117 }
5118 else
5119 {
5120 if (flags & ___GCHASHTABLE_FLAG_WEAK_VALS)
5121 {
5122 /*
5123 * GC hash table is weak on values only.
5124 */
5125
5126 /*
5127 * Eliminate GC hash table entries with an unmarked value.
5128 */
5129
5130 for (i=words-2; i>=___GCHASHTABLE_KEY0; i-=2)
5131 {
5132 ___WORD *unmarked_body; /* used by the UNMARKED macro */
5133 int unmarked_typ;
5134
5135 ___WORD val = body[i+1];
5136
5137 if (___MEM_ALLOCATED(val))
5138 {
5139 ___WORD head = ___BODY0(val)[-1];
5140
5141 if (___TYP(head) == ___FORW)
5142 {
5143 /*
5144 * The value is movable and has been
5145 * forwarded, so update value field.
5146 */
5147
5148 body[i+1] = ___TAG(___UNTAG_AS(head, ___FORW),
5149 ___TYP(val));
5150 }
5151 else if (UNMARKED(val))
5152 {
5153 /*
5154 * Change the entry to indicate it has been
5155 * deleted.
5156 */
5157
5158 body[i] = ___DELETED;
5159 body[i+1] = ___UNUSED;
5160 body[___GCHASHTABLE_COUNT] =
5161 ___FIXSUB(body[___GCHASHTABLE_COUNT],___FIX(1));
5162 flags |= ___GCHASHTABLE_FLAG_ENTRY_DELETED;
5163 }
5164 }
5165 }
5166 }
5167
5168 if (flags & ___GCHASHTABLE_FLAG_MEM_ALLOC_KEYS)
5169 flags |= ___GCHASHTABLE_FLAG_KEY_MOVED; /* assume worst case */
5170 }
5171
5172 body[___GCHASHTABLE_FLAGS] = ___FIX(flags);
5173 }
5174 }
5175
5176
5177 ___HIDDEN void gc_hash_table_rehash_in_situ
5178 ___P((___SCMOBJ ht),
5179 (ht)
5180 ___SCMOBJ ht;)
5181 {
5182 ___WORD* body = ___BODY_AS(ht,___tSUBTYPED);
5183 ___SIZE_TS words = ___HD_WORDS(body[-1]);
5184 int size2 = words - ___GCHASHTABLE_KEY0;
5185 int i;
5186
5187 body[___GCHASHTABLE_FLAGS] =
5188 ___FIXAND(body[___GCHASHTABLE_FLAGS],
5189 ___FIXNOT(___FIX(___GCHASHTABLE_FLAG_KEY_MOVED)));
5190
5191 if (!___FIXZEROP(___FIXAND(body[___GCHASHTABLE_FLAGS],
5192 ___FIX(___GCHASHTABLE_FLAG_UNION_FIND))))
5193 {
5194 #if 0
5195
5196 /*
5197 * Compress paths.
5198 */
5199
5200 for (i=size2-2; i>=0; i-=2)
5201 {
5202 ___SCMOBJ val = body[i+___GCHASHTABLE_VAL0];
5203 if (___FIXNUMP(val)) /* parent links are encoded as fixnums */
5204 {
5205 if (!___FIXODDP(val)) { /* not compressed yet */
5206 int probe2 = ___INT(val);
5207 int prev2 = i;
5208 ___SCMOBJ x;
5209 for (;;) {
5210 ___SCMOBJ v = body[probe2+___GCHASHTABLE_VAL0];
5211 if (___FIXNUMP(v)) { /* link to parent? */
5212 if (___FIXODDP(v)) { /* compressed path? */
5213 x = v;
5214 break;
5215 }
5216 body[probe2+___GCHASHTABLE_VAL0] = prev2;
5217 prev2 = probe2;
5218 probe2 = ___INT(v);
5219 } else { /* reached root of class */
5220 x = ___FIX(probe2+1);
5221 break;
5222 }
5223 }
5224 while (prev2 != i) {
5225 probe2 = body[prev2+___GCHASHTABLE_VAL0];
5226 body[prev2+___GCHASHTABLE_VAL0] = x;
5227 prev2 = probe2;
5228 }
5229 body[i+___GCHASHTABLE_VAL0] = x;
5230 }
5231 }
5232 }
5233
5234 for (i=size2-2; i>=0; i-=2)
5235 {
5236 ___SCMOBJ val = body[i+___GCHASHTABLE_VAL0];
5237 if (___FIXNUMP(val))
5238 body[i+___GCHASHTABLE_VAL0] = ___FIX(___INT(val)&~1);
5239 }
5240
5241 #endif
5242
5243 /*
5244 * Replace entry values that are parent links by the key of
5245 * their parent.
5246 */
5247
5248 for (i=size2-2; i>=0; i-=2)
5249 {
5250 ___SCMOBJ val = body[i+___GCHASHTABLE_VAL0];
5251 if (___FIXNUMP(val)) /* parent links are encoded as fixnums */
5252 body[i+___GCHASHTABLE_VAL0] = body[___INT(val)+___GCHASHTABLE_KEY0];
5253 }
5254 }
5255
5256 if (___FIXZEROP(___FIXAND(body[___GCHASHTABLE_FLAGS],
5257 ___FIX(___GCHASHTABLE_FLAG_MEM_ALLOC_KEYS))))
5258 {
5259 /*
5260 * Free deleted entries and mark key field of all active
5261 * entries.
5262 */
5263
5264 for (i=size2-2; i>=0; i-=2)
5265 {
5266 ___WORD key = body[i+___GCHASHTABLE_KEY0];
5267 if (key == ___DELETED)
5268 {
5269 body[i+___GCHASHTABLE_KEY0] = ___UNUSED;
5270 body[___GCHASHTABLE_FREE] =
5271 ___FIXADD(body[___GCHASHTABLE_FREE], ___FIX(1));
5272 }
5273 else if (key != ___UNUSED)
5274 body[i+___GCHASHTABLE_KEY0] = ___MEM_ALLOCATED_SET(key);
5275 }
5276
5277 /*
5278 * Move the active entries.
5279 */
5280
5281 for (i=size2-2; i>=0; i-=2)
5282 {
5283 ___WORD key = body[i+___GCHASHTABLE_KEY0];
5284
5285 if (___MEM_ALLOCATED(key))
5286 {
5287 /* this is an active entry that has not been moved yet */
5288
5289 ___SCMOBJ val = body[i+___GCHASHTABLE_VAL0];
5290 ___SCMOBJ obj;
5291 int probe2;
5292 int step2;
5293
5294 body[i+___GCHASHTABLE_KEY0] = ___UNUSED;
5295 body[i+___GCHASHTABLE_VAL0] = ___UNUSED;
5296
5297 chain_non_mem_alloc:
5298 key = ___MEM_ALLOCATED_CLEAR(key); /* recover true encoding */
5299 ___GCHASHTABLE_HASH_STEP(probe2, step2, key, size2>>1);
5300 probe2 <<= 1;
5301 step2 <<= 1;
5302
5303 next_non_mem_alloc:
5304 obj = body[probe2+___GCHASHTABLE_KEY0];
5305
5306 if (obj == ___UNUSED)
5307 {
5308 /* storing into an unused entry */
5309
5310 body[probe2+___GCHASHTABLE_KEY0] = key;
5311 body[probe2+___GCHASHTABLE_VAL0] = val;
5312 }
5313 else if (___MEM_ALLOCATED(obj))
5314 {
5315 /* storing into an active entry */
5316
5317 body[probe2+___GCHASHTABLE_KEY0] = key;
5318 key = obj;
5319 obj = body[probe2+___GCHASHTABLE_VAL0];
5320 body[probe2+___GCHASHTABLE_VAL0] = val;
5321 val = obj;
5322 goto chain_non_mem_alloc; /* now move overwritten entry */
5323 }
5324 else
5325 {
5326 /* an entry has been moved here, so keep looking */
5327
5328 probe2 -= step2;
5329 if (probe2 < 0)
5330 probe2 += size2;
5331 goto next_non_mem_alloc;
5332 }
5333 }
5334 }
5335 }
5336 else
5337 {
5338 /*
5339 * Free deleted entries and mark key field of all active
5340 * entries.
5341 */
5342
5343 for (i=size2-2; i>=0; i-=2)
5344 {
5345 ___WORD key = body[i+___GCHASHTABLE_KEY0];
5346 if (key == ___DELETED)
5347 {
5348 body[i+___GCHASHTABLE_KEY0] = ___UNUSED;
5349 body[___GCHASHTABLE_FREE] =
5350 ___FIXADD(body[___GCHASHTABLE_FREE], ___FIX(1));
5351 }
5352 else if (key != ___UNUSED)
5353 body[i+___GCHASHTABLE_KEY0] = ___MEM_ALLOCATED_CLEAR(key);
5354 }
5355
5356 /*
5357 * Move the active entries.
5358 */
5359
5360 for (i=size2-2; i>=0; i-=2)
5361 {
5362 ___WORD key = body[i+___GCHASHTABLE_KEY0];
5363
5364 if (key != ___UNUSED && !___MEM_ALLOCATED(key))
5365 {
5366 /* this is an active entry that has not been moved yet */
5367
5368 ___SCMOBJ val = body[i+___GCHASHTABLE_VAL0];
5369 ___SCMOBJ obj;
5370 int probe2;
5371 int step2;
5372
5373 body[i+___GCHASHTABLE_KEY0] = ___UNUSED;
5374 body[i+___GCHASHTABLE_VAL0] = ___UNUSED;
5375
5376 chain_mem_alloc:
5377 key = ___MEM_ALLOCATED_SET(key); /* recover true encoding */
5378 ___GCHASHTABLE_HASH_STEP(probe2, step2, key, size2>>1);
5379 probe2 <<= 1;
5380 step2 <<= 1;
5381
5382 next_mem_alloc:
5383 obj = body[probe2+___GCHASHTABLE_KEY0];
5384
5385 if (obj == ___UNUSED)
5386 {
5387 /* storing into an unused entry */
5388
5389 body[probe2+___GCHASHTABLE_KEY0] = key;
5390 body[probe2+___GCHASHTABLE_VAL0] = val;
5391 }
5392 else if (!___MEM_ALLOCATED(obj))
5393 {
5394 /* storing into an active entry */
5395
5396 body[probe2+___GCHASHTABLE_KEY0] = key;
5397 key = obj;
5398 obj = body[probe2+___GCHASHTABLE_VAL0];
5399 body[probe2+___GCHASHTABLE_VAL0] = val;
5400 val = obj;
5401 goto chain_mem_alloc; /* now move overwritten entry */
5402 }
5403 else
5404 {
5405 /* an entry has been moved here, so keep looking */
5406
5407 probe2 -= step2;
5408 if (probe2 < 0)
5409 probe2 += size2;
5410 goto next_mem_alloc;
5411 }
5412 }
5413 }
5414 }
5415 }
5416
5417
5418 ___SCMOBJ ___gc_hash_table_ref
5419 ___P((___SCMOBJ ht,
5420 ___SCMOBJ key),
5421 (ht,
5422 key)
5423 ___SCMOBJ ht;
5424 ___SCMOBJ key;)
5425 {
5426 int size2;
5427 int probe2;
5428 int step2;
5429 ___SCMOBJ obj;
5430
5431 if (!___FIXZEROP(___FIXAND(___FIELD(ht, ___GCHASHTABLE_FLAGS),
5432 ___FIX(___GCHASHTABLE_FLAG_KEY_MOVED))))
5433 gc_hash_table_rehash_in_situ (ht);
5434
5435 size2 = ___INT(___VECTORLENGTH(ht)) - ___GCHASHTABLE_KEY0;
5436 ___GCHASHTABLE_HASH_STEP(probe2, step2, key, size2>>1);
5437 probe2 <<= 1;
5438 step2 <<= 1;
5439 obj = ___FIELD(ht, probe2+___GCHASHTABLE_KEY0);
5440
5441 if (___EQP(obj,key))
5442 return ___FIELD(ht, probe2+___GCHASHTABLE_VAL0);
5443 else if (!___EQP(obj,___UNUSED))
5444 {
5445 for (;;)
5446 {
5447 probe2 -= step2;
5448 if (probe2 < 0)
5449 probe2 += size2;
5450 obj = ___FIELD(ht, probe2+___GCHASHTABLE_KEY0);
5451
5452 if (___EQP(obj,key))
5453 return ___FIELD(ht, probe2+___GCHASHTABLE_VAL0);
5454 else if (___EQP(obj,___UNUSED))
5455 break;
5456 }
5457 }
5458
5459 return ___UNUSED; /* key was not found */
5460 }
5461
5462
5463 ___SCMOBJ ___gc_hash_table_set
5464 ___P((___SCMOBJ ht,
5465 ___SCMOBJ key,
5466 ___SCMOBJ val),
5467 (ht,
5468 key,
5469 val)
5470 ___SCMOBJ ht;
5471 ___SCMOBJ key;
5472 ___SCMOBJ val;)
5473 {
5474 int size2;
5475 int probe2;
5476 int step2;
5477 ___SCMOBJ obj;
5478
5479 if (!___FIXZEROP(___FIXAND(___FIELD(ht, ___GCHASHTABLE_FLAGS),
5480 ___FIX(___GCHASHTABLE_FLAG_KEY_MOVED))))
5481 gc_hash_table_rehash_in_situ (ht);
5482
5483 size2 = ___INT(___VECTORLENGTH(ht)) - ___GCHASHTABLE_KEY0;
5484 ___GCHASHTABLE_HASH_STEP(probe2, step2, key, size2>>1);
5485 probe2 <<= 1;
5486 step2 <<= 1;
5487 obj = ___FIELD(ht, probe2+___GCHASHTABLE_KEY0);
5488
5489 if (!___EQP(val,___ABSENT))
5490 {
5491 /* trying to add or replace an entry */
5492
5493 if (___EQP(obj,key))
5494 {
5495 replace_entry:
5496 ___FIELD(ht, probe2+___GCHASHTABLE_VAL0) = val;
5497 }
5498 else if (___EQP(obj,___UNUSED))
5499 {
5500 add_entry:
5501 ___FIELD(ht, probe2+___GCHASHTABLE_KEY0) = key;
5502 ___FIELD(ht, probe2+___GCHASHTABLE_VAL0) = val;
5503 ___FIELD(ht, ___GCHASHTABLE_COUNT) =
5504 ___FIXADD(___FIELD(ht, ___GCHASHTABLE_COUNT), ___FIX(1));
5505 if (___FIXNEGATIVEP(___FIELD(ht, ___GCHASHTABLE_FREE) =
5506 ___FIXSUB(___FIELD(ht, ___GCHASHTABLE_FREE),
5507 ___FIX(1))))
5508 return ___TRU;
5509 }
5510 else
5511 {
5512 int deleted2 = -1;
5513
5514 for (;;)
5515 {
5516 if (deleted2 < 0 && ___EQP(obj,___DELETED))
5517 deleted2 = probe2;
5518
5519 probe2 -= step2;
5520 if (probe2 < 0)
5521 probe2 += size2;
5522 obj = ___FIELD(ht, probe2+___GCHASHTABLE_KEY0);
5523
5524 if (___EQP(obj,key))
5525 goto replace_entry;
5526
5527 if (___EQP(obj,___UNUSED))
5528 {
5529 if (deleted2 < 0)
5530 goto add_entry;
5531
5532 ___FIELD(ht, deleted2+___GCHASHTABLE_KEY0) = key;
5533 ___FIELD(ht, deleted2+___GCHASHTABLE_VAL0) = val;
5534 ___FIELD(ht, ___GCHASHTABLE_COUNT) =
5535 ___FIXADD(___FIELD(ht, ___GCHASHTABLE_COUNT), ___FIX(1));
5536
5537 break;
5538 }
5539 }
5540 }
5541 }
5542 else
5543 {
5544 /* trying to delete an entry */
5545
5546 if (___EQP(obj,key))
5547 {
5548 delete_entry:
5549 ___FIELD(ht, probe2+___GCHASHTABLE_KEY0) = ___DELETED;
5550 ___FIELD(ht, probe2+___GCHASHTABLE_VAL0) = ___UNUSED;
5551 ___FIELD(ht, ___GCHASHTABLE_COUNT) =
5552 ___FIXSUB(___FIELD(ht, ___GCHASHTABLE_COUNT),
5553 ___FIX(1));
5554 if (___FIXLT(___FIELD(ht, ___GCHASHTABLE_COUNT),
5555 ___FIELD(ht, ___GCHASHTABLE_MIN_COUNT)))
5556 return ___TRU;
5557 }
5558 else if (!___EQP(obj,___UNUSED))
5559 {
5560 for (;;)
5561 {
5562 probe2 -= step2;
5563 if (probe2 < 0)
5564 probe2 += size2;
5565 obj = ___FIELD(ht, probe2+___GCHASHTABLE_KEY0);
5566
5567 if (___EQP(obj,key))
5568 goto delete_entry;
5569
5570 if (___EQP(obj,___UNUSED))
5571 break;
5572 }
5573 }
5574 }
5575
5576 /*
5577 * Hash table does not need to be resized.
5578 */
5579
5580 return ___FAL;
5581 }
5582
5583
5584 #define FIND_COMPRESS_KEY(key,key_probe2,key_step2,obj,k,k_probe2,k_prev2,k_step2,o,k_p2) \
5585 do { \
5586 ___GCHASHTABLE_HASH_STEP(key_probe2,key_step2,key,size2>>1); \
5587 key_probe2 <<= 1; \
5588 key_step2 <<= 1; \
5589 obj = ___FIELD(ht, key_probe2+___GCHASHTABLE_KEY0); \
5590 \
5591 while (!(___EQP(obj,key) || ___EQP(obj,___UNUSED))) \
5592 { \
5593 key_probe2 -= key_step2; \
5594 if (key_probe2 < 0) \
5595 key_probe2 += size2; \
5596 obj = ___FIELD(ht, key_probe2+___GCHASHTABLE_KEY0); \
5597 } \
5598 \
5599 if (___EQP(obj,key)) \
5600 { \
5601 /* \
5602 * key was found, compress its path. \
5603 */ \
5604 \
5605 k = ___FIELD(ht, key_probe2+___GCHASHTABLE_VAL0); \
5606 \
5607 if (___SPECIALP(k)) \
5608 { \
5609 k_probe2 = key_probe2; \
5610 } \
5611 else \
5612 { \
5613 ___SCMOBJ k_prev2 = key_probe2; \
5614 \
5615 for (;;) \
5616 { \
5617 if (___FIXNUMP(k)) \
5618 { \
5619 k_probe2 = ___INT(k); \
5620 } \
5621 else \
5622 { \
5623 ___SCMOBJ o; \
5624 ___SCMOBJ k_step2; \
5625 ___GCHASHTABLE_HASH_STEP(k_probe2,k_step2,k,size2>>1); \
5626 k_probe2 <<= 1; \
5627 k_step2 <<= 1; \
5628 o = ___FIELD(ht, k_probe2+___GCHASHTABLE_KEY0); \
5629 \
5630 while (!___EQP(o,k)) \
5631 { \
5632 k_probe2 -= k_step2; \
5633 if (k_probe2 < 0) \
5634 k_probe2 += size2; \
5635 o = ___FIELD(ht, k_probe2+___GCHASHTABLE_KEY0); \
5636 } \
5637 } \
5638 \
5639 k = ___FIELD(ht, k_probe2+___GCHASHTABLE_VAL0); \
5640 \
5641 if (___SPECIALP(k)) \
5642 break; \
5643 \
5644 ___FIELD(ht, k_probe2+___GCHASHTABLE_VAL0) = ___FIX(k_prev2); \
5645 k_prev2 = k_probe2; \
5646 } \
5647 \
5648 for (;;) \
5649 { \
5650 ___SCMOBJ k_p2 = ___INT(___FIELD(ht, k_prev2+___GCHASHTABLE_VAL0)); \
5651 ___FIELD(ht, k_prev2+___GCHASHTABLE_VAL0) = ___FIX(k_probe2); \
5652 if (k_prev2 == key_probe2) \
5653 break; \
5654 k_prev2 = k_p2; \
5655 } \
5656 } \
5657 } \
5658 } while (0)
5659
5660
5661 ___SCMOBJ ___gc_hash_table_union_find
5662 ___P((___SCMOBJ ht,
5663 ___SCMOBJ key1,
5664 ___SCMOBJ key2,
5665 ___BOOL find),
5666 (ht,
5667 key1,
5668 key2,
5669 find)
5670 ___SCMOBJ ht;
5671 ___SCMOBJ key1;
5672 ___SCMOBJ key2;
5673 ___BOOL find;)
5674 {
5675 /*
5676 * This function takes a GC hash table "ht", which must have its
5677 * ___GCHASHTABLE_FLAG_UNION_FIND flag set, and two memory allocated
5678 * objects "key1" and "key2", and determines if these objects are
5679 * part of the same equivalence class. If "find" is false, the hash
5680 * table is modified to force these objects to be in the same
5681 * equivalence class (union operation). The returned value
5682 * indicates which keys were found in the table, if they are part of
5683 * the same equivalence class and if the GC hash table needs to
5684 * grow. The possible return values are:
5685 *
5686 * 0 key1 and key2 found in ht, and in same equiv class
5687 * 1 key1 and key2 found in ht, but not in same equiv class
5688 * 2 or 3 only one of key1 and key2 found in ht (2 = need to grow ht)
5689 * 4 or 5 neither key1 or key2 found in ht (4 = need to grow ht)
5690 */
5691
5692 int size2;
5693 ___SCMOBJ key1_probe2;
5694 ___SCMOBJ key1_step2;
5695 ___SCMOBJ key2_probe2;
5696 ___SCMOBJ key2_step2;
5697 ___SCMOBJ allocated;
5698 ___SCMOBJ obj1;
5699 ___SCMOBJ obj2;
5700 ___SCMOBJ k1;
5701 ___SCMOBJ k1_probe2;
5702 ___SCMOBJ k2;
5703 ___SCMOBJ k2_probe2;
5704
5705 if (!___FIXZEROP(___FIXAND(___FIELD(ht, ___GCHASHTABLE_FLAGS),
5706 ___FIX(___GCHASHTABLE_FLAG_KEY_MOVED))))
5707 gc_hash_table_rehash_in_situ (ht);
5708
5709 size2 = ___INT(___VECTORLENGTH(ht)) - ___GCHASHTABLE_KEY0;
5710
5711 /* Search for key1 */
5712
5713 FIND_COMPRESS_KEY(key1,
5714 key1_probe2,
5715 key1_step2,
5716 obj1,
5717 k1,
5718 k1_probe2,
5719 k1_prev2,
5720 k1_step2,
5721 o1,
5722 k1_p2);
5723
5724 /* Search for key2 */
5725
5726 FIND_COMPRESS_KEY(key2,
5727 key2_probe2,
5728 key2_step2,
5729 obj2,
5730 k2,
5731 k2_probe2,
5732 k2_prev2,
5733 k2_step2,
5734 o2,
5735 k2_p2);
5736
5737 /* What needs to be done depends on which keys were found */
5738
5739 if (___EQP(obj1,key1))
5740 {
5741 if (___EQP(obj2,key2))
5742 {
5743 /* both key1 and key2 were found in the table */
5744
5745 if (k1_probe2 == k2_probe2)
5746 return ___FIX(0); /* keys are in the same equiv class */
5747
5748 if (find)
5749 return ___FIX(1); /* keys are not in the same equiv class */
5750
5751 k1 = ___INT(k1);
5752 k2 = ___INT(k2);
5753
5754 if (k1 > k2) /* choose biggest equivalence class */
5755 {
5756 ___FIELD(ht, k1_probe2+___GCHASHTABLE_VAL0) = ___SPECIAL(k1+k2);
5757 ___FIELD(ht, k2_probe2+___GCHASHTABLE_VAL0) = ___FIX(k1_probe2);
5758 }
5759 else
5760 {
5761 ___FIELD(ht, k2_probe2+___GCHASHTABLE_VAL0) = ___SPECIAL(k1+k2);
5762 ___FIELD(ht, k1_probe2+___GCHASHTABLE_VAL0) = ___FIX(k2_probe2);
5763 }
5764
5765 return ___FIX(1);
5766 }
5767 else
5768 {
5769 /* key1 was found in the table, but key2 was not found */
5770
5771 if (find)
5772 return ___FIX(3); /* keys are not in the same equiv class */
5773
5774 k1 = ___INT(k1);
5775
5776 ___FIELD(ht, k1_probe2+___GCHASHTABLE_VAL0) = ___SPECIAL(k1+1);
5777 ___FIELD(ht, key2_probe2+___GCHASHTABLE_KEY0) = key2;
5778 ___FIELD(ht, key2_probe2+___GCHASHTABLE_VAL0) = ___FIX(k1_probe2);
5779 allocated = 1;
5780 }
5781 }
5782 else
5783 {
5784 /* key1 was not found */
5785
5786 if (___EQP(obj2,key2))
5787 {
5788 /* key2 was found in the table, but key1 was not found */
5789
5790 if (find)
5791 return ___FIX(3); /* keys are not in the same equiv class */
5792
5793 k2 = ___INT(k2);
5794
5795 ___FIELD(ht, k2_probe2+___GCHASHTABLE_VAL0) = ___SPECIAL(k2+1);
5796 ___FIELD(ht, key1_probe2+___GCHASHTABLE_KEY0) = key1;
5797 ___FIELD(ht, key1_probe2+___GCHASHTABLE_VAL0) = ___FIX(k2_probe2);
5798 allocated = 1;
5799 }
5800 else
5801 {
5802 /* key1 and key2 were not found in the table */
5803
5804 if (find)
5805 return ___FIX(5); /* keys are not in the same equiv class */
5806
5807 ___FIELD(ht, key1_probe2+___GCHASHTABLE_KEY0) = key1;
5808 ___FIELD(ht, key1_probe2+___GCHASHTABLE_VAL0) = ___SPECIAL(2);
5809
5810 if (key1_probe2 == key2_probe2)
5811 {
5812 /*
5813 * Both keys hash to the same entry so search for other
5814 * free entry. This will succeed because GC hash tables
5815 * are guaranteed to have 2 free entries.
5816 */
5817 do
5818 {
5819 key2_probe2 -= key2_step2;
5820 if (key2_probe2 < 0)
5821 key2_probe2 += size2;
5822 } while (!___EQP(___FIELD(ht, key2_probe2+___GCHASHTABLE_KEY0),
5823 ___UNUSED));
5824 }
5825
5826 ___FIELD(ht, key2_probe2+___GCHASHTABLE_KEY0) = key2;
5827 ___FIELD(ht, key2_probe2+___GCHASHTABLE_VAL0) = ___FIX(key1_probe2);
5828 allocated = 2;
5829 }
5830 }
5831
5832 ___FIELD(ht, ___GCHASHTABLE_COUNT) =
5833 ___FIXADD(___FIELD(ht, ___GCHASHTABLE_COUNT), ___FIX(allocated));
5834
5835 if (___FIXNEGATIVEP(___FIELD(ht, ___GCHASHTABLE_FREE) =
5836 ___FIXSUB(___FIELD(ht, ___GCHASHTABLE_FREE),
5837 ___FIX(allocated))))
5838 return ___FIX(allocated*2); /* signal that table needs to grow */
5839 else
5840 return ___FIX(allocated*2+1); /* signal that table doesn't need to grow */
5841 }
5842
5843
5844 ___SCMOBJ ___gc_hash_table_rehash
5845 ___P((___SCMOBJ ht_src,
5846 ___SCMOBJ ht_dst),
5847 (ht_src,
5848 ht_dst)
5849 ___SCMOBJ ht_src;
5850 ___SCMOBJ ht_dst;)
5851 {
5852 ___SCMOBJ* body_src = ___BODY_AS(ht_src,___tSUBTYPED);
5853 ___SIZE_TS words = ___HD_WORDS(body_src[-1]);
5854 int size2 = words - ___GCHASHTABLE_KEY0;
5855 int i;
5856
5857 if (___FIXZEROP(___FIXAND(body_src[___GCHASHTABLE_FLAGS],
5858 ___FIX(___GCHASHTABLE_FLAG_UNION_FIND))))
5859 {
5860 for (i=size2-2; i>=0; i-=2)
5861 {
5862 ___SCMOBJ key = body_src[i+___GCHASHTABLE_KEY0];
5863
5864 if (key != ___UNUSED &&
5865 key != ___DELETED)
5866 {
5867 ___SCMOBJ val = body_src[i+___GCHASHTABLE_VAL0];
5868 ___gc_hash_table_set (ht_dst, key, val);
5869 }
5870 }
5871 }
5872 else
5873 {
5874 for (i=size2-2; i>=0; i-=2)
5875 {
5876 ___SCMOBJ key = body_src[i+___GCHASHTABLE_KEY0];
5877
5878 if (key != ___UNUSED)
5879 {
5880 ___SCMOBJ val = body_src[i+___GCHASHTABLE_VAL0];
5881 if (___FIXNUMP(val)) {
5882 val = body_src[___INT(val)+___GCHASHTABLE_KEY0];
5883 }
5884 ___gc_hash_table_set (ht_dst, key, val);
5885 }
5886 }
5887 }
5888
5889 return ht_dst;
5890 }
5891
5892
5893 ___HIDDEN void move_continuation
5894 ___P((___PSDNC),
5895 (___PSVNC)
5896 ___PSDKR)
5897 {
5898 ___PSGET
5899
5900 ___WORD *start;
5901 ___SIZE_TS length;
5902 ___WORD *p1;
5903 ___WORD *p2;
5904
5905 start = ___ps->fp;
5906 length = (___ps->stack_break + ___FIRST_BREAK_FRAME_SPACE) - start;
5907
5908 p1 = start + length;
5909 p2 = alloc_stack_ptr;
5910
5911 ___ps->stack_start = alloc_stack_start;
5912 ___ps->stack_break = p2 - ___FIRST_BREAK_FRAME_SPACE;
5913
5914 while (p1 != start)
5915 *--p2 = *--p1;
5916
5917 alloc_stack_ptr = p2;
5918 }
5919
5920
5921 ___HIDDEN void determine_occupied_words
5922 ___P((___virtual_machine_state ___vms),
5923 (___vms)
5924 ___virtual_machine_state ___vms;)
5925 {
5926 #undef ___VMSTATE_MEM
5927 #define ___VMSTATE_MEM(var) ___vms->mem.var
5928
5929 /*
5930 * Compute space occupied by live objects.
5931 */
5932
5933 int p;
5934 int np = ___vms->processor_count;
5935 ___SIZE_TS movable = 0;
5936 ___SIZE_TS still = 0;
5937 ___F64 bytes_allocated = 0.0;
5938
5939 for (p=0; p<np; p++)
5940 {
5941 ___processor_state ps = ___PSTATE_FROM_PROCESSOR_ID(p,___vms);
5942
5943 movable += words_movable_objs(ps);
5944
5945 still += ps->mem.words_still_objs_;
5946
5947 /*
5948 * Note that at this point bytes_allocated_minus_occupied is
5949 * actually the number of bytes allocated by the processor.
5950 */
5951
5952 bytes_allocated += ps->mem.bytes_allocated_minus_occupied_;
5953 }
5954
5955 occupied_words_movable = movable;
5956 occupied_words_still = still;
5957
5958 latest_gc_alloc = bytes_allocated;
5959
5960 #undef ___VMSTATE_MEM
5961 #define ___VMSTATE_MEM(var) ___VMSTATE_FROM_PSTATE(___ps)->mem.var
5962 }
5963
5964
5965 ___HIDDEN ___BOOL resize_heap
5966 ___P((___virtual_machine_state ___vms,
5967 ___SIZE_TS requested_words_still),
5968 (___vms,
5969 requested_words_still)
5970 ___virtual_machine_state ___vms;
5971 ___SIZE_TS requested_words_still;)
5972 {
5973 #undef ___VMSTATE_MEM
5974 #define ___VMSTATE_MEM(var) ___vms->mem.var
5975
5976 ___BOOL overflow = 0;
5977 ___SIZE_TS target_heap_space;
5978 ___SIZE_TS target_movable_space;
5979 int target_nb_sections;
5980 ___SIZE_TS live;
5981
5982 determine_occupied_words (___vms);
5983
5984 occupied_words_still += requested_words_still; /* pretend requested space is live */
5985
5986 live = occupied_words_movable + occupied_words_still;
5987
5988 /*
5989 * Determine the target size of the heap in msections given the
5990 * space requested for the still object.
5991 */
5992
5993 target_heap_space = adjust_heap (live);
5994
5995 if (live > target_heap_space)
5996 {
5997 /*
5998 * Trigger a recoverable heap overflow.
5999 */
6000
6001 overflow = 1;
6002
6003 /*
6004 * Take some space from the overflow reserve.
6005 */
6006
6007 overflow_reserve >>= 5; /* make 96.875% of reserve usable */
6008
6009 if (overflow_reserve == 0)
6010 fatal_heap_overflow ();
6011
6012 /*
6013 * Cancel allocation of still object.
6014 */
6015
6016 occupied_words_still -= requested_words_still;
6017 live -= requested_words_still;
6018
6019 target_heap_space = adjust_heap (live);
6020 }
6021
6022 if (live + normal_overflow_reserve <= target_heap_space)
6023 {
6024 /*
6025 * Now that there is enough free space, reset the overflow
6026 * reserve to its normal value.
6027 */
6028
6029 overflow_reserve = normal_overflow_reserve;
6030 }
6031
6032 target_movable_space = target_heap_space - occupied_words_still;
6033
6034 SET_MAX(target_movable_space, 0);
6035
6036 /*
6037 * Compute the number of msections required after the GC. The code
6038 * reserves ___MIN_NB_MSECTIONS_PER_PROCESSOR per processor taking
6039 * the target number of processors into account in case the GC was
6040 * called as part of the resizing of the VM.
6041 */
6042
6043 target_nb_sections =
6044 ___MIN_NB_MSECTIONS_PER_PROCESSOR * target_processor_count +
6045 ___CEILING_DIV(target_movable_space + normal_overflow_reserve,
6046 ___MSECTION_SIZE - 2*___MSECTION_FUDGE);
6047
6048 SET_MAX(target_nb_sections,
6049 nb_msections_assigned);
6050
6051 adjust_msections (&the_msections, target_nb_sections);
6052
6053 heap_size = compute_heap_space();
6054
6055 /*
6056 * Maintain GC statistics.
6057 */
6058
6059 latest_gc_heap_size = ___CAST(___F64,heap_size) * ___WS;
6060
6061 latest_gc_live = ___CAST(___F64,live) * ___WS;
6062 latest_gc_movable = ___CAST(___F64,occupied_words_movable) * ___WS;
6063 latest_gc_still = ___CAST(___F64,occupied_words_still) * ___WS;
6064
6065 return overflow;
6066
6067 #undef ___VMSTATE_MEM
6068 #define ___VMSTATE_MEM(var) ___VMSTATE_FROM_PSTATE(___ps)->mem.var
6069 }
6070
6071
6072 ___HIDDEN void mark_registers
6073 ___P((___PSDNC),
6074 (___PSVNC)
6075 ___PSDKR)
6076 {
6077 ___PSGET
6078
6079 #ifdef ENABLE_CONSISTENCY_CHECKS
6080 reference_location = IN_REGISTER;
6081 #endif
6082
6083 mark_array (___PSP ___ps->r, sizeof(___ps->r)/sizeof(*___ps->r));
6084 }
6085
6086
6087 ___HIDDEN void mark_saved
6088 ___P((___PSDNC),
6089 (___PSVNC)
6090 ___PSDKR)
6091 {
6092 ___PSGET
6093
6094 #ifdef ENABLE_CONSISTENCY_CHECKS
6095 reference_location = IN_SAVED;
6096 #endif
6097
6098 mark_array (___PSP ___ps->saved, sizeof(___ps->saved)/sizeof(*___ps->saved));
6099 }
6100
6101
6102 ___HIDDEN void mark_processor_scmobj
6103 ___P((___PSDNC),
6104 (___PSVNC)
6105 ___PSDKR)
6106 {
6107 ___PSGET
6108
6109 #ifdef ENABLE_CONSISTENCY_CHECKS
6110 reference_location = IN_PROCESSOR_SCMOBJ;
6111 #endif
6112
6113 mark_array (___PSP
6114 ___BODY0_AS(___PROCESSOR_SCMOBJ(___ps),___tSUBTYPED),
6115 ___PROCESSOR_SIZE);
6116 }
6117
6118
6119 ___HIDDEN void mark_vm_scmobj
6120 ___P((___PSDNC),
6121 (___PSVNC)
6122 ___PSDKR)
6123 {
6124 ___PSGET
6125 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
6126
6127 #ifdef ENABLE_CONSISTENCY_CHECKS
6128 reference_location = IN_VM_SCMOBJ;
6129 #endif
6130
6131 mark_array (___PSP
6132 ___BODY0_AS(___VM_SCMOBJ(___vms),___tSUBTYPED),
6133 ___VM_SIZE);
6134 }
6135
6136
6137 ___HIDDEN void mark_global_variables
6138 ___P((___PSDNC),
6139 (___PSVNC)
6140 ___PSDKR)
6141 {
6142 ___PSGET
6143 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
6144 int id = ___PROCESSOR_ID(___ps,___vms); /* id of this processor */
6145
6146 #ifdef ENABLE_CONSISTENCY_CHECKS
6147 reference_location = IN_GLOBAL_VAR;
6148 #endif
6149
6150 /*
6151 * Mark a portion of the global variables.
6152 */
6153
6154 #ifdef ___SINGLE_VM
6155
6156 {
6157 int np = ___vms->processor_count;
6158 int lo = (id * ___GLO_SUBLIST_COUNT) / np;
6159 int hi = ((id+1) * ___GLO_SUBLIST_COUNT) / np;
6160
6161 while (lo < hi)
6162 {
6163 ___glo_sublist_struct *sl = &___GSTATE->mem.glo_list.sublist[lo];
6164 ___glo_struct *glo = sl->head;
6165
6166 while (glo != 0)
6167 {
6168 #ifdef ___DEBUG_GARBAGE_COLLECT_globals
6169 print_global_var_name (glo);
6170 ___printf ("\n");
6171 #endif
6172 mark_array (___PSP &___GLOCELL(glo->val), 1);
6173 glo = glo->next;
6174 }
6175
6176 lo++;
6177 }
6178 }
6179
6180 #else
6181
6182 {
6183 int n = ___GSTATE->mem.glo_list.count;
6184 int np = ___vms->processor_count;
6185 int lo = (id * n) / np;
6186 int hi = ((id+1) * n) / np;
6187
6188 mark_array (___PSP
6189 ___VMSTATE_FROM_PSTATE(___ps)->glos+lo,
6190 hi-lo);
6191 }
6192
6193 #endif
6194 }
6195
6196
6197 ___HIDDEN void mark_symkey_tables
6198 ___P((___PSDNC),
6199 (___PSVNC)
6200 ___PSDKR)
6201 {
6202 ___PSGET
6203
6204 #ifdef ENABLE_CONSISTENCY_CHECKS
6205 reference_location = IN_SYMKEY_TABLE;
6206 #endif
6207
6208 mark_array (___PSP &___GSTATE->symbol_table, 1);
6209 mark_array (___PSP &___GSTATE->keyword_table, 1);
6210 }
6211
6212
6213 ___HIDDEN void mark_reachable_from_marked
6214 ___P((___PSDNC),
6215 (___PSVNC)
6216 ___PSDKR)
6217 {
6218 ___PSGET
6219
6220 #ifndef ___SINGLE_THREADED_VMS
6221
6222 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
6223 ___VOLATILE ___WORD *workers_count = &scan_workers_count[traverse_weak_refs];
6224 int np = ___vms->processor_count;
6225 int id = ___PROCESSOR_ID(___ps,___vms); /* id of this processor */
6226 int i;
6227
6228 continue_local_scan:
6229
6230 #endif
6231
6232 do
6233 {
6234 scan_still_objs_to_scan (___PSPNC);
6235 scan_movable_objs_to_scan (___PSPNC);
6236 } while (___CAST(___WORD*,still_objs_to_scan) != 0);
6237
6238 #ifndef ___SINGLE_THREADED_VMS
6239
6240 #define ___GC_SCAN_STEAL_WORK_CYCLES 1000
6241
6242 for (;;)
6243 {
6244 /* Try stealing a queued chunk from another processor */
6245
6246 for (i = (np-1) * ___GC_SCAN_STEAL_WORK_CYCLES - 1; i>=0; i--)
6247 {
6248 ___processor_state ps = ___PSTATE_FROM_PROCESSOR_ID((i + i%(np-1) + 1) % np,___vms);
6249 ___VOLATILE ___WORD *hcsh;
6250
6251 if (ps->mem.heap_chunks_to_scan_head_ !=
6252 ps->mem.heap_chunks_to_scan_tail_)
6253 {
6254 /* only try stealing when chunk FIFO is non-empty */
6255
6256 ___SPINLOCK_LOCK(ps->mem.heap_chunks_to_scan_lock_);
6257
6258 if ((hcsh=ps->mem.heap_chunks_to_scan_head_) !=
6259 ps->mem.heap_chunks_to_scan_tail_)
6260 {
6261 /* chunk FIFO is really non-empty */
6262
6263 ___WORD *ptr = ___UNTAG_AS(*hcsh, ___FORW);
6264
6265 ps->mem.heap_chunks_to_scan_head_ = ptr;
6266
6267 ___SHARED_MEMORY_BARRIER(); /* share heap_chunks_to_scan_head */
6268
6269 ___SPINLOCK_UNLOCK(ps->mem.heap_chunks_to_scan_lock_);
6270
6271 scan_complete_heap_chunk (___PSP ptr+1);
6272
6273 goto continue_local_scan;
6274 }
6275 else
6276 {
6277 ___SPINLOCK_UNLOCK(ps->mem.heap_chunks_to_scan_lock_);
6278 }
6279 }
6280 }
6281
6282 /* Signal being idle and wait for more work or termination */
6283
6284 ___MUTEX_LOCK(scan_termination_mutex);
6285
6286 if (--(*workers_count) == 0)
6287 {
6288 /* Scan has terminated */
6289
6290 for (i=np-1; i>=0; i--)
6291 ___CONDVAR_SIGNAL(scan_termination_condvar);
6292
6293 ___MUTEX_UNLOCK(scan_termination_mutex);
6294
6295 break;
6296 }
6297 else
6298 {
6299 /* Other processors are still actively scanning chunks */
6300
6301 ___ACTLOG_BEGIN_PS(gc_wait,lightgray);
6302 ___CONDVAR_WAIT(scan_termination_condvar,scan_termination_mutex);
6303 ___ACTLOG_END_PS();
6304
6305 if (*workers_count == 0)
6306 {
6307 ___MUTEX_UNLOCK(scan_termination_mutex);
6308 break;
6309 }
6310
6311 (*workers_count)++;
6312
6313 ___MUTEX_UNLOCK(scan_termination_mutex);
6314 }
6315 }
6316
6317 #endif
6318 }
6319
6320
6321 ___HIDDEN void garbage_collect_setup_phase
6322 ___P((___PSDNC),
6323 (___PSVNC)
6324 ___PSDKR)
6325 {
6326 ___PSGET
6327 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
6328
6329 #ifdef ENABLE_GC_ACTLOG_PHASES
6330 ___ACTLOG_BEGIN_PS(setup_phase,_);
6331 #endif
6332
6333 #ifdef ENABLE_GC_TRACE_PHASES
6334 if (___PROCESSOR_ID(___ps,___vms) == 0)
6335 ___printf ("garbage_collect_setup_phase\n");
6336 BARRIER();
6337 #endif
6338
6339 /* Assign initial stack and heap msections to each processor */
6340
6341 if (___PROCESSOR_ID(___ps,___vms) == 0)
6342 setup_stack_heap_vmstate (___vms);
6343
6344 /* Create list of externally referenced still objects to trace */
6345
6346 setup_still_objs_to_scan (___PSPNC);
6347
6348 /* Account for deferred accounting of still object allocation */
6349
6350 words_still_objs += words_still_objs_deferred;
6351 words_still_objs_deferred = 0;
6352
6353 #ifdef ENABLE_GC_ACTLOG_PHASES
6354 ___ACTLOG_END_PS();
6355 #endif
6356 }
6357
6358
6359 ___HIDDEN void garbage_collect_mark_strong_phase
6360 ___P((___PSDNC),
6361 (___PSVNC)
6362 ___PSDKR)
6363 {
6364 ___PSGET
6365 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
6366
6367 #ifdef ENABLE_GC_ACTLOG_PHASES
6368 ___ACTLOG_BEGIN_PS(mark_strong_phase,_);
6369 #endif
6370
6371 #ifdef ENABLE_GC_TRACE_PHASES
6372 if (___PROCESSOR_ID(___ps,___vms) == 0)
6373 ___printf ("garbage_collect_mark_strong_phase\n");
6374 BARRIER();
6375 #endif
6376
6377 #ifdef ENABLE_CONSISTENCY_CHECKS
6378 if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) >= 1)
6379 {
6380 stack_fudge_used = 0;
6381 heap_fudge_used = 0;
6382 }
6383 #endif
6384
6385 /* maintain list of GC hash tables reached by GC */
6386
6387 reached_gc_hash_tables = 0;
6388
6389 traverse_weak_refs = 0; /* don't traverse weak references in this phase */
6390
6391 if (___PROCESSOR_ID(___ps,___vms) == 0)
6392 {
6393 mark_vm_scmobj (___PSPNC);
6394 mark_symkey_tables (___PSPNC);
6395 mark_rc (___PSPNC);
6396 }
6397
6398 mark_global_variables (___PSPNC);
6399
6400 mark_continuation (___PSPNC);
6401
6402 mark_registers (___PSPNC);
6403
6404 mark_saved (___PSPNC);
6405
6406 mark_processor_scmobj (___PSPNC);
6407
6408 mark_reachable_from_marked (___PSPNC);
6409
6410 #ifdef ENABLE_GC_ACTLOG_PHASES
6411 ___ACTLOG_END_PS();
6412 #endif
6413 }
6414
6415
6416 ___HIDDEN void garbage_collect_mark_weak_phase
6417 ___P((___PSDNC),
6418 (___PSVNC)
6419 ___PSDKR)
6420 {
6421 ___PSGET
6422 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
6423
6424 /*
6425 * At this point all of the objects accessible from the roots
6426 * without having to traverse a weak reference have been scanned
6427 * by the GC.
6428 */
6429
6430 #ifdef ENABLE_GC_ACTLOG_PHASES
6431 ___ACTLOG_BEGIN_PS(mark_weak_phase,_);
6432 #endif
6433
6434 #ifdef ENABLE_GC_TRACE_PHASES
6435 if (___PROCESSOR_ID(___ps,___vms) == 0)
6436 ___printf ("garbage_collect_mark_weak_phase\n");
6437 BARRIER();
6438 #endif
6439
6440 traverse_weak_refs = 1; /* traverse weak references in this phase */
6441
6442 process_wills (___PSPNC);
6443
6444 mark_reachable_from_marked (___PSPNC);
6445
6446 move_continuation (___PSPNC);
6447
6448 #ifdef ENABLE_GC_ACTLOG_PHASES
6449 ___ACTLOG_END_PS();
6450 #endif
6451 }
6452
6453
6454 ___HIDDEN void garbage_collect_cleanup_phase
6455 ___P((___PSDNC),
6456 (___PSVNC)
6457 ___PSDKR)
6458 {
6459 ___PSGET
6460 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
6461
6462 #ifdef ENABLE_GC_ACTLOG_PHASES
6463 ___ACTLOG_BEGIN_PS(cleanup_phase,_);
6464 #endif
6465
6466 #ifdef ENABLE_GC_TRACE_PHASES
6467 if (___PROCESSOR_ID(___ps,___vms) == 0)
6468 ___printf ("garbage_collect_cleanup_phase\n");
6469 BARRIER();
6470 #endif
6471
6472 process_gc_hash_tables (___PSPNC);
6473
6474 free_unmarked_still_objs (___PSPNC);
6475
6476 #ifdef ENABLE_GC_ACTLOG_PHASES
6477 ___ACTLOG_END_PS();
6478 #endif
6479 }
6480
6481
6482 ___BOOL ___garbage_collect_pstate
6483 ___P((___processor_state ___ps,
6484 ___SIZE_TS requested_words_still),
6485 (___ps,
6486 requested_words_still)
6487 ___processor_state ___ps;
6488 ___SIZE_TS requested_words_still;)
6489 {
6490 ___virtual_machine_state ___vms = ___VMSTATE_FROM_PSTATE(___ps);
6491 ___BOOL overflow = 0;
6492 ___F64 user_time_start, sys_time_start, real_time_start;
6493 ___F64 user_time_end, sys_time_end, real_time_end;
6494 ___F64 user_time, sys_time, real_time;
6495
6496 ___ACTLOG_BEGIN_PS(gc,red);
6497
6498 /* Start measuring GC statistics */
6499
6500 if (___PROCESSOR_ID(___ps,___vms) == 0)
6501 {
6502 #ifdef ENABLE_GC_TRACE_PHASES
6503 ___printf ("----------------------------------------- GC START #%d\n", ___CAST(int,nb_gcs)+1);
6504 #endif
6505 ___process_times (&user_time_start, &sys_time_start, &real_time_start);
6506 }
6507
6508 /* Print debugging info */
6509
6510 #ifdef ___DEBUG_GARBAGE_COLLECT
6511
6512 {
6513 int p;
6514 int np = ___vms->processor_count;
6515 for (p=0; p<np; p++)
6516 {
6517 if (___PROCESSOR_ID(___ps,___vms) == p)
6518 {
6519 ___printf ("processor #%d\n", p);
6520 ___printf ("heap_size = %d\n", heap_size);
6521 ___printf ("tospace_offset = %d\n", ___ps->mem.tospace_offset_);
6522 ___printf ("___ps->stack_start = %p\n", ___ps->stack_start);
6523 ___printf ("___ps->stack_break = %p\n", ___ps->stack_break);
6524 ___printf ("___ps->fp = %p\n", ___ps->fp);
6525 ___printf ("___ps->stack_limit = %p\n", ___ps->stack_limit);
6526 ___printf ("___ps->heap_limit = %p\n", ___ps->heap_limit);
6527 ___printf ("___ps->hp = %p\n", ___ps->hp);
6528 }
6529 BARRIER();
6530 }
6531 }
6532
6533 #endif
6534
6535
6536 /* Recover processor's stack and heap pointers */
6537
6538 alloc_stack_ptr = ___ps->fp;
6539 alloc_heap_ptr = ___ps->hp;
6540
6541
6542 /* Keep track of bytes allocated by this processor */
6543
6544 bytes_allocated_minus_occupied += bytes_occupied(___ps);
6545
6546 BARRIER();
6547
6548
6549 /* Setup the stacks and heaps of all the processors */
6550
6551 garbage_collect_setup_phase (___PSPNC);
6552
6553 BARRIER();
6554
6555
6556 /* Mark the objects that are reachable strongly */
6557
6558 garbage_collect_mark_strong_phase (___PSPNC);
6559
6560 BARRIER();
6561
6562
6563 /* Mark the objects that are reachable weakly */
6564
6565 garbage_collect_mark_weak_phase (___PSPNC);
6566
6567 BARRIER();
6568
6569
6570 /* Process gc hash tables and free unreachable still objects */
6571
6572 garbage_collect_cleanup_phase (___PSPNC);
6573
6574 BARRIER();
6575
6576
6577 /* Resize heap */
6578
6579 if (___PROCESSOR_ID(___ps,___vms) == 0)
6580 overflow = resize_heap (___vms, requested_words_still);
6581
6582 BARRIER();
6583
6584
6585 /* Guarantee heap fudge */
6586
6587 if (alloc_heap_ptr > alloc_heap_limit - ___MSECTION_FUDGE)
6588 next_heap_msection (___ps);
6589
6590
6591 /* Keep track of bytes allocated by this processor */
6592
6593 bytes_allocated_minus_occupied -= bytes_occupied(___ps);
6594
6595
6596 /* Finalize measuring GC statistics */
6597
6598 if (___PROCESSOR_ID(___ps,___vms) == 0)
6599 {
6600 ___process_times (&user_time_end, &sys_time_end, &real_time_end);
6601
6602 user_time = user_time_end - user_time_start;
6603 sys_time = sys_time_end - sys_time_start;
6604 real_time = real_time_end - real_time_start;
6605
6606 nb_gcs = nb_gcs + 1.0;
6607 gc_user_time += user_time;
6608 gc_sys_time += sys_time;
6609 gc_real_time += real_time;
6610
6611 latest_gc_user_time = user_time;
6612 latest_gc_sys_time = sys_time;
6613 latest_gc_real_time = real_time;
6614
6615 ___raise_interrupt_pstate (___ps, ___INTR_GC); /* raise gc interrupt */
6616
6617 #ifdef ENABLE_GC_TRACE_PHASES
6618 ___printf ("----------------------------------------- GC END #%d\n", ___CAST(int,nb_gcs));
6619 #endif
6620 }
6621
6622 /* Prepare to continue executing program */
6623
6624 prepare_mem_pstate (___ps);
6625
6626
6627 ___ACTLOG_END_PS();
6628
6629 return overflow;
6630 }
6631
6632
6633 #ifdef ___DEBUG_STACK_LIMIT
6634
6635 ___BOOL ___stack_limit_debug
6636 ___P((___PSD
6637 int line,
6638 char *file),
6639 (___PSV
6640 line,
6641 file)
6642 ___PSDKR
6643 int line;
6644 char *file;)
6645
6646 #else
6647
6648 ___BOOL ___stack_limit
6649 ___P((___PSDNC),
6650 (___PSVNC)
6651 ___PSDKR)
6652
6653 #endif
6654 {
6655 ___PSGET
6656
6657 /*
6658 * In a multithreaded VM, the function ___stack_limit can be called
6659 * concurrently by multiple processors. The VM level memory allocation
6660 * lock is used to implement a critical section.
6661 */
6662
6663 /* Recover processor's stack and heap pointers */
6664
6665 alloc_stack_ptr = ___ps->fp;
6666 alloc_heap_ptr = ___ps->hp;
6667
6668 #ifdef ___DEBUG_STACK_LIMIT
6669 ___ps->stack_limit_line = line;
6670 ___ps->stack_limit_file = file;
6671 ___printf ("___POLL caused ___stack_limit call at %s:%d\n",
6672 ___ps->poll_file,
6673 ___ps->poll_line);
6674 #endif
6675
6676 #ifdef ENABLE_CONSISTENCY_CHECKS
6677 if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) >= 1)
6678 check_fudge_used (___PSPNC);
6679 #endif
6680
6681 if (alloc_stack_ptr < alloc_stack_limit + ___MSECTION_FUDGE)
6682 {
6683 /*
6684 * There isn't enough free space in the current stack msection.
6685 */
6686
6687 ___msection *prev_stack_msection = stack_msection;
6688 ___WORD *prev_alloc_stack_start = alloc_stack_start;
6689 ___WORD *prev_alloc_stack_ptr = alloc_stack_ptr;
6690 ___WORD *prev_stack_break = ___ps->stack_break;
6691 ___WORD *fp;
6692 int frame_count;
6693 int words;
6694
6695 /*
6696 * Get a new stack msection.
6697 */
6698
6699 ___msection *ms = msection_free_list;
6700
6701 if (stack_msection != heap_msection &&
6702 ms != NULL)
6703 {
6704 /* we can reuse an existing one */
6705
6706 msection_free_list = ___CAST(___msection*,ms->base[0]);
6707
6708 set_stack_msection (___ps, ms);
6709 }
6710 else
6711 {
6712 /* we need to allocate a new msection */
6713
6714 ALLOC_MEM_LOCK();
6715
6716 if (
6717 #ifdef CALL_GC_FREQUENTLY
6718 --___gc_calls_to_punt < 0 ||
6719 #endif
6720 compute_free_heap_space() < ___MSECTION_SIZE)
6721 {
6722 ALLOC_MEM_UNLOCK();
6723
6724 /*
6725 * Because the GC preserves the topmost contiguous
6726 * frames in the stack msection (the frames between the
6727 * stack pointer and the latest break frame), the
6728 * occupation of the stack msection would increase
6729 * gradually with subsequent calls to ___stack_limit and
6730 * this would eventually cause an uncontrolled overflow
6731 * of the stack msection.
6732 *
6733 * To avoid this, a break frame is added at the top of
6734 * the stack when the topmost contiguous frames in the
6735 * stack msection take too much space. This will cause
6736 * the GC to move all the frames to the heap.
6737 */
6738
6739 if ((___ps->stack_break - alloc_stack_ptr) >
6740 (___ps->stack_start - ___ps->stack_limit)*2/3)
6741 {
6742 /*
6743 * At the top of the current stack msection there are
6744 * contiguous frames that occupy more than 2/3 of the
6745 * available space.
6746 */
6747
6748 /*
6749 * Add a break frame.
6750 */
6751
6752 ___FP_ADJFP(alloc_stack_ptr,___BREAK_FRAME_SPACE)
6753 ___FP_SET_STK(alloc_stack_ptr,
6754 -___BREAK_FRAME_NEXT,
6755 ___CAST(___WORD,prev_alloc_stack_ptr))
6756 ___ps->stack_break = alloc_stack_ptr;
6757 }
6758
6759 prepare_mem_pstate (___ps);
6760
6761 return 1; /* trigger GC */
6762 }
6763
6764 next_stack_msection_without_locking (___ps);
6765
6766 ALLOC_MEM_UNLOCK();
6767 }
6768
6769 /*
6770 * Move to the new stack msection.
6771 */
6772
6773 ___ps->stack_start = alloc_stack_start;
6774 alloc_stack_ptr = alloc_stack_start;
6775
6776 /*
6777 * Create a "break frame" in the new stack msection. The break
6778 * frame is used by the break handler (see _kernel.scm).
6779 */
6780
6781 ___FP_ADJFP(alloc_stack_ptr,___FIRST_BREAK_FRAME_SPACE)
6782
6783 /*
6784 * Because ___stack_limit is only called by the stack-limit
6785 * handler in _kernel.scm and it has pushed an internal
6786 * return frame to the top of the stack, the frame pointer
6787 * can't be pointing to a break frame,
6788 * i.e. prev_alloc_stack_ptr != prev_stack_break.
6789 *
6790 * If the topmost frame were to be left on the
6791 * stack, when ___stack_overflow_undo would return to it, an
6792 * uncontrolled overflow of the stack would be possible
6793 * (because the generated code assumes that after returning
6794 * from a call to ___stack_limit it is OK to perform a
6795 * function call without checking the stack limit).
6796 *
6797 * For this reason, is is necessary for correctness to
6798 * transfer the topmost frame to the new stack msection.
6799 * However, only transferring the topmost frame may cause
6800 * the stack limit handler to be called frequently if there
6801 * are many shallow function calls in a row (calls to
6802 * ___stack_limit followed by ___stack_overflow_undo in a
6803 * tight loop). To avoid this performance issue, it is best
6804 * to transfer a certain number of frames, but no more than
6805 * up to the break frame.
6806 */
6807
6808 fp = prev_alloc_stack_ptr;
6809 frame_count = 0; /* count of frames to transfer */
6810
6811 for (;;)
6812 {
6813 int fs, link;
6814 ___WORD ra1 = ___FP_STK(fp,-___FRAME_STACK_RA);
6815 ___WORD ra2;
6816
6817 frame_count++;
6818
6819 if (ra1 == ___GSTATE->internal_return)
6820 {
6821 ___WORD actual_ra = ___FP_STK(fp,___RETI_RA);
6822 ___RETI_GET_FS_LINK(actual_ra,fs,link)
6823 }
6824 else
6825 {
6826 ___RETN_GET_FS_LINK(ra1,fs,link)
6827 }
6828
6829 ___FP_ADJFP(fp,-___FRAME_SPACE(fs)) /* get base of frame */
6830
6831 ra2 = ___FP_STK(fp,link+1);
6832
6833 words = fp - prev_alloc_stack_ptr;
6834
6835 if (fp == prev_stack_break)
6836 {
6837 /* reached the break frame */
6838
6839 /* best to not transfer any frames to new stack msection */
6840
6841 /*
6842 * Add a break frame.
6843 */
6844
6845 ___FP_ADJFP(alloc_stack_ptr,___BREAK_FRAME_SPACE)
6846 ___FP_SET_STK(alloc_stack_ptr,
6847 -___BREAK_FRAME_NEXT,
6848 ___CAST(___WORD,prev_alloc_stack_ptr))
6849 ___ps->stack_break = alloc_stack_ptr;
6850
6851 fp = alloc_stack_ptr;
6852
6853 break;
6854 }
6855
6856 ___FP_SET_STK(fp,-___FRAME_STACK_RA,ra2)
6857
6858 words = fp - prev_alloc_stack_ptr;
6859
6860 if (frame_count >= 5 || words >= 100)
6861 {
6862 /* reached max frame count or max volume */
6863
6864 /*
6865 * Save state of previous stack msection in the
6866 * first break frame of new stack msection to allow
6867 * returning to the previous stack msection when
6868 * ___stack_overflow_undo is called.
6869 */
6870
6871 ___FP_SET_STK(alloc_stack_ptr,
6872 -___FIRST_BREAK_FRAME_STACK_MSECTION,
6873 ___CAST(___WORD,prev_stack_msection))
6874
6875 ___FP_SET_STK(alloc_stack_ptr,
6876 -___FIRST_BREAK_FRAME_STACK_BREAK,
6877 ___CAST(___WORD,prev_stack_break))
6878
6879 ___FP_SET_STK(alloc_stack_ptr,
6880 -___BREAK_FRAME_NEXT,
6881 ___CAST(___WORD,fp))
6882
6883 ___ps->stack_break = alloc_stack_ptr;
6884
6885 memmove (alloc_stack_ptr - words,
6886 prev_alloc_stack_ptr,
6887 words << ___LWS);
6888
6889 ___FP_SET_STK(alloc_stack_ptr,
6890 link+1,
6891 ___GSTATE->handler_break)
6892
6893 ___FP_ADJFP(alloc_stack_ptr, words)
6894
6895 break;
6896 }
6897 }
6898
6899 stack_msection_stop_using (___ps, prev_alloc_stack_start, fp);
6900 }
6901
6902 prepare_mem_pstate (___ps);
6903
6904 return 0;
6905 }
6906
6907
6908 ___WORD ___stack_overflow_undo_if_possible
6909 ___P((___PSDNC),
6910 (___PSVNC)
6911 ___PSDKR)
6912 {
6913 ___PSGET
6914 ___D_FP
6915 ___R_FP
6916
6917 if (stack_msection != heap_msection &&
6918 ___ps->stack_start == &___STK(-___FIRST_BREAK_FRAME_SPACE))
6919 {
6920 /*
6921 * The current stack msection is not shared with the heap and
6922 * the break frame is at the start of the stack msection, so
6923 * the stack can be moved to the previous stack msection.
6924 */
6925
6926 ___msection *ms = ___CAST(___msection*,
6927 ___STK(-___FIRST_BREAK_FRAME_STACK_MSECTION));
6928
6929 stack_msection->base[0] = ___CAST(___WORD,msection_free_list);
6930 msection_free_list = stack_msection;
6931
6932 set_stack_msection (___ps, ms);
6933
6934 ___ps->stack_break =
6935 ___CAST(___WORD*,___STK(-___FIRST_BREAK_FRAME_STACK_BREAK));
6936
6937 alloc_stack_ptr =
6938 ___CAST(___WORD*,___STK(-___BREAK_FRAME_NEXT));
6939
6940 ___ps->stack_start = alloc_stack_start;
6941
6942 stack_msection_resume_using (___ps, alloc_stack_start, alloc_stack_ptr);
6943
6944 prepare_mem_pstate (___ps);
6945
6946 ___SET_FP(alloc_stack_ptr)
6947
6948 return ___FRAME_FETCH_RA;
6949 }
6950
6951 return ___FAL;
6952 }
6953
6954
6955 #ifdef ___DEBUG_HEAP_LIMIT
6956
6957 ___BOOL ___heap_limit_debug
6958 ___P((___PSD
6959 int line,
6960 char *file),
6961 (___PSV
6962 line,
6963 file)
6964 ___PSDKR
6965 int line;
6966 char *file;)
6967
6968 #else
6969
6970 ___BOOL ___heap_limit
6971 ___P((___PSDNC),
6972 (___PSVNC)
6973 ___PSDKR)
6974
6975 #endif
6976 {
6977 ___PSGET
6978
6979 /*
6980 * In a multithreaded VM, the function ___heap_limit can be called
6981 * concurrently by multiple processors. The VM level memory allocation
6982 * lock is used to implement a critical section.
6983 */
6984
6985 /* Recover processor's stack and heap pointers */
6986
6987 alloc_stack_ptr = ___ps->fp;
6988 alloc_heap_ptr = ___ps->hp;
6989
6990 #ifdef ___DEBUG_HEAP_LIMIT
6991 ___ps->heap_limit_line = line;
6992 ___ps->heap_limit_file = file;
6993 #endif
6994
6995 #ifdef ENABLE_CONSISTENCY_CHECKS
6996 if (___DEBUG_SETTINGS_LEVEL(___GSTATE->setup_params.debug_settings) >= 1)
6997 check_fudge_used (___PSPNC);
6998 #endif
6999
7000 {
7001 /*
7002 * Get a new heap msection.
7003 */
7004
7005 ___msection *ms = msection_free_list;
7006
7007 if (stack_msection != heap_msection &&
7008 ms != NULL)
7009 {
7010 /* we can reuse an existing one */
7011
7012 msection_free_list = ___CAST(___msection*,ms->base[0]);
7013
7014 set_heap_msection (___ps, ms);
7015 }
7016 else
7017 {
7018 /* we need to allocate a new msection */
7019
7020 ALLOC_MEM_LOCK();
7021
7022 if (
7023 #ifdef CALL_GC_FREQUENTLY
7024 --___gc_calls_to_punt >= 0 &&
7025 #endif
7026 compute_free_heap_space() >= ___MSECTION_SIZE)
7027 {
7028 if (alloc_heap_ptr > alloc_heap_limit - ___MSECTION_FUDGE)
7029 next_heap_msection_without_locking (___ps);
7030
7031 ALLOC_MEM_UNLOCK();
7032 }
7033 else
7034 {
7035 ALLOC_MEM_UNLOCK();
7036
7037 return 1; /* trigger GC */
7038 }
7039 }
7040 }
7041
7042 prepare_mem_pstate (___ps);
7043
7044 return 0;
7045 }
7046
7047
7048 /*---------------------------------------------------------------------------*/
7049
7050
7051 ___F64 ___bytes_allocated
7052 ___P((___PSDNC),
7053 (___PSVNC)
7054 ___PSDKR)
7055 {
7056 ___PSGET
7057
7058 /* Recover processor's stack and heap pointers */
7059
7060 alloc_stack_ptr = ___ps->fp;
7061 alloc_heap_ptr = ___ps->hp;
7062
7063 return bytes_allocated_minus_occupied + bytes_occupied(___ps);
7064 }
7065
7066
7067 /*---------------------------------------------------------------------------*/
7068