1 
2 /*
3    Provides:
4       GC_malloc_weak_array
5       size_weak_array, mark_weak_array, fixup_weak_array
6       init_weak_arrays zero_weak_arrays
7       GC_malloc_weak_box
8       size_weak_box, mark_weak_box, fixup_weak_box
9       init_weak_boxes zero_weak_boxes
10       GC_malloc_ephemeron
11       size_ephemeron, mark_ephemeron, fixup_ephemeron
12       BTC_ephemeron_mark [ifdef NEW_BTC_ACCOUNT]
13       init_ephemerons mark_ready_ephemerons zero_remaining_ephemerons
14       num_last_seen_ephemerons
15    Requires:
16       weak_array_tag
17       weak_box_tag
18       ephemeron_tag
19       is_marked(p)
20       is_in_generation_half(p)
21       Type_Tag
22 */
23 
24 #define WEAK_INCREMENTAL_DONE_1 ((void *)0x1)
25 #define WEAK_INCREMENTAL_DONE_2 ((void *)0x3)
26 
27 /******************************************************************************/
28 /*                               weak arrays                                  */
29 /******************************************************************************/
30 
size_weak_array(void * p,struct NewGC * gc)31 static int size_weak_array(void *p, struct NewGC *gc)
32 {
33   GC_Weak_Array *a = (GC_Weak_Array *)p;
34 
35   return gcBYTES_TO_WORDS(sizeof(GC_Weak_Array)
36 			  + ((a->count - 1 + 1) * sizeof(void *)));
37 }
38 
mark_weak_array(void * p,struct NewGC * gc)39 static int mark_weak_array(void *p, struct NewGC *gc)
40 {
41   GC_Weak_Array *a = (GC_Weak_Array *)p;
42 
43   gcMARK2(a->replace_val, gc);
44 
45   if (gc->doing_memory_accounting) {
46     /* skip */
47   } else if (gc->inc_gen1) {
48     /* inc_next field is at the end of the `data` array: */
49     a->data[a->count] = gc->inc_weak_arrays;
50     gc->inc_weak_arrays = a;
51   } else if (gc->during_backpointer) {
52     if (!gc->gc_full
53         || (gc->started_incremental
54             /* `a` must have been marked and must be in the old
55                generation, or we wouldn't get here; `a` may have been
56                fully processed in incremental mode, though */
57             && (a->data[a->count] == gc->weak_incremental_done))) {
58       /* Keep backpointered weak arrays separate, because we
59          should not merge them to the incremental list
60          in incremental mode. */
61       a->next = gc->bp_weak_arrays;
62       gc->bp_weak_arrays = a;
63     }
64   } else {
65     a->next = gc->weak_arrays;
66     gc->weak_arrays = a;
67     if (gc->gc_full)
68       a->data[a->count] = NULL; /* ensure not a future weak_incremental_done */
69   }
70 
71 #if CHECKS
72   /* For now, weak arrays only used for symbols, keywords, and falses: */
73   {
74     void **data;
75     int i;
76     data = a->data;
77     for (i = a->count; i--; ) {
78       if (data[i]
79 	  && (*(short *)(data[i]) != 48)
80 	  && (*(short *)(data[i]) != 49)
81 	  && (*(short *)(data[i]) != 58)) {
82 	CRASH(1);
83       }
84     }
85   }
86 #endif
87 
88   return gcBYTES_TO_WORDS(sizeof(GC_Weak_Array)
89 			  + ((a->count - 1 + 1) * sizeof(void *)));
90 }
91 
fixup_weak_array(void * p,struct NewGC * gc)92 static int fixup_weak_array(void *p, struct NewGC *gc)
93 {
94   GC_Weak_Array *a = (GC_Weak_Array *)p;
95   int i;
96   void **data;
97 
98   gcFIXUP2(a->replace_val, gc);
99 
100   data = a->data;
101   for (i = a->count; i--; ) {
102     if (data[i]) {
103       gcFIXUP2(data[i], gc);
104     }
105   }
106 
107   return gcBYTES_TO_WORDS(sizeof(GC_Weak_Array)
108 			  + ((a->count - 1) * sizeof(void *)));
109 }
110 
GC_malloc_weak_array(size_t size_in_bytes,void * replace_val)111 void *GC_malloc_weak_array(size_t size_in_bytes, void *replace_val)
112 {
113   GCTYPE *gc = GC_get_GC();
114   GC_Weak_Array *w;
115 
116   /* Allocation might trigger GC, so we use park: */
117   CHECK_PARK_UNUSED(gc);
118   gc->park[0] = replace_val;
119 
120   w = (GC_Weak_Array *)GC_malloc_one_tagged(size_in_bytes
121 					    + sizeof(GC_Weak_Array)
122 					    - sizeof(void *)
123 					    + sizeof(GC_Weak_Array *));
124 
125   replace_val = gc->park[0];
126   gc->park[0] = NULL;
127 
128   w->type = gc->weak_array_tag;
129   w->replace_val = replace_val;
130   w->count = (size_in_bytes >> LOG_WORD_SIZE);
131 
132   return w;
133 }
134 
init_weak_arrays(GCTYPE * gc)135 static void init_weak_arrays(GCTYPE *gc)
136 {
137   GC_ASSERT(!gc->bp_weak_arrays);
138   gc->weak_arrays = NULL;
139 }
140 
append_weak_arrays(GC_Weak_Array * wa,GC_Weak_Array * bp_wa,int * _num_gen0)141 static GC_Weak_Array *append_weak_arrays(GC_Weak_Array *wa, GC_Weak_Array *bp_wa, int *_num_gen0)
142 {
143   *_num_gen0 = 0;
144 
145   if (wa) {
146     GC_Weak_Array *last_wa = wa;
147     while (last_wa->next) {
148       (*_num_gen0)++;
149       last_wa = last_wa->next;
150     }
151     (*_num_gen0)++;
152     last_wa->next = bp_wa;
153     return wa;
154   } else
155     return bp_wa;
156 }
157 
zero_weak_arrays(GCTYPE * gc,int force_zero,int from_inc,int need_resolve,int fuel)158 static int zero_weak_arrays(GCTYPE *gc, int force_zero, int from_inc, int need_resolve, int fuel)
159 {
160   GC_Weak_Array *wa;
161   int i, num_gen0;
162 
163   if (!fuel) return 0;
164 
165   if (from_inc) {
166     wa = gc->inc_weak_arrays;
167     num_gen0 = 0;
168   } else
169     wa = append_weak_arrays(gc->weak_arrays, gc->bp_weak_arrays, &num_gen0);
170 
171   if (gc->gc_full || !gc->started_incremental)
172     num_gen0 = 0;
173 
174   while (wa) {
175     void **data;
176 
177     data = wa->data;
178     for (i = wa->count; i--; ) {
179       void *p = data[i];
180       if (p && (force_zero || !is_marked(gc, p)))
181         data[i] = wa->replace_val;
182       else if (need_resolve)
183         data[i] = GC_resolve2(p, gc);
184     }
185     if (fuel > 0) {
186       fuel -= (4 * wa->count);
187       if (fuel < 0) fuel = 0;
188     }
189 
190     if (num_gen0 > 0) {
191       if (!is_in_generation_half(gc, wa)) {
192         if (!gc->all_marked_incremental) {
193           /* For incremental mode, preserve this weak array
194              in the incremental list for re-checking later. */
195           wa->data[wa->count] = gc->inc_weak_arrays;
196           gc->inc_weak_arrays = wa;
197         } else {
198           /* Count as incremental-done: */
199           wa->data[wa->count] = gc->weak_incremental_done;
200         }
201       }
202     }
203 
204     if (from_inc) {
205       GC_Weak_Array *next;
206       next = (GC_Weak_Array *)wa->data[wa->count];
207       wa->data[wa->count] = gc->weak_incremental_done;
208       wa = next;
209     } else
210       wa = wa->next;
211     num_gen0--;
212   }
213   if (from_inc)
214     gc->inc_weak_arrays = NULL;
215   else {
216     gc->weak_arrays = NULL;
217     gc->bp_weak_arrays = NULL;
218   }
219 
220   return fuel;
221 }
222 
223 /******************************************************************************/
224 /*                                weak boxes                                  */
225 /******************************************************************************/
226 
227 #if 0
228 static void check_weak_box_not_already_in_inc_chain(GC_Weak_Box *wb, GC_Weak_Box *wbc)
229 {
230   while (wbc) {
231     GC_ASSERT(wb != wbc);
232     wbc = wbc->inc_next;
233   }
234 }
235 static void check_weak_box_not_already_in_chain(GC_Weak_Box *wb, GC_Weak_Box *wbc)
236 {
237   while (wbc) {
238     GC_ASSERT(wb != wbc);
239     wbc = wbc->next;
240   }
241 }
242 #else
check_weak_box_not_already_in_inc_chain(GC_Weak_Box * wb,GC_Weak_Box * wbc)243 static void check_weak_box_not_already_in_inc_chain(GC_Weak_Box *wb, GC_Weak_Box *wbc) { }
check_weak_box_not_already_in_chain(GC_Weak_Box * wb,GC_Weak_Box * wbc)244 static void check_weak_box_not_already_in_chain(GC_Weak_Box *wb, GC_Weak_Box *wbc) { }
245 #endif
246 
size_weak_box(void * p,struct NewGC * gc)247 static int size_weak_box(void *p, struct NewGC *gc)
248 {
249   return gcBYTES_TO_WORDS(sizeof(GC_Weak_Box));
250 }
251 
mark_weak_box(void * p,struct NewGC * gc)252 static int mark_weak_box(void *p, struct NewGC *gc)
253 {
254   GC_Weak_Box *wb = (GC_Weak_Box *)p;
255 
256   gcMARK2(wb->secondary_erase, gc);
257 
258   if (gc->doing_memory_accounting) {
259     /* skip */
260   } else if (gc->inc_gen1) {
261     check_weak_box_not_already_in_inc_chain(wb, gc->inc_weak_boxes[wb->is_late]);
262     wb->inc_next = gc->inc_weak_boxes[wb->is_late];
263     gc->inc_weak_boxes[wb->is_late] = wb;
264   } else if (gc->during_backpointer) {
265     if ((!gc->gc_full
266          || (gc->started_incremental
267              /* see note with `gc->weak_incremental_done` for weak arrays */
268              && (wb->inc_next == gc->weak_incremental_done)
269              && wb->val))
270         && (wb->val || gc->started_incremental)) {
271       /* Keep backpointered weak arrays separate, because we
272          should not merge them to the incremental list
273          in incremental mode. */
274       check_weak_box_not_already_in_chain(wb, gc->bp_weak_boxes[wb->is_late]);
275       check_weak_box_not_already_in_chain(wb, gc->weak_boxes[wb->is_late]);
276       wb->next = gc->bp_weak_boxes[wb->is_late];
277       gc->bp_weak_boxes[wb->is_late] = wb;
278     }
279   } else if (wb->val || gc->started_incremental) {
280     check_weak_box_not_already_in_chain(wb, gc->weak_boxes[wb->is_late]);
281     check_weak_box_not_already_in_chain(wb, gc->bp_weak_boxes[wb->is_late]);
282     wb->next = gc->weak_boxes[wb->is_late];
283     gc->weak_boxes[wb->is_late] = wb;
284     if (gc->gc_full)
285       wb->inc_next = NULL; /* ensure not a future weak_incremental_done */
286   }
287 
288   return gcBYTES_TO_WORDS(sizeof(GC_Weak_Box));
289 }
290 
fixup_weak_box(void * p,struct NewGC * gc)291 static int fixup_weak_box(void *p, struct NewGC *gc)
292 {
293   GC_Weak_Box *wb = (GC_Weak_Box *)p;
294 
295   gcFIXUP2(wb->secondary_erase, gc);
296   gcFIXUP2(wb->val, gc);
297 
298   return gcBYTES_TO_WORDS(sizeof(GC_Weak_Box));
299 }
300 
GC_malloc_weak_box(void * p,void ** secondary,int soffset,int is_late)301 void *GC_malloc_weak_box(void *p, void **secondary, int soffset, int is_late)
302 {
303   GCTYPE *gc = GC_get_GC();
304   GC_Weak_Box *w;
305 
306   if (!GC_gen0_alloc_only) {
307     /* Allcation might trigger GC, so we use park: */
308     CHECK_PARK_UNUSED(gc);
309     gc->park[0] = p;
310     gc->park[1] = secondary;
311   }
312 
313   w = (GC_Weak_Box *)GC_malloc_one_tagged(sizeof(GC_Weak_Box));
314 
315   /* Future-local allocation may fail: */
316   if (!w) return NULL;
317 
318   if (!GC_gen0_alloc_only) {
319     p = gc->park[0];
320     secondary = (void **)gc->park[1];
321     gc->park[0] = NULL;
322     gc->park[1] = NULL;
323   }
324 
325   w->type = gc->weak_box_tag;
326   w->val = p;
327   w->secondary_erase = secondary;
328   w->is_late = is_late;
329   w->soffset = soffset;
330 
331   return w;
332 }
333 
init_weak_boxes(GCTYPE * gc)334 static void init_weak_boxes(GCTYPE *gc)
335 {
336   GC_ASSERT(!gc->bp_weak_boxes[0]);
337   GC_ASSERT(!gc->bp_weak_boxes[1]);
338   gc->weak_boxes[0] = NULL;
339   gc->weak_boxes[1] = NULL;
340 }
341 
append_weak_boxes(GC_Weak_Box * wb,GC_Weak_Box * bp_wb,int * _num_gen0)342 static GC_Weak_Box *append_weak_boxes(GC_Weak_Box *wb, GC_Weak_Box *bp_wb, int *_num_gen0)
343 {
344   *_num_gen0 = 0;
345 
346   if (wb) {
347     GC_Weak_Box *last_wb = wb;
348     while (last_wb->next) {
349       (*_num_gen0)++;
350       last_wb = last_wb->next;
351     }
352     (*_num_gen0)++;
353     last_wb->next = bp_wb;
354     return wb;
355   } else
356     return bp_wb;
357 }
358 
zero_weak_boxes(GCTYPE * gc,int is_late,int force_zero,int from_inc,int need_resolve,int fuel)359 static int zero_weak_boxes(GCTYPE *gc, int is_late, int force_zero, int from_inc, int need_resolve, int fuel)
360 {
361   GC_Weak_Box *wb;
362   int num_gen0;
363 
364   if (!fuel) return 0;
365 
366   if (from_inc) {
367     wb = gc->inc_weak_boxes[is_late];
368     num_gen0 = 0;
369   } else {
370     wb = append_weak_boxes(gc->weak_boxes[is_late],
371                            gc->bp_weak_boxes[is_late],
372                            &num_gen0);
373     if (gc->gc_full || !gc->started_incremental)
374       num_gen0 = 0;
375   }
376 
377   while (wb) {
378     GC_ASSERT(is_marked(gc, wb));
379     if (!wb->val) {
380       /* nothing to do */
381     } else if (force_zero || !is_marked(gc, wb->val)) {
382       wb->val = NULL;
383       if (wb->secondary_erase) {
384         void **p;
385         mpage *page;
386 
387         /* it's possible for the secondary to be in an old generation
388            and therefore on an mprotected page: */
389         page = pagemap_find_page(gc->page_maps, wb->secondary_erase);
390         if (page->mprotected) {
391           page->mprotected = 0;
392           mmu_write_unprotect_page(gc->mmu, page->addr, APAGE_SIZE, page_mmu_type(page), &page->mmu_src_block);
393           page->reprotect_next = gc->reprotect_next;
394           gc->reprotect_next = page;
395           page->reprotect = 1;
396         }
397         p = (void **)GC_resolve2(wb->secondary_erase, gc);
398         *(p + wb->soffset) = NULL;
399         wb->secondary_erase = NULL;
400       }
401     } else if (need_resolve)
402       wb->val = GC_resolve2(wb->val, gc);
403 
404     if (num_gen0 > 0) {
405       if (!is_in_generation_half(gc, wb)) {
406         if (!gc->all_marked_incremental) {
407           /* For incremental mode, preserve this weak box
408              in the incremental list for re-checking later. */
409           check_weak_box_not_already_in_inc_chain(wb, gc->inc_weak_boxes[wb->is_late]);
410           wb->inc_next = gc->inc_weak_boxes[is_late];
411           gc->inc_weak_boxes[is_late] = wb;
412         } else {
413           /* Count as incremental-done: */
414           wb->inc_next = gc->weak_incremental_done;
415         }
416       }
417     }
418 
419     if (from_inc) {
420       GC_Weak_Box *next;
421       next = wb->inc_next;
422       wb->inc_next = gc->weak_incremental_done;
423       wb = next;
424     } else
425       wb = wb->next;
426 
427     num_gen0--;
428 
429     if (fuel >= 0) {
430       if (fuel > 0) {
431         if (gc->unprotected_page) {
432           fuel -= 100;
433           gc->unprotected_page = 0;
434         } else
435           fuel -= 4;
436         if (fuel < 0) fuel = 0;
437       } else {
438         GC_ASSERT(from_inc);
439         gc->inc_weak_boxes[is_late] = wb;
440         return 0;
441       }
442     }
443   }
444 
445   /* reset, in case we have a second round */
446   if (from_inc) {
447     gc->inc_weak_boxes[is_late] = NULL;
448   } else {
449     gc->weak_boxes[is_late] = NULL;
450     gc->bp_weak_boxes[is_late] = NULL;
451   }
452 
453   return fuel;
454 }
455 
456 /******************************************************************************/
457 /*                                 ephemeron                                  */
458 /******************************************************************************/
459 
size_ephemeron(void * p,struct NewGC * gc)460 static int size_ephemeron(void *p, struct NewGC *gc)
461 {
462   return gcBYTES_TO_WORDS(sizeof(GC_Ephemeron));
463 }
464 
mark_ephemeron(void * p,struct NewGC * gc)465 static int mark_ephemeron(void *p, struct NewGC *gc)
466 {
467   GC_Ephemeron *eph = (GC_Ephemeron *)p;
468 
469   if (eph->val) {
470     GC_ASSERT(!gc->doing_memory_accounting);
471     if (gc->inc_gen1) {
472       eph->inc_next = gc->inc_ephemerons;
473       gc->inc_ephemerons = eph;
474     } else if (gc->during_backpointer) {
475       if (!gc->gc_full
476           /* If this old-generation object is not yet marked
477              and we're finishing an incremental pass, then
478              it won't get marked (and it can only refer to
479              other old-generation objects), so ignore in that case */
480           && (gc->mark_gen1
481               || !gc->started_incremental
482               || !gc->all_marked_incremental)) {
483         eph->next = gc->bp_ephemerons;
484         gc->bp_ephemerons = eph;
485       }
486     } else {
487       eph->next = gc->ephemerons;
488       gc->ephemerons = eph;
489     }
490   }
491 
492   return gcBYTES_TO_WORDS(sizeof(GC_Ephemeron));
493 }
494 
495 #ifdef NEWGC_BTC_ACCOUNT
BTC_ephemeron_mark(void * p,struct NewGC * gc)496 static int BTC_ephemeron_mark(void *p, struct NewGC *gc)
497 {
498   if (gc->doing_memory_accounting) {
499 
500     GC_Ephemeron *eph = (GC_Ephemeron *)p;
501 
502     gcMARK2(eph->key, gc);
503     gcMARK2(eph->val, gc);
504 
505     return gcBYTES_TO_WORDS(sizeof(GC_Ephemeron));
506   }
507   return mark_ephemeron(p, gc);
508 }
509 #endif
510 
511 
fixup_ephemeron(void * p,struct NewGC * gc)512 static int fixup_ephemeron(void *p, struct NewGC *gc)
513 {
514   GC_Ephemeron *eph = (GC_Ephemeron *)p;
515 
516   gcFIXUP2(eph->key, gc);
517   gcFIXUP2(eph->val, gc);
518 
519   return gcBYTES_TO_WORDS(sizeof(GC_Ephemeron));
520 }
521 
GC_malloc_ephemeron(void * k,void * v)522 void *GC_malloc_ephemeron(void *k, void *v)
523 {
524   GCTYPE *gc = GC_get_GC();
525   GC_Ephemeron *eph;
526 
527   /* Allcation might trigger GC, so we use park: */
528   CHECK_PARK_UNUSED(gc);
529   gc->park[0] = k;
530   gc->park[1] = v;
531 
532   eph = (GC_Ephemeron *)GC_malloc_one_tagged(sizeof(GC_Ephemeron));
533 
534   k = gc->park[0];
535   v = gc->park[1];
536   gc->park[0] = NULL;
537   gc->park[1] = NULL;
538 
539   eph->type = gc->ephemeron_tag;
540   eph->key = k;
541   eph->val = v;
542 
543   return eph;
544 }
545 
init_ephemerons(GCTYPE * gc)546 void init_ephemerons(GCTYPE *gc) {
547   GC_ASSERT(!gc->bp_ephemerons);
548   gc->ephemerons = NULL;
549   gc->bp_ephemerons = NULL;
550   gc->num_last_seen_ephemerons = 0;
551 }
552 
553 #define EPHEMERON_COMPLETED ((GC_Ephemeron *)0x1)
554 
add_ephemeron_trigger(GCTYPE * gc,GC_Ephemeron * eph)555 static void add_ephemeron_trigger(GCTYPE *gc, GC_Ephemeron *eph)
556 {
557   mpage *page = pagemap_find_page(gc->page_maps, eph->key);
558   if (page) {
559     GC_ASSERT(!page->triggers || (page->triggers->type == scheme_ephemeron_type));
560     eph->trigger_next = page->triggers;
561     page->triggers = eph;
562   }
563 }
564 
remove_ephemeron_trigger(GCTYPE * gc,GC_Ephemeron * eph,GC_Ephemeron * waiting)565 static GC_Ephemeron *remove_ephemeron_trigger(GCTYPE *gc, GC_Ephemeron *eph, GC_Ephemeron *waiting)
566 {
567   if (eph->trigger_next == EPHEMERON_COMPLETED) {
568     /* drop from waiting list; there are no triggers on the key's
569        page, because the ephemeron was triggered that way */
570     eph->trigger_next = NULL;
571     return waiting;
572   } else {
573     mpage *page = pagemap_find_page(gc->page_maps, eph->key);
574     if (page)
575       page->triggers = NULL;
576     eph->trigger_next = NULL;
577     eph->next = waiting;
578     return eph;
579   }
580 }
581 
trigger_ephemerons(GCTYPE * gc,mpage * page)582 static void trigger_ephemerons(GCTYPE *gc, mpage *page)
583 {
584   GC_Ephemeron *eph = page->triggers, *next;
585   if (eph) {
586     page->triggers = NULL;
587     while (eph) {
588       GC_ASSERT(eph->type == scheme_ephemeron_type);
589       next = eph->trigger_next;
590       eph->trigger_next = gc->triggered_ephemerons;
591       gc->triggered_ephemerons = eph;
592       eph = next;
593     }
594   }
595 }
596 
mark_ready_ephemerons(GCTYPE * gc,int inc_gen1)597 static int mark_ready_ephemerons(GCTYPE *gc, int inc_gen1)
598 {
599   GC_Ephemeron *waiting, *next, *eph;
600   int did_one = 0, j, follow_triggers;
601 
602   GC_mark_no_recur(gc, 1);
603 
604   for (j = 0; j < (inc_gen1 ? 1 : (gc->gc_full ? 3 : 2)); j++) {
605     follow_triggers = 0;
606     waiting = NULL;
607 
608     if (inc_gen1)
609       eph = gc->inc_ephemerons;
610     else if (j == 0) {
611       eph = gc->ephemerons;
612       gc->ephemerons = NULL; /* more may be added here */
613     } else if (j == 1)
614       eph = gc->bp_ephemerons;
615     else {
616       eph = gc->inc_ephemerons;
617       gc->inc_ephemerons = NULL;
618       waiting = gc->ephemerons;
619     }
620 
621     for (; eph; eph = next) {
622       GC_ASSERT(eph->type == scheme_ephemeron_type);
623       if (inc_gen1 || (j == 2))
624         next = eph->inc_next;
625       else if (follow_triggers) {
626         next = eph->trigger_next;
627         eph->trigger_next = NULL;
628       } else
629         next = eph->next;
630       if (is_marked(gc, eph->key)) {
631         if (!inc_gen1)
632           eph->key = GC_resolve2(eph->key, gc);
633         gcMARK2(eph->val, gc);
634         gc->num_last_seen_ephemerons++;
635         did_one = 1;
636         if (!inc_gen1 && (j == 0) && !gc->gc_full
637             && gc->started_incremental && !gc->all_marked_incremental) {
638           /* Need to preserve the ephemeron in the incremental list,
639              unless it's kept in generation 1/2 instead of promoted to
640              generation 1. */
641           if (!is_in_generation_half(gc, eph)) {
642             eph->inc_next = gc->inc_ephemerons;
643             gc->inc_ephemerons = eph;
644           }
645         }
646         if (follow_triggers)
647           eph->trigger_next = EPHEMERON_COMPLETED; /* => don't move back to waiting */
648       } else {
649         if (inc_gen1) {
650           /* Ensure that we can write to the page containing the ephemeron: */
651           check_incremental_unprotect(gc, pagemap_find_page(gc->page_maps, eph));
652           eph->inc_next = waiting;
653           waiting = eph;
654         } else {
655           if (j == 0) {
656             /* Add a trigger to make GC_mark2() notify us if this
657                ephemeron shoud be checked again: */
658             add_ephemeron_trigger(gc, eph);
659             if (!follow_triggers) {
660               eph->next = waiting;
661               waiting = eph;
662             }
663           } else {
664             eph->next = waiting;
665             waiting = eph;
666           }
667         }
668       }
669 
670       if (!next && !inc_gen1 && (j == 0)) {
671         /* Propagate newly discovered marks, and triggers can
672            reschedule some ephemerons for checking again. Otherwise, a
673            chain of ephemerons can make our loop discover only one
674            ephemeron each time around, leading to O(N^2) time to
675            handle a chain of N ephemersons. */
676         GC_mark_no_recur(gc, 0);
677         propagate_marks(gc);
678         GC_mark_no_recur(gc, 1);
679         next = gc->triggered_ephemerons;
680         GC_ASSERT(!next || (next->type == scheme_ephemeron_type));
681         gc->triggered_ephemerons = NULL;
682         follow_triggers = 1;
683         /* If no triggers, double-check for newly discovered ephemerons
684            on the plain waiting list, since we propagated marks */
685         if (!next) {
686           follow_triggers = 0;
687           next = gc->ephemerons;
688           gc->ephemerons = NULL;
689         }
690       }
691     }
692 
693     GC_ASSERT(!gc->triggered_ephemerons);
694 
695     if (!inc_gen1 && (j == 0)) {
696       /* Remove any triggers, and remove any completed-via-trigger
697          ephemerons from the waiting list */
698       eph = waiting;
699       waiting = gc->ephemerons;
700       for (; eph; eph = next) {
701         next = eph->next;
702         waiting = remove_ephemeron_trigger(gc, eph, waiting);
703       }
704     }
705 
706     if (inc_gen1)
707       gc->inc_ephemerons = waiting;
708     else if ((j == 0)|| (j == 2))
709       gc->ephemerons = waiting;
710     else
711       gc->bp_ephemerons = waiting;
712   }
713 
714   GC_mark_no_recur(gc, 0);
715 
716   return did_one;
717 }
718 
zero_remaining_ephemerons(GCTYPE * gc,int from_inc)719 static void zero_remaining_ephemerons(GCTYPE *gc, int from_inc)
720 {
721   GC_Ephemeron *eph;
722 
723   GC_ASSERT(from_inc || !gc->gc_full || !gc->inc_ephemerons);
724 
725   /* After level-1 finalization, any remaining ephemerons
726      should be zeroed. */
727   if (from_inc) {
728     for (eph = gc->inc_ephemerons; eph; eph = eph->inc_next) {
729       eph->key = NULL;
730       eph->val = NULL;
731     }
732     gc->inc_ephemerons = NULL;
733   } else {
734     for (eph = gc->ephemerons; eph; eph = eph->next) {
735       eph->key = NULL;
736       eph->val = NULL;
737     }
738     gc->ephemerons = NULL;
739   }
740 }
741