1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 #define V8_HEAP_MARK_COMPACT_INL_H_
7
8 #include "src/base/bits.h"
9 #include "src/heap/mark-compact.h"
10 #include "src/heap/objects-visiting-inl.h"
11 #include "src/heap/remembered-set.h"
12
13 namespace v8 {
14 namespace internal {
15
16 template <FixedArrayVisitationMode fixed_array_mode,
17 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
18 MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkingVisitor(MarkCompactCollector * collector,MarkingState * marking_state)19 MarkingState>::MarkingVisitor(MarkCompactCollector* collector,
20 MarkingState* marking_state)
21 : heap_(collector->heap()),
22 collector_(collector),
23 marking_state_(marking_state) {}
24
25 template <FixedArrayVisitationMode fixed_array_mode,
26 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
27 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitAllocationSite(Map * map,AllocationSite * object)28 MarkingState>::VisitAllocationSite(Map* map,
29 AllocationSite* object) {
30 int size = AllocationSite::BodyDescriptorWeak::SizeOf(map, object);
31 AllocationSite::BodyDescriptorWeak::IterateBody(map, object, size, this);
32 return size;
33 }
34
35 template <FixedArrayVisitationMode fixed_array_mode,
36 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
37 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitBytecodeArray(Map * map,BytecodeArray * array)38 MarkingState>::VisitBytecodeArray(Map* map,
39 BytecodeArray* array) {
40 int size = BytecodeArray::BodyDescriptor::SizeOf(map, array);
41 BytecodeArray::BodyDescriptor::IterateBody(map, array, size, this);
42 array->MakeOlder();
43 return size;
44 }
45
46 template <FixedArrayVisitationMode fixed_array_mode,
47 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
48 int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitCodeDataContainer(Map * map,CodeDataContainer * object)49 VisitCodeDataContainer(Map* map, CodeDataContainer* object) {
50 int size = CodeDataContainer::BodyDescriptorWeak::SizeOf(map, object);
51 CodeDataContainer::BodyDescriptorWeak::IterateBody(map, object, size, this);
52 return size;
53 }
54
55 template <FixedArrayVisitationMode fixed_array_mode,
56 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
57 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitFixedArray(Map * map,FixedArray * object)58 MarkingState>::VisitFixedArray(Map* map,
59 FixedArray* object) {
60 return (fixed_array_mode == FixedArrayVisitationMode::kRegular)
61 ? Parent::VisitFixedArray(map, object)
62 : VisitFixedArrayIncremental(map, object);
63 }
64
65 template <FixedArrayVisitationMode fixed_array_mode,
66 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
67 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitJSApiObject(Map * map,JSObject * object)68 MarkingState>::VisitJSApiObject(Map* map, JSObject* object) {
69 if (heap_->local_embedder_heap_tracer()->InUse()) {
70 DCHECK(object->IsJSObject());
71 heap_->TracePossibleWrapper(object);
72 }
73 int size = JSObject::BodyDescriptor::SizeOf(map, object);
74 JSObject::BodyDescriptor::IterateBody(map, object, size, this);
75 return size;
76 }
77
78 template <FixedArrayVisitationMode fixed_array_mode,
79 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
80 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitJSFunction(Map * map,JSFunction * object)81 MarkingState>::VisitJSFunction(Map* map,
82 JSFunction* object) {
83 int size = JSFunction::BodyDescriptorWeak::SizeOf(map, object);
84 JSFunction::BodyDescriptorWeak::IterateBody(map, object, size, this);
85 return size;
86 }
87
88 template <FixedArrayVisitationMode fixed_array_mode,
89 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
90 int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitJSWeakCollection(Map * map,JSWeakCollection * weak_collection)91 VisitJSWeakCollection(Map* map, JSWeakCollection* weak_collection) {
92 // Enqueue weak collection in linked list of encountered weak collections.
93 if (weak_collection->next() == heap_->undefined_value()) {
94 weak_collection->set_next(heap_->encountered_weak_collections());
95 heap_->set_encountered_weak_collections(weak_collection);
96 }
97
98 // Skip visiting the backing hash table containing the mappings and the
99 // pointer to the other enqueued weak collections, both are post-processed.
100 int size = JSWeakCollection::BodyDescriptorWeak::SizeOf(map, weak_collection);
101 JSWeakCollection::BodyDescriptorWeak::IterateBody(map, weak_collection, size,
102 this);
103
104 // Partially initialized weak collection is enqueued, but table is ignored.
105 if (!weak_collection->table()->IsHashTable()) return size;
106
107 // Mark the backing hash table without pushing it on the marking stack.
108 Object** slot =
109 HeapObject::RawField(weak_collection, JSWeakCollection::kTableOffset);
110 HeapObject* obj = HeapObject::cast(*slot);
111 collector_->RecordSlot(weak_collection, slot, obj);
112 MarkObjectWithoutPush(weak_collection, obj);
113 return size;
114 }
115
116 template <FixedArrayVisitationMode fixed_array_mode,
117 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
118 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitMap(Map * map,Map * object)119 MarkingState>::VisitMap(Map* map, Map* object) {
120 // When map collection is enabled we have to mark through map's transitions
121 // and back pointers in a special way to make these links weak.
122 int size = Map::BodyDescriptor::SizeOf(map, object);
123 if (object->CanTransition()) {
124 MarkMapContents(object);
125 } else {
126 Map::BodyDescriptor::IterateBody(map, object, size, this);
127 }
128 return size;
129 }
130
131 template <FixedArrayVisitationMode fixed_array_mode,
132 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
133 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitNativeContext(Map * map,Context * context)134 MarkingState>::VisitNativeContext(Map* map,
135 Context* context) {
136 int size = Context::BodyDescriptorWeak::SizeOf(map, context);
137 Context::BodyDescriptorWeak::IterateBody(map, context, size, this);
138 return size;
139 }
140
141 template <FixedArrayVisitationMode fixed_array_mode,
142 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
143 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitTransitionArray(Map * map,TransitionArray * array)144 MarkingState>::VisitTransitionArray(Map* map,
145 TransitionArray* array) {
146 int size = TransitionArray::BodyDescriptor::SizeOf(map, array);
147 TransitionArray::BodyDescriptor::IterateBody(map, array, size, this);
148 collector_->AddTransitionArray(array);
149 return size;
150 }
151
152 template <FixedArrayVisitationMode fixed_array_mode,
153 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
154 int MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitWeakCell(Map * map,WeakCell * weak_cell)155 MarkingState>::VisitWeakCell(Map* map, WeakCell* weak_cell) {
156 // Enqueue weak cell in linked list of encountered weak collections.
157 // We can ignore weak cells with cleared values because they will always
158 // contain smi zero.
159 if (!weak_cell->cleared()) {
160 HeapObject* value = HeapObject::cast(weak_cell->value());
161 if (marking_state()->IsBlackOrGrey(value)) {
162 // Weak cells with live values are directly processed here to reduce
163 // the processing time of weak cells during the main GC pause.
164 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
165 collector_->RecordSlot(weak_cell, slot, *slot);
166 } else {
167 // If we do not know about liveness of values of weak cells, we have to
168 // process them when we know the liveness of the whole transitive
169 // closure.
170 collector_->AddWeakCell(weak_cell);
171 }
172 }
173 return WeakCell::BodyDescriptor::SizeOf(map, weak_cell);
174 }
175
176 template <FixedArrayVisitationMode fixed_array_mode,
177 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
178 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitPointer(HeapObject * host,Object ** p)179 MarkingState>::VisitPointer(HeapObject* host, Object** p) {
180 if (!(*p)->IsHeapObject()) return;
181 HeapObject* target_object = HeapObject::cast(*p);
182 collector_->RecordSlot(host, p, target_object);
183 MarkObject(host, target_object);
184 }
185
186 template <FixedArrayVisitationMode fixed_array_mode,
187 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
188 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitPointer(HeapObject * host,MaybeObject ** p)189 MarkingState>::VisitPointer(HeapObject* host,
190 MaybeObject** p) {
191 HeapObject* target_object;
192 if ((*p)->ToStrongHeapObject(&target_object)) {
193 collector_->RecordSlot(host, reinterpret_cast<HeapObjectReference**>(p),
194 target_object);
195 MarkObject(host, target_object);
196 } else if ((*p)->ToWeakHeapObject(&target_object)) {
197 if (marking_state()->IsBlackOrGrey(target_object)) {
198 // Weak references with live values are directly processed here to reduce
199 // the processing time of weak cells during the main GC pause.
200 collector_->RecordSlot(host, reinterpret_cast<HeapObjectReference**>(p),
201 target_object);
202 } else {
203 // If we do not know about liveness of values of weak cells, we have to
204 // process them when we know the liveness of the whole transitive
205 // closure.
206 collector_->AddWeakReference(host,
207 reinterpret_cast<HeapObjectReference**>(p));
208 }
209 }
210 }
211
212 template <FixedArrayVisitationMode fixed_array_mode,
213 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
214 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitPointers(HeapObject * host,Object ** start,Object ** end)215 MarkingState>::VisitPointers(HeapObject* host,
216 Object** start, Object** end) {
217 for (Object** p = start; p < end; p++) {
218 VisitPointer(host, p);
219 }
220 }
221
222 template <FixedArrayVisitationMode fixed_array_mode,
223 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
224 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitPointers(HeapObject * host,MaybeObject ** start,MaybeObject ** end)225 MarkingState>::VisitPointers(HeapObject* host,
226 MaybeObject** start,
227 MaybeObject** end) {
228 for (MaybeObject** p = start; p < end; p++) {
229 VisitPointer(host, p);
230 }
231 }
232
233 template <FixedArrayVisitationMode fixed_array_mode,
234 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
235 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitEmbeddedPointer(Code * host,RelocInfo * rinfo)236 MarkingState>::VisitEmbeddedPointer(Code* host,
237 RelocInfo* rinfo) {
238 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
239 HeapObject* object = HeapObject::cast(rinfo->target_object());
240 collector_->RecordRelocSlot(host, rinfo, object);
241 if (!host->IsWeakObject(object)) {
242 MarkObject(host, object);
243 } else if (!marking_state()->IsBlackOrGrey(object)) {
244 collector_->AddWeakObjectInCode(object, host);
245 }
246 }
247
248 template <FixedArrayVisitationMode fixed_array_mode,
249 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
250 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
VisitCodeTarget(Code * host,RelocInfo * rinfo)251 MarkingState>::VisitCodeTarget(Code* host,
252 RelocInfo* rinfo) {
253 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
254 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
255 collector_->RecordRelocSlot(host, rinfo, target);
256 MarkObject(host, target);
257 }
258
259 template <FixedArrayVisitationMode fixed_array_mode,
260 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
261 bool MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkObjectWithoutPush(HeapObject * host,HeapObject * object)262 MarkingState>::MarkObjectWithoutPush(HeapObject* host,
263 HeapObject* object) {
264 if (marking_state()->WhiteToBlack(object)) {
265 if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
266 V8_UNLIKELY(FLAG_track_retaining_path)) {
267 heap_->AddRetainer(host, object);
268 }
269 return true;
270 }
271 return false;
272 }
273
274 template <FixedArrayVisitationMode fixed_array_mode,
275 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
276 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkObject(HeapObject * host,HeapObject * object)277 MarkingState>::MarkObject(HeapObject* host,
278 HeapObject* object) {
279 if (marking_state()->WhiteToGrey(object)) {
280 marking_worklist()->Push(object);
281 if (retaining_path_mode == TraceRetainingPathMode::kEnabled &&
282 V8_UNLIKELY(FLAG_track_retaining_path)) {
283 heap_->AddRetainer(host, object);
284 }
285 }
286 }
287
288 template <FixedArrayVisitationMode fixed_array_mode,
289 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
290 int MarkingVisitor<fixed_array_mode, retaining_path_mode, MarkingState>::
VisitFixedArrayIncremental(Map * map,FixedArray * object)291 VisitFixedArrayIncremental(Map* map, FixedArray* object) {
292 MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
293 int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
294 if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
295 DCHECK(!FLAG_use_marking_progress_bar ||
296 chunk->owner()->identity() == LO_SPACE);
297 // When using a progress bar for large fixed arrays, scan only a chunk of
298 // the array and try to push it onto the marking deque again until it is
299 // fully scanned. Fall back to scanning it through to the end in case this
300 // fails because of a full deque.
301 int start_offset =
302 Max(FixedArray::BodyDescriptor::kStartOffset, chunk->progress_bar());
303 if (start_offset < object_size) {
304 // Ensure that the object is either grey or black before pushing it
305 // into marking worklist.
306 marking_state()->WhiteToGrey(object);
307 if (FLAG_concurrent_marking) {
308 marking_worklist()->PushBailout(object);
309 } else {
310 marking_worklist()->Push(object);
311 }
312 DCHECK(marking_state()->IsGrey(object) ||
313 marking_state()->IsBlack(object));
314
315 int end_offset =
316 Min(object_size, start_offset + kProgressBarScanningChunk);
317 int already_scanned_offset = start_offset;
318 VisitPointers(object, HeapObject::RawField(object, start_offset),
319 HeapObject::RawField(object, end_offset));
320 start_offset = end_offset;
321 end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
322 chunk->set_progress_bar(start_offset);
323 if (start_offset < object_size) {
324 heap_->incremental_marking()->NotifyIncompleteScanOfObject(
325 object_size - (start_offset - already_scanned_offset));
326 }
327 }
328 } else {
329 FixedArray::BodyDescriptor::IterateBody(map, object, object_size, this);
330 }
331 return object_size;
332 }
333
334 template <FixedArrayVisitationMode fixed_array_mode,
335 TraceRetainingPathMode retaining_path_mode, typename MarkingState>
336 void MarkingVisitor<fixed_array_mode, retaining_path_mode,
MarkMapContents(Map * map)337 MarkingState>::MarkMapContents(Map* map) {
338 // Since descriptor arrays are potentially shared, ensure that only the
339 // descriptors that belong to this map are marked. The first time a non-empty
340 // descriptor array is marked, its header is also visited. The slot holding
341 // the descriptor array will be implicitly recorded when the pointer fields of
342 // this map are visited. Prototype maps don't keep track of transitions, so
343 // just mark the entire descriptor array.
344 if (!map->is_prototype_map()) {
345 DescriptorArray* descriptors = map->instance_descriptors();
346 if (MarkObjectWithoutPush(map, descriptors) && descriptors->length() > 0) {
347 VisitPointers(descriptors, descriptors->GetFirstElementAddress(),
348 descriptors->GetDescriptorEndSlot(0));
349 }
350 int start = 0;
351 int end = map->NumberOfOwnDescriptors();
352 if (start < end) {
353 VisitPointers(descriptors, descriptors->GetDescriptorStartSlot(start),
354 descriptors->GetDescriptorEndSlot(end));
355 }
356 }
357
358 // Mark the pointer fields of the Map. Since the transitions array has
359 // been marked already, it is fine that one of these fields contains a
360 // pointer to it.
361 Map::BodyDescriptor::IterateBody(
362 map->map(), map, Map::BodyDescriptor::SizeOf(map->map(), map), this);
363 }
364
MarkObject(HeapObject * host,HeapObject * obj)365 void MarkCompactCollector::MarkObject(HeapObject* host, HeapObject* obj) {
366 if (marking_state()->WhiteToGrey(obj)) {
367 marking_worklist()->Push(obj);
368 if (V8_UNLIKELY(FLAG_track_retaining_path)) {
369 heap_->AddRetainer(host, obj);
370 }
371 }
372 }
373
MarkRootObject(Root root,HeapObject * obj)374 void MarkCompactCollector::MarkRootObject(Root root, HeapObject* obj) {
375 if (marking_state()->WhiteToGrey(obj)) {
376 marking_worklist()->Push(obj);
377 if (V8_UNLIKELY(FLAG_track_retaining_path)) {
378 heap_->AddRetainingRoot(root, obj);
379 }
380 }
381 }
382
383 #ifdef ENABLE_MINOR_MC
384
MarkRootObject(HeapObject * obj)385 void MinorMarkCompactCollector::MarkRootObject(HeapObject* obj) {
386 if (heap_->InNewSpace(obj) && non_atomic_marking_state_.WhiteToGrey(obj)) {
387 worklist_->Push(kMainThread, obj);
388 }
389 }
390
391 #endif
392
MarkExternallyReferencedObject(HeapObject * obj)393 void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
394 if (marking_state()->WhiteToGrey(obj)) {
395 marking_worklist()->Push(obj);
396 if (V8_UNLIKELY(FLAG_track_retaining_path)) {
397 heap_->AddRetainingRoot(Root::kWrapperTracing, obj);
398 }
399 }
400 }
401
RecordSlot(HeapObject * object,Object ** slot,Object * target)402 void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
403 Object* target) {
404 RecordSlot(object, reinterpret_cast<HeapObjectReference**>(slot), target);
405 }
406
RecordSlot(HeapObject * object,HeapObjectReference ** slot,Object * target)407 void MarkCompactCollector::RecordSlot(HeapObject* object,
408 HeapObjectReference** slot,
409 Object* target) {
410 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
411 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
412 if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
413 !source_page->ShouldSkipEvacuationSlotRecording<AccessMode::ATOMIC>()) {
414 RememberedSet<OLD_TO_OLD>::Insert(source_page,
415 reinterpret_cast<Address>(slot));
416 }
417 }
418
419 template <LiveObjectIterationMode mode>
iterator(MemoryChunk * chunk,Bitmap * bitmap,Address start)420 LiveObjectRange<mode>::iterator::iterator(MemoryChunk* chunk, Bitmap* bitmap,
421 Address start)
422 : chunk_(chunk),
423 one_word_filler_map_(chunk->heap()->one_pointer_filler_map()),
424 two_word_filler_map_(chunk->heap()->two_pointer_filler_map()),
425 free_space_map_(chunk->heap()->free_space_map()),
426 it_(chunk, bitmap) {
427 it_.Advance(Bitmap::IndexToCell(
428 Bitmap::CellAlignIndex(chunk_->AddressToMarkbitIndex(start))));
429 if (!it_.Done()) {
430 cell_base_ = it_.CurrentCellBase();
431 current_cell_ = *it_.CurrentCell();
432 AdvanceToNextValidObject();
433 } else {
434 current_object_ = nullptr;
435 }
436 }
437
438 template <LiveObjectIterationMode mode>
439 typename LiveObjectRange<mode>::iterator& LiveObjectRange<mode>::iterator::
440 operator++() {
441 AdvanceToNextValidObject();
442 return *this;
443 }
444
445 template <LiveObjectIterationMode mode>
446 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::iterator::
447 operator++(int) {
448 iterator retval = *this;
449 ++(*this);
450 return retval;
451 }
452
453 template <LiveObjectIterationMode mode>
AdvanceToNextValidObject()454 void LiveObjectRange<mode>::iterator::AdvanceToNextValidObject() {
455 while (!it_.Done()) {
456 HeapObject* object = nullptr;
457 int size = 0;
458 while (current_cell_ != 0) {
459 uint32_t trailing_zeros = base::bits::CountTrailingZeros(current_cell_);
460 Address addr = cell_base_ + trailing_zeros * kPointerSize;
461
462 // Clear the first bit of the found object..
463 current_cell_ &= ~(1u << trailing_zeros);
464
465 uint32_t second_bit_index = 0;
466 if (trailing_zeros >= Bitmap::kBitIndexMask) {
467 second_bit_index = 0x1;
468 // The overlapping case; there has to exist a cell after the current
469 // cell.
470 // However, if there is a black area at the end of the page, and the
471 // last word is a one word filler, we are not allowed to advance. In
472 // that case we can return immediately.
473 if (!it_.Advance()) {
474 DCHECK(HeapObject::FromAddress(addr)->map() == one_word_filler_map_);
475 current_object_ = nullptr;
476 return;
477 }
478 cell_base_ = it_.CurrentCellBase();
479 current_cell_ = *it_.CurrentCell();
480 } else {
481 second_bit_index = 1u << (trailing_zeros + 1);
482 }
483
484 Map* map = nullptr;
485 if (current_cell_ & second_bit_index) {
486 // We found a black object. If the black object is within a black area,
487 // make sure that we skip all set bits in the black area until the
488 // object ends.
489 HeapObject* black_object = HeapObject::FromAddress(addr);
490 map =
491 base::AsAtomicPointer::Relaxed_Load(reinterpret_cast<Map**>(addr));
492 size = black_object->SizeFromMap(map);
493 Address end = addr + size - kPointerSize;
494 // One word filler objects do not borrow the second mark bit. We have
495 // to jump over the advancing and clearing part.
496 // Note that we know that we are at a one word filler when
497 // object_start + object_size - kPointerSize == object_start.
498 if (addr != end) {
499 DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
500 uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
501 unsigned int end_cell_index =
502 end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
503 MarkBit::CellType end_index_mask =
504 1u << Bitmap::IndexInCell(end_mark_bit_index);
505 if (it_.Advance(end_cell_index)) {
506 cell_base_ = it_.CurrentCellBase();
507 current_cell_ = *it_.CurrentCell();
508 }
509
510 // Clear all bits in current_cell, including the end index.
511 current_cell_ &= ~(end_index_mask + end_index_mask - 1);
512 }
513
514 if (mode == kBlackObjects || mode == kAllLiveObjects) {
515 object = black_object;
516 }
517 } else if ((mode == kGreyObjects || mode == kAllLiveObjects)) {
518 map =
519 base::AsAtomicPointer::Relaxed_Load(reinterpret_cast<Map**>(addr));
520 object = HeapObject::FromAddress(addr);
521 size = object->SizeFromMap(map);
522 }
523
524 // We found a live object.
525 if (object != nullptr) {
526 // Do not use IsFiller() here. This may cause a data race for reading
527 // out the instance type when a new map concurrently is written into
528 // this object while iterating over the object.
529 if (map == one_word_filler_map_ || map == two_word_filler_map_ ||
530 map == free_space_map_) {
531 // There are two reasons why we can get black or grey fillers:
532 // 1) Black areas together with slack tracking may result in black one
533 // word filler objects.
534 // 2) Left trimming may leave black or grey fillers behind because we
535 // do not clear the old location of the object start.
536 // We filter these objects out in the iterator.
537 object = nullptr;
538 } else {
539 break;
540 }
541 }
542 }
543
544 if (current_cell_ == 0) {
545 if (it_.Advance()) {
546 cell_base_ = it_.CurrentCellBase();
547 current_cell_ = *it_.CurrentCell();
548 }
549 }
550 if (object != nullptr) {
551 current_object_ = object;
552 current_size_ = size;
553 return;
554 }
555 }
556 current_object_ = nullptr;
557 }
558
559 template <LiveObjectIterationMode mode>
begin()560 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::begin() {
561 return iterator(chunk_, bitmap_, start_);
562 }
563
564 template <LiveObjectIterationMode mode>
end()565 typename LiveObjectRange<mode>::iterator LiveObjectRange<mode>::end() {
566 return iterator(chunk_, bitmap_, end_);
567 }
568
569 } // namespace internal
570 } // namespace v8
571
572 #endif // V8_HEAP_MARK_COMPACT_INL_H_
573