1 // Copyright 2019 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/heap/basic-memory-chunk.h"
6 
7 #include <cstdlib>
8 
9 #include "src/heap/heap-write-barrier-inl.h"
10 #include "src/heap/incremental-marking.h"
11 #include "src/objects/heap-object.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 // Verify write barrier offsets match the the real offsets.
17 STATIC_ASSERT(BasicMemoryChunk::Flag::INCREMENTAL_MARKING ==
18               heap_internals::MemoryChunk::kMarkingBit);
19 STATIC_ASSERT(BasicMemoryChunk::Flag::FROM_PAGE ==
20               heap_internals::MemoryChunk::kFromPageBit);
21 STATIC_ASSERT(BasicMemoryChunk::Flag::TO_PAGE ==
22               heap_internals::MemoryChunk::kToPageBit);
23 STATIC_ASSERT(BasicMemoryChunk::kFlagsOffset ==
24               heap_internals::MemoryChunk::kFlagsOffset);
25 STATIC_ASSERT(BasicMemoryChunk::kHeapOffset ==
26               heap_internals::MemoryChunk::kHeapOffset);
27 
28 // static
29 constexpr BasicMemoryChunk::MainThreadFlags BasicMemoryChunk::kAllFlagsMask;
30 // static
31 constexpr BasicMemoryChunk::MainThreadFlags
32     BasicMemoryChunk::kPointersToHereAreInterestingMask;
33 // static
34 constexpr BasicMemoryChunk::MainThreadFlags
35     BasicMemoryChunk::kPointersFromHereAreInterestingMask;
36 // static
37 constexpr BasicMemoryChunk::MainThreadFlags
38     BasicMemoryChunk::kEvacuationCandidateMask;
39 // static
40 constexpr BasicMemoryChunk::MainThreadFlags
41     BasicMemoryChunk::kIsInYoungGenerationMask;
42 // static
43 constexpr BasicMemoryChunk::MainThreadFlags BasicMemoryChunk::kIsLargePageMask;
44 // static
45 constexpr BasicMemoryChunk::MainThreadFlags
46     BasicMemoryChunk::kSkipEvacuationSlotsRecordingMask;
47 
BasicMemoryChunk(size_t size,Address area_start,Address area_end)48 BasicMemoryChunk::BasicMemoryChunk(size_t size, Address area_start,
49                                    Address area_end) {
50   size_ = size;
51   area_start_ = area_start;
52   area_end_ = area_end;
53 }
54 
55 // static
Initialize(Heap * heap,Address base,size_t size,Address area_start,Address area_end,BaseSpace * owner,VirtualMemory reservation)56 BasicMemoryChunk* BasicMemoryChunk::Initialize(Heap* heap, Address base,
57                                                size_t size, Address area_start,
58                                                Address area_end,
59                                                BaseSpace* owner,
60                                                VirtualMemory reservation) {
61   BasicMemoryChunk* chunk = FromAddress(base);
62   DCHECK_EQ(base, chunk->address());
63   new (chunk) BasicMemoryChunk(size, area_start, area_end);
64 
65   chunk->heap_ = heap;
66   chunk->set_owner(owner);
67   chunk->reservation_ = std::move(reservation);
68   chunk->high_water_mark_ = static_cast<intptr_t>(area_start - base);
69   chunk->allocated_bytes_ = chunk->area_size();
70   chunk->wasted_memory_ = 0;
71   chunk->marking_bitmap<AccessMode::NON_ATOMIC>()->Clear();
72 
73   return chunk;
74 }
75 
InOldSpace() const76 bool BasicMemoryChunk::InOldSpace() const {
77   return owner()->identity() == OLD_SPACE;
78 }
79 
InLargeObjectSpace() const80 bool BasicMemoryChunk::InLargeObjectSpace() const {
81   return owner()->identity() == LO_SPACE;
82 }
83 
84 #ifdef THREAD_SANITIZER
SynchronizedHeapLoad() const85 void BasicMemoryChunk::SynchronizedHeapLoad() const {
86   CHECK(reinterpret_cast<Heap*>(
87             base::Acquire_Load(reinterpret_cast<base::AtomicWord*>(
88                 &(const_cast<BasicMemoryChunk*>(this)->heap_)))) != nullptr ||
89         InReadOnlySpaceRaw());
90 }
91 #endif
92 
93 class BasicMemoryChunkValidator {
94   // Computed offsets should match the compiler generated ones.
95   STATIC_ASSERT(BasicMemoryChunk::kSizeOffset ==
96                 offsetof(BasicMemoryChunk, size_));
97   STATIC_ASSERT(BasicMemoryChunk::kFlagsOffset ==
98                 offsetof(BasicMemoryChunk, main_thread_flags_));
99   STATIC_ASSERT(BasicMemoryChunk::kHeapOffset ==
100                 offsetof(BasicMemoryChunk, heap_));
101   STATIC_ASSERT(offsetof(BasicMemoryChunk, size_) ==
102                 MemoryChunkLayout::kSizeOffset);
103   STATIC_ASSERT(offsetof(BasicMemoryChunk, heap_) ==
104                 MemoryChunkLayout::kHeapOffset);
105   STATIC_ASSERT(offsetof(BasicMemoryChunk, area_start_) ==
106                 MemoryChunkLayout::kAreaStartOffset);
107   STATIC_ASSERT(offsetof(BasicMemoryChunk, area_end_) ==
108                 MemoryChunkLayout::kAreaEndOffset);
109   STATIC_ASSERT(offsetof(BasicMemoryChunk, allocated_bytes_) ==
110                 MemoryChunkLayout::kAllocatedBytesOffset);
111   STATIC_ASSERT(offsetof(BasicMemoryChunk, wasted_memory_) ==
112                 MemoryChunkLayout::kWastedMemoryOffset);
113   STATIC_ASSERT(offsetof(BasicMemoryChunk, high_water_mark_) ==
114                 MemoryChunkLayout::kHighWaterMarkOffset);
115   STATIC_ASSERT(offsetof(BasicMemoryChunk, owner_) ==
116                 MemoryChunkLayout::kOwnerOffset);
117   STATIC_ASSERT(offsetof(BasicMemoryChunk, reservation_) ==
118                 MemoryChunkLayout::kReservationOffset);
119 };
120 
121 }  // namespace internal
122 }  // namespace v8
123