1 // Copyright 2020 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/cppgc/page-memory.h"
6
7 #include "src/base/page-allocator.h"
8 #include "src/heap/cppgc/platform.h"
9 #include "testing/gtest/include/gtest/gtest.h"
10
11 namespace cppgc {
12 namespace internal {
13
TEST(MemoryRegionTest,Construct)14 TEST(MemoryRegionTest, Construct) {
15 constexpr size_t kSize = 17;
16 uint8_t dummy[kSize];
17 const MemoryRegion region(dummy, kSize);
18 EXPECT_EQ(dummy, region.base());
19 EXPECT_EQ(kSize, region.size());
20 EXPECT_EQ(dummy + kSize, region.end());
21 }
22
23 namespace {
24
AtOffset(uint8_t * base,intptr_t offset)25 Address AtOffset(uint8_t* base, intptr_t offset) {
26 return reinterpret_cast<Address>(reinterpret_cast<intptr_t>(base) + offset);
27 }
28
29 } // namespace
30
TEST(MemoryRegionTest,ContainsAddress)31 TEST(MemoryRegionTest, ContainsAddress) {
32 constexpr size_t kSize = 7;
33 uint8_t dummy[kSize];
34 const MemoryRegion region(dummy, kSize);
35 EXPECT_FALSE(region.Contains(AtOffset(dummy, -1)));
36 EXPECT_TRUE(region.Contains(dummy));
37 EXPECT_TRUE(region.Contains(dummy + kSize - 1));
38 EXPECT_FALSE(region.Contains(AtOffset(dummy, kSize)));
39 }
40
TEST(MemoryRegionTest,ContainsMemoryRegion)41 TEST(MemoryRegionTest, ContainsMemoryRegion) {
42 constexpr size_t kSize = 7;
43 uint8_t dummy[kSize];
44 const MemoryRegion region(dummy, kSize);
45 const MemoryRegion contained_region1(dummy, kSize - 1);
46 EXPECT_TRUE(region.Contains(contained_region1));
47 const MemoryRegion contained_region2(dummy + 1, kSize - 1);
48 EXPECT_TRUE(region.Contains(contained_region2));
49 const MemoryRegion not_contained_region1(AtOffset(dummy, -1), kSize);
50 EXPECT_FALSE(region.Contains(not_contained_region1));
51 const MemoryRegion not_contained_region2(AtOffset(dummy, kSize), 1);
52 EXPECT_FALSE(region.Contains(not_contained_region2));
53 }
54
TEST(PageMemoryTest,Construct)55 TEST(PageMemoryTest, Construct) {
56 constexpr size_t kOverallSize = 17;
57 uint8_t dummy[kOverallSize];
58 const MemoryRegion overall_region(dummy, kOverallSize);
59 const MemoryRegion writeable_region(dummy + 1, kOverallSize - 2);
60 const PageMemory page_memory(overall_region, writeable_region);
61 EXPECT_EQ(dummy, page_memory.overall_region().base());
62 EXPECT_EQ(dummy + kOverallSize, page_memory.overall_region().end());
63 EXPECT_EQ(dummy + 1, page_memory.writeable_region().base());
64 EXPECT_EQ(dummy + kOverallSize - 1, page_memory.writeable_region().end());
65 }
66
67 #if DEBUG
68
TEST(PageMemoryDeathTest,ConstructNonContainedRegions)69 TEST(PageMemoryDeathTest, ConstructNonContainedRegions) {
70 constexpr size_t kOverallSize = 17;
71 uint8_t dummy[kOverallSize];
72 const MemoryRegion overall_region(dummy, kOverallSize);
73 const MemoryRegion writeable_region(dummy + 1, kOverallSize);
74 EXPECT_DEATH_IF_SUPPORTED(PageMemory(overall_region, writeable_region), "");
75 }
76
77 #endif // DEBUG
78
TEST(PageMemoryRegionTest,NormalPageMemoryRegion)79 TEST(PageMemoryRegionTest, NormalPageMemoryRegion) {
80 v8::base::PageAllocator allocator;
81 FatalOutOfMemoryHandler oom_handler;
82 auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
83 pmr->UnprotectForTesting();
84 MemoryRegion prev_overall;
85 for (size_t i = 0; i < NormalPageMemoryRegion::kNumPageRegions; ++i) {
86 const PageMemory pm = pmr->GetPageMemory(i);
87 // Previous PageMemory aligns with the current one.
88 if (prev_overall.base()) {
89 EXPECT_EQ(prev_overall.end(), pm.overall_region().base());
90 }
91 prev_overall =
92 MemoryRegion(pm.overall_region().base(), pm.overall_region().size());
93 // Writeable region is contained in overall region.
94 EXPECT_TRUE(pm.overall_region().Contains(pm.writeable_region()));
95 EXPECT_EQ(0u, pm.writeable_region().base()[0]);
96 EXPECT_EQ(0u, pm.writeable_region().end()[-1]);
97 // Front guard page.
98 EXPECT_EQ(pm.writeable_region().base(),
99 pm.overall_region().base() + kGuardPageSize);
100 // Back guard page.
101 EXPECT_EQ(pm.overall_region().end(),
102 pm.writeable_region().end() + kGuardPageSize);
103 }
104 }
105
TEST(PageMemoryRegionTest,LargePageMemoryRegion)106 TEST(PageMemoryRegionTest, LargePageMemoryRegion) {
107 v8::base::PageAllocator allocator;
108 FatalOutOfMemoryHandler oom_handler;
109 auto pmr =
110 std::make_unique<LargePageMemoryRegion>(allocator, oom_handler, 1024);
111 pmr->UnprotectForTesting();
112 const PageMemory pm = pmr->GetPageMemory();
113 EXPECT_LE(1024u, pm.writeable_region().size());
114 EXPECT_EQ(0u, pm.writeable_region().base()[0]);
115 EXPECT_EQ(0u, pm.writeable_region().end()[-1]);
116 }
117
TEST(PageMemoryRegionTest,PlatformUsesGuardPages)118 TEST(PageMemoryRegionTest, PlatformUsesGuardPages) {
119 // This tests that the testing allocator actually uses protected guard
120 // regions.
121 v8::base::PageAllocator allocator;
122 #if defined(V8_HOST_ARCH_PPC64) && !defined(_AIX)
123 EXPECT_FALSE(SupportsCommittingGuardPages(allocator));
124 #elif defined(V8_HOST_ARCH_ARM64)
125 if (allocator.CommitPageSize() == 4096) {
126 EXPECT_TRUE(SupportsCommittingGuardPages(allocator));
127 } else {
128 // Arm64 supports both 16k and 64k OS pages.
129 EXPECT_FALSE(SupportsCommittingGuardPages(allocator));
130 }
131 #else // Regular case.
132 EXPECT_TRUE(SupportsCommittingGuardPages(allocator));
133 #endif
134 }
135
136 namespace {
137
access(volatile const uint8_t & u)138 V8_NOINLINE uint8_t access(volatile const uint8_t& u) { return u; }
139
140 } // namespace
141
TEST(PageMemoryRegionDeathTest,ReservationIsFreed)142 TEST(PageMemoryRegionDeathTest, ReservationIsFreed) {
143 // Full sequence as part of the death test macro as otherwise, the macro
144 // may expand to statements that re-purpose the previously freed memory
145 // and thus not crash.
146 EXPECT_DEATH_IF_SUPPORTED(
147 v8::base::PageAllocator allocator; FatalOutOfMemoryHandler oom_handler;
148 Address base; {
149 auto pmr = std::make_unique<LargePageMemoryRegion>(allocator,
150 oom_handler, 1024);
151 base = pmr->reserved_region().base();
152 } access(base[0]);
153 , "");
154 }
155
TEST(PageMemoryRegionDeathTest,FrontGuardPageAccessCrashes)156 TEST(PageMemoryRegionDeathTest, FrontGuardPageAccessCrashes) {
157 v8::base::PageAllocator allocator;
158 FatalOutOfMemoryHandler oom_handler;
159 auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
160 if (SupportsCommittingGuardPages(allocator)) {
161 EXPECT_DEATH_IF_SUPPORTED(
162 access(pmr->GetPageMemory(0).overall_region().base()[0]), "");
163 }
164 }
165
TEST(PageMemoryRegionDeathTest,BackGuardPageAccessCrashes)166 TEST(PageMemoryRegionDeathTest, BackGuardPageAccessCrashes) {
167 v8::base::PageAllocator allocator;
168 FatalOutOfMemoryHandler oom_handler;
169 auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
170 if (SupportsCommittingGuardPages(allocator)) {
171 EXPECT_DEATH_IF_SUPPORTED(
172 access(pmr->GetPageMemory(0).writeable_region().end()[0]), "");
173 }
174 }
175
TEST(PageMemoryRegionTreeTest,AddNormalLookupRemove)176 TEST(PageMemoryRegionTreeTest, AddNormalLookupRemove) {
177 v8::base::PageAllocator allocator;
178 FatalOutOfMemoryHandler oom_handler;
179 auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
180 PageMemoryRegionTree tree;
181 tree.Add(pmr.get());
182 ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
183 ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().end() - 1));
184 ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base() - 1));
185 ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end()));
186 tree.Remove(pmr.get());
187 ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base()));
188 ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end() - 1));
189 }
190
TEST(PageMemoryRegionTreeTest,AddLargeLookupRemove)191 TEST(PageMemoryRegionTreeTest, AddLargeLookupRemove) {
192 v8::base::PageAllocator allocator;
193 FatalOutOfMemoryHandler oom_handler;
194 constexpr size_t kLargeSize = 5012;
195 auto pmr = std::make_unique<LargePageMemoryRegion>(allocator, oom_handler,
196 kLargeSize);
197 PageMemoryRegionTree tree;
198 tree.Add(pmr.get());
199 ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().base()));
200 ASSERT_EQ(pmr.get(), tree.Lookup(pmr->reserved_region().end() - 1));
201 ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base() - 1));
202 ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end()));
203 tree.Remove(pmr.get());
204 ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().base()));
205 ASSERT_EQ(nullptr, tree.Lookup(pmr->reserved_region().end() - 1));
206 }
207
TEST(PageMemoryRegionTreeTest,AddLookupRemoveMultiple)208 TEST(PageMemoryRegionTreeTest, AddLookupRemoveMultiple) {
209 v8::base::PageAllocator allocator;
210 FatalOutOfMemoryHandler oom_handler;
211 auto pmr1 = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
212 constexpr size_t kLargeSize = 3127;
213 auto pmr2 = std::make_unique<LargePageMemoryRegion>(allocator, oom_handler,
214 kLargeSize);
215 PageMemoryRegionTree tree;
216 tree.Add(pmr1.get());
217 tree.Add(pmr2.get());
218 ASSERT_EQ(pmr1.get(), tree.Lookup(pmr1->reserved_region().base()));
219 ASSERT_EQ(pmr1.get(), tree.Lookup(pmr1->reserved_region().end() - 1));
220 ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().base()));
221 ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().end() - 1));
222 tree.Remove(pmr1.get());
223 ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().base()));
224 ASSERT_EQ(pmr2.get(), tree.Lookup(pmr2->reserved_region().end() - 1));
225 tree.Remove(pmr2.get());
226 ASSERT_EQ(nullptr, tree.Lookup(pmr2->reserved_region().base()));
227 ASSERT_EQ(nullptr, tree.Lookup(pmr2->reserved_region().end() - 1));
228 }
229
TEST(NormalPageMemoryPool,ConstructorEmpty)230 TEST(NormalPageMemoryPool, ConstructorEmpty) {
231 v8::base::PageAllocator allocator;
232 NormalPageMemoryPool pool;
233 constexpr size_t kBucket = 0;
234 EXPECT_EQ(NormalPageMemoryPool::Result(nullptr, nullptr), pool.Take(kBucket));
235 }
236
TEST(NormalPageMemoryPool,AddTakeSameBucket)237 TEST(NormalPageMemoryPool, AddTakeSameBucket) {
238 v8::base::PageAllocator allocator;
239 FatalOutOfMemoryHandler oom_handler;
240 auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
241 const PageMemory pm = pmr->GetPageMemory(0);
242 NormalPageMemoryPool pool;
243 constexpr size_t kBucket = 0;
244 pool.Add(kBucket, pmr.get(), pm.writeable_region().base());
245 EXPECT_EQ(
246 NormalPageMemoryPool::Result(pmr.get(), pm.writeable_region().base()),
247 pool.Take(kBucket));
248 }
249
TEST(NormalPageMemoryPool,AddTakeNotFoundDifferentBucket)250 TEST(NormalPageMemoryPool, AddTakeNotFoundDifferentBucket) {
251 v8::base::PageAllocator allocator;
252 FatalOutOfMemoryHandler oom_handler;
253 auto pmr = std::make_unique<NormalPageMemoryRegion>(allocator, oom_handler);
254 const PageMemory pm = pmr->GetPageMemory(0);
255 NormalPageMemoryPool pool;
256 constexpr size_t kFirstBucket = 0;
257 constexpr size_t kSecondBucket = 1;
258 pool.Add(kFirstBucket, pmr.get(), pm.writeable_region().base());
259 EXPECT_EQ(NormalPageMemoryPool::Result(nullptr, nullptr),
260 pool.Take(kSecondBucket));
261 EXPECT_EQ(
262 NormalPageMemoryPool::Result(pmr.get(), pm.writeable_region().base()),
263 pool.Take(kFirstBucket));
264 }
265
TEST(PageBackendTest,AllocateNormalUsesPool)266 TEST(PageBackendTest, AllocateNormalUsesPool) {
267 v8::base::PageAllocator allocator;
268 FatalOutOfMemoryHandler oom_handler;
269 PageBackend backend(allocator, oom_handler);
270 constexpr size_t kBucket = 0;
271 Address writeable_base1 = backend.AllocateNormalPageMemory(kBucket);
272 EXPECT_NE(nullptr, writeable_base1);
273 backend.FreeNormalPageMemory(kBucket, writeable_base1);
274 Address writeable_base2 = backend.AllocateNormalPageMemory(kBucket);
275 EXPECT_NE(nullptr, writeable_base2);
276 EXPECT_EQ(writeable_base1, writeable_base2);
277 }
278
TEST(PageBackendTest,AllocateLarge)279 TEST(PageBackendTest, AllocateLarge) {
280 v8::base::PageAllocator allocator;
281 FatalOutOfMemoryHandler oom_handler;
282 PageBackend backend(allocator, oom_handler);
283 Address writeable_base1 = backend.AllocateLargePageMemory(13731);
284 EXPECT_NE(nullptr, writeable_base1);
285 Address writeable_base2 = backend.AllocateLargePageMemory(9478);
286 EXPECT_NE(nullptr, writeable_base2);
287 EXPECT_NE(writeable_base1, writeable_base2);
288 backend.FreeLargePageMemory(writeable_base1);
289 backend.FreeLargePageMemory(writeable_base2);
290 }
291
TEST(PageBackendTest,LookupNormal)292 TEST(PageBackendTest, LookupNormal) {
293 v8::base::PageAllocator allocator;
294 FatalOutOfMemoryHandler oom_handler;
295 PageBackend backend(allocator, oom_handler);
296 constexpr size_t kBucket = 0;
297 Address writeable_base = backend.AllocateNormalPageMemory(kBucket);
298 EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
299 EXPECT_EQ(nullptr, backend.Lookup(writeable_base - 1));
300 EXPECT_EQ(writeable_base, backend.Lookup(writeable_base));
301 EXPECT_EQ(writeable_base, backend.Lookup(writeable_base + kPageSize -
302 2 * kGuardPageSize - 1));
303 EXPECT_EQ(nullptr,
304 backend.Lookup(writeable_base + kPageSize - 2 * kGuardPageSize));
305 EXPECT_EQ(nullptr,
306 backend.Lookup(writeable_base - kGuardPageSize + kPageSize - 1));
307 }
308
TEST(PageBackendTest,LookupLarge)309 TEST(PageBackendTest, LookupLarge) {
310 v8::base::PageAllocator allocator;
311 FatalOutOfMemoryHandler oom_handler;
312 PageBackend backend(allocator, oom_handler);
313 constexpr size_t kSize = 7934;
314 Address writeable_base = backend.AllocateLargePageMemory(kSize);
315 EXPECT_EQ(nullptr, backend.Lookup(writeable_base - kGuardPageSize));
316 EXPECT_EQ(nullptr, backend.Lookup(writeable_base - 1));
317 EXPECT_EQ(writeable_base, backend.Lookup(writeable_base));
318 EXPECT_EQ(writeable_base, backend.Lookup(writeable_base + kSize - 1));
319 }
320
TEST(PageBackendDeathTest,DestructingBackendDestroysPageMemory)321 TEST(PageBackendDeathTest, DestructingBackendDestroysPageMemory) {
322 v8::base::PageAllocator allocator;
323 FatalOutOfMemoryHandler oom_handler;
324 Address base;
325 {
326 PageBackend backend(allocator, oom_handler);
327 constexpr size_t kBucket = 0;
328 base = backend.AllocateNormalPageMemory(kBucket);
329 }
330 EXPECT_DEATH_IF_SUPPORTED(access(base[0]), "");
331 }
332
333 } // namespace internal
334 } // namespace cppgc
335