1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdlib.h>
29
30 #ifdef __linux__
31 #include <errno.h>
32 #include <fcntl.h>
33 #include <sys/stat.h>
34 #include <sys/types.h>
35 #include <unistd.h>
36 #endif
37
38 #include <utility>
39
40 #include "include/v8-locker.h"
41 #include "src/handles/global-handles.h"
42 #include "src/heap/mark-compact-inl.h"
43 #include "src/heap/mark-compact.h"
44 #include "src/init/v8.h"
45 #include "src/objects/objects-inl.h"
46 #include "test/cctest/cctest.h"
47 #include "test/cctest/heap/heap-tester.h"
48 #include "test/cctest/heap/heap-utils.h"
49
50 namespace v8 {
51 namespace internal {
52 namespace heap {
53
TEST(Promotion)54 TEST(Promotion) {
55 if (FLAG_single_generation) return;
56 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
57 CcTest::InitializeVM();
58 Isolate* isolate = CcTest::i_isolate();
59 {
60 v8::HandleScope sc(CcTest::isolate());
61 Heap* heap = isolate->heap();
62
63 heap::SealCurrentObjects(heap);
64
65 int array_length = heap::FixedArrayLenFromSize(kMaxRegularHeapObjectSize);
66 Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
67
68 // Array should be in the new space.
69 CHECK(heap->InSpace(*array, NEW_SPACE));
70 CcTest::CollectAllGarbage();
71 CcTest::CollectAllGarbage();
72 CHECK(heap->InSpace(*array, OLD_SPACE));
73 }
74 }
75
HEAP_TEST(NoPromotion)76 HEAP_TEST(NoPromotion) {
77 if (FLAG_always_promote_young_mc) return;
78 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
79 // Page promotion allows pages to be moved to old space even in the case of
80 // OOM scenarios.
81 FLAG_page_promotion = false;
82
83 CcTest::InitializeVM();
84 Isolate* isolate = CcTest::i_isolate();
85 {
86 v8::HandleScope sc(CcTest::isolate());
87 Heap* heap = isolate->heap();
88
89 heap::SealCurrentObjects(heap);
90
91 int array_length = heap::FixedArrayLenFromSize(kMaxRegularHeapObjectSize);
92 Handle<FixedArray> array = isolate->factory()->NewFixedArray(array_length);
93
94 heap->set_force_oom(true);
95 // Array should be in the new space.
96 CHECK(heap->InSpace(*array, NEW_SPACE));
97 CcTest::CollectAllGarbage();
98 CcTest::CollectAllGarbage();
99 CHECK(heap->InSpace(*array, NEW_SPACE));
100 }
101 }
102
103 // This is the same as Factory::NewMap, except it doesn't retry on
104 // allocation failure.
AllocateMapForTest(Isolate * isolate)105 AllocationResult HeapTester::AllocateMapForTest(Isolate* isolate) {
106 Heap* heap = isolate->heap();
107 HeapObject obj;
108 AllocationResult alloc = heap->AllocateRaw(Map::kSize, AllocationType::kMap);
109 if (!alloc.To(&obj)) return alloc;
110 obj.set_map_after_allocation(ReadOnlyRoots(heap).meta_map(),
111 SKIP_WRITE_BARRIER);
112 return isolate->factory()->InitializeMap(Map::cast(obj), JS_OBJECT_TYPE,
113 JSObject::kHeaderSize,
114 TERMINAL_FAST_ELEMENTS_KIND, 0);
115 }
116
117 // This is the same as Factory::NewFixedArray, except it doesn't retry
118 // on allocation failure.
AllocateFixedArrayForTest(Heap * heap,int length,AllocationType allocation)119 AllocationResult HeapTester::AllocateFixedArrayForTest(
120 Heap* heap, int length, AllocationType allocation) {
121 DCHECK(length >= 0 && length <= FixedArray::kMaxLength);
122 int size = FixedArray::SizeFor(length);
123 HeapObject obj;
124 {
125 AllocationResult result = heap->AllocateRaw(size, allocation);
126 if (!result.To(&obj)) return result;
127 }
128 obj.set_map_after_allocation(ReadOnlyRoots(heap).fixed_array_map(),
129 SKIP_WRITE_BARRIER);
130 FixedArray array = FixedArray::cast(obj);
131 array.set_length(length);
132 MemsetTagged(array.data_start(), ReadOnlyRoots(heap).undefined_value(),
133 length);
134 return array;
135 }
136
HEAP_TEST(MarkCompactCollector)137 HEAP_TEST(MarkCompactCollector) {
138 FLAG_incremental_marking = false;
139 FLAG_retain_maps_for_n_gc = 0;
140 CcTest::InitializeVM();
141 Isolate* isolate = CcTest::i_isolate();
142 Heap* heap = CcTest::heap();
143 Factory* factory = isolate->factory();
144
145 v8::HandleScope sc(CcTest::isolate());
146 Handle<JSGlobalObject> global(isolate->context().global_object(), isolate);
147
148 // call mark-compact when heap is empty
149 CcTest::CollectGarbage(OLD_SPACE);
150
151 AllocationResult allocation;
152 if (!FLAG_single_generation) {
153 // keep allocating garbage in new space until it fails
154 const int arraysize = 100;
155 do {
156 allocation =
157 AllocateFixedArrayForTest(heap, arraysize, AllocationType::kYoung);
158 } while (!allocation.IsRetry());
159 CcTest::CollectGarbage(NEW_SPACE);
160 AllocateFixedArrayForTest(heap, arraysize, AllocationType::kYoung)
161 .ToObjectChecked();
162 }
163
164 // keep allocating maps until it fails
165 do {
166 allocation = AllocateMapForTest(isolate);
167 } while (!allocation.IsRetry());
168 CcTest::CollectGarbage(MAP_SPACE);
169 AllocateMapForTest(isolate).ToObjectChecked();
170
171 { HandleScope scope(isolate);
172 // allocate a garbage
173 Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
174 Handle<JSFunction> function = factory->NewFunctionForTesting(func_name);
175 Object::SetProperty(isolate, global, func_name, function).Check();
176
177 factory->NewJSObject(function);
178 }
179
180 CcTest::CollectGarbage(OLD_SPACE);
181
182 { HandleScope scope(isolate);
183 Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
184 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, func_name));
185 Handle<Object> func_value =
186 Object::GetProperty(isolate, global, func_name).ToHandleChecked();
187 CHECK(func_value->IsJSFunction());
188 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
189 Handle<JSObject> obj = factory->NewJSObject(function);
190
191 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
192 Object::SetProperty(isolate, global, obj_name, obj).Check();
193 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
194 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
195 Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
196 }
197
198 CcTest::CollectGarbage(OLD_SPACE);
199
200 { HandleScope scope(isolate);
201 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
202 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
203 Handle<Object> object =
204 Object::GetProperty(isolate, global, obj_name).ToHandleChecked();
205 CHECK(object->IsJSObject());
206 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
207 CHECK_EQ(*Object::GetProperty(isolate, object, prop_name).ToHandleChecked(),
208 Smi::FromInt(23));
209 }
210 }
211
HEAP_TEST(DoNotEvacuatePinnedPages)212 HEAP_TEST(DoNotEvacuatePinnedPages) {
213 if (FLAG_never_compact || !FLAG_single_generation) return;
214
215 FLAG_always_compact = true;
216
217 CcTest::InitializeVM();
218 Isolate* isolate = CcTest::i_isolate();
219
220 v8::HandleScope sc(CcTest::isolate());
221 Heap* heap = isolate->heap();
222
223 heap::SealCurrentObjects(heap);
224
225 auto handles = heap::CreatePadding(
226 heap, static_cast<int>(MemoryChunkLayout::AllocatableMemoryInDataPage()),
227 AllocationType::kOld);
228
229 Page* page = Page::FromHeapObject(*handles.front());
230
231 CHECK(heap->InSpace(*handles.front(), OLD_SPACE));
232 page->SetFlag(MemoryChunk::PINNED);
233
234 CcTest::CollectAllGarbage();
235 heap->mark_compact_collector()->EnsureSweepingCompleted();
236
237 // The pinned flag should prevent the page from moving.
238 for (Handle<FixedArray> object : handles) {
239 CHECK_EQ(page, Page::FromHeapObject(*object));
240 }
241
242 page->ClearFlag(MemoryChunk::PINNED);
243
244 CcTest::CollectAllGarbage();
245 heap->mark_compact_collector()->EnsureSweepingCompleted();
246
247 // always_compact ensures that this page is an evacuation candidate, so with
248 // the pin flag cleared compaction should now move it.
249 for (Handle<FixedArray> object : handles) {
250 CHECK_NE(page, Page::FromHeapObject(*object));
251 }
252 }
253
HEAP_TEST(ObjectStartBitmap)254 HEAP_TEST(ObjectStartBitmap) {
255 if (!FLAG_single_generation || !FLAG_conservative_stack_scanning) return;
256
257 #if V8_ENABLE_CONSERVATIVE_STACK_SCANNING
258
259 CcTest::InitializeVM();
260 Isolate* isolate = CcTest::i_isolate();
261 v8::HandleScope sc(CcTest::isolate());
262
263 Heap* heap = isolate->heap();
264 heap::SealCurrentObjects(heap);
265
266 auto* factory = isolate->factory();
267 HeapObject obj = *factory->NewStringFromStaticChars("hello");
268 HeapObject obj2 = *factory->NewStringFromStaticChars("world");
269 Page* page = Page::FromAddress(obj.ptr());
270
271 CHECK(page->object_start_bitmap()->CheckBit(obj.address()));
272 CHECK(page->object_start_bitmap()->CheckBit(obj2.address()));
273
274 Address obj_inner_ptr = obj.ptr() + 2;
275 CHECK(page->object_start_bitmap()->FindBasePtr(obj_inner_ptr) ==
276 obj.address());
277
278 Address obj2_inner_ptr = obj2.ptr() + 2;
279 CHECK(page->object_start_bitmap()->FindBasePtr(obj2_inner_ptr) ==
280 obj2.address());
281
282 CcTest::CollectAllGarbage();
283
284 CHECK((obj).IsString());
285 CHECK((obj2).IsString());
286 CHECK(page->object_start_bitmap()->CheckBit(obj.address()));
287 CHECK(page->object_start_bitmap()->CheckBit(obj2.address()));
288
289 #endif
290 }
291
292 // TODO(1600): compaction of map space is temporary removed from GC.
293 #if 0
294 static Handle<Map> CreateMap(Isolate* isolate) {
295 return isolate->factory()->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
296 }
297
298
299 TEST(MapCompact) {
300 FLAG_max_map_space_pages = 16;
301 CcTest::InitializeVM();
302 Isolate* isolate = CcTest::i_isolate();
303 Factory* factory = isolate->factory();
304
305 {
306 v8::HandleScope sc;
307 // keep allocating maps while pointers are still encodable and thus
308 // mark compact is permitted.
309 Handle<JSObject> root = factory->NewJSObjectFromMap(CreateMap());
310 do {
311 Handle<Map> map = CreateMap();
312 map->set_prototype(*root);
313 root = factory->NewJSObjectFromMap(map);
314 } while (CcTest::heap()->map_space()->MapPointersEncodable());
315 }
316 // Now, as we don't have any handles to just allocated maps, we should
317 // be able to trigger map compaction.
318 // To give an additional chance to fail, try to force compaction which
319 // should be impossible right now.
320 CcTest::CollectAllGarbage(Heap::kForceCompactionMask);
321 // And now map pointers should be encodable again.
322 CHECK(CcTest::heap()->map_space()->MapPointersEncodable());
323 }
324 #endif
325
326 #if defined(__has_feature)
327 #if __has_feature(address_sanitizer)
328 #define V8_WITH_ASAN 1
329 #endif
330 #endif
331
332 // Here is a memory use test that uses /proc, and is therefore Linux-only. We
333 // do not care how much memory the simulator uses, since it is only there for
334 // debugging purposes. Testing with ASAN doesn't make sense, either.
335 #if defined(__linux__) && !defined(USE_SIMULATOR) && !defined(V8_WITH_ASAN)
336
337
ReadLong(char * buffer,intptr_t * position,int base)338 static uintptr_t ReadLong(char* buffer, intptr_t* position, int base) {
339 char* end_address = buffer + *position;
340 uintptr_t result = strtoul(buffer + *position, &end_address, base);
341 CHECK(result != ULONG_MAX || errno != ERANGE);
342 CHECK(end_address > buffer + *position);
343 *position = end_address - buffer;
344 return result;
345 }
346
347
348 // The memory use computed this way is not entirely accurate and depends on
349 // the way malloc allocates memory. That's why the memory use may seem to
350 // increase even though the sum of the allocated object sizes decreases. It
351 // also means that the memory use depends on the kernel and stdlib.
MemoryInUse()352 static intptr_t MemoryInUse() {
353 intptr_t memory_use = 0;
354
355 int fd = open("/proc/self/maps", O_RDONLY);
356 if (fd < 0) return -1;
357
358 const int kBufSize = 20000;
359 char buffer[kBufSize];
360 ssize_t length = read(fd, buffer, kBufSize);
361 intptr_t line_start = 0;
362 CHECK_LT(length, kBufSize); // Make the buffer bigger.
363 CHECK_GT(length, 0); // We have to find some data in the file.
364 while (line_start < length) {
365 if (buffer[line_start] == '\n') {
366 line_start++;
367 continue;
368 }
369 intptr_t position = line_start;
370 uintptr_t start = ReadLong(buffer, &position, 16);
371 CHECK_EQ(buffer[position++], '-');
372 uintptr_t end = ReadLong(buffer, &position, 16);
373 CHECK_EQ(buffer[position++], ' ');
374 CHECK(buffer[position] == '-' || buffer[position] == 'r');
375 bool read_permission = (buffer[position++] == 'r');
376 CHECK(buffer[position] == '-' || buffer[position] == 'w');
377 bool write_permission = (buffer[position++] == 'w');
378 CHECK(buffer[position] == '-' || buffer[position] == 'x');
379 bool execute_permission = (buffer[position++] == 'x');
380 CHECK(buffer[position] == 's' || buffer[position] == 'p');
381 bool private_mapping = (buffer[position++] == 'p');
382 CHECK_EQ(buffer[position++], ' ');
383 uintptr_t offset = ReadLong(buffer, &position, 16);
384 USE(offset);
385 CHECK_EQ(buffer[position++], ' ');
386 uintptr_t major = ReadLong(buffer, &position, 16);
387 USE(major);
388 CHECK_EQ(buffer[position++], ':');
389 uintptr_t minor = ReadLong(buffer, &position, 16);
390 USE(minor);
391 CHECK_EQ(buffer[position++], ' ');
392 uintptr_t inode = ReadLong(buffer, &position, 10);
393 while (position < length && buffer[position] != '\n') position++;
394 if ((read_permission || write_permission || execute_permission) &&
395 private_mapping && inode == 0) {
396 memory_use += (end - start);
397 }
398
399 line_start = position;
400 }
401 close(fd);
402 return memory_use;
403 }
404
405
ShortLivingIsolate()406 intptr_t ShortLivingIsolate() {
407 v8::Isolate::CreateParams create_params;
408 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
409 v8::Isolate* isolate = v8::Isolate::New(create_params);
410 { v8::Isolate::Scope isolate_scope(isolate);
411 v8::Locker lock(isolate);
412 v8::HandleScope handle_scope(isolate);
413 v8::Local<v8::Context> context = v8::Context::New(isolate);
414 CHECK(!context.IsEmpty());
415 }
416 isolate->Dispose();
417 return MemoryInUse();
418 }
419
UNINITIALIZED_TEST(RegressJoinThreadsOnIsolateDeinit)420 UNINITIALIZED_TEST(RegressJoinThreadsOnIsolateDeinit) {
421 // Memory is measured, do not allocate in background thread.
422 FLAG_stress_concurrent_allocation = false;
423 intptr_t size_limit = ShortLivingIsolate() * 2;
424 for (int i = 0; i < 10; i++) {
425 CHECK_GT(size_limit, ShortLivingIsolate());
426 }
427 }
428
TEST(Regress5829)429 TEST(Regress5829) {
430 if (!FLAG_incremental_marking) return;
431 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
432 CcTest::InitializeVM();
433 Isolate* isolate = CcTest::i_isolate();
434 v8::HandleScope sc(CcTest::isolate());
435 Heap* heap = isolate->heap();
436 heap::SealCurrentObjects(heap);
437 i::MarkCompactCollector* collector = heap->mark_compact_collector();
438 i::IncrementalMarking* marking = heap->incremental_marking();
439 if (collector->sweeping_in_progress()) {
440 collector->EnsureSweepingCompleted();
441 }
442 CHECK(marking->IsMarking() || marking->IsStopped());
443 if (marking->IsStopped()) {
444 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
445 i::GarbageCollectionReason::kTesting);
446 }
447 CHECK(marking->IsMarking());
448 marking->StartBlackAllocationForTesting();
449 Handle<FixedArray> array =
450 isolate->factory()->NewFixedArray(10, AllocationType::kOld);
451 Address old_end = array->address() + array->Size();
452 // Right trim the array without clearing the mark bits.
453 array->set_length(9);
454 heap->CreateFillerObjectAt(old_end - kTaggedSize, kTaggedSize,
455 ClearRecordedSlots::kNo);
456 heap->old_space()->FreeLinearAllocationArea();
457 Page* page = Page::FromAddress(array->address());
458 IncrementalMarking::MarkingState* marking_state = marking->marking_state();
459 for (auto object_and_size :
460 LiveObjectRange<kGreyObjects>(page, marking_state->bitmap(page))) {
461 CHECK(!object_and_size.first.IsFreeSpaceOrFiller());
462 }
463 }
464
465 #endif // __linux__ and !USE_SIMULATOR
466
467 } // namespace heap
468 } // namespace internal
469 } // namespace v8
470