1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include <stdlib.h>
29
30 #include <utility>
31
32 #include "include/v8-function.h"
33 #include "src/api/api-inl.h"
34 #include "src/base/strings.h"
35 #include "src/codegen/assembler-inl.h"
36 #include "src/codegen/compilation-cache.h"
37 #include "src/codegen/macro-assembler-inl.h"
38 #include "src/codegen/script-details.h"
39 #include "src/common/globals.h"
40 #include "src/debug/debug.h"
41 #include "src/deoptimizer/deoptimizer.h"
42 #include "src/execution/execution.h"
43 #include "src/handles/global-handles-inl.h"
44 #include "src/heap/combined-heap.h"
45 #include "src/heap/factory.h"
46 #include "src/heap/gc-tracer.h"
47 #include "src/heap/heap-inl.h"
48 #include "src/heap/incremental-marking.h"
49 #include "src/heap/large-spaces.h"
50 #include "src/heap/mark-compact.h"
51 #include "src/heap/memory-chunk.h"
52 #include "src/heap/memory-reducer.h"
53 #include "src/heap/parked-scope.h"
54 #include "src/heap/remembered-set-inl.h"
55 #include "src/heap/safepoint.h"
56 #include "src/ic/ic.h"
57 #include "src/numbers/hash-seed-inl.h"
58 #include "src/objects/elements.h"
59 #include "src/objects/field-type.h"
60 #include "src/objects/heap-number-inl.h"
61 #include "src/objects/js-array-inl.h"
62 #include "src/objects/js-collection-inl.h"
63 #include "src/objects/managed-inl.h"
64 #include "src/objects/objects-inl.h"
65 #include "src/objects/slots.h"
66 #include "src/objects/stack-frame-info-inl.h"
67 #include "src/objects/transitions.h"
68 #include "src/regexp/regexp.h"
69 #include "src/snapshot/snapshot.h"
70 #include "src/tracing/tracing-category-observer.h"
71 #include "src/utils/ostreams.h"
72 #include "test/cctest/cctest.h"
73 #include "test/cctest/heap/heap-tester.h"
74 #include "test/cctest/heap/heap-utils.h"
75 #include "test/cctest/test-feedback-vector.h"
76 #include "test/cctest/test-transitions.h"
77
78 namespace v8 {
79 namespace internal {
80 namespace heap {
81
82 // We only start allocation-site tracking with the second instantiation.
83 static const int kPretenureCreationCount =
84 AllocationSite::kPretenureMinimumCreated + 1;
85
CheckMap(Map map,int type,int instance_size)86 static void CheckMap(Map map, int type, int instance_size) {
87 CHECK(map.IsHeapObject());
88 DCHECK(IsValidHeapObject(CcTest::heap(), map));
89 CHECK_EQ(ReadOnlyRoots(CcTest::heap()).meta_map(), map.map());
90 CHECK_EQ(type, map.instance_type());
91 CHECK_EQ(instance_size, map.instance_size());
92 }
93
94
TEST(HeapMaps)95 TEST(HeapMaps) {
96 CcTest::InitializeVM();
97 ReadOnlyRoots roots(CcTest::heap());
98 CheckMap(roots.meta_map(), MAP_TYPE, Map::kSize);
99 CheckMap(roots.heap_number_map(), HEAP_NUMBER_TYPE, HeapNumber::kSize);
100 CheckMap(roots.fixed_array_map(), FIXED_ARRAY_TYPE, kVariableSizeSentinel);
101 CheckMap(roots.hash_table_map(), HASH_TABLE_TYPE, kVariableSizeSentinel);
102 CheckMap(roots.string_map(), STRING_TYPE, kVariableSizeSentinel);
103 }
104
VerifyStoredPrototypeMap(Isolate * isolate,int stored_map_context_index,int stored_ctor_context_index)105 static void VerifyStoredPrototypeMap(Isolate* isolate,
106 int stored_map_context_index,
107 int stored_ctor_context_index) {
108 Handle<Context> context = isolate->native_context();
109
110 Handle<Map> this_map(Map::cast(context->get(stored_map_context_index)),
111 isolate);
112
113 Handle<JSFunction> fun(
114 JSFunction::cast(context->get(stored_ctor_context_index)), isolate);
115 Handle<JSObject> proto(JSObject::cast(fun->initial_map().prototype()),
116 isolate);
117 Handle<Map> that_map(proto->map(), isolate);
118
119 CHECK(proto->HasFastProperties());
120 CHECK_EQ(*this_map, *that_map);
121 }
122
123 // Checks that critical maps stored on the context (mostly used for fast-path
124 // checks) are unchanged after initialization.
TEST(ContextMaps)125 TEST(ContextMaps) {
126 CcTest::InitializeVM();
127 Isolate* isolate = CcTest::i_isolate();
128 HandleScope handle_scope(isolate);
129
130 VerifyStoredPrototypeMap(isolate,
131 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX,
132 Context::STRING_FUNCTION_INDEX);
133 VerifyStoredPrototypeMap(isolate, Context::REGEXP_PROTOTYPE_MAP_INDEX,
134 Context::REGEXP_FUNCTION_INDEX);
135 }
136
TEST(InitialObjects)137 TEST(InitialObjects) {
138 LocalContext env;
139 HandleScope scope(CcTest::i_isolate());
140 Handle<Context> context = v8::Utils::OpenHandle(*env);
141 // Initial ArrayIterator prototype.
142 CHECK_EQ(
143 context->initial_array_iterator_prototype(),
144 *v8::Utils::OpenHandle(*CompileRun("[][Symbol.iterator]().__proto__")));
145 // Initial Array prototype.
146 CHECK_EQ(context->initial_array_prototype(),
147 *v8::Utils::OpenHandle(*CompileRun("Array.prototype")));
148 // Initial Generator prototype.
149 CHECK_EQ(context->initial_generator_prototype(),
150 *v8::Utils::OpenHandle(
151 *CompileRun("(function*(){}).__proto__.prototype")));
152 // Initial Iterator prototype.
153 CHECK_EQ(context->initial_iterator_prototype(),
154 *v8::Utils::OpenHandle(
155 *CompileRun("[][Symbol.iterator]().__proto__.__proto__")));
156 // Initial Object prototype.
157 CHECK_EQ(context->initial_object_prototype(),
158 *v8::Utils::OpenHandle(*CompileRun("Object.prototype")));
159 }
160
CheckOddball(Isolate * isolate,Object obj,const char * string)161 static void CheckOddball(Isolate* isolate, Object obj, const char* string) {
162 CHECK(obj.IsOddball());
163 Handle<Object> handle(obj, isolate);
164 Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
165 CHECK(String::cast(print_string).IsOneByteEqualTo(base::CStrVector(string)));
166 }
167
CheckSmi(Isolate * isolate,int value,const char * string)168 static void CheckSmi(Isolate* isolate, int value, const char* string) {
169 Handle<Object> handle(Smi::FromInt(value), isolate);
170 Object print_string = *Object::ToString(isolate, handle).ToHandleChecked();
171 CHECK(String::cast(print_string).IsOneByteEqualTo(base::CStrVector(string)));
172 }
173
174
CheckNumber(Isolate * isolate,double value,const char * string)175 static void CheckNumber(Isolate* isolate, double value, const char* string) {
176 Handle<Object> number = isolate->factory()->NewNumber(value);
177 CHECK(number->IsNumber());
178 Handle<Object> print_string =
179 Object::ToString(isolate, number).ToHandleChecked();
180 CHECK(String::cast(*print_string).IsOneByteEqualTo(base::CStrVector(string)));
181 }
182
CheckEmbeddedObjectsAreEqual(Handle<Code> lhs,Handle<Code> rhs)183 void CheckEmbeddedObjectsAreEqual(Handle<Code> lhs, Handle<Code> rhs) {
184 int mode_mask = RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT);
185 RelocIterator lhs_it(*lhs, mode_mask);
186 RelocIterator rhs_it(*rhs, mode_mask);
187 while (!lhs_it.done() && !rhs_it.done()) {
188 CHECK(lhs_it.rinfo()->target_object() == rhs_it.rinfo()->target_object());
189
190 lhs_it.next();
191 rhs_it.next();
192 }
193 CHECK(lhs_it.done() == rhs_it.done());
194 }
195
HEAP_TEST(TestNewSpaceRefsInCopiedCode)196 HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
197 if (FLAG_single_generation) return;
198 CcTest::InitializeVM();
199 Isolate* isolate = CcTest::i_isolate();
200 Factory* factory = isolate->factory();
201 HandleScope sc(isolate);
202
203 Handle<HeapNumber> value = factory->NewHeapNumber(1.000123);
204 CHECK(Heap::InYoungGeneration(*value));
205
206 i::byte buffer[i::Assembler::kDefaultBufferSize];
207 MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
208 ExternalAssemblerBuffer(buffer, sizeof(buffer)));
209 // Add a new-space reference to the code.
210 #if V8_TARGET_ARCH_ARM64
211 // Arm64 requires stack alignment.
212 UseScratchRegisterScope temps(&masm);
213 Register tmp = temps.AcquireX();
214 masm.Mov(tmp, Operand(value));
215 masm.Push(tmp, padreg);
216 #else
217 masm.Push(value);
218 #endif
219
220 CodeDesc desc;
221 masm.GetCode(isolate, &desc);
222 Handle<Code> code =
223 Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build();
224
225 Handle<Code> copy;
226 {
227 CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
228 copy = factory->CopyCode(code);
229 }
230
231 CheckEmbeddedObjectsAreEqual(code, copy);
232 CcTest::CollectAllAvailableGarbage();
233 CheckEmbeddedObjectsAreEqual(code, copy);
234 }
235
CheckFindCodeObject(Isolate * isolate)236 static void CheckFindCodeObject(Isolate* isolate) {
237 // Test FindCodeObject
238 #define __ assm.
239
240 Assembler assm(AssemblerOptions{});
241
242 __ nop(); // supported on all architectures
243
244 CodeDesc desc;
245 assm.GetCode(isolate, &desc);
246 Handle<Code> code =
247 Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build();
248 CHECK(code->IsCode());
249
250 HeapObject obj = HeapObject::cast(*code);
251 Address obj_addr = obj.address();
252
253 for (int i = 0; i < obj.Size(); i += kTaggedSize) {
254 Object found = isolate->FindCodeObject(obj_addr + i);
255 CHECK_EQ(*code, found);
256 }
257
258 Handle<Code> copy =
259 Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build();
260 HeapObject obj_copy = HeapObject::cast(*copy);
261 Object not_right =
262 isolate->FindCodeObject(obj_copy.address() + obj_copy.Size() / 2);
263 CHECK(not_right != *code);
264 }
265
266
TEST(HandleNull)267 TEST(HandleNull) {
268 CcTest::InitializeVM();
269 Isolate* isolate = CcTest::i_isolate();
270 HandleScope outer_scope(isolate);
271 LocalContext context;
272 Handle<Object> n(Object(0), isolate);
273 CHECK(!n.is_null());
274 }
275
276
TEST(HeapObjects)277 TEST(HeapObjects) {
278 CcTest::InitializeVM();
279 Isolate* isolate = CcTest::i_isolate();
280 Factory* factory = isolate->factory();
281 Heap* heap = isolate->heap();
282
283 HandleScope sc(isolate);
284 Handle<Object> value = factory->NewNumber(1.000123);
285 CHECK(value->IsHeapNumber());
286 CHECK(value->IsNumber());
287 CHECK_EQ(1.000123, value->Number());
288
289 value = factory->NewNumber(1.0);
290 CHECK(value->IsSmi());
291 CHECK(value->IsNumber());
292 CHECK_EQ(1.0, value->Number());
293
294 value = factory->NewNumberFromInt(1024);
295 CHECK(value->IsSmi());
296 CHECK(value->IsNumber());
297 CHECK_EQ(1024.0, value->Number());
298
299 value = factory->NewNumberFromInt(Smi::kMinValue);
300 CHECK(value->IsSmi());
301 CHECK(value->IsNumber());
302 CHECK_EQ(Smi::kMinValue, Handle<Smi>::cast(value)->value());
303
304 value = factory->NewNumberFromInt(Smi::kMaxValue);
305 CHECK(value->IsSmi());
306 CHECK(value->IsNumber());
307 CHECK_EQ(Smi::kMaxValue, Handle<Smi>::cast(value)->value());
308
309 #if !defined(V8_TARGET_ARCH_64_BIT)
310 // TODO(lrn): We need a NumberFromIntptr function in order to test this.
311 value = factory->NewNumberFromInt(Smi::kMinValue - 1);
312 CHECK(value->IsHeapNumber());
313 CHECK(value->IsNumber());
314 CHECK_EQ(static_cast<double>(Smi::kMinValue - 1), value->Number());
315 #endif
316
317 value = factory->NewNumberFromUint(static_cast<uint32_t>(Smi::kMaxValue) + 1);
318 CHECK(value->IsHeapNumber());
319 CHECK(value->IsNumber());
320 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(Smi::kMaxValue) + 1),
321 value->Number());
322
323 value = factory->NewNumberFromUint(static_cast<uint32_t>(1) << 31);
324 CHECK(value->IsHeapNumber());
325 CHECK(value->IsNumber());
326 CHECK_EQ(static_cast<double>(static_cast<uint32_t>(1) << 31),
327 value->Number());
328
329 // nan oddball checks
330 CHECK(factory->nan_value()->IsNumber());
331 CHECK(std::isnan(factory->nan_value()->Number()));
332
333 Handle<String> s = factory->NewStringFromStaticChars("fisk hest ");
334 CHECK(s->IsString());
335 CHECK_EQ(10, s->length());
336
337 Handle<String> object_string = Handle<String>::cast(factory->Object_string());
338 Handle<JSGlobalObject> global(CcTest::i_isolate()->context().global_object(),
339 isolate);
340 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, object_string));
341
342 // Check ToString for oddballs
343 ReadOnlyRoots roots(heap);
344 CheckOddball(isolate, roots.true_value(), "true");
345 CheckOddball(isolate, roots.false_value(), "false");
346 CheckOddball(isolate, roots.null_value(), "null");
347 CheckOddball(isolate, roots.undefined_value(), "undefined");
348
349 // Check ToString for Smis
350 CheckSmi(isolate, 0, "0");
351 CheckSmi(isolate, 42, "42");
352 CheckSmi(isolate, -42, "-42");
353
354 // Check ToString for Numbers
355 CheckNumber(isolate, 1.1, "1.1");
356
357 CheckFindCodeObject(isolate);
358 }
359
TEST(Tagging)360 TEST(Tagging) {
361 CcTest::InitializeVM();
362 int request = 24;
363 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
364 CHECK(Smi::FromInt(42).IsSmi());
365 CHECK(Smi::FromInt(Smi::kMinValue).IsSmi());
366 CHECK(Smi::FromInt(Smi::kMaxValue).IsSmi());
367 }
368
369
TEST(GarbageCollection)370 TEST(GarbageCollection) {
371 if (FLAG_single_generation) return;
372
373 CcTest::InitializeVM();
374 Isolate* isolate = CcTest::i_isolate();
375 Factory* factory = isolate->factory();
376
377 HandleScope sc(isolate);
378 // Check GC.
379 CcTest::CollectGarbage(NEW_SPACE);
380
381 Handle<JSGlobalObject> global(CcTest::i_isolate()->context().global_object(),
382 isolate);
383 Handle<String> name = factory->InternalizeUtf8String("theFunction");
384 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
385 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
386 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
387 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
388 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
389
390 {
391 HandleScope inner_scope(isolate);
392 // Allocate a function and keep it in global object's property.
393 Handle<JSFunction> function = factory->NewFunctionForTesting(name);
394 Object::SetProperty(isolate, global, name, function).Check();
395 // Allocate an object. Unrooted after leaving the scope.
396 Handle<JSObject> obj = factory->NewJSObject(function);
397 Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
398 Object::SetProperty(isolate, obj, prop_namex, twenty_four).Check();
399
400 CHECK_EQ(Smi::FromInt(23),
401 *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
402 CHECK_EQ(Smi::FromInt(24),
403 *Object::GetProperty(isolate, obj, prop_namex).ToHandleChecked());
404 }
405
406 CcTest::CollectGarbage(NEW_SPACE);
407
408 // Function should be alive.
409 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
410 // Check function is retained.
411 Handle<Object> func_value =
412 Object::GetProperty(isolate, global, name).ToHandleChecked();
413 CHECK(func_value->IsJSFunction());
414 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
415
416 {
417 HandleScope inner_scope(isolate);
418 // Allocate another object, make it reachable from global.
419 Handle<JSObject> obj = factory->NewJSObject(function);
420 Object::SetProperty(isolate, global, obj_name, obj).Check();
421 Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
422 }
423
424 // After gc, it should survive.
425 CcTest::CollectGarbage(NEW_SPACE);
426
427 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
428 Handle<Object> obj =
429 Object::GetProperty(isolate, global, obj_name).ToHandleChecked();
430 CHECK(obj->IsJSObject());
431 CHECK_EQ(Smi::FromInt(23),
432 *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
433 }
434
435
VerifyStringAllocation(Isolate * isolate,const char * string)436 static void VerifyStringAllocation(Isolate* isolate, const char* string) {
437 HandleScope scope(isolate);
438 Handle<String> s = isolate->factory()
439 ->NewStringFromUtf8(base::CStrVector(string))
440 .ToHandleChecked();
441 CHECK_EQ(strlen(string), s->length());
442 for (int index = 0; index < s->length(); index++) {
443 CHECK_EQ(static_cast<uint16_t>(string[index]), s->Get(index));
444 }
445 }
446
447
TEST(String)448 TEST(String) {
449 CcTest::InitializeVM();
450 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
451
452 VerifyStringAllocation(isolate, "a");
453 VerifyStringAllocation(isolate, "ab");
454 VerifyStringAllocation(isolate, "abc");
455 VerifyStringAllocation(isolate, "abcd");
456 VerifyStringAllocation(isolate, "fiskerdrengen er paa havet");
457 }
458
459
TEST(LocalHandles)460 TEST(LocalHandles) {
461 CcTest::InitializeVM();
462 Isolate* isolate = CcTest::i_isolate();
463 Factory* factory = isolate->factory();
464
465 v8::HandleScope scope(CcTest::isolate());
466 const char* name = "Kasper the spunky";
467 Handle<String> string = factory->NewStringFromAsciiChecked(name);
468 CHECK_EQ(strlen(name), string->length());
469 }
470
471
TEST(GlobalHandles)472 TEST(GlobalHandles) {
473 CcTest::InitializeVM();
474 Isolate* isolate = CcTest::i_isolate();
475 Factory* factory = isolate->factory();
476 GlobalHandles* global_handles = isolate->global_handles();
477
478 Handle<Object> h1;
479 Handle<Object> h2;
480 Handle<Object> h3;
481 Handle<Object> h4;
482
483 {
484 HandleScope scope(isolate);
485
486 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
487 Handle<Object> u = factory->NewNumber(1.12344);
488
489 h1 = global_handles->Create(*i);
490 h2 = global_handles->Create(*u);
491 h3 = global_handles->Create(*i);
492 h4 = global_handles->Create(*u);
493 }
494
495 // after gc, it should survive
496 CcTest::CollectGarbage(NEW_SPACE);
497
498 CHECK((*h1).IsString());
499 CHECK((*h2).IsHeapNumber());
500 CHECK((*h3).IsString());
501 CHECK((*h4).IsHeapNumber());
502
503 CHECK_EQ(*h3, *h1);
504 GlobalHandles::Destroy(h1.location());
505 GlobalHandles::Destroy(h3.location());
506
507 CHECK_EQ(*h4, *h2);
508 GlobalHandles::Destroy(h2.location());
509 GlobalHandles::Destroy(h4.location());
510 }
511
512
513 static bool WeakPointerCleared = false;
514
TestWeakGlobalHandleCallback(const v8::WeakCallbackInfo<void> & data)515 static void TestWeakGlobalHandleCallback(
516 const v8::WeakCallbackInfo<void>& data) {
517 std::pair<v8::Persistent<v8::Value>*, int>* p =
518 reinterpret_cast<std::pair<v8::Persistent<v8::Value>*, int>*>(
519 data.GetParameter());
520 if (p->second == 1234) WeakPointerCleared = true;
521 p->first->Reset();
522 }
523
TEST(WeakGlobalUnmodifiedApiHandlesScavenge)524 TEST(WeakGlobalUnmodifiedApiHandlesScavenge) {
525 CcTest::InitializeVM();
526 Isolate* isolate = CcTest::i_isolate();
527 LocalContext context;
528 Factory* factory = isolate->factory();
529 GlobalHandles* global_handles = isolate->global_handles();
530
531 WeakPointerCleared = false;
532
533 Handle<Object> h1;
534 Handle<Object> h2;
535
536 {
537 HandleScope scope(isolate);
538
539 // Create an Api object that is unmodified.
540 Local<v8::Function> function = FunctionTemplate::New(context->GetIsolate())
541 ->GetFunction(context.local())
542 .ToLocalChecked();
543 Local<v8::Object> i =
544 function->NewInstance(context.local()).ToLocalChecked();
545 Handle<Object> u = factory->NewNumber(1.12344);
546
547 h1 = global_handles->Create(*u);
548 h2 = global_handles->Create(*(reinterpret_cast<internal::Address*>(*i)));
549 }
550
551 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
552 GlobalHandles::MakeWeak(
553 h2.location(), reinterpret_cast<void*>(&handle_and_id),
554 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
555
556 FLAG_single_generation ? CcTest::CollectGarbage(OLD_SPACE)
557 : CcTest::CollectGarbage(NEW_SPACE);
558 CHECK((*h1).IsHeapNumber());
559 CHECK(WeakPointerCleared);
560 GlobalHandles::Destroy(h1.location());
561 }
562
TEST(WeakGlobalHandlesMark)563 TEST(WeakGlobalHandlesMark) {
564 ManualGCScope manual_gc_scope;
565 CcTest::InitializeVM();
566 Isolate* isolate = CcTest::i_isolate();
567 Factory* factory = isolate->factory();
568 GlobalHandles* global_handles = isolate->global_handles();
569
570 WeakPointerCleared = false;
571
572 Handle<Object> h1;
573 Handle<Object> h2;
574
575 {
576 HandleScope scope(isolate);
577
578 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
579 Handle<Object> u = factory->NewNumber(1.12344);
580
581 h1 = global_handles->Create(*i);
582 h2 = global_handles->Create(*u);
583 }
584
585 // Make sure the objects are promoted.
586 CcTest::CollectGarbage(OLD_SPACE);
587 CcTest::CollectGarbage(NEW_SPACE);
588 CHECK(!Heap::InYoungGeneration(*h1) && !Heap::InYoungGeneration(*h2));
589
590 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
591 GlobalHandles::MakeWeak(
592 h2.location(), reinterpret_cast<void*>(&handle_and_id),
593 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
594
595 // Incremental marking potentially marked handles before they turned weak.
596 CcTest::CollectAllGarbage();
597 CHECK((*h1).IsString());
598 CHECK(WeakPointerCleared);
599 GlobalHandles::Destroy(h1.location());
600 }
601
602
TEST(DeleteWeakGlobalHandle)603 TEST(DeleteWeakGlobalHandle) {
604 FLAG_stress_compaction = false;
605 FLAG_stress_incremental_marking = false;
606 CcTest::InitializeVM();
607 Isolate* isolate = CcTest::i_isolate();
608 Factory* factory = isolate->factory();
609 GlobalHandles* global_handles = isolate->global_handles();
610
611 WeakPointerCleared = false;
612 Handle<Object> h;
613 {
614 HandleScope scope(isolate);
615
616 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
617 h = global_handles->Create(*i);
618 }
619
620 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
621 GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
622 &TestWeakGlobalHandleCallback,
623 v8::WeakCallbackType::kParameter);
624 CHECK(!WeakPointerCleared);
625 CcTest::CollectGarbage(OLD_SPACE);
626 CHECK(WeakPointerCleared);
627 }
628
TEST(BytecodeArray)629 TEST(BytecodeArray) {
630 if (FLAG_never_compact) return;
631 static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
632 static const int kRawBytesSize = sizeof(kRawBytes);
633 static const int32_t kFrameSize = 32;
634 static const int32_t kParameterCount = 2;
635
636 ManualGCScope manual_gc_scope;
637 FLAG_manual_evacuation_candidates_selection = true;
638 CcTest::InitializeVM();
639 Isolate* isolate = CcTest::i_isolate();
640 Heap* heap = isolate->heap();
641 Factory* factory = isolate->factory();
642 HandleScope scope(isolate);
643
644 heap::SimulateFullSpace(heap->old_space());
645 Handle<FixedArray> constant_pool =
646 factory->NewFixedArray(5, AllocationType::kOld);
647 for (int i = 0; i < 5; i++) {
648 Handle<Object> number = factory->NewHeapNumber(i);
649 constant_pool->set(i, *number);
650 }
651
652 // Allocate and initialize BytecodeArray
653 Handle<BytecodeArray> array = factory->NewBytecodeArray(
654 kRawBytesSize, kRawBytes, kFrameSize, kParameterCount, constant_pool);
655
656 CHECK(array->IsBytecodeArray());
657 CHECK_EQ(array->length(), (int)sizeof(kRawBytes));
658 CHECK_EQ(array->frame_size(), kFrameSize);
659 CHECK_EQ(array->parameter_count(), kParameterCount);
660 CHECK_EQ(array->constant_pool(), *constant_pool);
661 CHECK_LE(array->address(), array->GetFirstBytecodeAddress());
662 CHECK_GE(array->address() + array->BytecodeArraySize(),
663 array->GetFirstBytecodeAddress() + array->length());
664 for (int i = 0; i < kRawBytesSize; i++) {
665 CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
666 kRawBytes[i]);
667 CHECK_EQ(array->get(i), kRawBytes[i]);
668 }
669
670 FixedArray old_constant_pool_address = *constant_pool;
671
672 // Perform a full garbage collection and force the constant pool to be on an
673 // evacuation candidate.
674 Page* evac_page = Page::FromHeapObject(*constant_pool);
675 heap::ForceEvacuationCandidate(evac_page);
676 CcTest::CollectAllGarbage();
677
678 // BytecodeArray should survive.
679 CHECK_EQ(array->length(), kRawBytesSize);
680 CHECK_EQ(array->frame_size(), kFrameSize);
681 for (int i = 0; i < kRawBytesSize; i++) {
682 CHECK_EQ(array->get(i), kRawBytes[i]);
683 CHECK_EQ(Memory<uint8_t>(array->GetFirstBytecodeAddress() + i),
684 kRawBytes[i]);
685 }
686
687 // Constant pool should have been migrated.
688 CHECK_EQ(array->constant_pool(), *constant_pool);
689 CHECK_NE(array->constant_pool(), old_constant_pool_address);
690 }
691
TEST(BytecodeArrayAging)692 TEST(BytecodeArrayAging) {
693 static const uint8_t kRawBytes[] = {0xC3, 0x7E, 0xA5, 0x5A};
694 static const int kRawBytesSize = sizeof(kRawBytes);
695 static const int32_t kFrameSize = 32;
696 static const int32_t kParameterCount = 2;
697 CcTest::InitializeVM();
698 Isolate* isolate = CcTest::i_isolate();
699 Factory* factory = isolate->factory();
700 HandleScope scope(isolate);
701
702 Handle<BytecodeArray> array =
703 factory->NewBytecodeArray(kRawBytesSize, kRawBytes, kFrameSize,
704 kParameterCount, factory->empty_fixed_array());
705
706 CHECK_EQ(BytecodeArray::kFirstBytecodeAge, array->bytecode_age());
707 array->MakeOlder();
708 CHECK_EQ(BytecodeArray::kQuadragenarianBytecodeAge, array->bytecode_age());
709 array->set_bytecode_age(BytecodeArray::kLastBytecodeAge);
710 array->MakeOlder();
711 CHECK_EQ(BytecodeArray::kLastBytecodeAge, array->bytecode_age());
712 }
713
714 static const char* not_so_random_string_table[] = {
715 "abstract", "boolean", "break", "byte", "case",
716 "catch", "char", "class", "const", "continue",
717 "debugger", "default", "delete", "do", "double",
718 "else", "enum", "export", "extends", "false",
719 "final", "finally", "float", "for", "function",
720 "goto", "if", "implements", "import", "in",
721 "instanceof", "int", "interface", "long", "native",
722 "new", "null", "package", "private", "protected",
723 "public", "return", "short", "static", "super",
724 "switch", "synchronized", "this", "throw", "throws",
725 "transient", "true", "try", "typeof", "var",
726 "void", "volatile", "while", "with", nullptr};
727
CheckInternalizedStrings(const char ** strings)728 static void CheckInternalizedStrings(const char** strings) {
729 Isolate* isolate = CcTest::i_isolate();
730 Factory* factory = isolate->factory();
731 for (const char* string = *strings; *strings != nullptr;
732 string = *strings++) {
733 HandleScope scope(isolate);
734 Handle<String> a =
735 isolate->factory()->InternalizeUtf8String(base::CStrVector(string));
736 // InternalizeUtf8String may return a failure if a GC is needed.
737 CHECK(a->IsInternalizedString());
738 Handle<String> b = factory->InternalizeUtf8String(string);
739 CHECK_EQ(*b, *a);
740 CHECK(b->IsOneByteEqualTo(base::CStrVector(string)));
741 b = isolate->factory()->InternalizeUtf8String(base::CStrVector(string));
742 CHECK_EQ(*b, *a);
743 CHECK(b->IsOneByteEqualTo(base::CStrVector(string)));
744 }
745 }
746
747
TEST(StringTable)748 TEST(StringTable) {
749 CcTest::InitializeVM();
750
751 v8::HandleScope sc(CcTest::isolate());
752 CheckInternalizedStrings(not_so_random_string_table);
753 CheckInternalizedStrings(not_so_random_string_table);
754 }
755
756
TEST(FunctionAllocation)757 TEST(FunctionAllocation) {
758 CcTest::InitializeVM();
759 Isolate* isolate = CcTest::i_isolate();
760 Factory* factory = isolate->factory();
761
762 v8::HandleScope sc(CcTest::isolate());
763 Handle<String> name = factory->InternalizeUtf8String("theFunction");
764 Handle<JSFunction> function = factory->NewFunctionForTesting(name);
765
766 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
767 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
768
769 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
770 Handle<JSObject> obj = factory->NewJSObject(function);
771 Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
772 CHECK_EQ(Smi::FromInt(23),
773 *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
774 // Check that we can add properties to function objects.
775 Object::SetProperty(isolate, function, prop_name, twenty_four).Check();
776 CHECK_EQ(
777 Smi::FromInt(24),
778 *Object::GetProperty(isolate, function, prop_name).ToHandleChecked());
779 }
780
781
TEST(ObjectProperties)782 TEST(ObjectProperties) {
783 CcTest::InitializeVM();
784 Isolate* isolate = CcTest::i_isolate();
785 Factory* factory = isolate->factory();
786
787 v8::HandleScope sc(CcTest::isolate());
788 Handle<String> object_string(
789 String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
790 Handle<Object> object =
791 Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
792 object_string)
793 .ToHandleChecked();
794 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
795 Handle<JSObject> obj = factory->NewJSObject(constructor);
796 Handle<String> first = factory->InternalizeUtf8String("first");
797 Handle<String> second = factory->InternalizeUtf8String("second");
798
799 Handle<Smi> one(Smi::FromInt(1), isolate);
800 Handle<Smi> two(Smi::FromInt(2), isolate);
801
802 // check for empty
803 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
804
805 // add first
806 Object::SetProperty(isolate, obj, first, one).Check();
807 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
808
809 // delete first
810 CHECK(Just(true) ==
811 JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
812 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
813
814 // add first and then second
815 Object::SetProperty(isolate, obj, first, one).Check();
816 Object::SetProperty(isolate, obj, second, two).Check();
817 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
818 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
819
820 // delete first and then second
821 CHECK(Just(true) ==
822 JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
823 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
824 CHECK(Just(true) ==
825 JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
826 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
827 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
828
829 // add first and then second
830 Object::SetProperty(isolate, obj, first, one).Check();
831 Object::SetProperty(isolate, obj, second, two).Check();
832 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
833 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, second));
834
835 // delete second and then first
836 CHECK(Just(true) ==
837 JSReceiver::DeleteProperty(obj, second, LanguageMode::kSloppy));
838 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, first));
839 CHECK(Just(true) ==
840 JSReceiver::DeleteProperty(obj, first, LanguageMode::kSloppy));
841 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, first));
842 CHECK(Just(false) == JSReceiver::HasOwnProperty(obj, second));
843
844 // check string and internalized string match
845 const char* string1 = "fisk";
846 Handle<String> s1 = factory->NewStringFromAsciiChecked(string1);
847 Object::SetProperty(isolate, obj, s1, one).Check();
848 Handle<String> s1_string = factory->InternalizeUtf8String(string1);
849 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s1_string));
850
851 // check internalized string and string match
852 const char* string2 = "fugl";
853 Handle<String> s2_string = factory->InternalizeUtf8String(string2);
854 Object::SetProperty(isolate, obj, s2_string, one).Check();
855 Handle<String> s2 = factory->NewStringFromAsciiChecked(string2);
856 CHECK(Just(true) == JSReceiver::HasOwnProperty(obj, s2));
857 }
858
859
TEST(JSObjectMaps)860 TEST(JSObjectMaps) {
861 CcTest::InitializeVM();
862 Isolate* isolate = CcTest::i_isolate();
863 Factory* factory = isolate->factory();
864
865 v8::HandleScope sc(CcTest::isolate());
866 Handle<String> name = factory->InternalizeUtf8String("theFunction");
867 Handle<JSFunction> function = factory->NewFunctionForTesting(name);
868
869 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
870 Handle<JSObject> obj = factory->NewJSObject(function);
871 Handle<Map> initial_map(function->initial_map(), isolate);
872
873 // Set a propery
874 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
875 Object::SetProperty(isolate, obj, prop_name, twenty_three).Check();
876 CHECK_EQ(Smi::FromInt(23),
877 *Object::GetProperty(isolate, obj, prop_name).ToHandleChecked());
878
879 // Check the map has changed
880 CHECK(*initial_map != obj->map());
881 }
882
883
TEST(JSArray)884 TEST(JSArray) {
885 CcTest::InitializeVM();
886 Isolate* isolate = CcTest::i_isolate();
887 Factory* factory = isolate->factory();
888
889 v8::HandleScope sc(CcTest::isolate());
890 Handle<String> name = factory->InternalizeUtf8String("Array");
891 Handle<Object> fun_obj =
892 Object::GetProperty(isolate, CcTest::i_isolate()->global_object(), name)
893 .ToHandleChecked();
894 Handle<JSFunction> function = Handle<JSFunction>::cast(fun_obj);
895
896 // Allocate the object.
897 Handle<Object> element;
898 Handle<JSObject> object = factory->NewJSObject(function);
899 Handle<JSArray> array = Handle<JSArray>::cast(object);
900 // We just initialized the VM, no heap allocation failure yet.
901 JSArray::Initialize(array, 0);
902
903 // Set array length to 0.
904 JSArray::SetLength(array, 0);
905 CHECK_EQ(Smi::zero(), array->length());
906 // Must be in fast mode.
907 CHECK(array->HasSmiOrObjectElements());
908
909 // array[length] = name.
910 Object::SetElement(isolate, array, 0, name, ShouldThrow::kDontThrow).Check();
911 CHECK_EQ(Smi::FromInt(1), array->length());
912 element = i::Object::GetElement(isolate, array, 0).ToHandleChecked();
913 CHECK_EQ(*element, *name);
914
915 // Set array length with larger than smi value.
916 JSArray::SetLength(array, static_cast<uint32_t>(Smi::kMaxValue) + 1);
917
918 uint32_t int_length = 0;
919 CHECK(array->length().ToArrayIndex(&int_length));
920 CHECK_EQ(static_cast<uint32_t>(Smi::kMaxValue) + 1, int_length);
921 CHECK(array->HasDictionaryElements()); // Must be in slow mode.
922
923 // array[length] = name.
924 Object::SetElement(isolate, array, int_length, name, ShouldThrow::kDontThrow)
925 .Check();
926 uint32_t new_int_length = 0;
927 CHECK(array->length().ToArrayIndex(&new_int_length));
928 CHECK_EQ(static_cast<double>(int_length), new_int_length - 1);
929 element = Object::GetElement(isolate, array, int_length).ToHandleChecked();
930 CHECK_EQ(*element, *name);
931 element = Object::GetElement(isolate, array, 0).ToHandleChecked();
932 CHECK_EQ(*element, *name);
933 }
934
935
TEST(JSObjectCopy)936 TEST(JSObjectCopy) {
937 CcTest::InitializeVM();
938 Isolate* isolate = CcTest::i_isolate();
939 Factory* factory = isolate->factory();
940
941 v8::HandleScope sc(CcTest::isolate());
942 Handle<String> object_string(
943 String::cast(ReadOnlyRoots(CcTest::heap()).Object_string()), isolate);
944 Handle<Object> object =
945 Object::GetProperty(isolate, CcTest::i_isolate()->global_object(),
946 object_string)
947 .ToHandleChecked();
948 Handle<JSFunction> constructor = Handle<JSFunction>::cast(object);
949 Handle<JSObject> obj = factory->NewJSObject(constructor);
950 Handle<String> first = factory->InternalizeUtf8String("first");
951 Handle<String> second = factory->InternalizeUtf8String("second");
952
953 Handle<Smi> one(Smi::FromInt(1), isolate);
954 Handle<Smi> two(Smi::FromInt(2), isolate);
955
956 Object::SetProperty(isolate, obj, first, one).Check();
957 Object::SetProperty(isolate, obj, second, two).Check();
958
959 Object::SetElement(isolate, obj, 0, first, ShouldThrow::kDontThrow).Check();
960 Object::SetElement(isolate, obj, 1, second, ShouldThrow::kDontThrow).Check();
961
962 // Make the clone.
963 Handle<Object> value1, value2;
964 Handle<JSObject> clone = factory->CopyJSObject(obj);
965 CHECK(!clone.is_identical_to(obj));
966
967 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
968 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
969 CHECK_EQ(*value1, *value2);
970 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
971 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
972 CHECK_EQ(*value1, *value2);
973
974 value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
975 value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
976 CHECK_EQ(*value1, *value2);
977 value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
978 value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
979 CHECK_EQ(*value1, *value2);
980
981 // Flip the values.
982 Object::SetProperty(isolate, clone, first, two).Check();
983 Object::SetProperty(isolate, clone, second, one).Check();
984
985 Object::SetElement(isolate, clone, 0, second, ShouldThrow::kDontThrow)
986 .Check();
987 Object::SetElement(isolate, clone, 1, first, ShouldThrow::kDontThrow).Check();
988
989 value1 = Object::GetElement(isolate, obj, 1).ToHandleChecked();
990 value2 = Object::GetElement(isolate, clone, 0).ToHandleChecked();
991 CHECK_EQ(*value1, *value2);
992 value1 = Object::GetElement(isolate, obj, 0).ToHandleChecked();
993 value2 = Object::GetElement(isolate, clone, 1).ToHandleChecked();
994 CHECK_EQ(*value1, *value2);
995
996 value1 = Object::GetProperty(isolate, obj, second).ToHandleChecked();
997 value2 = Object::GetProperty(isolate, clone, first).ToHandleChecked();
998 CHECK_EQ(*value1, *value2);
999 value1 = Object::GetProperty(isolate, obj, first).ToHandleChecked();
1000 value2 = Object::GetProperty(isolate, clone, second).ToHandleChecked();
1001 CHECK_EQ(*value1, *value2);
1002 }
1003
1004
TEST(StringAllocation)1005 TEST(StringAllocation) {
1006 CcTest::InitializeVM();
1007 Isolate* isolate = CcTest::i_isolate();
1008 Factory* factory = isolate->factory();
1009
1010 const unsigned char chars[] = {0xE5, 0xA4, 0xA7};
1011 for (int length = 0; length < 100; length++) {
1012 v8::HandleScope scope(CcTest::isolate());
1013 char* non_one_byte = NewArray<char>(3 * length + 1);
1014 char* one_byte = NewArray<char>(length + 1);
1015 non_one_byte[3 * length] = 0;
1016 one_byte[length] = 0;
1017 for (int i = 0; i < length; i++) {
1018 one_byte[i] = 'a';
1019 non_one_byte[3 * i] = chars[0];
1020 non_one_byte[3 * i + 1] = chars[1];
1021 non_one_byte[3 * i + 2] = chars[2];
1022 }
1023 Handle<String> non_one_byte_sym = factory->InternalizeUtf8String(
1024 base::Vector<const char>(non_one_byte, 3 * length));
1025 CHECK_EQ(length, non_one_byte_sym->length());
1026 Handle<String> one_byte_sym =
1027 factory->InternalizeString(base::OneByteVector(one_byte, length));
1028 CHECK_EQ(length, one_byte_sym->length());
1029 CHECK(one_byte_sym->HasHashCode());
1030 Handle<String> non_one_byte_str =
1031 factory
1032 ->NewStringFromUtf8(
1033 base::Vector<const char>(non_one_byte, 3 * length))
1034 .ToHandleChecked();
1035 CHECK_EQ(length, non_one_byte_str->length());
1036 Handle<String> one_byte_str =
1037 factory->NewStringFromUtf8(base::Vector<const char>(one_byte, length))
1038 .ToHandleChecked();
1039 CHECK_EQ(length, one_byte_str->length());
1040 DeleteArray(non_one_byte);
1041 DeleteArray(one_byte);
1042 }
1043 }
1044
1045
ObjectsFoundInHeap(Heap * heap,Handle<Object> objs[],int size)1046 static int ObjectsFoundInHeap(Heap* heap, Handle<Object> objs[], int size) {
1047 // Count the number of objects found in the heap.
1048 int found_count = 0;
1049 HeapObjectIterator iterator(heap);
1050 for (HeapObject obj = iterator.Next(); !obj.is_null();
1051 obj = iterator.Next()) {
1052 for (int i = 0; i < size; i++) {
1053 if (*objs[i] == obj) {
1054 found_count++;
1055 }
1056 }
1057 }
1058 return found_count;
1059 }
1060
1061
TEST(Iteration)1062 TEST(Iteration) {
1063 CcTest::InitializeVM();
1064 Isolate* isolate = CcTest::i_isolate();
1065 Factory* factory = isolate->factory();
1066 v8::HandleScope scope(CcTest::isolate());
1067
1068 // Array of objects to scan haep for.
1069 const int objs_count = 6;
1070 Handle<Object> objs[objs_count];
1071 int next_objs_index = 0;
1072
1073 // Allocate a JS array to OLD_SPACE and NEW_SPACE
1074 objs[next_objs_index++] = factory->NewJSArray(10);
1075 objs[next_objs_index++] =
1076 factory->NewJSArray(10, HOLEY_ELEMENTS, AllocationType::kOld);
1077
1078 // Allocate a small string to OLD_DATA_SPACE and NEW_SPACE
1079 objs[next_objs_index++] = factory->NewStringFromStaticChars("abcdefghij");
1080 objs[next_objs_index++] =
1081 factory->NewStringFromStaticChars("abcdefghij", AllocationType::kOld);
1082
1083 // Allocate a large string (for large object space).
1084 int large_size = kMaxRegularHeapObjectSize + 1;
1085 char* str = new char[large_size];
1086 for (int i = 0; i < large_size - 1; ++i) str[i] = 'a';
1087 str[large_size - 1] = '\0';
1088 objs[next_objs_index++] =
1089 factory->NewStringFromAsciiChecked(str, AllocationType::kOld);
1090 delete[] str;
1091
1092 // Add a Map object to look for.
1093 objs[next_objs_index++] =
1094 Handle<Map>(HeapObject::cast(*objs[0]).map(), isolate);
1095
1096 CHECK_EQ(objs_count, next_objs_index);
1097 CHECK_EQ(objs_count, ObjectsFoundInHeap(CcTest::heap(), objs, objs_count));
1098 }
1099
TEST(TestBytecodeFlushing)1100 TEST(TestBytecodeFlushing) {
1101 #ifndef V8_LITE_MODE
1102 FLAG_opt = false;
1103 FLAG_always_opt = false;
1104 i::FLAG_optimize_for_size = false;
1105 #endif // V8_LITE_MODE
1106 #if ENABLE_SPARKPLUG
1107 FLAG_always_sparkplug = false;
1108 #endif // ENABLE_SPARKPLUG
1109 i::FLAG_flush_bytecode = true;
1110 i::FLAG_allow_natives_syntax = true;
1111
1112 CcTest::InitializeVM();
1113 v8::Isolate* isolate = CcTest::isolate();
1114 Isolate* i_isolate = CcTest::i_isolate();
1115 Factory* factory = i_isolate->factory();
1116
1117 {
1118 v8::HandleScope scope(isolate);
1119 v8::Context::New(isolate)->Enter();
1120 const char* source =
1121 "function foo() {"
1122 " var x = 42;"
1123 " var y = 42;"
1124 " var z = x + y;"
1125 "};"
1126 "foo()";
1127 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1128
1129 // This compile will add the code to the compilation cache.
1130 {
1131 v8::HandleScope scope(isolate);
1132 CompileRun(source);
1133 }
1134
1135 // Check function is compiled.
1136 Handle<Object> func_value =
1137 Object::GetProperty(i_isolate, i_isolate->global_object(), foo_name)
1138 .ToHandleChecked();
1139 CHECK(func_value->IsJSFunction());
1140 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1141 CHECK(function->shared().is_compiled());
1142
1143 // The code will survive at least two GCs.
1144 CcTest::CollectAllGarbage();
1145 CcTest::CollectAllGarbage();
1146 CHECK(function->shared().is_compiled());
1147
1148 // Simulate several GCs that use full marking.
1149 const int kAgingThreshold = 6;
1150 for (int i = 0; i < kAgingThreshold; i++) {
1151 CcTest::CollectAllGarbage();
1152 }
1153
1154 // foo should no longer be in the compilation cache
1155 CHECK(!function->shared().is_compiled());
1156 CHECK(!function->is_compiled());
1157 // Call foo to get it recompiled.
1158 CompileRun("foo()");
1159 CHECK(function->shared().is_compiled());
1160 CHECK(function->is_compiled());
1161 }
1162 }
1163
HEAP_TEST(Regress10560)1164 HEAP_TEST(Regress10560) {
1165 i::FLAG_flush_bytecode = true;
1166 i::FLAG_allow_natives_syntax = true;
1167 // Disable flags that allocate a feedback vector eagerly.
1168 i::FLAG_opt = false;
1169 i::FLAG_always_opt = false;
1170 #if ENABLE_SPARKPLUG
1171 FLAG_always_sparkplug = false;
1172 #endif // ENABLE_SPARKPLUG
1173 i::FLAG_lazy_feedback_allocation = true;
1174
1175 ManualGCScope manual_gc_scope;
1176 CcTest::InitializeVM();
1177 v8::Isolate* isolate = CcTest::isolate();
1178 Isolate* i_isolate = CcTest::i_isolate();
1179 Factory* factory = i_isolate->factory();
1180 Heap* heap = i_isolate->heap();
1181
1182 {
1183 v8::HandleScope scope(isolate);
1184 const char* source =
1185 "function foo() {"
1186 " var x = 42;"
1187 " var y = 42;"
1188 " var z = x + y;"
1189 "};"
1190 "foo()";
1191 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1192 CompileRun(source);
1193
1194 // Check function is compiled.
1195 Handle<Object> func_value =
1196 Object::GetProperty(i_isolate, i_isolate->global_object(), foo_name)
1197 .ToHandleChecked();
1198 CHECK(func_value->IsJSFunction());
1199 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1200 CHECK(function->shared().is_compiled());
1201 CHECK(!function->has_feedback_vector());
1202
1203 // Pre-age bytecode so it will be flushed on next run.
1204 CHECK(function->shared().HasBytecodeArray());
1205 const int kAgingThreshold = 6;
1206 for (int i = 0; i < kAgingThreshold; i++) {
1207 function->shared().GetBytecodeArray(i_isolate).MakeOlder();
1208 if (function->shared().GetBytecodeArray(i_isolate).IsOld()) break;
1209 }
1210
1211 CHECK(function->shared().GetBytecodeArray(i_isolate).IsOld());
1212
1213 heap::SimulateFullSpace(heap->old_space());
1214
1215 // Just check bytecode isn't flushed still
1216 CHECK(function->shared().GetBytecodeArray(i_isolate).IsOld());
1217 CHECK(function->shared().is_compiled());
1218
1219 heap->set_force_gc_on_next_allocation();
1220
1221 // Allocate feedback vector.
1222 IsCompiledScope is_compiled_scope(
1223 function->shared().is_compiled_scope(i_isolate));
1224 JSFunction::EnsureFeedbackVector(function, &is_compiled_scope);
1225
1226 CHECK(function->has_feedback_vector());
1227 CHECK(function->shared().is_compiled());
1228 CHECK(function->is_compiled());
1229 }
1230 }
1231
UNINITIALIZED_TEST(Regress10843)1232 UNINITIALIZED_TEST(Regress10843) {
1233 FLAG_max_semi_space_size = 2;
1234 FLAG_min_semi_space_size = 2;
1235 FLAG_max_old_space_size = 8;
1236 FLAG_always_compact = true;
1237 v8::Isolate::CreateParams create_params;
1238 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
1239 v8::Isolate* isolate = v8::Isolate::New(create_params);
1240 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
1241 Factory* factory = i_isolate->factory();
1242 Heap* heap = i_isolate->heap();
1243 bool callback_was_invoked = false;
1244
1245 heap->AddNearHeapLimitCallback(
1246 [](void* data, size_t current_heap_limit,
1247 size_t initial_heap_limit) -> size_t {
1248 *reinterpret_cast<bool*>(data) = true;
1249 return current_heap_limit * 2;
1250 },
1251 &callback_was_invoked);
1252
1253 {
1254 HandleScope scope(i_isolate);
1255 std::vector<Handle<FixedArray>> arrays;
1256 for (int i = 0; i < 140; i++) {
1257 arrays.push_back(factory->NewFixedArray(10000));
1258 }
1259 CcTest::CollectAllGarbage(i_isolate);
1260 CcTest::CollectAllGarbage(i_isolate);
1261 for (int i = 0; i < 40; i++) {
1262 arrays.push_back(factory->NewFixedArray(10000));
1263 }
1264 CcTest::CollectAllGarbage(i_isolate);
1265 for (int i = 0; i < 100; i++) {
1266 arrays.push_back(factory->NewFixedArray(10000));
1267 }
1268 CHECK(callback_was_invoked);
1269 }
1270 isolate->Dispose();
1271 }
1272
1273 // Tests that spill slots from optimized code don't have weak pointers.
TEST(Regress10774)1274 TEST(Regress10774) {
1275 if (FLAG_single_generation) return;
1276 i::FLAG_allow_natives_syntax = true;
1277 i::FLAG_turboprop = true;
1278 i::FLAG_turbo_dynamic_map_checks = true;
1279 #ifdef VERIFY_HEAP
1280 i::FLAG_verify_heap = true;
1281 #endif
1282
1283 ManualGCScope manual_gc_scope;
1284 CcTest::InitializeVM();
1285 v8::Isolate* isolate = CcTest::isolate();
1286 Isolate* i_isolate = CcTest::i_isolate();
1287 Factory* factory = i_isolate->factory();
1288 Heap* heap = i_isolate->heap();
1289
1290 {
1291 v8::HandleScope scope(isolate);
1292 // We want to generate optimized code with dynamic map check operator that
1293 // migrates deprecated maps. To force this, we want the IC state to be
1294 // monomorphic and the map in the feedback should be a migration target.
1295 const char* source =
1296 "function f(o) {"
1297 " return o.b;"
1298 "}"
1299 "var o = {a:10, b:20};"
1300 "var o1 = {a:10, b:20};"
1301 "var o2 = {a:10, b:20};"
1302 "%PrepareFunctionForOptimization(f);"
1303 "f(o);"
1304 "o1.b = 10.23;" // Deprecate O's map.
1305 "f(o1);" // Install new map in IC
1306 "f(o);" // Mark o's map as migration target
1307 "%OptimizeFunctionOnNextCall(f);"
1308 "f(o);";
1309 CompileRun(source);
1310
1311 Handle<String> foo_name = factory->InternalizeUtf8String("f");
1312 Handle<Object> func_value =
1313 Object::GetProperty(i_isolate, i_isolate->global_object(), foo_name)
1314 .ToHandleChecked();
1315 CHECK(func_value->IsJSFunction());
1316 Handle<JSFunction> fun = Handle<JSFunction>::cast(func_value);
1317
1318 Handle<String> obj_name = factory->InternalizeUtf8String("o2");
1319 Handle<Object> obj_value =
1320 Object::GetProperty(i_isolate, i_isolate->global_object(), obj_name)
1321 .ToHandleChecked();
1322
1323 heap::SimulateFullSpace(heap->new_space());
1324
1325 Handle<JSObject> global(i_isolate->context().global_object(), i_isolate);
1326 // O2 still has the deprecated map and the optimized code should migrate O2
1327 // successfully. This shouldn't crash.
1328 Execution::Call(i_isolate, fun, global, 1, &obj_value).ToHandleChecked();
1329 }
1330 }
1331
1332 #ifndef V8_LITE_MODE
1333
TEST(TestOptimizeAfterBytecodeFlushingCandidate)1334 TEST(TestOptimizeAfterBytecodeFlushingCandidate) {
1335 if (FLAG_single_generation) return;
1336 FLAG_opt = true;
1337 FLAG_always_opt = false;
1338 #if ENABLE_SPARKPLUG
1339 FLAG_always_sparkplug = false;
1340 #endif // ENABLE_SPARKPLUG
1341 i::FLAG_optimize_for_size = false;
1342 i::FLAG_incremental_marking = true;
1343 i::FLAG_flush_bytecode = true;
1344 i::FLAG_allow_natives_syntax = true;
1345 ManualGCScope manual_gc_scope;
1346
1347 CcTest::InitializeVM();
1348 Isolate* isolate = CcTest::i_isolate();
1349 Factory* factory = isolate->factory();
1350 v8::HandleScope scope(CcTest::isolate());
1351 const char* source =
1352 "function foo() {"
1353 " var x = 42;"
1354 " var y = 42;"
1355 " var z = x + y;"
1356 "};"
1357 "foo()";
1358 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1359
1360 // This compile will add the code to the compilation cache.
1361 {
1362 v8::HandleScope scope(CcTest::isolate());
1363 CompileRun(source);
1364 }
1365
1366 // Check function is compiled.
1367 Handle<Object> func_value =
1368 Object::GetProperty(isolate, isolate->global_object(), foo_name)
1369 .ToHandleChecked();
1370 CHECK(func_value->IsJSFunction());
1371 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1372 CHECK(function->shared().is_compiled());
1373
1374 // The code will survive at least two GCs.
1375 CcTest::CollectAllGarbage();
1376 CcTest::CollectAllGarbage();
1377 CHECK(function->shared().is_compiled());
1378
1379 // Simulate several GCs that use incremental marking.
1380 const int kAgingThreshold = 6;
1381 for (int i = 0; i < kAgingThreshold; i++) {
1382 heap::SimulateIncrementalMarking(CcTest::heap());
1383 CcTest::CollectAllGarbage();
1384 }
1385 CHECK(!function->shared().is_compiled());
1386 CHECK(!function->is_compiled());
1387
1388 // This compile will compile the function again.
1389 {
1390 v8::HandleScope scope(CcTest::isolate());
1391 CompileRun("foo();");
1392 }
1393
1394 // Simulate several GCs that use incremental marking but make sure
1395 // the loop breaks once the function is enqueued as a candidate.
1396 for (int i = 0; i < kAgingThreshold; i++) {
1397 heap::SimulateIncrementalMarking(CcTest::heap());
1398 if (function->shared().GetBytecodeArray(CcTest::i_isolate()).IsOld()) break;
1399 CcTest::CollectAllGarbage();
1400 }
1401
1402 // Force optimization while incremental marking is active and while
1403 // the function is enqueued as a candidate.
1404 {
1405 v8::HandleScope scope(CcTest::isolate());
1406 CompileRun(
1407 "%PrepareFunctionForOptimization(foo);"
1408 "%OptimizeFunctionOnNextCall(foo); foo();");
1409 }
1410
1411 // Simulate one final GC and make sure the candidate wasn't flushed.
1412 CcTest::CollectAllGarbage();
1413 CHECK(function->shared().is_compiled());
1414 CHECK(function->is_compiled());
1415 }
1416
1417 #endif // V8_LITE_MODE
1418
TEST(TestUseOfIncrementalBarrierOnCompileLazy)1419 TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1420 if (!FLAG_incremental_marking) return;
1421 // Turn off always_opt because it interferes with running the built-in for
1422 // the last call to g().
1423 FLAG_always_opt = false;
1424 FLAG_allow_natives_syntax = true;
1425 CcTest::InitializeVM();
1426 Isolate* isolate = CcTest::i_isolate();
1427 Factory* factory = isolate->factory();
1428 Heap* heap = isolate->heap();
1429 v8::HandleScope scope(CcTest::isolate());
1430
1431 CompileRun(
1432 "function make_closure(x) {"
1433 " return function() { return x + 3 };"
1434 "}"
1435 "var f = make_closure(5);"
1436 "%PrepareFunctionForOptimization(f); f();"
1437 "var g = make_closure(5);");
1438
1439 // Check f is compiled.
1440 Handle<String> f_name = factory->InternalizeUtf8String("f");
1441 Handle<Object> f_value =
1442 Object::GetProperty(isolate, isolate->global_object(), f_name)
1443 .ToHandleChecked();
1444 Handle<JSFunction> f_function = Handle<JSFunction>::cast(f_value);
1445 CHECK(f_function->is_compiled());
1446
1447 // Check g is not compiled.
1448 Handle<String> g_name = factory->InternalizeUtf8String("g");
1449 Handle<Object> g_value =
1450 Object::GetProperty(isolate, isolate->global_object(), g_name)
1451 .ToHandleChecked();
1452 Handle<JSFunction> g_function = Handle<JSFunction>::cast(g_value);
1453 CHECK(!g_function->is_compiled());
1454
1455 heap::SimulateIncrementalMarking(heap);
1456 CompileRun("%OptimizeFunctionOnNextCall(f); f();");
1457
1458 // g should now have available an optimized function, unmarked by gc. The
1459 // CompileLazy built-in will discover it and install it in the closure, and
1460 // the incremental write barrier should be used.
1461 CompileRun("g();");
1462 CHECK(g_function->is_compiled());
1463 }
1464
TEST(CompilationCacheCachingBehavior)1465 TEST(CompilationCacheCachingBehavior) {
1466 // If we do not have the compilation cache turned off, this test is invalid.
1467 if (!FLAG_compilation_cache) {
1468 return;
1469 }
1470 CcTest::InitializeVM();
1471 Isolate* isolate = CcTest::i_isolate();
1472 Factory* factory = isolate->factory();
1473 CompilationCache* compilation_cache = isolate->compilation_cache();
1474 LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1475
1476 v8::HandleScope scope(CcTest::isolate());
1477 const char* raw_source =
1478 "function foo() {"
1479 " var x = 42;"
1480 " var y = 42;"
1481 " var z = x + y;"
1482 "};"
1483 "foo();";
1484 Handle<String> source = factory->InternalizeUtf8String(raw_source);
1485
1486 {
1487 v8::HandleScope scope(CcTest::isolate());
1488 CompileRun(raw_source);
1489 }
1490
1491 // The script should be in the cache now.
1492 {
1493 v8::HandleScope scope(CcTest::isolate());
1494 ScriptDetails script_details(Handle<Object>(),
1495 v8::ScriptOriginOptions(true, false));
1496 MaybeHandle<SharedFunctionInfo> cached_script =
1497 compilation_cache->LookupScript(source, script_details, language_mode);
1498 CHECK(!cached_script.is_null());
1499 }
1500
1501 // Check that the code cache entry survives at least one GC.
1502 {
1503 CcTest::CollectAllGarbage();
1504 v8::HandleScope scope(CcTest::isolate());
1505 ScriptDetails script_details(Handle<Object>(),
1506 v8::ScriptOriginOptions(true, false));
1507 MaybeHandle<SharedFunctionInfo> cached_script =
1508 compilation_cache->LookupScript(source, script_details, language_mode);
1509 CHECK(!cached_script.is_null());
1510
1511 // Progress code age until it's old and ready for GC.
1512 Handle<SharedFunctionInfo> shared = cached_script.ToHandleChecked();
1513 CHECK(shared->HasBytecodeArray());
1514 const int kAgingThreshold = 6;
1515 for (int i = 0; i < kAgingThreshold; i++) {
1516 shared->GetBytecodeArray(CcTest::i_isolate()).MakeOlder();
1517 }
1518 }
1519
1520 CcTest::CollectAllGarbage();
1521
1522 {
1523 v8::HandleScope scope(CcTest::isolate());
1524 // Ensure code aging cleared the entry from the cache.
1525 ScriptDetails script_details(Handle<Object>(),
1526 v8::ScriptOriginOptions(true, false));
1527 MaybeHandle<SharedFunctionInfo> cached_script =
1528 compilation_cache->LookupScript(source, script_details, language_mode);
1529 CHECK(cached_script.is_null());
1530 }
1531 }
1532
1533
OptimizeEmptyFunction(const char * name)1534 static void OptimizeEmptyFunction(const char* name) {
1535 HandleScope scope(CcTest::i_isolate());
1536 base::EmbeddedVector<char, 256> source;
1537 base::SNPrintF(source,
1538 "function %s() { return 0; }"
1539 "%%PrepareFunctionForOptimization(%s);"
1540 "%s(); %s();"
1541 "%%OptimizeFunctionOnNextCall(%s);"
1542 "%s();",
1543 name, name, name, name, name, name);
1544 CompileRun(source.begin());
1545 }
1546
1547
1548 // Count the number of native contexts in the weak list of native contexts.
CountNativeContexts()1549 int CountNativeContexts() {
1550 int count = 0;
1551 Object object = CcTest::heap()->native_contexts_list();
1552 while (!object.IsUndefined(CcTest::i_isolate())) {
1553 count++;
1554 object = Context::cast(object).next_context_link();
1555 }
1556 return count;
1557 }
1558
TEST(TestInternalWeakLists)1559 TEST(TestInternalWeakLists) {
1560 FLAG_always_opt = false;
1561 FLAG_allow_natives_syntax = true;
1562 v8::V8::Initialize();
1563
1564 // Some flags turn Scavenge collections into Mark-sweep collections
1565 // and hence are incompatible with this test case.
1566 if (FLAG_gc_global || FLAG_stress_compaction ||
1567 FLAG_stress_incremental_marking || FLAG_single_generation)
1568 return;
1569 FLAG_retain_maps_for_n_gc = 0;
1570
1571 static const int kNumTestContexts = 10;
1572
1573 Isolate* isolate = CcTest::i_isolate();
1574 HandleScope scope(isolate);
1575 v8::Local<v8::Context> ctx[kNumTestContexts];
1576 if (!isolate->use_optimizer()) return;
1577
1578 CHECK_EQ(0, CountNativeContexts());
1579
1580 // Create a number of global contests which gets linked together.
1581 for (int i = 0; i < kNumTestContexts; i++) {
1582 ctx[i] = v8::Context::New(CcTest::isolate());
1583
1584 // Collect garbage that might have been created by one of the
1585 // installed extensions.
1586 isolate->compilation_cache()->Clear();
1587 CcTest::CollectAllGarbage();
1588
1589 CHECK_EQ(i + 1, CountNativeContexts());
1590
1591 ctx[i]->Enter();
1592
1593 // Create a handle scope so no function objects get stuck in the outer
1594 // handle scope.
1595 HandleScope scope(isolate);
1596 OptimizeEmptyFunction("f1");
1597 OptimizeEmptyFunction("f2");
1598 OptimizeEmptyFunction("f3");
1599 OptimizeEmptyFunction("f4");
1600 OptimizeEmptyFunction("f5");
1601
1602 // Remove function f1, and
1603 CompileRun("f1=null");
1604
1605 // Scavenge treats these references as strong.
1606 for (int j = 0; j < 10; j++) {
1607 CcTest::CollectGarbage(NEW_SPACE);
1608 }
1609
1610 // Mark compact handles the weak references.
1611 isolate->compilation_cache()->Clear();
1612 CcTest::CollectAllGarbage();
1613
1614 // Get rid of f3 and f5 in the same way.
1615 CompileRun("f3=null");
1616 for (int j = 0; j < 10; j++) {
1617 CcTest::CollectGarbage(NEW_SPACE);
1618 }
1619 CcTest::CollectAllGarbage();
1620 CompileRun("f5=null");
1621 for (int j = 0; j < 10; j++) {
1622 CcTest::CollectGarbage(NEW_SPACE);
1623 }
1624 CcTest::CollectAllGarbage();
1625
1626 ctx[i]->Exit();
1627 }
1628
1629 // Force compilation cache cleanup.
1630 CcTest::heap()->NotifyContextDisposed(true);
1631 CcTest::CollectAllGarbage();
1632
1633 // Dispose the native contexts one by one.
1634 for (int i = 0; i < kNumTestContexts; i++) {
1635 // TODO(dcarney): is there a better way to do this?
1636 i::Address* unsafe = reinterpret_cast<i::Address*>(*ctx[i]);
1637 *unsafe = ReadOnlyRoots(CcTest::heap()).undefined_value().ptr();
1638 ctx[i].Clear();
1639
1640 // Scavenge treats these references as strong.
1641 for (int j = 0; j < 10; j++) {
1642 CcTest::CollectGarbage(i::NEW_SPACE);
1643 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1644 }
1645
1646 // Mark compact handles the weak references.
1647 CcTest::CollectAllGarbage();
1648 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1649 }
1650
1651 CHECK_EQ(0, CountNativeContexts());
1652 }
1653
1654
TEST(TestSizeOfRegExpCode)1655 TEST(TestSizeOfRegExpCode) {
1656 if (!FLAG_regexp_optimization) return;
1657 FLAG_stress_concurrent_allocation = false;
1658
1659 v8::V8::Initialize();
1660
1661 Isolate* isolate = CcTest::i_isolate();
1662 HandleScope scope(isolate);
1663
1664 LocalContext context;
1665
1666 // Adjust source below and this check to match
1667 // RegExp::kRegExpTooLargeToOptimize.
1668 CHECK_EQ(i::RegExp::kRegExpTooLargeToOptimize, 20 * KB);
1669
1670 // Compile a regexp that is much larger if we are using regexp optimizations.
1671 CompileRun(
1672 "var reg_exp_source = '(?:a|bc|def|ghij|klmno|pqrstu)';"
1673 "var half_size_reg_exp;"
1674 "while (reg_exp_source.length < 20 * 1024) {"
1675 " half_size_reg_exp = reg_exp_source;"
1676 " reg_exp_source = reg_exp_source + reg_exp_source;"
1677 "}"
1678 // Flatten string.
1679 "reg_exp_source.match(/f/);");
1680
1681 // Get initial heap size after several full GCs, which will stabilize
1682 // the heap size and return with sweeping finished completely.
1683 CcTest::CollectAllAvailableGarbage();
1684 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1685 if (collector->sweeping_in_progress()) {
1686 collector->EnsureSweepingCompleted();
1687 }
1688 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1689
1690 CompileRun("'foo'.match(reg_exp_source);");
1691 CcTest::CollectAllAvailableGarbage();
1692 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1693
1694 CompileRun("'foo'.match(half_size_reg_exp);");
1695 CcTest::CollectAllAvailableGarbage();
1696 int size_with_optimized_regexp =
1697 static_cast<int>(CcTest::heap()->SizeOfObjects());
1698
1699 int size_of_regexp_code = size_with_regexp - initial_size;
1700
1701 // On some platforms the debug-code flag causes huge amounts of regexp code
1702 // to be emitted, breaking this test.
1703 if (!FLAG_debug_code) {
1704 CHECK_LE(size_of_regexp_code, 1 * MB);
1705 }
1706
1707 // Small regexp is half the size, but compiles to more than twice the code
1708 // due to the optimization steps.
1709 CHECK_GE(size_with_optimized_regexp,
1710 size_with_regexp + size_of_regexp_code * 2);
1711 }
1712
1713
HEAP_TEST(TestSizeOfObjects)1714 HEAP_TEST(TestSizeOfObjects) {
1715 FLAG_stress_concurrent_allocation = false;
1716 v8::V8::Initialize();
1717 Isolate* isolate = CcTest::i_isolate();
1718 Heap* heap = CcTest::heap();
1719 // Disable LAB, such that calculations with SizeOfObjects() and object size
1720 // are correct.
1721 heap->DisableInlineAllocation();
1722 MarkCompactCollector* collector = heap->mark_compact_collector();
1723
1724 // Get initial heap size after several full GCs, which will stabilize
1725 // the heap size and return with sweeping finished completely.
1726 CcTest::CollectAllAvailableGarbage();
1727 if (collector->sweeping_in_progress()) {
1728 collector->EnsureSweepingCompleted();
1729 }
1730 int initial_size = static_cast<int>(heap->SizeOfObjects());
1731
1732 {
1733 HandleScope scope(isolate);
1734 // Allocate objects on several different old-space pages so that
1735 // concurrent sweeper threads will be busy sweeping the old space on
1736 // subsequent GC runs.
1737 AlwaysAllocateScopeForTesting always_allocate(heap);
1738 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
1739 for (int i = 1; i <= 100; i++) {
1740 isolate->factory()->NewFixedArray(8192, AllocationType::kOld);
1741 CHECK_EQ(initial_size + i * filler_size,
1742 static_cast<int>(heap->SizeOfObjects()));
1743 }
1744 }
1745
1746 // The heap size should go back to initial size after a full GC, even
1747 // though sweeping didn't finish yet.
1748 CcTest::CollectAllGarbage();
1749 // Normally sweeping would not be complete here, but no guarantees.
1750 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1751 // Waiting for sweeper threads should not change heap size.
1752 if (collector->sweeping_in_progress()) {
1753 collector->EnsureSweepingCompleted();
1754 }
1755 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
1756 }
1757
1758
TEST(TestAlignmentCalculations)1759 TEST(TestAlignmentCalculations) {
1760 // Maximum fill amounts are consistent.
1761 int maximum_double_misalignment = kDoubleSize - kTaggedSize;
1762 int max_word_fill = Heap::GetMaximumFillToAlign(kWordAligned);
1763 CHECK_EQ(0, max_word_fill);
1764 int max_double_fill = Heap::GetMaximumFillToAlign(kDoubleAligned);
1765 CHECK_EQ(maximum_double_misalignment, max_double_fill);
1766 int max_double_unaligned_fill = Heap::GetMaximumFillToAlign(kDoubleUnaligned);
1767 CHECK_EQ(maximum_double_misalignment, max_double_unaligned_fill);
1768
1769 Address base = kNullAddress;
1770 int fill = 0;
1771
1772 // Word alignment never requires fill.
1773 fill = Heap::GetFillToAlign(base, kWordAligned);
1774 CHECK_EQ(0, fill);
1775 fill = Heap::GetFillToAlign(base + kTaggedSize, kWordAligned);
1776 CHECK_EQ(0, fill);
1777
1778 // No fill is required when address is double aligned.
1779 fill = Heap::GetFillToAlign(base, kDoubleAligned);
1780 CHECK_EQ(0, fill);
1781 // Fill is required if address is not double aligned.
1782 fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleAligned);
1783 CHECK_EQ(maximum_double_misalignment, fill);
1784 // kDoubleUnaligned has the opposite fill amounts.
1785 fill = Heap::GetFillToAlign(base, kDoubleUnaligned);
1786 CHECK_EQ(maximum_double_misalignment, fill);
1787 fill = Heap::GetFillToAlign(base + kTaggedSize, kDoubleUnaligned);
1788 CHECK_EQ(0, fill);
1789 }
1790
NewSpaceAllocateAligned(int size,AllocationAlignment alignment)1791 static HeapObject NewSpaceAllocateAligned(int size,
1792 AllocationAlignment alignment) {
1793 Heap* heap = CcTest::heap();
1794 AllocationResult allocation = heap->new_space()->AllocateRaw(size, alignment);
1795 HeapObject obj;
1796 allocation.To(&obj);
1797 heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
1798 return obj;
1799 }
1800
1801 // Get new space allocation into the desired alignment.
AlignNewSpace(AllocationAlignment alignment,int offset)1802 static Address AlignNewSpace(AllocationAlignment alignment, int offset) {
1803 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1804 int fill = Heap::GetFillToAlign(*top_addr, alignment);
1805 int allocation = fill + offset;
1806 if (allocation) {
1807 NewSpaceAllocateAligned(allocation, kWordAligned);
1808 }
1809 return *top_addr;
1810 }
1811
1812
TEST(TestAlignedAllocation)1813 TEST(TestAlignedAllocation) {
1814 if (FLAG_single_generation) return;
1815 // Double misalignment is 4 on 32-bit platforms or when pointer compression
1816 // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1817 const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1818 Address* top_addr = CcTest::heap()->new_space()->allocation_top_address();
1819 Address start;
1820 HeapObject obj;
1821 HeapObject filler;
1822 if (double_misalignment) {
1823 // Allocate a pointer sized object that must be double aligned at an
1824 // aligned address.
1825 start = AlignNewSpace(kDoubleAligned, 0);
1826 obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1827 CHECK(IsAligned(obj.address(), kDoubleAlignment));
1828 // There is no filler.
1829 CHECK_EQ(kTaggedSize, *top_addr - start);
1830
1831 // Allocate a second pointer sized object that must be double aligned at an
1832 // unaligned address.
1833 start = AlignNewSpace(kDoubleAligned, kTaggedSize);
1834 obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1835 CHECK(IsAligned(obj.address(), kDoubleAlignment));
1836 // There is a filler object before the object.
1837 filler = HeapObject::FromAddress(start);
1838 CHECK(obj != filler && filler.IsFreeSpaceOrFiller() &&
1839 filler.Size() == kTaggedSize);
1840 CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1841
1842 // Similarly for kDoubleUnaligned.
1843 start = AlignNewSpace(kDoubleUnaligned, 0);
1844 obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1845 CHECK(IsAligned(obj.address() + kTaggedSize, kDoubleAlignment));
1846 CHECK_EQ(kTaggedSize, *top_addr - start);
1847 start = AlignNewSpace(kDoubleUnaligned, kTaggedSize);
1848 obj = NewSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1849 CHECK(IsAligned(obj.address() + kTaggedSize, kDoubleAlignment));
1850 // There is a filler object before the object.
1851 filler = HeapObject::FromAddress(start);
1852 CHECK(obj != filler && filler.IsFreeSpaceOrFiller() &&
1853 filler.Size() == kTaggedSize);
1854 CHECK_EQ(kTaggedSize + double_misalignment, *top_addr - start);
1855 }
1856 }
1857
OldSpaceAllocateAligned(int size,AllocationAlignment alignment)1858 static HeapObject OldSpaceAllocateAligned(int size,
1859 AllocationAlignment alignment) {
1860 Heap* heap = CcTest::heap();
1861 AllocationResult allocation =
1862 heap->old_space()->AllocateRawAligned(size, alignment);
1863 HeapObject obj;
1864 allocation.To(&obj);
1865 heap->CreateFillerObjectAt(obj.address(), size, ClearRecordedSlots::kNo);
1866 return obj;
1867 }
1868
1869 // Get old space allocation into the desired alignment.
AlignOldSpace(AllocationAlignment alignment,int offset)1870 static Address AlignOldSpace(AllocationAlignment alignment, int offset) {
1871 Address* top_addr = CcTest::heap()->old_space()->allocation_top_address();
1872 int fill = Heap::GetFillToAlign(*top_addr, alignment);
1873 int allocation = fill + offset;
1874 if (allocation) {
1875 OldSpaceAllocateAligned(allocation, kWordAligned);
1876 }
1877 Address top = *top_addr;
1878 // Now force the remaining allocation onto the free list.
1879 CcTest::heap()->old_space()->FreeLinearAllocationArea();
1880 return top;
1881 }
1882
1883
1884 // Test the case where allocation must be done from the free list, so filler
1885 // may precede or follow the object.
TEST(TestAlignedOverAllocation)1886 TEST(TestAlignedOverAllocation) {
1887 if (FLAG_stress_concurrent_allocation) return;
1888 ManualGCScope manual_gc_scope;
1889 Heap* heap = CcTest::heap();
1890 // Test checks for fillers before and behind objects and requires a fresh
1891 // page and empty free list.
1892 heap::AbandonCurrentlyFreeMemory(heap->old_space());
1893 // Allocate a dummy object to properly set up the linear allocation info.
1894 AllocationResult dummy = heap->old_space()->AllocateRawUnaligned(kTaggedSize);
1895 CHECK(!dummy.IsRetry());
1896 heap->CreateFillerObjectAt(dummy.ToObjectChecked().address(), kTaggedSize,
1897 ClearRecordedSlots::kNo);
1898
1899 // Double misalignment is 4 on 32-bit platforms or when pointer compression
1900 // is enabled, 0 on 64-bit ones when pointer compression is disabled.
1901 const intptr_t double_misalignment = kDoubleSize - kTaggedSize;
1902 Address start;
1903 HeapObject obj;
1904 HeapObject filler;
1905 if (double_misalignment) {
1906 start = AlignOldSpace(kDoubleAligned, 0);
1907 obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1908 // The object is aligned.
1909 CHECK(IsAligned(obj.address(), kDoubleAlignment));
1910 // Try the opposite alignment case.
1911 start = AlignOldSpace(kDoubleAligned, kTaggedSize);
1912 obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleAligned);
1913 CHECK(IsAligned(obj.address(), kDoubleAlignment));
1914 filler = HeapObject::FromAddress(start);
1915 CHECK(obj != filler);
1916 CHECK(filler.IsFreeSpaceOrFiller());
1917 CHECK_EQ(kTaggedSize, filler.Size());
1918 CHECK(obj != filler && filler.IsFreeSpaceOrFiller() &&
1919 filler.Size() == kTaggedSize);
1920
1921 // Similarly for kDoubleUnaligned.
1922 start = AlignOldSpace(kDoubleUnaligned, 0);
1923 obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1924 // The object is aligned.
1925 CHECK(IsAligned(obj.address() + kTaggedSize, kDoubleAlignment));
1926 // Try the opposite alignment case.
1927 start = AlignOldSpace(kDoubleUnaligned, kTaggedSize);
1928 obj = OldSpaceAllocateAligned(kTaggedSize, kDoubleUnaligned);
1929 CHECK(IsAligned(obj.address() + kTaggedSize, kDoubleAlignment));
1930 filler = HeapObject::FromAddress(start);
1931 CHECK(obj != filler && filler.IsFreeSpaceOrFiller() &&
1932 filler.Size() == kTaggedSize);
1933 }
1934 }
1935
TEST(HeapNumberAlignment)1936 TEST(HeapNumberAlignment) {
1937 if (!FLAG_allocation_site_pretenuring) return;
1938 CcTest::InitializeVM();
1939 Isolate* isolate = CcTest::i_isolate();
1940 Factory* factory = isolate->factory();
1941 Heap* heap = isolate->heap();
1942 HandleScope sc(isolate);
1943
1944 const auto required_alignment =
1945 HeapObject::RequiredAlignment(*factory->heap_number_map());
1946 const int maximum_misalignment =
1947 Heap::GetMaximumFillToAlign(required_alignment);
1948
1949 for (int offset = 0; offset <= maximum_misalignment; offset += kTaggedSize) {
1950 if (!FLAG_single_generation) {
1951 AlignNewSpace(required_alignment, offset);
1952 Handle<Object> number_new = factory->NewNumber(1.000123);
1953 CHECK(number_new->IsHeapNumber());
1954 CHECK(Heap::InYoungGeneration(*number_new));
1955 CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_new).address(),
1956 required_alignment));
1957 }
1958
1959 AlignOldSpace(required_alignment, offset);
1960 Handle<Object> number_old =
1961 factory->NewNumber<AllocationType::kOld>(1.000321);
1962 CHECK(number_old->IsHeapNumber());
1963 CHECK(heap->InOldSpace(*number_old));
1964 CHECK_EQ(0, Heap::GetFillToAlign(HeapObject::cast(*number_old).address(),
1965 required_alignment));
1966 }
1967 }
1968
TEST(TestSizeOfObjectsVsHeapObjectIteratorPrecision)1969 TEST(TestSizeOfObjectsVsHeapObjectIteratorPrecision) {
1970 CcTest::InitializeVM();
1971 // Disable LAB, such that calculations with SizeOfObjects() and object size
1972 // are correct.
1973 CcTest::heap()->DisableInlineAllocation();
1974 HeapObjectIterator iterator(CcTest::heap());
1975 intptr_t size_of_objects_1 = CcTest::heap()->SizeOfObjects();
1976 intptr_t size_of_objects_2 = 0;
1977 for (HeapObject obj = iterator.Next(); !obj.is_null();
1978 obj = iterator.Next()) {
1979 if (!obj.IsFreeSpace()) {
1980 size_of_objects_2 += obj.Size();
1981 }
1982 }
1983 // Delta must be within 5% of the larger result.
1984 // TODO(gc): Tighten this up by distinguishing between byte
1985 // arrays that are real and those that merely mark free space
1986 // on the heap.
1987 if (size_of_objects_1 > size_of_objects_2) {
1988 intptr_t delta = size_of_objects_1 - size_of_objects_2;
1989 PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1990 ", "
1991 "Iterator: %" V8PRIdPTR
1992 ", "
1993 "delta: %" V8PRIdPTR "\n",
1994 size_of_objects_1, size_of_objects_2, delta);
1995 CHECK_GT(size_of_objects_1 / 20, delta);
1996 } else {
1997 intptr_t delta = size_of_objects_2 - size_of_objects_1;
1998 PrintF("Heap::SizeOfObjects: %" V8PRIdPTR
1999 ", "
2000 "Iterator: %" V8PRIdPTR
2001 ", "
2002 "delta: %" V8PRIdPTR "\n",
2003 size_of_objects_1, size_of_objects_2, delta);
2004 CHECK_GT(size_of_objects_2 / 20, delta);
2005 }
2006 }
2007
TEST(GrowAndShrinkNewSpace)2008 TEST(GrowAndShrinkNewSpace) {
2009 if (FLAG_single_generation) return;
2010 // Avoid shrinking new space in GC epilogue. This can happen if allocation
2011 // throughput samples have been taken while executing the benchmark.
2012 FLAG_predictable = true;
2013 FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
2014 CcTest::InitializeVM();
2015 Heap* heap = CcTest::heap();
2016 NewSpace* new_space = heap->new_space();
2017
2018 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2019 return;
2020 }
2021
2022 // Make sure we're in a consistent state to start out.
2023 CcTest::CollectAllGarbage();
2024 CcTest::CollectAllGarbage();
2025 new_space->Shrink();
2026
2027 // Explicitly growing should double the space capacity.
2028 size_t old_capacity, new_capacity;
2029 old_capacity = new_space->TotalCapacity();
2030 GrowNewSpace(heap);
2031 new_capacity = new_space->TotalCapacity();
2032 CHECK_EQ(2 * old_capacity, new_capacity);
2033
2034 old_capacity = new_space->TotalCapacity();
2035 {
2036 v8::HandleScope temporary_scope(CcTest::isolate());
2037 heap::SimulateFullSpace(new_space);
2038 }
2039 new_capacity = new_space->TotalCapacity();
2040 CHECK_EQ(old_capacity, new_capacity);
2041
2042 // Explicitly shrinking should not affect space capacity.
2043 old_capacity = new_space->TotalCapacity();
2044 new_space->Shrink();
2045 new_capacity = new_space->TotalCapacity();
2046 CHECK_EQ(old_capacity, new_capacity);
2047
2048 // Let the scavenger empty the new space.
2049 CcTest::CollectGarbage(NEW_SPACE);
2050 CHECK_LE(new_space->Size(), old_capacity);
2051
2052 // Explicitly shrinking should halve the space capacity.
2053 old_capacity = new_space->TotalCapacity();
2054 new_space->Shrink();
2055 new_capacity = new_space->TotalCapacity();
2056 CHECK_EQ(old_capacity, 2 * new_capacity);
2057
2058 // Consecutive shrinking should not affect space capacity.
2059 old_capacity = new_space->TotalCapacity();
2060 new_space->Shrink();
2061 new_space->Shrink();
2062 new_space->Shrink();
2063 new_capacity = new_space->TotalCapacity();
2064 CHECK_EQ(old_capacity, new_capacity);
2065 }
2066
TEST(CollectingAllAvailableGarbageShrinksNewSpace)2067 TEST(CollectingAllAvailableGarbageShrinksNewSpace) {
2068 if (FLAG_single_generation) return;
2069 FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
2070 CcTest::InitializeVM();
2071 Heap* heap = CcTest::heap();
2072 if (heap->MaxSemiSpaceSize() == heap->InitialSemiSpaceSize()) {
2073 return;
2074 }
2075
2076 v8::HandleScope scope(CcTest::isolate());
2077 NewSpace* new_space = heap->new_space();
2078 size_t old_capacity, new_capacity;
2079 old_capacity = new_space->TotalCapacity();
2080 GrowNewSpace(heap);
2081 new_capacity = new_space->TotalCapacity();
2082 CHECK_EQ(2 * old_capacity, new_capacity);
2083 {
2084 v8::HandleScope temporary_scope(CcTest::isolate());
2085 heap::SimulateFullSpace(new_space);
2086 }
2087 CcTest::CollectAllAvailableGarbage();
2088 new_capacity = new_space->TotalCapacity();
2089 CHECK_EQ(old_capacity, new_capacity);
2090 }
2091
NumberOfGlobalObjects()2092 static int NumberOfGlobalObjects() {
2093 int count = 0;
2094 HeapObjectIterator iterator(CcTest::heap());
2095 for (HeapObject obj = iterator.Next(); !obj.is_null();
2096 obj = iterator.Next()) {
2097 if (obj.IsJSGlobalObject()) count++;
2098 }
2099 return count;
2100 }
2101
2102
2103 // Test that we don't embed maps from foreign contexts into
2104 // optimized code.
TEST(LeakNativeContextViaMap)2105 TEST(LeakNativeContextViaMap) {
2106 FLAG_allow_natives_syntax = true;
2107 v8::Isolate* isolate = CcTest::isolate();
2108 v8::HandleScope outer_scope(isolate);
2109 v8::Persistent<v8::Context> ctx1p;
2110 v8::Persistent<v8::Context> ctx2p;
2111 {
2112 v8::HandleScope scope(isolate);
2113 ctx1p.Reset(isolate, v8::Context::New(isolate));
2114 ctx2p.Reset(isolate, v8::Context::New(isolate));
2115 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2116 }
2117
2118 CcTest::CollectAllAvailableGarbage();
2119 CHECK_EQ(2, NumberOfGlobalObjects());
2120
2121 {
2122 v8::HandleScope inner_scope(isolate);
2123 CompileRun("var v = {x: 42}");
2124 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2125 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2126 v8::Local<v8::Value> v =
2127 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2128 ctx2->Enter();
2129 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2130 v8::Local<v8::Value> res = CompileRun(
2131 "function f() { return o.x; }"
2132 "%PrepareFunctionForOptimization(f);"
2133 "for (var i = 0; i < 10; ++i) f();"
2134 "%OptimizeFunctionOnNextCall(f);"
2135 "f();");
2136 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2137 CHECK(ctx2->Global()
2138 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2139 .FromJust());
2140 ctx2->Exit();
2141 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2142 ctx1p.Reset();
2143 isolate->ContextDisposedNotification();
2144 }
2145 CcTest::CollectAllAvailableGarbage();
2146 CHECK_EQ(1, NumberOfGlobalObjects());
2147 ctx2p.Reset();
2148 CcTest::CollectAllAvailableGarbage();
2149 CHECK_EQ(0, NumberOfGlobalObjects());
2150 }
2151
2152
2153 // Test that we don't embed functions from foreign contexts into
2154 // optimized code.
TEST(LeakNativeContextViaFunction)2155 TEST(LeakNativeContextViaFunction) {
2156 FLAG_allow_natives_syntax = true;
2157 v8::Isolate* isolate = CcTest::isolate();
2158 v8::HandleScope outer_scope(isolate);
2159 v8::Persistent<v8::Context> ctx1p;
2160 v8::Persistent<v8::Context> ctx2p;
2161 {
2162 v8::HandleScope scope(isolate);
2163 ctx1p.Reset(isolate, v8::Context::New(isolate));
2164 ctx2p.Reset(isolate, v8::Context::New(isolate));
2165 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2166 }
2167
2168 CcTest::CollectAllAvailableGarbage();
2169 CHECK_EQ(2, NumberOfGlobalObjects());
2170
2171 {
2172 v8::HandleScope inner_scope(isolate);
2173 CompileRun("var v = function() { return 42; }");
2174 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2175 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2176 v8::Local<v8::Value> v =
2177 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2178 ctx2->Enter();
2179 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2180 v8::Local<v8::Value> res = CompileRun(
2181 "function f(x) { return x(); }"
2182 "%PrepareFunctionForOptimization(f);"
2183 "for (var i = 0; i < 10; ++i) f(o);"
2184 "%OptimizeFunctionOnNextCall(f);"
2185 "f(o);");
2186 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2187 CHECK(ctx2->Global()
2188 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2189 .FromJust());
2190 ctx2->Exit();
2191 ctx1->Exit();
2192 ctx1p.Reset();
2193 isolate->ContextDisposedNotification();
2194 }
2195 CcTest::CollectAllAvailableGarbage();
2196 CHECK_EQ(1, NumberOfGlobalObjects());
2197 ctx2p.Reset();
2198 CcTest::CollectAllAvailableGarbage();
2199 CHECK_EQ(0, NumberOfGlobalObjects());
2200 }
2201
2202
TEST(LeakNativeContextViaMapKeyed)2203 TEST(LeakNativeContextViaMapKeyed) {
2204 FLAG_allow_natives_syntax = true;
2205 v8::Isolate* isolate = CcTest::isolate();
2206 v8::HandleScope outer_scope(isolate);
2207 v8::Persistent<v8::Context> ctx1p;
2208 v8::Persistent<v8::Context> ctx2p;
2209 {
2210 v8::HandleScope scope(isolate);
2211 ctx1p.Reset(isolate, v8::Context::New(isolate));
2212 ctx2p.Reset(isolate, v8::Context::New(isolate));
2213 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2214 }
2215
2216 CcTest::CollectAllAvailableGarbage();
2217 CHECK_EQ(2, NumberOfGlobalObjects());
2218
2219 {
2220 v8::HandleScope inner_scope(isolate);
2221 CompileRun("var v = [42, 43]");
2222 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2223 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2224 v8::Local<v8::Value> v =
2225 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2226 ctx2->Enter();
2227 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2228 v8::Local<v8::Value> res = CompileRun(
2229 "function f() { return o[0]; }"
2230 "%PrepareFunctionForOptimization(f);"
2231 "for (var i = 0; i < 10; ++i) f();"
2232 "%OptimizeFunctionOnNextCall(f);"
2233 "f();");
2234 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2235 CHECK(ctx2->Global()
2236 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2237 .FromJust());
2238 ctx2->Exit();
2239 ctx1->Exit();
2240 ctx1p.Reset();
2241 isolate->ContextDisposedNotification();
2242 }
2243 CcTest::CollectAllAvailableGarbage();
2244 CHECK_EQ(1, NumberOfGlobalObjects());
2245 ctx2p.Reset();
2246 CcTest::CollectAllAvailableGarbage();
2247 CHECK_EQ(0, NumberOfGlobalObjects());
2248 }
2249
2250
TEST(LeakNativeContextViaMapProto)2251 TEST(LeakNativeContextViaMapProto) {
2252 FLAG_allow_natives_syntax = true;
2253 v8::Isolate* isolate = CcTest::isolate();
2254 v8::HandleScope outer_scope(isolate);
2255 v8::Persistent<v8::Context> ctx1p;
2256 v8::Persistent<v8::Context> ctx2p;
2257 {
2258 v8::HandleScope scope(isolate);
2259 ctx1p.Reset(isolate, v8::Context::New(isolate));
2260 ctx2p.Reset(isolate, v8::Context::New(isolate));
2261 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2262 }
2263
2264 CcTest::CollectAllAvailableGarbage();
2265 CHECK_EQ(2, NumberOfGlobalObjects());
2266
2267 {
2268 v8::HandleScope inner_scope(isolate);
2269 CompileRun("var v = { y: 42}");
2270 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2271 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2272 v8::Local<v8::Value> v =
2273 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2274 ctx2->Enter();
2275 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2276 v8::Local<v8::Value> res = CompileRun(
2277 "function f() {"
2278 " var p = {x: 42};"
2279 " p.__proto__ = o;"
2280 " return p.x;"
2281 "}"
2282 "%PrepareFunctionForOptimization(f);"
2283 "for (var i = 0; i < 10; ++i) f();"
2284 "%OptimizeFunctionOnNextCall(f);"
2285 "f();");
2286 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2287 CHECK(ctx2->Global()
2288 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2289 .FromJust());
2290 ctx2->Exit();
2291 ctx1->Exit();
2292 ctx1p.Reset();
2293 isolate->ContextDisposedNotification();
2294 }
2295 CcTest::CollectAllAvailableGarbage();
2296 CHECK_EQ(1, NumberOfGlobalObjects());
2297 ctx2p.Reset();
2298 CcTest::CollectAllAvailableGarbage();
2299 CHECK_EQ(0, NumberOfGlobalObjects());
2300 }
2301
2302
TEST(InstanceOfStubWriteBarrier)2303 TEST(InstanceOfStubWriteBarrier) {
2304 if (!FLAG_incremental_marking) return;
2305 ManualGCScope manual_gc_scope;
2306 FLAG_allow_natives_syntax = true;
2307 #ifdef VERIFY_HEAP
2308 FLAG_verify_heap = true;
2309 #endif
2310
2311 CcTest::InitializeVM();
2312 if (!CcTest::i_isolate()->use_optimizer()) return;
2313 if (FLAG_force_marking_deque_overflows) return;
2314 v8::HandleScope outer_scope(CcTest::isolate());
2315 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2316
2317 // Store native context in global as well to make it part of the root set when
2318 // starting incremental marking. This will ensure that function will be part
2319 // of the transitive closure during incremental marking.
2320 v8::Global<v8::Context> global_ctx(CcTest::isolate(), ctx);
2321
2322 {
2323 v8::HandleScope scope(CcTest::isolate());
2324 CompileRun(
2325 "function foo () { }"
2326 "function mkbar () { return new (new Function(\"\")) (); }"
2327 "function f (x) { return (x instanceof foo); }"
2328 "function g () { f(mkbar()); }"
2329 "%PrepareFunctionForOptimization(f);"
2330 "f(new foo()); f(new foo());"
2331 "%OptimizeFunctionOnNextCall(f);"
2332 "f(new foo()); g();");
2333 }
2334
2335 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2336 marking->Stop();
2337 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2338 i::GarbageCollectionReason::kTesting);
2339
2340 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2341 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2342 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2343
2344 CHECK(f->HasAttachedOptimizedCode());
2345
2346 IncrementalMarking::MarkingState* marking_state = marking->marking_state();
2347
2348 const double kStepSizeInMs = 100;
2349 while (!marking_state->IsBlack(f->code()) && !marking->IsStopped()) {
2350 // Discard any pending GC requests otherwise we will get GC when we enter
2351 // code below.
2352 marking->Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2353 StepOrigin::kV8);
2354 }
2355
2356 CHECK(marking->IsMarking());
2357
2358 {
2359 v8::HandleScope scope(CcTest::isolate());
2360 v8::Local<v8::Object> global = CcTest::global();
2361 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2362 global->Get(ctx, v8_str("g")).ToLocalChecked());
2363 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2364 }
2365
2366 CcTest::CollectGarbage(OLD_SPACE);
2367 }
2368
HEAP_TEST(GCFlags)2369 HEAP_TEST(GCFlags) {
2370 if (!FLAG_incremental_marking) return;
2371 CcTest::InitializeVM();
2372 Heap* heap = CcTest::heap();
2373
2374 heap->set_current_gc_flags(Heap::kNoGCFlags);
2375 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2376
2377 // Check whether we appropriately reset flags after GC.
2378 CcTest::heap()->CollectAllGarbage(Heap::kReduceMemoryFootprintMask,
2379 GarbageCollectionReason::kTesting);
2380 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2381
2382 MarkCompactCollector* collector = heap->mark_compact_collector();
2383 if (collector->sweeping_in_progress()) {
2384 collector->EnsureSweepingCompleted();
2385 }
2386
2387 IncrementalMarking* marking = heap->incremental_marking();
2388 marking->Stop();
2389 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask,
2390 i::GarbageCollectionReason::kTesting);
2391 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2392
2393 CcTest::CollectGarbage(NEW_SPACE);
2394 // NewSpace scavenges should not overwrite the flags.
2395 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2396
2397 CcTest::CollectAllGarbage();
2398 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2399 }
2400
HEAP_TEST(Regress845060)2401 HEAP_TEST(Regress845060) {
2402 if (FLAG_single_generation) return;
2403 // Regression test for crbug.com/845060, where a raw pointer to a string's
2404 // data was kept across an allocation. If the allocation causes GC and
2405 // moves the string, such raw pointers become invalid.
2406 FLAG_allow_natives_syntax = true;
2407 FLAG_stress_incremental_marking = false;
2408 FLAG_stress_compaction = false;
2409 CcTest::InitializeVM();
2410 LocalContext context;
2411 v8::HandleScope scope(CcTest::isolate());
2412 Heap* heap = CcTest::heap();
2413
2414 // Preparation: create a string in new space.
2415 Local<Value> str = CompileRun("var str = (new Array(10000)).join('x'); str");
2416 CHECK(Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
2417
2418 // Idle incremental marking sets the "kReduceMemoryFootprint" flag, which
2419 // causes from_space to be unmapped after scavenging.
2420 heap->StartIdleIncrementalMarking(GarbageCollectionReason::kTesting);
2421 CHECK(heap->ShouldReduceMemory());
2422
2423 // Run the test (which allocates results) until the original string was
2424 // promoted to old space. Unmapping of from_space causes accesses to any
2425 // stale raw pointers to crash.
2426 CompileRun("while (%InYoungGeneration(str)) { str.split(''); }");
2427 CHECK(!Heap::InYoungGeneration(*v8::Utils::OpenHandle(*str)));
2428 }
2429
TEST(IdleNotificationFinishMarking)2430 TEST(IdleNotificationFinishMarking) {
2431 if (!FLAG_incremental_marking) return;
2432 ManualGCScope manual_gc_scope;
2433 FLAG_allow_natives_syntax = true;
2434 CcTest::InitializeVM();
2435 const int initial_gc_count = CcTest::heap()->gc_count();
2436 heap::SimulateFullSpace(CcTest::heap()->old_space());
2437 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2438 marking->Stop();
2439 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2440 i::GarbageCollectionReason::kTesting);
2441
2442 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2443
2444 const double kStepSizeInMs = 100;
2445 do {
2446 marking->Step(kStepSizeInMs, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2447 StepOrigin::kV8);
2448 } while (!CcTest::heap()
2449 ->mark_compact_collector()
2450 ->local_marking_worklists()
2451 ->IsEmpty());
2452
2453 marking->SetWeakClosureWasOverApproximatedForTesting(true);
2454
2455 // The next idle notification has to finish incremental marking.
2456 const double kLongIdleTime = 1000.0;
2457 CcTest::isolate()->IdleNotificationDeadline(
2458 (v8::base::TimeTicks::HighResolutionNow().ToInternalValue() /
2459 static_cast<double>(v8::base::Time::kMicrosecondsPerSecond)) +
2460 kLongIdleTime);
2461 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count + 1);
2462 }
2463
2464
2465 // Test that HAllocateObject will always return an object in new-space.
TEST(OptimizedAllocationAlwaysInNewSpace)2466 TEST(OptimizedAllocationAlwaysInNewSpace) {
2467 if (FLAG_single_generation) return;
2468 FLAG_allow_natives_syntax = true;
2469 FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
2470 CcTest::InitializeVM();
2471 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2472 if (FLAG_gc_global || FLAG_stress_compaction ||
2473 FLAG_stress_incremental_marking)
2474 return;
2475 v8::HandleScope scope(CcTest::isolate());
2476 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2477 heap::SimulateFullSpace(CcTest::heap()->new_space());
2478 AlwaysAllocateScopeForTesting always_allocate(CcTest::heap());
2479 v8::Local<v8::Value> res = CompileRun(
2480 "function c(x) {"
2481 " this.x = x;"
2482 " for (var i = 0; i < 32; i++) {"
2483 " this['x' + i] = x;"
2484 " }"
2485 "}"
2486 "function f(x) { return new c(x); };"
2487 "%PrepareFunctionForOptimization(f);"
2488 "f(1); f(2); f(3);"
2489 "%OptimizeFunctionOnNextCall(f);"
2490 "f(4);");
2491
2492 CHECK_EQ(4, res.As<v8::Object>()
2493 ->GetRealNamedProperty(ctx, v8_str("x"))
2494 .ToLocalChecked()
2495 ->Int32Value(ctx)
2496 .FromJust());
2497
2498 i::Handle<JSReceiver> o =
2499 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2500
2501 CHECK(Heap::InYoungGeneration(*o));
2502 }
2503
2504
TEST(OptimizedPretenuringAllocationFolding)2505 TEST(OptimizedPretenuringAllocationFolding) {
2506 FLAG_allow_natives_syntax = true;
2507 FLAG_expose_gc = true;
2508 CcTest::InitializeVM();
2509 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2510 if (FLAG_gc_global || FLAG_stress_compaction ||
2511 FLAG_stress_incremental_marking || FLAG_single_generation)
2512 return;
2513 v8::HandleScope scope(CcTest::isolate());
2514 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2515 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2516
2517 base::ScopedVector<char> source(1024);
2518 base::SNPrintF(source,
2519 "var number_elements = %d;"
2520 "var elements = new Array();"
2521 "function f() {"
2522 " for (var i = 0; i < number_elements; i++) {"
2523 " elements[i] = [[{}], [1.1]];"
2524 " }"
2525 " return elements[number_elements-1]"
2526 "};"
2527 "%%PrepareFunctionForOptimization(f);"
2528 "f(); gc();"
2529 "f(); f();"
2530 "%%OptimizeFunctionOnNextCall(f);"
2531 "f();",
2532 kPretenureCreationCount);
2533
2534 v8::Local<v8::Value> res = CompileRun(source.begin());
2535
2536 v8::Local<v8::Value> int_array =
2537 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2538 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2539 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2540 v8::Local<v8::Value> double_array =
2541 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2542 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2543 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2544
2545 i::Handle<JSReceiver> o =
2546 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res));
2547 CHECK(CcTest::heap()->InOldSpace(*o));
2548 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2549 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2550 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2551 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2552 }
2553
2554
TEST(OptimizedPretenuringObjectArrayLiterals)2555 TEST(OptimizedPretenuringObjectArrayLiterals) {
2556 FLAG_allow_natives_syntax = true;
2557 FLAG_expose_gc = true;
2558 CcTest::InitializeVM();
2559 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2560 if (FLAG_gc_global || FLAG_stress_compaction ||
2561 FLAG_stress_incremental_marking || FLAG_single_generation) {
2562 return;
2563 }
2564 v8::HandleScope scope(CcTest::isolate());
2565
2566 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2567
2568 base::ScopedVector<char> source(1024);
2569 base::SNPrintF(source,
2570 "var number_elements = %d;"
2571 "var elements = new Array(number_elements);"
2572 "function f() {"
2573 " for (var i = 0; i < number_elements; i++) {"
2574 " elements[i] = [{}, {}, {}];"
2575 " }"
2576 " return elements[number_elements - 1];"
2577 "};"
2578 "%%PrepareFunctionForOptimization(f);"
2579 "f(); gc();"
2580 "f(); f();"
2581 "%%OptimizeFunctionOnNextCall(f);"
2582 "f();",
2583 kPretenureCreationCount);
2584
2585 v8::Local<v8::Value> res = CompileRun(source.begin());
2586
2587 i::Handle<JSObject> o = Handle<JSObject>::cast(
2588 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2589
2590 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2591 CHECK(CcTest::heap()->InOldSpace(*o));
2592 }
2593
TEST(OptimizedPretenuringNestedInObjectProperties)2594 TEST(OptimizedPretenuringNestedInObjectProperties) {
2595 FLAG_allow_natives_syntax = true;
2596 FLAG_expose_gc = true;
2597 CcTest::InitializeVM();
2598 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2599 if (FLAG_gc_global || FLAG_stress_compaction ||
2600 FLAG_stress_incremental_marking || FLAG_single_generation) {
2601 return;
2602 }
2603 v8::HandleScope scope(CcTest::isolate());
2604
2605 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2606
2607 // Keep the nested literal alive while its root is freed
2608 base::ScopedVector<char> source(1024);
2609 base::SNPrintF(source,
2610 "let number_elements = %d;"
2611 "let elements = new Array(number_elements);"
2612 "function f() {"
2613 " for (let i = 0; i < number_elements; i++) {"
2614 " let l = {a: {c: 2.2, d: {e: 3.3}}, b: 1.1}; "
2615 " elements[i] = l.a;"
2616 " }"
2617 " return elements[number_elements-1];"
2618 "};"
2619 "%%PrepareFunctionForOptimization(f);"
2620 "f(); gc(); gc();"
2621 "f(); f();"
2622 "%%OptimizeFunctionOnNextCall(f);"
2623 "f();",
2624 kPretenureCreationCount);
2625
2626 v8::Local<v8::Value> res = CompileRun(source.begin());
2627
2628 i::Handle<JSObject> o = Handle<JSObject>::cast(
2629 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2630
2631 // Nested literal sites are only pretenured if the top level
2632 // literal is pretenured
2633 CHECK(Heap::InYoungGeneration(*o));
2634 }
2635
TEST(OptimizedPretenuringMixedInObjectProperties)2636 TEST(OptimizedPretenuringMixedInObjectProperties) {
2637 FLAG_allow_natives_syntax = true;
2638 FLAG_expose_gc = true;
2639 CcTest::InitializeVM();
2640 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2641 if (FLAG_gc_global || FLAG_stress_compaction ||
2642 FLAG_stress_incremental_marking || FLAG_single_generation)
2643 return;
2644 v8::HandleScope scope(CcTest::isolate());
2645
2646 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2647
2648 base::ScopedVector<char> source(1024);
2649 base::SNPrintF(source,
2650 "var number_elements = %d;"
2651 "var elements = new Array(number_elements);"
2652 "function f() {"
2653 " for (var i = 0; i < number_elements; i++) {"
2654 " elements[i] = {a: {c: 2.2, d: {}}, b: 1.1};"
2655 " }"
2656 " return elements[number_elements - 1];"
2657 "};"
2658 "%%PrepareFunctionForOptimization(f);"
2659 "f(); gc();"
2660 "f(); f();"
2661 "%%OptimizeFunctionOnNextCall(f);"
2662 "f();",
2663 kPretenureCreationCount);
2664
2665 v8::Local<v8::Value> res = CompileRun(source.begin());
2666
2667 i::Handle<JSObject> o = Handle<JSObject>::cast(
2668 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2669
2670 CHECK(CcTest::heap()->InOldSpace(*o));
2671 FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
2672 FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
2673 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
2674 CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
2675
2676 JSObject inner_object = JSObject::cast(o->RawFastPropertyAt(idx1));
2677 CHECK(CcTest::heap()->InOldSpace(inner_object));
2678 CHECK(CcTest::heap()->InOldSpace(inner_object.RawFastPropertyAt(idx1)));
2679 CHECK(CcTest::heap()->InOldSpace(inner_object.RawFastPropertyAt(idx2)));
2680 }
2681
2682
TEST(OptimizedPretenuringDoubleArrayProperties)2683 TEST(OptimizedPretenuringDoubleArrayProperties) {
2684 FLAG_allow_natives_syntax = true;
2685 FLAG_expose_gc = true;
2686 CcTest::InitializeVM();
2687 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2688 if (FLAG_gc_global || FLAG_stress_compaction ||
2689 FLAG_stress_incremental_marking || FLAG_single_generation)
2690 return;
2691 v8::HandleScope scope(CcTest::isolate());
2692
2693 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2694
2695 base::ScopedVector<char> source(1024);
2696 base::SNPrintF(source,
2697 "var number_elements = %d;"
2698 "var elements = new Array(number_elements);"
2699 "function f() {"
2700 " for (var i = 0; i < number_elements; i++) {"
2701 " elements[i] = {a: 1.1, b: 2.2};"
2702 " }"
2703 " return elements[i - 1];"
2704 "};"
2705 "%%PrepareFunctionForOptimization(f);"
2706 "f(); gc();"
2707 "f(); f();"
2708 "%%OptimizeFunctionOnNextCall(f);"
2709 "f();",
2710 kPretenureCreationCount);
2711
2712 v8::Local<v8::Value> res = CompileRun(source.begin());
2713
2714 i::Handle<JSObject> o = Handle<JSObject>::cast(
2715 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2716
2717 CHECK(CcTest::heap()->InOldSpace(*o));
2718 CHECK_EQ(o->property_array(),
2719 ReadOnlyRoots(CcTest::heap()).empty_property_array());
2720 }
2721
TEST(OptimizedPretenuringDoubleArrayLiterals)2722 TEST(OptimizedPretenuringDoubleArrayLiterals) {
2723 FLAG_allow_natives_syntax = true;
2724 FLAG_expose_gc = true;
2725 CcTest::InitializeVM();
2726 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2727 if (FLAG_gc_global || FLAG_stress_compaction ||
2728 FLAG_stress_incremental_marking || FLAG_single_generation)
2729 return;
2730 v8::HandleScope scope(CcTest::isolate());
2731
2732 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2733
2734 base::ScopedVector<char> source(1024);
2735 base::SNPrintF(source,
2736 "var number_elements = %d;"
2737 "var elements = new Array(number_elements);"
2738 "function f() {"
2739 " for (var i = 0; i < number_elements; i++) {"
2740 " elements[i] = [1.1, 2.2, 3.3];"
2741 " }"
2742 " return elements[number_elements - 1];"
2743 "};"
2744 "%%PrepareFunctionForOptimization(f);"
2745 "f(); gc();"
2746 "f(); f();"
2747 "%%OptimizeFunctionOnNextCall(f);"
2748 "f();",
2749 kPretenureCreationCount);
2750
2751 v8::Local<v8::Value> res = CompileRun(source.begin());
2752
2753 i::Handle<JSObject> o = Handle<JSObject>::cast(
2754 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2755
2756 CHECK(CcTest::heap()->InOldSpace(o->elements()));
2757 CHECK(CcTest::heap()->InOldSpace(*o));
2758 }
2759
TEST(OptimizedPretenuringNestedMixedArrayLiterals)2760 TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
2761 FLAG_allow_natives_syntax = true;
2762 FLAG_expose_gc = true;
2763 CcTest::InitializeVM();
2764 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2765 if (FLAG_gc_global || FLAG_stress_compaction ||
2766 FLAG_stress_incremental_marking || FLAG_single_generation)
2767 return;
2768 v8::HandleScope scope(CcTest::isolate());
2769 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2770 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2771
2772 base::ScopedVector<char> source(1024);
2773 base::SNPrintF(source,
2774 "var number_elements = %d;"
2775 "var elements = new Array(number_elements);"
2776 "function f() {"
2777 " for (var i = 0; i < number_elements; i++) {"
2778 " elements[i] = [[{}, {}, {}], [1.1, 2.2, 3.3]];"
2779 " }"
2780 " return elements[number_elements - 1];"
2781 "};"
2782 "%%PrepareFunctionForOptimization(f);"
2783 "f(); gc();"
2784 "f(); f();"
2785 "%%OptimizeFunctionOnNextCall(f);"
2786 "f();",
2787 kPretenureCreationCount);
2788
2789 v8::Local<v8::Value> res = CompileRun(source.begin());
2790
2791 v8::Local<v8::Value> int_array =
2792 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2793 i::Handle<JSObject> int_array_handle = i::Handle<JSObject>::cast(
2794 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array)));
2795 v8::Local<v8::Value> double_array =
2796 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2797 i::Handle<JSObject> double_array_handle = i::Handle<JSObject>::cast(
2798 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array)));
2799
2800 Handle<JSObject> o = Handle<JSObject>::cast(
2801 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2802 CHECK(CcTest::heap()->InOldSpace(*o));
2803 CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
2804 CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
2805 CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
2806 CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
2807 }
2808
2809
TEST(OptimizedPretenuringNestedObjectLiterals)2810 TEST(OptimizedPretenuringNestedObjectLiterals) {
2811 FLAG_allow_natives_syntax = true;
2812 FLAG_expose_gc = true;
2813 CcTest::InitializeVM();
2814 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2815 if (FLAG_gc_global || FLAG_stress_compaction ||
2816 FLAG_stress_incremental_marking || FLAG_single_generation)
2817 return;
2818 v8::HandleScope scope(CcTest::isolate());
2819 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2820 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2821
2822 base::ScopedVector<char> source(1024);
2823 base::SNPrintF(source,
2824 "var number_elements = %d;"
2825 "var elements = new Array(number_elements);"
2826 "function f() {"
2827 " for (var i = 0; i < number_elements; i++) {"
2828 " elements[i] = [[{}, {}, {}],[{}, {}, {}]];"
2829 " }"
2830 " return elements[number_elements - 1];"
2831 "};"
2832 "%%PrepareFunctionForOptimization(f);"
2833 "f(); gc();"
2834 "f(); f();"
2835 "%%OptimizeFunctionOnNextCall(f);"
2836 "f();",
2837 kPretenureCreationCount);
2838
2839 v8::Local<v8::Value> res = CompileRun(source.begin());
2840
2841 v8::Local<v8::Value> int_array_1 =
2842 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2843 Handle<JSObject> int_array_handle_1 = Handle<JSObject>::cast(
2844 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_1)));
2845 v8::Local<v8::Value> int_array_2 =
2846 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2847 Handle<JSObject> int_array_handle_2 = Handle<JSObject>::cast(
2848 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(int_array_2)));
2849
2850 Handle<JSObject> o = Handle<JSObject>::cast(
2851 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2852 CHECK(CcTest::heap()->InOldSpace(*o));
2853 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
2854 CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
2855 CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
2856 CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
2857 }
2858
2859
TEST(OptimizedPretenuringNestedDoubleLiterals)2860 TEST(OptimizedPretenuringNestedDoubleLiterals) {
2861 FLAG_allow_natives_syntax = true;
2862 FLAG_expose_gc = true;
2863 CcTest::InitializeVM();
2864 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2865 if (FLAG_gc_global || FLAG_stress_compaction ||
2866 FLAG_stress_incremental_marking || FLAG_single_generation)
2867 return;
2868 v8::HandleScope scope(CcTest::isolate());
2869 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2870 GrowNewSpaceToMaximumCapacity(CcTest::heap());
2871
2872 base::ScopedVector<char> source(1024);
2873 base::SNPrintF(source,
2874 "var number_elements = %d;"
2875 "var elements = new Array(number_elements);"
2876 "function f() {"
2877 " for (var i = 0; i < number_elements; i++) {"
2878 " elements[i] = [[1.1, 1.2, 1.3],[2.1, 2.2, 2.3]];"
2879 " }"
2880 " return elements[number_elements - 1];"
2881 "};"
2882 "%%PrepareFunctionForOptimization(f);"
2883 "f(); gc();"
2884 "f(); f();"
2885 "%%OptimizeFunctionOnNextCall(f);"
2886 "f();",
2887 kPretenureCreationCount);
2888
2889 v8::Local<v8::Value> res = CompileRun(source.begin());
2890
2891 v8::Local<v8::Value> double_array_1 =
2892 v8::Object::Cast(*res)->Get(ctx, v8_str("0")).ToLocalChecked();
2893 i::Handle<JSObject> double_array_handle_1 = i::Handle<JSObject>::cast(
2894 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_1)));
2895 v8::Local<v8::Value> double_array_2 =
2896 v8::Object::Cast(*res)->Get(ctx, v8_str("1")).ToLocalChecked();
2897 i::Handle<JSObject> double_array_handle_2 = Handle<JSObject>::cast(
2898 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(double_array_2)));
2899
2900 i::Handle<JSObject> o = Handle<JSObject>::cast(
2901 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2902 CHECK(CcTest::heap()->InOldSpace(*o));
2903 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
2904 CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
2905 CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
2906 CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
2907 }
2908
2909
2910 // Test regular array literals allocation.
TEST(OptimizedAllocationArrayLiterals)2911 TEST(OptimizedAllocationArrayLiterals) {
2912 FLAG_allow_natives_syntax = true;
2913 CcTest::InitializeVM();
2914 if (!CcTest::i_isolate()->use_optimizer() || FLAG_always_opt) return;
2915 if (FLAG_gc_global || FLAG_stress_compaction ||
2916 FLAG_stress_incremental_marking)
2917 return;
2918 v8::HandleScope scope(CcTest::isolate());
2919 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
2920 v8::Local<v8::Value> res = CompileRun(
2921 "function f() {"
2922 " var numbers = new Array(1, 2, 3);"
2923 " numbers[0] = 3.14;"
2924 " return numbers;"
2925 "};"
2926 "%PrepareFunctionForOptimization(f);"
2927 "f(); f(); f();"
2928 "%OptimizeFunctionOnNextCall(f);"
2929 "f();");
2930 CHECK_EQ(static_cast<int>(3.14), v8::Object::Cast(*res)
2931 ->Get(ctx, v8_str("0"))
2932 .ToLocalChecked()
2933 ->Int32Value(ctx)
2934 .FromJust());
2935
2936 i::Handle<JSObject> o = Handle<JSObject>::cast(
2937 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(res)));
2938
2939 CHECK(InCorrectGeneration(o->elements()));
2940 }
2941
CountMapTransitions(i::Isolate * isolate,Map map)2942 static int CountMapTransitions(i::Isolate* isolate, Map map) {
2943 DisallowGarbageCollection no_gc;
2944 return TransitionsAccessor(isolate, map, &no_gc).NumberOfTransitions();
2945 }
2946
2947
2948 // Test that map transitions are cleared and maps are collected with
2949 // incremental marking as well.
TEST(Regress1465)2950 TEST(Regress1465) {
2951 if (!FLAG_incremental_marking) return;
2952 FLAG_stress_compaction = false;
2953 FLAG_stress_incremental_marking = false;
2954 FLAG_allow_natives_syntax = true;
2955 FLAG_trace_incremental_marking = true;
2956 FLAG_retain_maps_for_n_gc = 0;
2957 CcTest::InitializeVM();
2958 v8::Isolate* isolate = CcTest::isolate();
2959 i::Isolate* i_isolate = CcTest::i_isolate();
2960 v8::HandleScope scope(isolate);
2961 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
2962 static const int transitions_count = 256;
2963
2964 CompileRun("function F() {}");
2965 {
2966 AlwaysAllocateScopeForTesting always_allocate(CcTest::i_isolate()->heap());
2967 for (int i = 0; i < transitions_count; i++) {
2968 base::EmbeddedVector<char, 64> buffer;
2969 base::SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
2970 CompileRun(buffer.begin());
2971 }
2972 CompileRun("var root = new F;");
2973 }
2974
2975 i::Handle<JSReceiver> root =
2976 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2977 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
2978
2979 // Count number of live transitions before marking.
2980 int transitions_before = CountMapTransitions(i_isolate, root->map());
2981 CompileRun("%DebugPrint(root);");
2982 CHECK_EQ(transitions_count, transitions_before);
2983
2984 heap::SimulateIncrementalMarking(CcTest::heap());
2985 CcTest::CollectAllGarbage();
2986
2987 // Count number of live transitions after marking. Note that one transition
2988 // is left, because 'o' still holds an instance of one transition target.
2989 int transitions_after = CountMapTransitions(i_isolate, root->map());
2990 CompileRun("%DebugPrint(root);");
2991 CHECK_EQ(1, transitions_after);
2992 }
2993
GetByName(const char * name)2994 static i::Handle<JSObject> GetByName(const char* name) {
2995 return i::Handle<JSObject>::cast(
2996 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
2997 CcTest::global()
2998 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str(name))
2999 .ToLocalChecked())));
3000 }
3001
3002 #ifdef DEBUG
AddTransitions(int transitions_count)3003 static void AddTransitions(int transitions_count) {
3004 AlwaysAllocateScopeForTesting always_allocate(CcTest::i_isolate()->heap());
3005 for (int i = 0; i < transitions_count; i++) {
3006 base::EmbeddedVector<char, 64> buffer;
3007 base::SNPrintF(buffer, "var o = new F; o.prop%d = %d;", i, i);
3008 CompileRun(buffer.begin());
3009 }
3010 }
3011
AddPropertyTo(int gc_count,Handle<JSObject> object,const char * property_name)3012 static void AddPropertyTo(int gc_count, Handle<JSObject> object,
3013 const char* property_name) {
3014 Isolate* isolate = CcTest::i_isolate();
3015 Factory* factory = isolate->factory();
3016 Handle<String> prop_name = factory->InternalizeUtf8String(property_name);
3017 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
3018 FLAG_gc_interval = gc_count;
3019 FLAG_gc_global = true;
3020 FLAG_retain_maps_for_n_gc = 0;
3021 CcTest::heap()->set_allocation_timeout(gc_count);
3022 Object::SetProperty(isolate, object, prop_name, twenty_three).Check();
3023 }
3024
TEST(TransitionArrayShrinksDuringAllocToZero)3025 TEST(TransitionArrayShrinksDuringAllocToZero) {
3026 FLAG_stress_compaction = false;
3027 FLAG_stress_incremental_marking = false;
3028 FLAG_allow_natives_syntax = true;
3029 CcTest::InitializeVM();
3030 i::Isolate* i_isolate = CcTest::i_isolate();
3031 v8::HandleScope scope(CcTest::isolate());
3032 static const int transitions_count = 10;
3033 CompileRun("function F() { }");
3034 AddTransitions(transitions_count);
3035 CompileRun("var root = new F;");
3036 Handle<JSObject> root = GetByName("root");
3037
3038 // Count number of live transitions before marking.
3039 int transitions_before = CountMapTransitions(i_isolate, root->map());
3040 CHECK_EQ(transitions_count, transitions_before);
3041
3042 // Get rid of o
3043 CompileRun(
3044 "o = new F;"
3045 "root = new F");
3046 root = GetByName("root");
3047 AddPropertyTo(2, root, "funny");
3048 CcTest::CollectGarbage(NEW_SPACE);
3049
3050 // Count number of live transitions after marking. Note that one transition
3051 // is left, because 'o' still holds an instance of one transition target.
3052 int transitions_after =
3053 CountMapTransitions(i_isolate, Map::cast(root->map().GetBackPointer()));
3054 CHECK_EQ(1, transitions_after);
3055 }
3056
3057
TEST(TransitionArrayShrinksDuringAllocToOne)3058 TEST(TransitionArrayShrinksDuringAllocToOne) {
3059 FLAG_stress_compaction = false;
3060 FLAG_stress_incremental_marking = false;
3061 FLAG_allow_natives_syntax = true;
3062 CcTest::InitializeVM();
3063 i::Isolate* i_isolate = CcTest::i_isolate();
3064 v8::HandleScope scope(CcTest::isolate());
3065 static const int transitions_count = 10;
3066 CompileRun("function F() {}");
3067 AddTransitions(transitions_count);
3068 CompileRun("var root = new F;");
3069 Handle<JSObject> root = GetByName("root");
3070
3071 // Count number of live transitions before marking.
3072 int transitions_before = CountMapTransitions(i_isolate, root->map());
3073 CHECK_EQ(transitions_count, transitions_before);
3074
3075 root = GetByName("root");
3076 AddPropertyTo(2, root, "funny");
3077 CcTest::CollectGarbage(NEW_SPACE);
3078
3079 // Count number of live transitions after marking. Note that one transition
3080 // is left, because 'o' still holds an instance of one transition target.
3081 int transitions_after =
3082 CountMapTransitions(i_isolate, Map::cast(root->map().GetBackPointer()));
3083 CHECK_EQ(2, transitions_after);
3084 }
3085
3086
TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound)3087 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3088 FLAG_stress_compaction = false;
3089 FLAG_stress_incremental_marking = false;
3090 FLAG_allow_natives_syntax = true;
3091 CcTest::InitializeVM();
3092 i::Isolate* i_isolate = CcTest::i_isolate();
3093 v8::HandleScope scope(CcTest::isolate());
3094 static const int transitions_count = 10;
3095 CompileRun("function F() {}");
3096 AddTransitions(transitions_count);
3097 CompileRun("var root = new F;");
3098 Handle<JSObject> root = GetByName("root");
3099
3100 // Count number of live transitions before marking.
3101 int transitions_before = CountMapTransitions(i_isolate, root->map());
3102 CHECK_EQ(transitions_count, transitions_before);
3103
3104 root = GetByName("root");
3105 AddPropertyTo(0, root, "prop9");
3106 CcTest::CollectGarbage(OLD_SPACE);
3107
3108 // Count number of live transitions after marking. Note that one transition
3109 // is left, because 'o' still holds an instance of one transition target.
3110 int transitions_after =
3111 CountMapTransitions(i_isolate, Map::cast(root->map().GetBackPointer()));
3112 CHECK_EQ(1, transitions_after);
3113 }
3114 #endif // DEBUG
3115
3116
TEST(ReleaseOverReservedPages)3117 TEST(ReleaseOverReservedPages) {
3118 if (FLAG_never_compact) return;
3119 FLAG_trace_gc = true;
3120 // The optimizer can allocate stuff, messing up the test.
3121 #ifndef V8_LITE_MODE
3122 FLAG_opt = false;
3123 FLAG_always_opt = false;
3124 #endif // V8_LITE_MODE
3125 // - Parallel compaction increases fragmentation, depending on how existing
3126 // memory is distributed. Since this is non-deterministic because of
3127 // concurrent sweeping, we disable it for this test.
3128 // - Concurrent sweeping adds non determinism, depending on when memory is
3129 // available for further reuse.
3130 // - Fast evacuation of pages may result in a different page count in old
3131 // space.
3132 ManualGCScope manual_gc_scope;
3133 FLAG_page_promotion = false;
3134 FLAG_parallel_compaction = false;
3135 CcTest::InitializeVM();
3136 Isolate* isolate = CcTest::i_isolate();
3137 // If there's snapshot available, we don't know whether 20 small arrays will
3138 // fit on the initial pages.
3139 if (!isolate->snapshot_available()) return;
3140 Factory* factory = isolate->factory();
3141 Heap* heap = isolate->heap();
3142 v8::HandleScope scope(CcTest::isolate());
3143 // Ensure that the young generation is empty.
3144 CcTest::CollectGarbage(NEW_SPACE);
3145 CcTest::CollectGarbage(NEW_SPACE);
3146 static const int number_of_test_pages = 20;
3147
3148 // Prepare many pages with low live-bytes count.
3149 PagedSpace* old_space = heap->old_space();
3150 const int initial_page_count = old_space->CountTotalPages();
3151 const int overall_page_count = number_of_test_pages + initial_page_count;
3152 for (int i = 0; i < number_of_test_pages; i++) {
3153 AlwaysAllocateScopeForTesting always_allocate(heap);
3154 heap::SimulateFullSpace(old_space);
3155 factory->NewFixedArray(1, AllocationType::kOld);
3156 }
3157 CHECK_EQ(overall_page_count, old_space->CountTotalPages());
3158
3159 // Triggering one GC will cause a lot of garbage to be discovered but
3160 // even spread across all allocated pages.
3161 CcTest::CollectAllGarbage();
3162 CHECK_GE(overall_page_count, old_space->CountTotalPages());
3163
3164 // Triggering subsequent GCs should cause at least half of the pages
3165 // to be released to the OS after at most two cycles.
3166 CcTest::CollectAllGarbage();
3167 CHECK_GE(overall_page_count, old_space->CountTotalPages());
3168 CcTest::CollectAllGarbage();
3169 CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
3170
3171 // Triggering a last-resort GC should cause all pages to be released to the
3172 // OS so that other processes can seize the memory. If we get a failure here
3173 // where there are 2 pages left instead of 1, then we should increase the
3174 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3175 // first page should be small in order to reduce memory used when the VM
3176 // boots, but if the 20 small arrays don't fit on the first page then that's
3177 // an indication that it is too small.
3178 CcTest::CollectAllAvailableGarbage();
3179 CHECK_GE(initial_page_count, old_space->CountTotalPages());
3180 }
3181
3182 static int forced_gc_counter = 0;
3183
MockUseCounterCallback(v8::Isolate * isolate,v8::Isolate::UseCounterFeature feature)3184 void MockUseCounterCallback(v8::Isolate* isolate,
3185 v8::Isolate::UseCounterFeature feature) {
3186 isolate->GetCurrentContext();
3187 if (feature == v8::Isolate::kForcedGC) {
3188 forced_gc_counter++;
3189 }
3190 }
3191
3192
TEST(CountForcedGC)3193 TEST(CountForcedGC) {
3194 FLAG_expose_gc = true;
3195 CcTest::InitializeVM();
3196 Isolate* isolate = CcTest::i_isolate();
3197 v8::HandleScope scope(CcTest::isolate());
3198
3199 isolate->SetUseCounterCallback(MockUseCounterCallback);
3200
3201 forced_gc_counter = 0;
3202 const char* source = "gc();";
3203 CompileRun(source);
3204 CHECK_GT(forced_gc_counter, 0);
3205 }
3206
3207
3208 #ifdef OBJECT_PRINT
TEST(PrintSharedFunctionInfo)3209 TEST(PrintSharedFunctionInfo) {
3210 CcTest::InitializeVM();
3211 v8::HandleScope scope(CcTest::isolate());
3212 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3213 const char* source =
3214 "f = function() { return 987654321; }\n"
3215 "g = function() { return 123456789; }\n";
3216 CompileRun(source);
3217 i::Handle<JSFunction> g = i::Handle<JSFunction>::cast(
3218 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3219 CcTest::global()->Get(ctx, v8_str("g")).ToLocalChecked())));
3220
3221 StdoutStream os;
3222 g->shared().Print(os);
3223 os << std::endl;
3224 }
3225 #endif // OBJECT_PRINT
3226
3227
TEST(IncrementalMarkingPreservesMonomorphicCallIC)3228 TEST(IncrementalMarkingPreservesMonomorphicCallIC) {
3229 if (!FLAG_use_ic) return;
3230 if (!FLAG_incremental_marking) return;
3231 if (FLAG_always_opt) return;
3232 FLAG_allow_natives_syntax = true;
3233 CcTest::InitializeVM();
3234 v8::HandleScope scope(CcTest::isolate());
3235 v8::Local<v8::Value> fun1, fun2;
3236 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3237 {
3238 CompileRun("function fun() {};");
3239 fun1 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3240 }
3241
3242 {
3243 CompileRun("function fun() {};");
3244 fun2 = CcTest::global()->Get(ctx, v8_str("fun")).ToLocalChecked();
3245 }
3246
3247 // Prepare function f that contains type feedback for the two closures.
3248 CHECK(CcTest::global()->Set(ctx, v8_str("fun1"), fun1).FromJust());
3249 CHECK(CcTest::global()->Set(ctx, v8_str("fun2"), fun2).FromJust());
3250 CompileRun(
3251 "function f(a, b) { a(); b(); } %EnsureFeedbackVectorForFunction(f); "
3252 "f(fun1, fun2);");
3253
3254 Handle<JSFunction> f = Handle<JSFunction>::cast(
3255 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3256 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3257
3258 Handle<FeedbackVector> feedback_vector(f->feedback_vector(), f->GetIsolate());
3259 FeedbackVectorHelper feedback_helper(feedback_vector);
3260
3261 int expected_slots = 2;
3262 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3263 int slot1 = 0;
3264 int slot2 = 1;
3265 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3266 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3267
3268 heap::SimulateIncrementalMarking(CcTest::heap());
3269 CcTest::CollectAllGarbage();
3270
3271 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeak());
3272 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeak());
3273 }
3274
3275
CheckVectorIC(Handle<JSFunction> f,int slot_index,InlineCacheState desired_state)3276 static void CheckVectorIC(Handle<JSFunction> f, int slot_index,
3277 InlineCacheState desired_state) {
3278 Handle<FeedbackVector> vector =
3279 Handle<FeedbackVector>(f->feedback_vector(), f->GetIsolate());
3280 FeedbackVectorHelper helper(vector);
3281 FeedbackSlot slot = helper.slot(slot_index);
3282 FeedbackNexus nexus(vector, slot);
3283 CHECK(nexus.ic_state() == desired_state);
3284 }
3285
TEST(IncrementalMarkingPreservesMonomorphicConstructor)3286 TEST(IncrementalMarkingPreservesMonomorphicConstructor) {
3287 if (!FLAG_incremental_marking) return;
3288 if (FLAG_always_opt) return;
3289 FLAG_allow_natives_syntax = true;
3290 CcTest::InitializeVM();
3291 v8::HandleScope scope(CcTest::isolate());
3292 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3293 // Prepare function f that contains a monomorphic IC for object
3294 // originating from the same native context.
3295 CompileRun(
3296 "function fun() { this.x = 1; };"
3297 "function f(o) { return new o(); }"
3298 "%EnsureFeedbackVectorForFunction(f);"
3299 "f(fun); f(fun);");
3300 Handle<JSFunction> f = Handle<JSFunction>::cast(
3301 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3302 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3303
3304 Handle<FeedbackVector> vector(f->feedback_vector(), f->GetIsolate());
3305 CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3306
3307 heap::SimulateIncrementalMarking(CcTest::heap());
3308 CcTest::CollectAllGarbage();
3309
3310 CHECK(vector->Get(FeedbackSlot(0))->IsWeakOrCleared());
3311 }
3312
TEST(IncrementalMarkingPreservesMonomorphicIC)3313 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3314 if (!FLAG_use_ic) return;
3315 if (!FLAG_incremental_marking) return;
3316 if (FLAG_always_opt) return;
3317 FLAG_allow_natives_syntax = true;
3318 CcTest::InitializeVM();
3319 v8::HandleScope scope(CcTest::isolate());
3320 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3321 // Prepare function f that contains a monomorphic IC for object
3322 // originating from the same native context.
3323 CompileRun(
3324 "function fun() { this.x = 1; }; var obj = new fun();"
3325 "%EnsureFeedbackVectorForFunction(f);"
3326 "function f(o) { return o.x; } f(obj); f(obj);");
3327 Handle<JSFunction> f = Handle<JSFunction>::cast(
3328 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3329 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3330
3331 CheckVectorIC(f, 0, MONOMORPHIC);
3332
3333 heap::SimulateIncrementalMarking(CcTest::heap());
3334 CcTest::CollectAllGarbage();
3335
3336 CheckVectorIC(f, 0, MONOMORPHIC);
3337 }
3338
TEST(IncrementalMarkingPreservesPolymorphicIC)3339 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3340 if (!FLAG_use_ic) return;
3341 if (!FLAG_incremental_marking) return;
3342 if (FLAG_always_opt) return;
3343 FLAG_allow_natives_syntax = true;
3344 CcTest::InitializeVM();
3345 v8::HandleScope scope(CcTest::isolate());
3346 v8::Local<v8::Value> obj1, obj2;
3347 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3348
3349 {
3350 LocalContext env;
3351 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3352 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3353 }
3354
3355 {
3356 LocalContext env;
3357 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3358 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3359 }
3360
3361 // Prepare function f that contains a polymorphic IC for objects
3362 // originating from two different native contexts.
3363 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3364 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3365 CompileRun(
3366 "function f(o) { return o.x; }; "
3367 "%EnsureFeedbackVectorForFunction(f);"
3368 "f(obj1); f(obj1); f(obj2);");
3369 Handle<JSFunction> f = Handle<JSFunction>::cast(
3370 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3371 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3372
3373 CheckVectorIC(f, 0, POLYMORPHIC);
3374
3375 // Fire context dispose notification.
3376 heap::SimulateIncrementalMarking(CcTest::heap());
3377 CcTest::CollectAllGarbage();
3378
3379 CheckVectorIC(f, 0, POLYMORPHIC);
3380 }
3381
TEST(ContextDisposeDoesntClearPolymorphicIC)3382 TEST(ContextDisposeDoesntClearPolymorphicIC) {
3383 if (!FLAG_use_ic) return;
3384 if (!FLAG_incremental_marking) return;
3385 if (FLAG_always_opt) return;
3386 FLAG_allow_natives_syntax = true;
3387 CcTest::InitializeVM();
3388 v8::HandleScope scope(CcTest::isolate());
3389 v8::Local<v8::Value> obj1, obj2;
3390 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3391
3392 {
3393 LocalContext env;
3394 CompileRun("function fun() { this.x = 1; }; var obj = new fun();");
3395 obj1 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3396 }
3397
3398 {
3399 LocalContext env;
3400 CompileRun("function fun() { this.x = 2; }; var obj = new fun();");
3401 obj2 = env->Global()->Get(env.local(), v8_str("obj")).ToLocalChecked();
3402 }
3403
3404 // Prepare function f that contains a polymorphic IC for objects
3405 // originating from two different native contexts.
3406 CHECK(CcTest::global()->Set(ctx, v8_str("obj1"), obj1).FromJust());
3407 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3408 CompileRun(
3409 "function f(o) { return o.x; }; "
3410 "%EnsureFeedbackVectorForFunction(f);"
3411 "f(obj1); f(obj1); f(obj2);");
3412 Handle<JSFunction> f = Handle<JSFunction>::cast(
3413 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3414 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3415
3416 CheckVectorIC(f, 0, POLYMORPHIC);
3417
3418 // Fire context dispose notification.
3419 CcTest::isolate()->ContextDisposedNotification();
3420 heap::SimulateIncrementalMarking(CcTest::heap());
3421 CcTest::CollectAllGarbage();
3422
3423 CheckVectorIC(f, 0, POLYMORPHIC);
3424 }
3425
3426
3427 class SourceResource : public v8::String::ExternalOneByteStringResource {
3428 public:
SourceResource(const char * data)3429 explicit SourceResource(const char* data)
3430 : data_(data), length_(strlen(data)) { }
3431
Dispose()3432 void Dispose() override {
3433 i::DeleteArray(data_);
3434 data_ = nullptr;
3435 }
3436
data() const3437 const char* data() const override { return data_; }
3438
length() const3439 size_t length() const override { return length_; }
3440
IsDisposed()3441 bool IsDisposed() { return data_ == nullptr; }
3442
3443 private:
3444 const char* data_;
3445 size_t length_;
3446 };
3447
3448
ReleaseStackTraceDataTest(v8::Isolate * isolate,const char * source,const char * accessor)3449 void ReleaseStackTraceDataTest(v8::Isolate* isolate, const char* source,
3450 const char* accessor) {
3451 // Test that the data retained by the Error.stack accessor is released
3452 // after the first time the accessor is fired. We use external string
3453 // to check whether the data is being released since the external string
3454 // resource's callback is fired when the external string is GC'ed.
3455 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3456 v8::HandleScope scope(isolate);
3457 SourceResource* resource = new SourceResource(i::StrDup(source));
3458 {
3459 v8::HandleScope scope(isolate);
3460 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3461 v8::Local<v8::String> source_string =
3462 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3463 i_isolate->heap()->CollectAllAvailableGarbage(
3464 i::GarbageCollectionReason::kTesting);
3465 v8::Script::Compile(ctx, source_string)
3466 .ToLocalChecked()
3467 ->Run(ctx)
3468 .ToLocalChecked();
3469 CHECK(!resource->IsDisposed());
3470 }
3471 // i_isolate->heap()->CollectAllAvailableGarbage();
3472 CHECK(!resource->IsDisposed());
3473
3474 CompileRun(accessor);
3475 i_isolate->heap()->CollectAllAvailableGarbage(
3476 i::GarbageCollectionReason::kTesting);
3477
3478 // External source has been released.
3479 CHECK(resource->IsDisposed());
3480 delete resource;
3481 }
3482
3483
UNINITIALIZED_TEST(ReleaseStackTraceData)3484 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3485 #ifndef V8_LITE_MODE
3486 // ICs retain objects.
3487 FLAG_use_ic = false;
3488 #endif // V8_LITE_MODE
3489 FLAG_concurrent_recompilation = false;
3490 v8::Isolate::CreateParams create_params;
3491 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
3492 v8::Isolate* isolate = v8::Isolate::New(create_params);
3493 {
3494 v8::Isolate::Scope isolate_scope(isolate);
3495 v8::HandleScope handle_scope(isolate);
3496 v8::Context::New(isolate)->Enter();
3497 static const char* source1 =
3498 "var error = null; "
3499 /* Normal Error */
3500 "try { "
3501 " throw new Error(); "
3502 "} catch (e) { "
3503 " error = e; "
3504 "} ";
3505 static const char* source2 =
3506 "var error = null; "
3507 /* Stack overflow */
3508 "try { "
3509 " (function f() { f(); })(); "
3510 "} catch (e) { "
3511 " error = e; "
3512 "} ";
3513 static const char* source3 =
3514 "var error = null; "
3515 /* Normal Error */
3516 "try { "
3517 /* as prototype */
3518 " throw new Error(); "
3519 "} catch (e) { "
3520 " error = {}; "
3521 " error.__proto__ = e; "
3522 "} ";
3523 static const char* source4 =
3524 "var error = null; "
3525 /* Stack overflow */
3526 "try { "
3527 /* as prototype */
3528 " (function f() { f(); })(); "
3529 "} catch (e) { "
3530 " error = {}; "
3531 " error.__proto__ = e; "
3532 "} ";
3533 static const char* getter = "error.stack";
3534 static const char* setter = "error.stack = 0";
3535
3536 ReleaseStackTraceDataTest(isolate, source1, setter);
3537 ReleaseStackTraceDataTest(isolate, source2, setter);
3538 // We do not test source3 and source4 with setter, since the setter is
3539 // supposed to (untypically) write to the receiver, not the holder. This is
3540 // to emulate the behavior of a data property.
3541
3542 ReleaseStackTraceDataTest(isolate, source1, getter);
3543 ReleaseStackTraceDataTest(isolate, source2, getter);
3544 ReleaseStackTraceDataTest(isolate, source3, getter);
3545 ReleaseStackTraceDataTest(isolate, source4, getter);
3546 }
3547 isolate->Dispose();
3548 }
3549
3550 // TODO(mmarchini) also write tests for async/await and Promise.all
DetailedErrorStackTraceTest(const char * src,std::function<void (Handle<FixedArray>)> test)3551 void DetailedErrorStackTraceTest(const char* src,
3552 std::function<void(Handle<FixedArray>)> test) {
3553 FLAG_detailed_error_stack_trace = true;
3554 CcTest::InitializeVM();
3555 v8::HandleScope scope(CcTest::isolate());
3556
3557 v8::TryCatch try_catch(CcTest::isolate());
3558 CompileRun(src);
3559
3560 CHECK(try_catch.HasCaught());
3561 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
3562
3563 Isolate* isolate = CcTest::i_isolate();
3564 Handle<Name> key = isolate->factory()->stack_trace_symbol();
3565
3566 Handle<FixedArray> stack_trace(Handle<FixedArray>::cast(
3567 Object::GetProperty(isolate, exception, key).ToHandleChecked()));
3568 test(stack_trace);
3569 }
3570
ParametersOf(Handle<FixedArray> stack_trace,int frame_index)3571 FixedArray ParametersOf(Handle<FixedArray> stack_trace, int frame_index) {
3572 return StackFrameInfo::cast(stack_trace->get(frame_index)).parameters();
3573 }
3574
3575 // * Test interpreted function error
TEST(DetailedErrorStackTrace)3576 TEST(DetailedErrorStackTrace) {
3577 static const char* source =
3578 "function func1(arg1) { "
3579 " let err = new Error(); "
3580 " throw err; "
3581 "} "
3582 "function func2(arg1, arg2) { "
3583 " func1(42); "
3584 "} "
3585 "class Foo {}; "
3586 "function main(arg1, arg2) { "
3587 " func2(arg1, false); "
3588 "} "
3589 "var foo = new Foo(); "
3590 "main(foo); ";
3591
3592 DetailedErrorStackTraceTest(source, [](Handle<FixedArray> stack_trace) {
3593 FixedArray foo_parameters = ParametersOf(stack_trace, 0);
3594 CHECK_EQ(foo_parameters.length(), 1);
3595 CHECK(foo_parameters.get(0).IsSmi());
3596 CHECK_EQ(Smi::ToInt(foo_parameters.get(0)), 42);
3597
3598 FixedArray bar_parameters = ParametersOf(stack_trace, 1);
3599 CHECK_EQ(bar_parameters.length(), 2);
3600 CHECK(bar_parameters.get(0).IsJSObject());
3601 CHECK(bar_parameters.get(1).IsBoolean());
3602 Handle<Object> foo = Handle<Object>::cast(GetByName("foo"));
3603 CHECK_EQ(bar_parameters.get(0), *foo);
3604 CHECK(!bar_parameters.get(1).BooleanValue(CcTest::i_isolate()));
3605
3606 FixedArray main_parameters = ParametersOf(stack_trace, 2);
3607 CHECK_EQ(main_parameters.length(), 2);
3608 CHECK(main_parameters.get(0).IsJSObject());
3609 CHECK(main_parameters.get(1).IsUndefined());
3610 CHECK_EQ(main_parameters.get(0), *foo);
3611 });
3612 }
3613
3614 // * Test optimized function with inline frame error
TEST(DetailedErrorStackTraceInline)3615 TEST(DetailedErrorStackTraceInline) {
3616 FLAG_allow_natives_syntax = true;
3617 static const char* source =
3618 "function add(x) { "
3619 " if (x == 42) "
3620 " throw new Error(); "
3621 " return x + x; "
3622 "} "
3623 "add(0); "
3624 "add(1); "
3625 "function foo(x) { "
3626 " return add(x + 1) "
3627 "} "
3628 "%PrepareFunctionForOptimization(foo); "
3629 "foo(40); "
3630 "%OptimizeFunctionOnNextCall(foo); "
3631 "foo(41); ";
3632
3633 DetailedErrorStackTraceTest(source, [](Handle<FixedArray> stack_trace) {
3634 FixedArray parameters_add = ParametersOf(stack_trace, 0);
3635 CHECK_EQ(parameters_add.length(), 1);
3636 CHECK(parameters_add.get(0).IsSmi());
3637 CHECK_EQ(Smi::ToInt(parameters_add.get(0)), 42);
3638
3639 FixedArray parameters_foo = ParametersOf(stack_trace, 1);
3640 CHECK_EQ(parameters_foo.length(), 1);
3641 CHECK(parameters_foo.get(0).IsSmi());
3642 CHECK_EQ(Smi::ToInt(parameters_foo.get(0)), 41);
3643 });
3644 }
3645
3646 // * Test builtin exit error
TEST(DetailedErrorStackTraceBuiltinExit)3647 TEST(DetailedErrorStackTraceBuiltinExit) {
3648 static const char* source =
3649 "function test(arg1) { "
3650 " (new Number()).toFixed(arg1); "
3651 "} "
3652 "test(9999); ";
3653
3654 DetailedErrorStackTraceTest(source, [](Handle<FixedArray> stack_trace) {
3655 FixedArray parameters = ParametersOf(stack_trace, 0);
3656
3657 CHECK_EQ(parameters.length(), 2);
3658 CHECK(parameters.get(1).IsSmi());
3659 CHECK_EQ(Smi::ToInt(parameters.get(1)), 9999);
3660 });
3661 }
3662
TEST(Regress169928)3663 TEST(Regress169928) {
3664 FLAG_allow_natives_syntax = true;
3665 #ifndef V8_LITE_MODE
3666 FLAG_opt = false;
3667 #endif // V8_LITE_MODE
3668 CcTest::InitializeVM();
3669 Isolate* isolate = CcTest::i_isolate();
3670 LocalContext env;
3671 Factory* factory = isolate->factory();
3672 v8::HandleScope scope(CcTest::isolate());
3673
3674 // Some flags turn Scavenge collections into Mark-sweep collections
3675 // and hence are incompatible with this test case.
3676 if (FLAG_gc_global || FLAG_stress_compaction ||
3677 FLAG_stress_incremental_marking || FLAG_single_generation)
3678 return;
3679
3680 // Prepare the environment
3681 CompileRun(
3682 "function fastliteralcase(literal, value) {"
3683 " literal[0] = value;"
3684 " return literal;"
3685 "}"
3686 "function get_standard_literal() {"
3687 " var literal = [1, 2, 3];"
3688 " return literal;"
3689 "}"
3690 "obj = fastliteralcase(get_standard_literal(), 1);"
3691 "obj = fastliteralcase(get_standard_literal(), 1.5);"
3692 "obj = fastliteralcase(get_standard_literal(), 2);");
3693
3694 // prepare the heap
3695 v8::Local<v8::String> mote_code_string =
3696 v8_str("fastliteralcase(mote, 2.5);");
3697
3698 v8::Local<v8::String> array_name = v8_str("mote");
3699 CHECK(CcTest::global()
3700 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
3701 .FromJust());
3702
3703 // First make sure we flip spaces
3704 CcTest::CollectGarbage(NEW_SPACE);
3705
3706 // Allocate the object.
3707 Handle<FixedArray> array_data =
3708 factory->NewFixedArray(2, AllocationType::kYoung);
3709 array_data->set(0, Smi::FromInt(1));
3710 array_data->set(1, Smi::FromInt(2));
3711
3712 heap::FillCurrentPageButNBytes(
3713 CcTest::heap()->new_space(),
3714 JSArray::kHeaderSize + AllocationMemento::kSize + kTaggedSize);
3715
3716 Handle<JSArray> array =
3717 factory->NewJSArrayWithElements(array_data, PACKED_SMI_ELEMENTS);
3718
3719 CHECK_EQ(Smi::FromInt(2), array->length());
3720 CHECK(array->HasSmiOrObjectElements());
3721
3722 // We need filler the size of AllocationMemento object, plus an extra
3723 // fill pointer value.
3724 HeapObject obj;
3725 AllocationResult allocation = CcTest::heap()->new_space()->AllocateRaw(
3726 AllocationMemento::kSize + kTaggedSize,
3727 AllocationAlignment::kWordAligned);
3728 CHECK(allocation.To(&obj));
3729 Address addr_obj = obj.address();
3730 CcTest::heap()->CreateFillerObjectAt(addr_obj,
3731 AllocationMemento::kSize + kTaggedSize,
3732 ClearRecordedSlots::kNo);
3733
3734 // Give the array a name, making sure not to allocate strings.
3735 v8::Local<v8::Object> array_obj = v8::Utils::ToLocal(array);
3736 CHECK(CcTest::global()->Set(env.local(), array_name, array_obj).FromJust());
3737
3738 // This should crash with a protection violation if we are running a build
3739 // with the bug.
3740 AlwaysAllocateScopeForTesting aa_scope(isolate->heap());
3741 v8::Script::Compile(env.local(), mote_code_string)
3742 .ToLocalChecked()
3743 ->Run(env.local())
3744 .ToLocalChecked();
3745 }
3746
TEST(LargeObjectSlotRecording)3747 TEST(LargeObjectSlotRecording) {
3748 if (!FLAG_incremental_marking) return;
3749 if (FLAG_never_compact) return;
3750 ManualGCScope manual_gc_scope;
3751 FLAG_manual_evacuation_candidates_selection = true;
3752 CcTest::InitializeVM();
3753 Isolate* isolate = CcTest::i_isolate();
3754 Heap* heap = isolate->heap();
3755 HandleScope scope(isolate);
3756
3757 // Create an object on an evacuation candidate.
3758 heap::SimulateFullSpace(heap->old_space());
3759 Handle<FixedArray> lit =
3760 isolate->factory()->NewFixedArray(4, AllocationType::kOld);
3761 Page* evac_page = Page::FromHeapObject(*lit);
3762 heap::ForceEvacuationCandidate(evac_page);
3763 FixedArray old_location = *lit;
3764
3765 // Allocate a large object.
3766 int size = std::max(1000000, kMaxRegularHeapObjectSize + KB);
3767 CHECK_LT(kMaxRegularHeapObjectSize, size);
3768 Handle<FixedArray> lo =
3769 isolate->factory()->NewFixedArray(size, AllocationType::kOld);
3770 CHECK(heap->lo_space()->Contains(*lo));
3771
3772 // Start incremental marking to active write barrier.
3773 heap::SimulateIncrementalMarking(heap, false);
3774
3775 // Create references from the large object to the object on the evacuation
3776 // candidate.
3777 const int kStep = size / 10;
3778 for (int i = 0; i < size; i += kStep) {
3779 lo->set(i, *lit);
3780 CHECK(lo->get(i) == old_location);
3781 }
3782
3783 heap::SimulateIncrementalMarking(heap, true);
3784
3785 // Move the evaucation candidate object.
3786 CcTest::CollectAllGarbage();
3787
3788 // Verify that the pointers in the large object got updated.
3789 for (int i = 0; i < size; i += kStep) {
3790 CHECK_EQ(lo->get(i), *lit);
3791 CHECK(lo->get(i) != old_location);
3792 }
3793 }
3794
3795 class DummyVisitor : public RootVisitor {
3796 public:
VisitRootPointers(Root root,const char * description,FullObjectSlot start,FullObjectSlot end)3797 void VisitRootPointers(Root root, const char* description,
3798 FullObjectSlot start, FullObjectSlot end) override {}
3799 };
3800
TEST(PersistentHandles)3801 TEST(PersistentHandles) {
3802 CcTest::InitializeVM();
3803 Isolate* isolate = CcTest::i_isolate();
3804 Heap* heap = isolate->heap();
3805 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3806 HandleScopeData* data = isolate->handle_scope_data();
3807 Handle<Object> init(ReadOnlyRoots(heap).empty_string(), isolate);
3808 while (data->next < data->limit) {
3809 Handle<Object> obj(ReadOnlyRoots(heap).empty_string(), isolate);
3810 }
3811 // An entire block of handles has been filled.
3812 // Next handle would require a new block.
3813 CHECK(data->next == data->limit);
3814
3815 PersistentHandlesScope persistent(isolate);
3816 DummyVisitor visitor;
3817 isolate->handle_scope_implementer()->Iterate(&visitor);
3818 persistent.Detach();
3819 }
3820
TestFillersFromPersistentHandles(bool promote)3821 static void TestFillersFromPersistentHandles(bool promote) {
3822 // We assume that the fillers can only arise when left-trimming arrays.
3823 Isolate* isolate = CcTest::i_isolate();
3824 Heap* heap = isolate->heap();
3825 v8::HandleScope scope(reinterpret_cast<v8::Isolate*>(isolate));
3826
3827 const size_t n = 10;
3828 Handle<FixedArray> array = isolate->factory()->NewFixedArray(n);
3829
3830 if (promote) {
3831 // Age the array so it's ready for promotion on next GC.
3832 CcTest::CollectGarbage(NEW_SPACE);
3833 }
3834 CHECK(Heap::InYoungGeneration(*array));
3835
3836 PersistentHandlesScope persistent_scope(isolate);
3837
3838 // Trim the array three times to different sizes so all kinds of fillers are
3839 // created and tracked by the persistent handles.
3840 Handle<FixedArrayBase> filler_1 = Handle<FixedArrayBase>(*array, isolate);
3841 Handle<FixedArrayBase> filler_2 =
3842 Handle<FixedArrayBase>(heap->LeftTrimFixedArray(*filler_1, 1), isolate);
3843 Handle<FixedArrayBase> filler_3 =
3844 Handle<FixedArrayBase>(heap->LeftTrimFixedArray(*filler_2, 2), isolate);
3845 Handle<FixedArrayBase> tail =
3846 Handle<FixedArrayBase>(heap->LeftTrimFixedArray(*filler_3, 3), isolate);
3847
3848 std::unique_ptr<PersistentHandles> persistent_handles(
3849 persistent_scope.Detach());
3850
3851 // GC should retain the trimmed array but drop all of the three fillers.
3852 CcTest::CollectGarbage(NEW_SPACE);
3853 if (!FLAG_single_generation) {
3854 if (promote) {
3855 CHECK(heap->InOldSpace(*tail));
3856 } else {
3857 CHECK(Heap::InYoungGeneration(*tail));
3858 }
3859 }
3860 CHECK_EQ(n - 6, (*tail).length());
3861 CHECK(!filler_1->IsHeapObject());
3862 CHECK(!filler_2->IsHeapObject());
3863 CHECK(!filler_3->IsHeapObject());
3864 }
3865
TEST(DoNotEvacuateFillersFromPersistentHandles)3866 TEST(DoNotEvacuateFillersFromPersistentHandles) {
3867 if (FLAG_single_generation || FLAG_move_object_start) return;
3868 TestFillersFromPersistentHandles(false /*promote*/);
3869 }
3870
TEST(DoNotPromoteFillersFromPersistentHandles)3871 TEST(DoNotPromoteFillersFromPersistentHandles) {
3872 if (FLAG_single_generation || FLAG_move_object_start) return;
3873 TestFillersFromPersistentHandles(true /*promote*/);
3874 }
3875
TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects)3876 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
3877 if (!FLAG_incremental_marking) return;
3878 ManualGCScope manual_gc_scope;
3879 CcTest::InitializeVM();
3880 v8::HandleScope scope(CcTest::isolate());
3881 CompileRun(
3882 "function f(n) {"
3883 " var a = new Array(n);"
3884 " for (var i = 0; i < n; i += 100) a[i] = i;"
3885 "};"
3886 "f(10 * 1024 * 1024);");
3887 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
3888 if (marking->IsStopped()) {
3889 CcTest::heap()->StartIncrementalMarking(
3890 i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
3891 }
3892 heap::SimulateIncrementalMarking(CcTest::heap());
3893 CHECK(marking->IsComplete() ||
3894 marking->IsReadyToOverApproximateWeakClosure());
3895 }
3896
3897
TEST(DisableInlineAllocation)3898 TEST(DisableInlineAllocation) {
3899 FLAG_allow_natives_syntax = true;
3900 CcTest::InitializeVM();
3901 v8::HandleScope scope(CcTest::isolate());
3902 CompileRun(
3903 "function test() {"
3904 " var x = [];"
3905 " for (var i = 0; i < 10; i++) {"
3906 " x[i] = [ {}, [1,2,3], [1,x,3] ];"
3907 " }"
3908 "}"
3909 "function run() {"
3910 " %PrepareFunctionForOptimization(test);"
3911 " %OptimizeFunctionOnNextCall(test);"
3912 " test();"
3913 " %DeoptimizeFunction(test);"
3914 "}");
3915
3916 // Warm-up with inline allocation enabled.
3917 CompileRun("test(); test(); run();");
3918
3919 // Run test with inline allocation disabled.
3920 CcTest::heap()->DisableInlineAllocation();
3921 CompileRun("run()");
3922
3923 // Run test with inline allocation re-enabled.
3924 CcTest::heap()->EnableInlineAllocation();
3925 CompileRun("run()");
3926 }
3927
3928
AllocationSitesCount(Heap * heap)3929 static int AllocationSitesCount(Heap* heap) {
3930 int count = 0;
3931 for (Object site = heap->allocation_sites_list(); site.IsAllocationSite();) {
3932 AllocationSite cur = AllocationSite::cast(site);
3933 CHECK(cur.HasWeakNext());
3934 site = cur.weak_next();
3935 count++;
3936 }
3937 return count;
3938 }
3939
SlimAllocationSiteCount(Heap * heap)3940 static int SlimAllocationSiteCount(Heap* heap) {
3941 int count = 0;
3942 for (Object weak_list = heap->allocation_sites_list();
3943 weak_list.IsAllocationSite();) {
3944 AllocationSite weak_cur = AllocationSite::cast(weak_list);
3945 for (Object site = weak_cur.nested_site(); site.IsAllocationSite();) {
3946 AllocationSite cur = AllocationSite::cast(site);
3947 CHECK(!cur.HasWeakNext());
3948 site = cur.nested_site();
3949 count++;
3950 }
3951 weak_list = weak_cur.weak_next();
3952 }
3953 return count;
3954 }
3955
TEST(EnsureAllocationSiteDependentCodesProcessed)3956 TEST(EnsureAllocationSiteDependentCodesProcessed) {
3957 if (FLAG_always_opt || !FLAG_opt || !V8_ALLOCATION_SITE_TRACKING_BOOL) return;
3958 FLAG_allow_natives_syntax = true;
3959 CcTest::InitializeVM();
3960 Isolate* isolate = CcTest::i_isolate();
3961 v8::internal::Heap* heap = CcTest::heap();
3962 GlobalHandles* global_handles = isolate->global_handles();
3963
3964 if (!isolate->use_optimizer()) return;
3965
3966 // The allocation site at the head of the list is ours.
3967 Handle<AllocationSite> site;
3968 {
3969 LocalContext context;
3970 v8::HandleScope scope(context->GetIsolate());
3971
3972 int count = AllocationSitesCount(heap);
3973 CompileRun(
3974 "var bar = function() { return (new Array()); };"
3975 "%PrepareFunctionForOptimization(bar);"
3976 "var a = bar();"
3977 "bar();"
3978 "bar();");
3979
3980 // One allocation site should have been created.
3981 int new_count = AllocationSitesCount(heap);
3982 CHECK_EQ(new_count, (count + 1));
3983 site = Handle<AllocationSite>::cast(global_handles->Create(
3984 AllocationSite::cast(heap->allocation_sites_list())));
3985
3986 CompileRun("%OptimizeFunctionOnNextCall(bar); bar();");
3987
3988 Handle<JSFunction> bar_handle = Handle<JSFunction>::cast(
3989 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3990 CcTest::global()
3991 ->Get(context.local(), v8_str("bar"))
3992 .ToLocalChecked())));
3993
3994 int dependency_group_count = 0;
3995 DependentCode dependency = site->dependent_code();
3996 while (dependency != ReadOnlyRoots(heap).empty_weak_fixed_array()) {
3997 CHECK(dependency.group() ==
3998 DependentCode::kAllocationSiteTransitionChangedGroup ||
3999 dependency.group() ==
4000 DependentCode::kAllocationSiteTenuringChangedGroup);
4001 CHECK_EQ(1, dependency.count());
4002 CHECK(dependency.object_at(0)->IsWeak());
4003 Code function_bar = FromCodeT(
4004 CodeT::cast(dependency.object_at(0)->GetHeapObjectAssumeWeak()));
4005 CHECK_EQ(bar_handle->code(), function_bar);
4006 dependency = dependency.next_link();
4007 dependency_group_count++;
4008 }
4009 // Expect a dependent code object for transitioning and pretenuring.
4010 CHECK_EQ(2, dependency_group_count);
4011 }
4012
4013 // Now make sure that a gc should get rid of the function, even though we
4014 // still have the allocation site alive.
4015 for (int i = 0; i < 4; i++) {
4016 CcTest::CollectAllGarbage();
4017 }
4018
4019 // The site still exists because of our global handle, but the code is no
4020 // longer referred to by dependent_code().
4021 CHECK(site->dependent_code().object_at(0)->IsCleared());
4022 }
4023
CheckNumberOfAllocations(Heap * heap,const char * source,int expected_full_alloc,int expected_slim_alloc)4024 void CheckNumberOfAllocations(Heap* heap, const char* source,
4025 int expected_full_alloc,
4026 int expected_slim_alloc) {
4027 int prev_fat_alloc_count = AllocationSitesCount(heap);
4028 int prev_slim_alloc_count = SlimAllocationSiteCount(heap);
4029
4030 CompileRun(source);
4031
4032 int fat_alloc_sites = AllocationSitesCount(heap) - prev_fat_alloc_count;
4033 int slim_alloc_sites = SlimAllocationSiteCount(heap) - prev_slim_alloc_count;
4034
4035 CHECK_EQ(expected_full_alloc, fat_alloc_sites);
4036 CHECK_EQ(expected_slim_alloc, slim_alloc_sites);
4037 }
4038
TEST(AllocationSiteCreation)4039 TEST(AllocationSiteCreation) {
4040 FLAG_always_opt = false;
4041 CcTest::InitializeVM();
4042 Isolate* isolate = CcTest::i_isolate();
4043 Heap* heap = isolate->heap();
4044 HandleScope scope(isolate);
4045 i::FLAG_allow_natives_syntax = true;
4046
4047 // Array literals.
4048 CheckNumberOfAllocations(heap,
4049 "function f1() {"
4050 " return []; "
4051 "};"
4052 "%EnsureFeedbackVectorForFunction(f1); f1();",
4053 1, 0);
4054 CheckNumberOfAllocations(heap,
4055 "function f2() {"
4056 " return [1, 2];"
4057 "};"
4058 "%EnsureFeedbackVectorForFunction(f2); f2();",
4059 1, 0);
4060 CheckNumberOfAllocations(heap,
4061 "function f3() {"
4062 " return [[1], [2]];"
4063 "};"
4064 "%EnsureFeedbackVectorForFunction(f3); f3();",
4065 1, 2);
4066 CheckNumberOfAllocations(heap,
4067 "function f4() { "
4068 "return [0, [1, 1.1, 1.2, "
4069 "], 1.5, [2.1, 2.2], 3];"
4070 "};"
4071 "%EnsureFeedbackVectorForFunction(f4); f4();",
4072 1, 2);
4073
4074 // Object literals have lazy AllocationSites
4075 CheckNumberOfAllocations(heap,
4076 "function f5() {"
4077 " return {};"
4078 "};"
4079 "%EnsureFeedbackVectorForFunction(f5); f5();",
4080 0, 0);
4081
4082 // No AllocationSites are created for the empty object literal.
4083 for (int i = 0; i < 5; i++) {
4084 CheckNumberOfAllocations(heap, "f5(); ", 0, 0);
4085 }
4086
4087 CheckNumberOfAllocations(heap,
4088 "function f6() {"
4089 " return {a:1};"
4090 "};"
4091 "%EnsureFeedbackVectorForFunction(f6); f6();",
4092 0, 0);
4093
4094 CheckNumberOfAllocations(heap, "f6(); ", 1, 0);
4095
4096 CheckNumberOfAllocations(heap,
4097 "function f7() {"
4098 " return {a:1, b:2};"
4099 "};"
4100 "%EnsureFeedbackVectorForFunction(f7); f7(); ",
4101 0, 0);
4102 CheckNumberOfAllocations(heap, "f7(); ", 1, 0);
4103
4104 // No Allocation sites are created for object subliterals
4105 CheckNumberOfAllocations(heap,
4106 "function f8() {"
4107 "return {a:{}, b:{ a:2, c:{ d:{f:{}}} } }; "
4108 "};"
4109 "%EnsureFeedbackVectorForFunction(f8); f8();",
4110 0, 0);
4111 CheckNumberOfAllocations(heap, "f8(); ", 1, 0);
4112
4113 // We currently eagerly create allocation sites if there are sub-arrays.
4114 // Allocation sites are created only for array subliterals
4115 CheckNumberOfAllocations(heap,
4116 "function f9() {"
4117 "return {a:[1, 2, 3], b:{ a:2, c:{ d:{f:[]} } }}; "
4118 "};"
4119 "%EnsureFeedbackVectorForFunction(f9); f9(); ",
4120 1, 2);
4121
4122 // No new AllocationSites created on the second invocation.
4123 CheckNumberOfAllocations(heap, "f9(); ", 0, 0);
4124 }
4125
TEST(CellsInOptimizedCodeAreWeak)4126 TEST(CellsInOptimizedCodeAreWeak) {
4127 if (FLAG_always_opt || !FLAG_opt) return;
4128 FLAG_allow_natives_syntax = true;
4129 CcTest::InitializeVM();
4130 Isolate* isolate = CcTest::i_isolate();
4131 v8::internal::Heap* heap = CcTest::heap();
4132
4133 if (!isolate->use_optimizer()) return;
4134 HandleScope outer_scope(heap->isolate());
4135 Handle<Code> code;
4136 {
4137 LocalContext context;
4138 HandleScope scope(heap->isolate());
4139
4140 CompileRun(
4141 "bar = (function() {"
4142 " function bar() {"
4143 " return foo(1);"
4144 " };"
4145 " %PrepareFunctionForOptimization(bar);"
4146 " var foo = function(x) { with (x) { return 1 + x; } };"
4147 " %NeverOptimizeFunction(foo);"
4148 " bar(foo);"
4149 " bar(foo);"
4150 " bar(foo);"
4151 " %OptimizeFunctionOnNextCall(bar);"
4152 " bar(foo);"
4153 " return bar;})();");
4154
4155 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4156 *v8::Local<v8::Function>::Cast(CcTest::global()
4157 ->Get(context.local(), v8_str("bar"))
4158 .ToLocalChecked())));
4159 code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4160 }
4161
4162 // Now make sure that a gc should get rid of the function
4163 for (int i = 0; i < 4; i++) {
4164 CcTest::CollectAllGarbage();
4165 }
4166
4167 CHECK(code->marked_for_deoptimization());
4168 CHECK(code->embedded_objects_cleared());
4169 }
4170
4171
TEST(ObjectsInOptimizedCodeAreWeak)4172 TEST(ObjectsInOptimizedCodeAreWeak) {
4173 if (FLAG_always_opt || !FLAG_opt) return;
4174 FLAG_allow_natives_syntax = true;
4175 CcTest::InitializeVM();
4176 Isolate* isolate = CcTest::i_isolate();
4177 v8::internal::Heap* heap = CcTest::heap();
4178
4179 if (!isolate->use_optimizer()) return;
4180 HandleScope outer_scope(heap->isolate());
4181 Handle<Code> code;
4182 {
4183 LocalContext context;
4184 HandleScope scope(heap->isolate());
4185
4186 CompileRun(
4187 "function bar() {"
4188 " return foo(1);"
4189 "};"
4190 "%PrepareFunctionForOptimization(bar);"
4191 "function foo(x) { with (x) { return 1 + x; } };"
4192 "%NeverOptimizeFunction(foo);"
4193 "bar();"
4194 "bar();"
4195 "bar();"
4196 "%OptimizeFunctionOnNextCall(bar);"
4197 "bar();");
4198
4199 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4200 *v8::Local<v8::Function>::Cast(CcTest::global()
4201 ->Get(context.local(), v8_str("bar"))
4202 .ToLocalChecked())));
4203 code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4204 }
4205
4206 // Now make sure that a gc should get rid of the function
4207 for (int i = 0; i < 4; i++) {
4208 CcTest::CollectAllGarbage();
4209 }
4210
4211 CHECK(code->marked_for_deoptimization());
4212 CHECK(code->embedded_objects_cleared());
4213 }
4214
TEST(NewSpaceObjectsInOptimizedCode)4215 TEST(NewSpaceObjectsInOptimizedCode) {
4216 if (FLAG_always_opt || !FLAG_opt || FLAG_single_generation) return;
4217 FLAG_allow_natives_syntax = true;
4218 CcTest::InitializeVM();
4219 Isolate* isolate = CcTest::i_isolate();
4220
4221 if (!isolate->use_optimizer()) return;
4222 HandleScope outer_scope(isolate);
4223 Handle<Code> code;
4224 {
4225 LocalContext context;
4226 HandleScope scope(isolate);
4227
4228 CompileRun(
4229 "var foo;"
4230 "var bar;"
4231 "(function() {"
4232 " function foo_func(x) { with (x) { return 1 + x; } };"
4233 " %NeverOptimizeFunction(foo_func);"
4234 " function bar_func() {"
4235 " return foo(1);"
4236 " };"
4237 " %PrepareFunctionForOptimization(bar_func);"
4238 " bar = bar_func;"
4239 " foo = foo_func;"
4240 " bar_func();"
4241 " bar_func();"
4242 " bar_func();"
4243 " %OptimizeFunctionOnNextCall(bar_func);"
4244 " bar_func();"
4245 "})();");
4246
4247 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4248 *v8::Local<v8::Function>::Cast(CcTest::global()
4249 ->Get(context.local(), v8_str("bar"))
4250 .ToLocalChecked())));
4251
4252 Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4253 *v8::Local<v8::Function>::Cast(CcTest::global()
4254 ->Get(context.local(), v8_str("foo"))
4255 .ToLocalChecked())));
4256
4257 CHECK(Heap::InYoungGeneration(*foo));
4258 CcTest::CollectGarbage(NEW_SPACE);
4259 CcTest::CollectGarbage(NEW_SPACE);
4260 CHECK(!Heap::InYoungGeneration(*foo));
4261 #ifdef VERIFY_HEAP
4262 CcTest::heap()->Verify();
4263 #endif
4264 CHECK(!bar->code().marked_for_deoptimization());
4265 code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4266 }
4267
4268 // Now make sure that a gc should get rid of the function
4269 for (int i = 0; i < 4; i++) {
4270 CcTest::CollectAllGarbage();
4271 }
4272
4273 CHECK(code->marked_for_deoptimization());
4274 CHECK(code->embedded_objects_cleared());
4275 }
4276
TEST(ObjectsInEagerlyDeoptimizedCodeAreWeak)4277 TEST(ObjectsInEagerlyDeoptimizedCodeAreWeak) {
4278 if (FLAG_always_opt || !FLAG_opt) return;
4279 FLAG_allow_natives_syntax = true;
4280 CcTest::InitializeVM();
4281 Isolate* isolate = CcTest::i_isolate();
4282 v8::internal::Heap* heap = CcTest::heap();
4283
4284 if (!isolate->use_optimizer()) return;
4285 HandleScope outer_scope(heap->isolate());
4286 Handle<Code> code;
4287 {
4288 LocalContext context;
4289 HandleScope scope(heap->isolate());
4290
4291 CompileRun(
4292 "function bar() {"
4293 " return foo(1);"
4294 "};"
4295 "function foo(x) { with (x) { return 1 + x; } };"
4296 "%NeverOptimizeFunction(foo);"
4297 "%PrepareFunctionForOptimization(bar);"
4298 "bar();"
4299 "bar();"
4300 "bar();"
4301 "%OptimizeFunctionOnNextCall(bar);"
4302 "bar();"
4303 "%DeoptimizeFunction(bar);");
4304
4305 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4306 *v8::Local<v8::Function>::Cast(CcTest::global()
4307 ->Get(context.local(), v8_str("bar"))
4308 .ToLocalChecked())));
4309 code = scope.CloseAndEscape(Handle<Code>(bar->code(), isolate));
4310 }
4311
4312 CHECK(code->marked_for_deoptimization());
4313
4314 // Now make sure that a gc should get rid of the function
4315 for (int i = 0; i < 4; i++) {
4316 CcTest::CollectAllGarbage();
4317 }
4318
4319 CHECK(code->marked_for_deoptimization());
4320 CHECK(code->embedded_objects_cleared());
4321 }
4322
OptimizeDummyFunction(v8::Isolate * isolate,const char * name)4323 static Handle<JSFunction> OptimizeDummyFunction(v8::Isolate* isolate,
4324 const char* name) {
4325 base::EmbeddedVector<char, 256> source;
4326 base::SNPrintF(source,
4327 "function %s() { return 0; }"
4328 "%%PrepareFunctionForOptimization(%s);"
4329 "%s(); %s();"
4330 "%%OptimizeFunctionOnNextCall(%s);"
4331 "%s();",
4332 name, name, name, name, name, name);
4333 CompileRun(source.begin());
4334 i::Handle<JSFunction> fun = Handle<JSFunction>::cast(
4335 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4336 CcTest::global()
4337 ->Get(isolate->GetCurrentContext(), v8_str(name))
4338 .ToLocalChecked())));
4339 return fun;
4340 }
4341
GetCodeChainLength(Code code)4342 static int GetCodeChainLength(Code code) {
4343 int result = 0;
4344 while (code.next_code_link().IsCodeT()) {
4345 result++;
4346 code = FromCodeT(CodeT::cast(code.next_code_link()));
4347 }
4348 return result;
4349 }
4350
4351
TEST(NextCodeLinkIsWeak)4352 TEST(NextCodeLinkIsWeak) {
4353 FLAG_always_opt = false;
4354 FLAG_allow_natives_syntax = true;
4355 CcTest::InitializeVM();
4356 Isolate* isolate = CcTest::i_isolate();
4357 v8::internal::Heap* heap = CcTest::heap();
4358
4359 if (!isolate->use_optimizer()) return;
4360 HandleScope outer_scope(heap->isolate());
4361 Handle<Code> code;
4362 CcTest::CollectAllAvailableGarbage();
4363 int code_chain_length_before, code_chain_length_after;
4364 {
4365 HandleScope scope(heap->isolate());
4366 Handle<JSFunction> mortal =
4367 OptimizeDummyFunction(CcTest::isolate(), "mortal");
4368 Handle<JSFunction> immortal =
4369 OptimizeDummyFunction(CcTest::isolate(), "immortal");
4370 CHECK_EQ(immortal->code().next_code_link(), ToCodeT(mortal->code()));
4371 code_chain_length_before = GetCodeChainLength(immortal->code());
4372 // Keep the immortal code and let the mortal code die.
4373 code = scope.CloseAndEscape(Handle<Code>(immortal->code(), isolate));
4374 CompileRun("mortal = null; immortal = null;");
4375 }
4376 CcTest::CollectAllAvailableGarbage();
4377 // Now mortal code should be dead.
4378 code_chain_length_after = GetCodeChainLength(*code);
4379 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
4380 }
4381
TEST(NextCodeLinkInCodeDataContainerIsCleared)4382 TEST(NextCodeLinkInCodeDataContainerIsCleared) {
4383 FLAG_always_opt = false;
4384 FLAG_allow_natives_syntax = true;
4385 CcTest::InitializeVM();
4386 Isolate* isolate = CcTest::i_isolate();
4387 v8::internal::Heap* heap = CcTest::heap();
4388
4389 if (!isolate->use_optimizer()) return;
4390 HandleScope outer_scope(heap->isolate());
4391 Handle<CodeDataContainer> code_data_container;
4392 {
4393 HandleScope scope(heap->isolate());
4394 Handle<JSFunction> mortal1 =
4395 OptimizeDummyFunction(CcTest::isolate(), "mortal1");
4396 Handle<JSFunction> mortal2 =
4397 OptimizeDummyFunction(CcTest::isolate(), "mortal2");
4398 CHECK_EQ(mortal2->code().next_code_link(), ToCodeT(mortal1->code()));
4399 code_data_container = scope.CloseAndEscape(Handle<CodeDataContainer>(
4400 mortal2->code().code_data_container(kAcquireLoad), isolate));
4401 CompileRun("mortal1 = null; mortal2 = null;");
4402 }
4403 CcTest::CollectAllAvailableGarbage();
4404 CHECK(code_data_container->next_code_link().IsUndefined(isolate));
4405 }
4406
DummyOptimizedCode(Isolate * isolate)4407 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
4408 i::byte buffer[i::Assembler::kDefaultBufferSize];
4409 MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
4410 ExternalAssemblerBuffer(buffer, sizeof(buffer)));
4411 CodeDesc desc;
4412 #if V8_TARGET_ARCH_ARM64
4413 UseScratchRegisterScope temps(&masm);
4414 Register tmp = temps.AcquireX();
4415 masm.Mov(tmp, Operand(isolate->factory()->undefined_value()));
4416 masm.Push(tmp, tmp);
4417 #else
4418 masm.Push(isolate->factory()->undefined_value());
4419 masm.Push(isolate->factory()->undefined_value());
4420 #endif
4421 masm.Drop(2);
4422 masm.GetCode(isolate, &desc);
4423 Handle<Code> code = Factory::CodeBuilder(isolate, desc, CodeKind::TURBOFAN)
4424 .set_self_reference(masm.CodeObject())
4425 .Build();
4426 CHECK(code->IsCode());
4427 return code;
4428 }
4429
4430
TEST(NextCodeLinkIsWeak2)4431 TEST(NextCodeLinkIsWeak2) {
4432 FLAG_allow_natives_syntax = true;
4433 CcTest::InitializeVM();
4434 Isolate* isolate = CcTest::i_isolate();
4435 v8::internal::Heap* heap = CcTest::heap();
4436
4437 if (!isolate->use_optimizer()) return;
4438 HandleScope outer_scope(heap->isolate());
4439 CcTest::CollectAllAvailableGarbage();
4440 Handle<NativeContext> context(
4441 NativeContext::cast(heap->native_contexts_list()), isolate);
4442 Handle<Code> new_head;
4443 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
4444 {
4445 HandleScope scope(heap->isolate());
4446 Handle<Code> immortal = DummyOptimizedCode(isolate);
4447 Handle<Code> mortal = DummyOptimizedCode(isolate);
4448 mortal->set_next_code_link(*old_head);
4449 immortal->set_next_code_link(ToCodeT(*mortal));
4450 context->SetOptimizedCodeListHead(ToCodeT(*immortal));
4451 new_head = scope.CloseAndEscape(immortal);
4452 }
4453 CcTest::CollectAllAvailableGarbage();
4454 // Now mortal code should be dead.
4455 CHECK_EQ(*old_head, new_head->next_code_link());
4456 }
4457
4458
4459 static bool weak_ic_cleared = false;
4460
ClearWeakIC(const v8::WeakCallbackInfo<v8::Persistent<v8::Object>> & data)4461 static void ClearWeakIC(
4462 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
4463 printf("clear weak is called\n");
4464 weak_ic_cleared = true;
4465 data.GetParameter()->Reset();
4466 }
4467
4468
TEST(WeakFunctionInConstructor)4469 TEST(WeakFunctionInConstructor) {
4470 if (FLAG_always_opt) return;
4471 FLAG_stress_compaction = false;
4472 FLAG_stress_incremental_marking = false;
4473 FLAG_allow_natives_syntax = true;
4474 CcTest::InitializeVM();
4475 v8::Isolate* isolate = CcTest::isolate();
4476 LocalContext env;
4477 v8::HandleScope scope(isolate);
4478 CompileRun(
4479 "function createObj(obj) {"
4480 " return new obj();"
4481 "}");
4482 i::Handle<JSFunction> createObj = Handle<JSFunction>::cast(
4483 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4484 CcTest::global()
4485 ->Get(env.local(), v8_str("createObj"))
4486 .ToLocalChecked())));
4487
4488 v8::Persistent<v8::Object> garbage;
4489 {
4490 v8::HandleScope scope(isolate);
4491 const char* source =
4492 " (function() {"
4493 " function hat() { this.x = 5; }"
4494 " %EnsureFeedbackVectorForFunction(hat);"
4495 " %EnsureFeedbackVectorForFunction(createObj);"
4496 " createObj(hat);"
4497 " createObj(hat);"
4498 " return hat;"
4499 " })();";
4500 garbage.Reset(isolate, CompileRun(env.local(), source)
4501 .ToLocalChecked()
4502 ->ToObject(env.local())
4503 .ToLocalChecked());
4504 }
4505 weak_ic_cleared = false;
4506 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4507 CcTest::CollectAllGarbage();
4508 CHECK(weak_ic_cleared);
4509
4510 // We've determined the constructor in createObj has had it's weak cell
4511 // cleared. Now, verify that one additional call with a new function
4512 // allows monomorphicity.
4513 Handle<FeedbackVector> feedback_vector =
4514 Handle<FeedbackVector>(createObj->feedback_vector(), CcTest::i_isolate());
4515 for (int i = 0; i < 20; i++) {
4516 MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4517 CHECK(slot_value->IsWeakOrCleared());
4518 if (slot_value->IsCleared()) break;
4519 CcTest::CollectAllGarbage();
4520 }
4521
4522 MaybeObject slot_value = feedback_vector->Get(FeedbackSlot(0));
4523 CHECK(slot_value->IsCleared());
4524 CompileRun(
4525 "function coat() { this.x = 6; }"
4526 "createObj(coat);");
4527 slot_value = feedback_vector->Get(FeedbackSlot(0));
4528 CHECK(slot_value->IsWeak());
4529 }
4530
4531
4532 // Checks that the value returned by execution of the source is weak.
CheckWeakness(const char * source)4533 void CheckWeakness(const char* source) {
4534 FLAG_stress_compaction = false;
4535 FLAG_stress_incremental_marking = false;
4536 FLAG_allow_natives_syntax = true;
4537 CcTest::InitializeVM();
4538 v8::Isolate* isolate = CcTest::isolate();
4539 LocalContext env;
4540 v8::HandleScope scope(isolate);
4541 v8::Persistent<v8::Object> garbage;
4542 {
4543 v8::HandleScope scope(isolate);
4544 garbage.Reset(isolate, CompileRun(env.local(), source)
4545 .ToLocalChecked()
4546 ->ToObject(env.local())
4547 .ToLocalChecked());
4548 }
4549 weak_ic_cleared = false;
4550 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
4551 CcTest::CollectAllGarbage();
4552 CHECK(weak_ic_cleared);
4553 }
4554
4555
4556 // Each of the following "weak IC" tests creates an IC that embeds a map with
4557 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
TEST(WeakMapInMonomorphicLoadIC)4558 TEST(WeakMapInMonomorphicLoadIC) {
4559 CheckWeakness(
4560 "function loadIC(obj) {"
4561 " return obj.name;"
4562 "}"
4563 "%EnsureFeedbackVectorForFunction(loadIC);"
4564 " (function() {"
4565 " var proto = {'name' : 'weak'};"
4566 " var obj = Object.create(proto);"
4567 " loadIC(obj);"
4568 " loadIC(obj);"
4569 " loadIC(obj);"
4570 " return proto;"
4571 " })();");
4572 }
4573
4574
TEST(WeakMapInPolymorphicLoadIC)4575 TEST(WeakMapInPolymorphicLoadIC) {
4576 CheckWeakness(
4577 "function loadIC(obj) {"
4578 " return obj.name;"
4579 "}"
4580 "%EnsureFeedbackVectorForFunction(loadIC);"
4581 " (function() {"
4582 " var proto = {'name' : 'weak'};"
4583 " var obj = Object.create(proto);"
4584 " loadIC(obj);"
4585 " loadIC(obj);"
4586 " loadIC(obj);"
4587 " var poly = Object.create(proto);"
4588 " poly.x = true;"
4589 " loadIC(poly);"
4590 " return proto;"
4591 " })();");
4592 }
4593
4594
TEST(WeakMapInMonomorphicKeyedLoadIC)4595 TEST(WeakMapInMonomorphicKeyedLoadIC) {
4596 CheckWeakness(
4597 "function keyedLoadIC(obj, field) {"
4598 " return obj[field];"
4599 "}"
4600 "%EnsureFeedbackVectorForFunction(keyedLoadIC);"
4601 " (function() {"
4602 " var proto = {'name' : 'weak'};"
4603 " var obj = Object.create(proto);"
4604 " keyedLoadIC(obj, 'name');"
4605 " keyedLoadIC(obj, 'name');"
4606 " keyedLoadIC(obj, 'name');"
4607 " return proto;"
4608 " })();");
4609 }
4610
4611
TEST(WeakMapInPolymorphicKeyedLoadIC)4612 TEST(WeakMapInPolymorphicKeyedLoadIC) {
4613 CheckWeakness(
4614 "function keyedLoadIC(obj, field) {"
4615 " return obj[field];"
4616 "}"
4617 "%EnsureFeedbackVectorForFunction(keyedLoadIC);"
4618 " (function() {"
4619 " var proto = {'name' : 'weak'};"
4620 " var obj = Object.create(proto);"
4621 " keyedLoadIC(obj, 'name');"
4622 " keyedLoadIC(obj, 'name');"
4623 " keyedLoadIC(obj, 'name');"
4624 " var poly = Object.create(proto);"
4625 " poly.x = true;"
4626 " keyedLoadIC(poly, 'name');"
4627 " return proto;"
4628 " })();");
4629 }
4630
4631
TEST(WeakMapInMonomorphicStoreIC)4632 TEST(WeakMapInMonomorphicStoreIC) {
4633 CheckWeakness(
4634 "function storeIC(obj, value) {"
4635 " obj.name = value;"
4636 "}"
4637 "%EnsureFeedbackVectorForFunction(storeIC);"
4638 " (function() {"
4639 " var proto = {'name' : 'weak'};"
4640 " var obj = Object.create(proto);"
4641 " storeIC(obj, 'x');"
4642 " storeIC(obj, 'x');"
4643 " storeIC(obj, 'x');"
4644 " return proto;"
4645 " })();");
4646 }
4647
4648
TEST(WeakMapInPolymorphicStoreIC)4649 TEST(WeakMapInPolymorphicStoreIC) {
4650 CheckWeakness(
4651 "function storeIC(obj, value) {"
4652 " obj.name = value;"
4653 "}"
4654 "%EnsureFeedbackVectorForFunction(storeIC);"
4655 " (function() {"
4656 " var proto = {'name' : 'weak'};"
4657 " var obj = Object.create(proto);"
4658 " storeIC(obj, 'x');"
4659 " storeIC(obj, 'x');"
4660 " storeIC(obj, 'x');"
4661 " var poly = Object.create(proto);"
4662 " poly.x = true;"
4663 " storeIC(poly, 'x');"
4664 " return proto;"
4665 " })();");
4666 }
4667
4668
TEST(WeakMapInMonomorphicKeyedStoreIC)4669 TEST(WeakMapInMonomorphicKeyedStoreIC) {
4670 CheckWeakness(
4671 "function keyedStoreIC(obj, field, value) {"
4672 " obj[field] = value;"
4673 "}"
4674 "%EnsureFeedbackVectorForFunction(keyedStoreIC);"
4675 " (function() {"
4676 " var proto = {'name' : 'weak'};"
4677 " var obj = Object.create(proto);"
4678 " keyedStoreIC(obj, 'x');"
4679 " keyedStoreIC(obj, 'x');"
4680 " keyedStoreIC(obj, 'x');"
4681 " return proto;"
4682 " })();");
4683 }
4684
4685
TEST(WeakMapInPolymorphicKeyedStoreIC)4686 TEST(WeakMapInPolymorphicKeyedStoreIC) {
4687 CheckWeakness(
4688 "function keyedStoreIC(obj, field, value) {"
4689 " obj[field] = value;"
4690 "}"
4691 "%EnsureFeedbackVectorForFunction(keyedStoreIC);"
4692 " (function() {"
4693 " var proto = {'name' : 'weak'};"
4694 " var obj = Object.create(proto);"
4695 " keyedStoreIC(obj, 'x');"
4696 " keyedStoreIC(obj, 'x');"
4697 " keyedStoreIC(obj, 'x');"
4698 " var poly = Object.create(proto);"
4699 " poly.x = true;"
4700 " keyedStoreIC(poly, 'x');"
4701 " return proto;"
4702 " })();");
4703 }
4704
4705
TEST(WeakMapInMonomorphicCompareNilIC)4706 TEST(WeakMapInMonomorphicCompareNilIC) {
4707 FLAG_allow_natives_syntax = true;
4708 CheckWeakness(
4709 "function compareNilIC(obj) {"
4710 " return obj == null;"
4711 "}"
4712 "%EnsureFeedbackVectorForFunction(compareNilIC);"
4713 " (function() {"
4714 " var proto = {'name' : 'weak'};"
4715 " var obj = Object.create(proto);"
4716 " compareNilIC(obj);"
4717 " compareNilIC(obj);"
4718 " compareNilIC(obj);"
4719 " return proto;"
4720 " })();");
4721 }
4722
4723
GetFunctionByName(Isolate * isolate,const char * name)4724 Handle<JSFunction> GetFunctionByName(Isolate* isolate, const char* name) {
4725 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
4726 Handle<Object> obj =
4727 Object::GetProperty(isolate, isolate->global_object(), str)
4728 .ToHandleChecked();
4729 return Handle<JSFunction>::cast(obj);
4730 }
4731
CheckIC(Handle<JSFunction> function,int slot_index,InlineCacheState state)4732 void CheckIC(Handle<JSFunction> function, int slot_index,
4733 InlineCacheState state) {
4734 FeedbackVector vector = function->feedback_vector();
4735 FeedbackSlot slot(slot_index);
4736 FeedbackNexus nexus(vector, slot);
4737 CHECK_EQ(nexus.ic_state(), state);
4738 }
4739
TEST(MonomorphicStaysMonomorphicAfterGC)4740 TEST(MonomorphicStaysMonomorphicAfterGC) {
4741 if (!FLAG_use_ic) return;
4742 if (FLAG_always_opt) return;
4743 ManualGCScope manual_gc_scope;
4744 CcTest::InitializeVM();
4745 Isolate* isolate = CcTest::i_isolate();
4746 v8::HandleScope scope(CcTest::isolate());
4747 FLAG_allow_natives_syntax = true;
4748 CompileRun(
4749 "function loadIC(obj) {"
4750 " return obj.name;"
4751 "}"
4752 "%EnsureFeedbackVectorForFunction(loadIC);"
4753 "function testIC() {"
4754 " var proto = {'name' : 'weak'};"
4755 " var obj = Object.create(proto);"
4756 " loadIC(obj);"
4757 " loadIC(obj);"
4758 " loadIC(obj);"
4759 " return proto;"
4760 "};");
4761 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4762 {
4763 v8::HandleScope scope(CcTest::isolate());
4764 CompileRun("(testIC())");
4765 }
4766 CcTest::CollectAllGarbage();
4767 CheckIC(loadIC, 0, MONOMORPHIC);
4768 {
4769 v8::HandleScope scope(CcTest::isolate());
4770 CompileRun("(testIC())");
4771 }
4772 CheckIC(loadIC, 0, MONOMORPHIC);
4773 }
4774
4775
TEST(PolymorphicStaysPolymorphicAfterGC)4776 TEST(PolymorphicStaysPolymorphicAfterGC) {
4777 if (!FLAG_use_ic) return;
4778 if (FLAG_always_opt) return;
4779 ManualGCScope manual_gc_scope;
4780 CcTest::InitializeVM();
4781 Isolate* isolate = CcTest::i_isolate();
4782 v8::HandleScope scope(CcTest::isolate());
4783 FLAG_allow_natives_syntax = true;
4784 CompileRun(
4785 "function loadIC(obj) {"
4786 " return obj.name;"
4787 "}"
4788 "%EnsureFeedbackVectorForFunction(loadIC);"
4789 "function testIC() {"
4790 " var proto = {'name' : 'weak'};"
4791 " var obj = Object.create(proto);"
4792 " loadIC(obj);"
4793 " loadIC(obj);"
4794 " loadIC(obj);"
4795 " var poly = Object.create(proto);"
4796 " poly.x = true;"
4797 " loadIC(poly);"
4798 " return proto;"
4799 "};");
4800 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
4801 {
4802 v8::HandleScope scope(CcTest::isolate());
4803 CompileRun("(testIC())");
4804 }
4805 CcTest::CollectAllGarbage();
4806 CheckIC(loadIC, 0, POLYMORPHIC);
4807 {
4808 v8::HandleScope scope(CcTest::isolate());
4809 CompileRun("(testIC())");
4810 }
4811 CheckIC(loadIC, 0, POLYMORPHIC);
4812 }
4813
4814 #ifdef DEBUG
TEST(AddInstructionChangesNewSpacePromotion)4815 TEST(AddInstructionChangesNewSpacePromotion) {
4816 FLAG_allow_natives_syntax = true;
4817 FLAG_expose_gc = true;
4818 FLAG_stress_compaction = true;
4819 FLAG_gc_interval = 1000;
4820 CcTest::InitializeVM();
4821 if (!FLAG_allocation_site_pretenuring) return;
4822 v8::HandleScope scope(CcTest::isolate());
4823 Isolate* isolate = CcTest::i_isolate();
4824 Heap* heap = isolate->heap();
4825 LocalContext env;
4826 CompileRun(
4827 "function add(a, b) {"
4828 " return a + b;"
4829 "}"
4830 "add(1, 2);"
4831 "add(\"a\", \"b\");"
4832 "var oldSpaceObject;"
4833 "gc();"
4834 "function crash(x) {"
4835 " var object = {a: null, b: null};"
4836 " var result = add(1.5, x | 0);"
4837 " object.a = result;"
4838 " oldSpaceObject = object;"
4839 " return object;"
4840 "}"
4841 "%PrepareFunctionForOptimization(crash);"
4842 "crash(1);"
4843 "crash(1);"
4844 "%OptimizeFunctionOnNextCall(crash);"
4845 "crash(1);");
4846
4847 v8::Local<v8::Object> global = CcTest::global();
4848 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
4849 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
4850 v8::Local<v8::Value> args1[] = {v8_num(1)};
4851 heap->DisableInlineAllocation();
4852 heap->set_allocation_timeout(1);
4853 g->Call(env.local(), global, 1, args1).ToLocalChecked();
4854 CcTest::CollectAllGarbage();
4855 }
4856
4857
OnFatalErrorExpectOOM(const char * location,const char * message)4858 void OnFatalErrorExpectOOM(const char* location, const char* message) {
4859 // Exit with 0 if the location matches our expectation.
4860 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
4861 }
4862
4863
TEST(CEntryStubOOM)4864 TEST(CEntryStubOOM) {
4865 FLAG_allow_natives_syntax = true;
4866 CcTest::InitializeVM();
4867 v8::HandleScope scope(CcTest::isolate());
4868 CcTest::isolate()->SetFatalErrorHandler(OnFatalErrorExpectOOM);
4869
4870 v8::Local<v8::Value> result = CompileRun(
4871 "%SetAllocationTimeout(1, 1);"
4872 "var a = [];"
4873 "a.__proto__ = [];"
4874 "a.unshift(1)");
4875
4876 CHECK(result->IsNumber());
4877 }
4878
4879 #endif // DEBUG
4880
4881
InterruptCallback357137(v8::Isolate * isolate,void * data)4882 static void InterruptCallback357137(v8::Isolate* isolate, void* data) { }
4883
4884
RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value> & args)4885 static void RequestInterrupt(const v8::FunctionCallbackInfo<v8::Value>& args) {
4886 CcTest::isolate()->RequestInterrupt(&InterruptCallback357137, nullptr);
4887 }
4888
HEAP_TEST(Regress538257)4889 HEAP_TEST(Regress538257) {
4890 ManualGCScope manual_gc_scope;
4891 FLAG_manual_evacuation_candidates_selection = true;
4892 v8::Isolate::CreateParams create_params;
4893 // Set heap limits.
4894 create_params.constraints.set_max_young_generation_size_in_bytes(3 * MB);
4895 #ifdef DEBUG
4896 create_params.constraints.set_max_old_generation_size_in_bytes(20 * MB);
4897 #else
4898 create_params.constraints.set_max_old_generation_size_in_bytes(6 * MB);
4899 #endif
4900 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
4901 v8::Isolate* isolate = v8::Isolate::New(create_params);
4902 isolate->Enter();
4903 {
4904 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
4905 Heap* heap = i_isolate->heap();
4906 HandleScope handle_scope(i_isolate);
4907 PagedSpace* old_space = heap->old_space();
4908 const int kMaxObjects = 10000;
4909 const int kFixedArrayLen = 512;
4910 Handle<FixedArray> objects[kMaxObjects];
4911 for (int i = 0; (i < kMaxObjects) &&
4912 heap->CanExpandOldGeneration(old_space->AreaSize());
4913 i++) {
4914 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen,
4915 AllocationType::kOld);
4916 heap::ForceEvacuationCandidate(Page::FromHeapObject(*objects[i]));
4917 }
4918 heap::SimulateFullSpace(old_space);
4919 CcTest::CollectAllGarbage();
4920 // If we get this far, we've successfully aborted compaction. Any further
4921 // allocations might trigger OOM.
4922 }
4923 isolate->Exit();
4924 isolate->Dispose();
4925 }
4926
4927
TEST(Regress357137)4928 TEST(Regress357137) {
4929 CcTest::InitializeVM();
4930 v8::Isolate* isolate = CcTest::isolate();
4931 v8::HandleScope hscope(isolate);
4932 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
4933 global->Set(isolate, "interrupt",
4934 v8::FunctionTemplate::New(isolate, RequestInterrupt));
4935 v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
4936 CHECK(!context.IsEmpty());
4937 v8::Context::Scope cscope(context);
4938
4939 v8::Local<v8::Value> result = CompileRun(
4940 "var locals = '';"
4941 "for (var i = 0; i < 512; i++) locals += 'var v' + i + '= 42;';"
4942 "eval('function f() {' + locals + 'return function() { return v0; }; }');"
4943 "interrupt();" // This triggers a fake stack overflow in f.
4944 "f()()");
4945 CHECK_EQ(42.0, result->ToNumber(context).ToLocalChecked()->Value());
4946 }
4947
4948
TEST(Regress507979)4949 TEST(Regress507979) {
4950 const int kFixedArrayLen = 10;
4951 CcTest::InitializeVM();
4952 Isolate* isolate = CcTest::i_isolate();
4953 HandleScope handle_scope(isolate);
4954
4955 Handle<FixedArray> o1 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4956 Handle<FixedArray> o2 = isolate->factory()->NewFixedArray(kFixedArrayLen);
4957 CHECK(InCorrectGeneration(*o1));
4958 CHECK(InCorrectGeneration(*o2));
4959
4960 HeapObjectIterator it(isolate->heap(),
4961 i::HeapObjectIterator::kFilterUnreachable);
4962
4963 // Replace parts of an object placed before a live object with a filler. This
4964 // way the filler object shares the mark bits with the following live object.
4965 o1->Shrink(isolate, kFixedArrayLen - 1);
4966
4967 for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
4968 // Let's not optimize the loop away.
4969 CHECK_NE(obj.address(), kNullAddress);
4970 }
4971 }
4972
TEST(Regress388880)4973 TEST(Regress388880) {
4974 if (!FLAG_incremental_marking) return;
4975 FLAG_stress_incremental_marking = false;
4976 FLAG_expose_gc = true;
4977 FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
4978 CcTest::InitializeVM();
4979 v8::HandleScope scope(CcTest::isolate());
4980 Isolate* isolate = CcTest::i_isolate();
4981 Factory* factory = isolate->factory();
4982 Heap* heap = isolate->heap();
4983
4984 Handle<Map> map1 = Map::Create(isolate, 1);
4985 Handle<String> name = factory->NewStringFromStaticChars("foo");
4986 name = factory->InternalizeString(name);
4987 Handle<Map> map2 =
4988 Map::CopyWithField(isolate, map1, name, FieldType::Any(isolate), NONE,
4989 PropertyConstness::kMutable, Representation::Tagged(),
4990 OMIT_TRANSITION)
4991 .ToHandleChecked();
4992
4993 size_t desired_offset = Page::kPageSize - map1->instance_size();
4994
4995 // Allocate padding objects in old pointer space so, that object allocated
4996 // afterwards would end at the end of the page.
4997 heap::SimulateFullSpace(heap->old_space());
4998 size_t padding_size =
4999 desired_offset - MemoryChunkLayout::ObjectStartOffsetInDataPage();
5000 heap::CreatePadding(heap, static_cast<int>(padding_size),
5001 AllocationType::kOld);
5002
5003 Handle<JSObject> o = factory->NewJSObjectFromMap(map1, AllocationType::kOld);
5004 o->set_raw_properties_or_hash(*factory->empty_fixed_array());
5005
5006 // Ensure that the object allocated where we need it.
5007 Page* page = Page::FromHeapObject(*o);
5008 CHECK_EQ(desired_offset, page->Offset(o->address()));
5009
5010 // Now we have an object right at the end of the page.
5011
5012 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5013 // that would cause crash.
5014 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5015 marking->Stop();
5016 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
5017 i::GarbageCollectionReason::kTesting);
5018 CHECK(marking->IsMarking());
5019
5020 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5021 // when it calls heap->AdjustLiveBytes(...).
5022 JSObject::MigrateToMap(isolate, o, map2);
5023 }
5024
5025
TEST(Regress3631)5026 TEST(Regress3631) {
5027 if (!FLAG_incremental_marking) return;
5028 FLAG_expose_gc = true;
5029 CcTest::InitializeVM();
5030 v8::HandleScope scope(CcTest::isolate());
5031 Isolate* isolate = CcTest::i_isolate();
5032 Heap* heap = isolate->heap();
5033 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5034 v8::Local<v8::Value> result = CompileRun(
5035 "var weak_map = new WeakMap();"
5036 "var future_keys = [];"
5037 "for (var i = 0; i < 50; i++) {"
5038 " var key = {'k' : i + 0.1};"
5039 " weak_map.set(key, 1);"
5040 " future_keys.push({'x' : i + 0.2});"
5041 "}"
5042 "weak_map");
5043 if (marking->IsStopped()) {
5044 CcTest::heap()->StartIncrementalMarking(
5045 i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
5046 }
5047 // Incrementally mark the backing store.
5048 Handle<JSReceiver> obj =
5049 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5050 Handle<JSWeakCollection> weak_map(JSWeakCollection::cast(*obj), isolate);
5051 SimulateIncrementalMarking(heap);
5052 // Stash the backing store in a handle.
5053 Handle<Object> save(weak_map->table(), isolate);
5054 // The following line will update the backing store.
5055 CompileRun(
5056 "for (var i = 0; i < 50; i++) {"
5057 " weak_map.set(future_keys[i], i);"
5058 "}");
5059 CcTest::CollectGarbage(OLD_SPACE);
5060 }
5061
5062
TEST(Regress442710)5063 TEST(Regress442710) {
5064 CcTest::InitializeVM();
5065 Isolate* isolate = CcTest::i_isolate();
5066 Factory* factory = isolate->factory();
5067
5068 HandleScope sc(isolate);
5069 Handle<JSGlobalObject> global(CcTest::i_isolate()->context().global_object(),
5070 isolate);
5071 Handle<JSArray> array = factory->NewJSArray(2);
5072
5073 Handle<String> name = factory->InternalizeUtf8String("testArray");
5074 Object::SetProperty(isolate, global, name, array).Check();
5075 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5076 CcTest::CollectGarbage(OLD_SPACE);
5077 }
5078
5079
HEAP_TEST(NumberStringCacheSize)5080 HEAP_TEST(NumberStringCacheSize) {
5081 // Test that the number-string cache has not been resized in the snapshot.
5082 CcTest::InitializeVM();
5083 Isolate* isolate = CcTest::i_isolate();
5084 if (!isolate->snapshot_available()) return;
5085 Heap* heap = isolate->heap();
5086 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5087 heap->number_string_cache().length());
5088 }
5089
5090
TEST(Regress3877)5091 TEST(Regress3877) {
5092 CcTest::InitializeVM();
5093 Isolate* isolate = CcTest::i_isolate();
5094 Factory* factory = isolate->factory();
5095 HandleScope scope(isolate);
5096 CompileRun("function cls() { this.x = 10; }");
5097 Handle<WeakFixedArray> weak_prototype_holder = factory->NewWeakFixedArray(1);
5098 {
5099 HandleScope inner_scope(isolate);
5100 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5101 Handle<JSReceiver> proto =
5102 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5103 weak_prototype_holder->Set(0, HeapObjectReference::Weak(*proto));
5104 }
5105 CHECK(!weak_prototype_holder->Get(0)->IsCleared());
5106 CompileRun(
5107 "var a = { };"
5108 "a.x = new cls();"
5109 "cls.prototype = null;");
5110 for (int i = 0; i < 4; i++) {
5111 CcTest::CollectAllGarbage();
5112 }
5113 // The map of a.x keeps prototype alive
5114 CHECK(!weak_prototype_holder->Get(0)->IsCleared());
5115 // Change the map of a.x and make the previous map garbage collectable.
5116 CompileRun("a.x.__proto__ = {};");
5117 for (int i = 0; i < 4; i++) {
5118 CcTest::CollectAllGarbage();
5119 }
5120 CHECK(weak_prototype_holder->Get(0)->IsCleared());
5121 }
5122
AddRetainedMap(Isolate * isolate,Handle<NativeContext> context)5123 Handle<WeakFixedArray> AddRetainedMap(Isolate* isolate,
5124 Handle<NativeContext> context) {
5125 HandleScope inner_scope(isolate);
5126 Handle<Map> map = Map::Create(isolate, 1);
5127 v8::Local<v8::Value> result =
5128 CompileRun("(function () { return {x : 10}; })();");
5129 Handle<JSReceiver> proto =
5130 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5131 Map::SetPrototype(isolate, map, proto);
5132 isolate->heap()->AddRetainedMap(context, map);
5133 Handle<WeakFixedArray> array = isolate->factory()->NewWeakFixedArray(1);
5134 array->Set(0, HeapObjectReference::Weak(*map));
5135 return inner_scope.CloseAndEscape(array);
5136 }
5137
CheckMapRetainingFor(int n)5138 void CheckMapRetainingFor(int n) {
5139 FLAG_retain_maps_for_n_gc = n;
5140 Isolate* isolate = CcTest::i_isolate();
5141 Heap* heap = isolate->heap();
5142 v8::Local<v8::Context> ctx = v8::Context::New(CcTest::isolate());
5143 Handle<Context> context = Utils::OpenHandle(*ctx);
5144 CHECK(context->IsNativeContext());
5145 Handle<NativeContext> native_context = Handle<NativeContext>::cast(context);
5146 // This global is used to visit the object's constructor alive when starting
5147 // incremental marking. The native context keeps the constructor alive. The
5148 // constructor needs to be alive to retain the map.
5149 v8::Global<v8::Context> global_ctxt(CcTest::isolate(), ctx);
5150
5151 ctx->Enter();
5152 Handle<WeakFixedArray> array_with_map =
5153 AddRetainedMap(isolate, native_context);
5154 CHECK(array_with_map->Get(0)->IsWeak());
5155 for (int i = 0; i < n; i++) {
5156 heap::SimulateIncrementalMarking(heap);
5157 CcTest::CollectGarbage(OLD_SPACE);
5158 }
5159 CHECK(array_with_map->Get(0)->IsWeak());
5160 heap::SimulateIncrementalMarking(heap);
5161 CcTest::CollectGarbage(OLD_SPACE);
5162 CHECK(array_with_map->Get(0)->IsCleared());
5163
5164 ctx->Exit();
5165 }
5166
5167
TEST(MapRetaining)5168 TEST(MapRetaining) {
5169 if (!FLAG_incremental_marking) return;
5170 ManualGCScope manual_gc_scope;
5171 CcTest::InitializeVM();
5172 v8::HandleScope scope(CcTest::isolate());
5173 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5174 CheckMapRetainingFor(0);
5175 CheckMapRetainingFor(1);
5176 CheckMapRetainingFor(7);
5177 }
5178
TEST(RetainedMapsCleanup)5179 TEST(RetainedMapsCleanup) {
5180 if (!FLAG_incremental_marking) return;
5181 ManualGCScope manual_gc_scope;
5182 CcTest::InitializeVM();
5183 v8::HandleScope scope(CcTest::isolate());
5184 Isolate* isolate = CcTest::i_isolate();
5185 Heap* heap = isolate->heap();
5186 v8::Local<v8::Context> ctx = v8::Context::New(CcTest::isolate());
5187 Handle<Context> context = Utils::OpenHandle(*ctx);
5188 CHECK(context->IsNativeContext());
5189 Handle<NativeContext> native_context = Handle<NativeContext>::cast(context);
5190
5191 ctx->Enter();
5192 Handle<WeakFixedArray> array_with_map =
5193 AddRetainedMap(isolate, native_context);
5194 CHECK(array_with_map->Get(0)->IsWeak());
5195 heap->NotifyContextDisposed(true);
5196 CcTest::CollectAllGarbage();
5197 ctx->Exit();
5198
5199 CHECK_EQ(ReadOnlyRoots(heap).empty_weak_array_list(),
5200 native_context->retained_maps());
5201 }
5202
TEST(PreprocessStackTrace)5203 TEST(PreprocessStackTrace) {
5204 // Do not automatically trigger early GC.
5205 FLAG_gc_interval = -1;
5206 CcTest::InitializeVM();
5207 v8::HandleScope scope(CcTest::isolate());
5208 v8::TryCatch try_catch(CcTest::isolate());
5209 CompileRun("throw new Error();");
5210 CHECK(try_catch.HasCaught());
5211 Isolate* isolate = CcTest::i_isolate();
5212 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
5213 Handle<Name> key = isolate->factory()->stack_trace_symbol();
5214 Handle<Object> stack_trace =
5215 Object::GetProperty(isolate, exception, key).ToHandleChecked();
5216 Handle<Object> code =
5217 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5218 CHECK(code->IsCode());
5219
5220 CcTest::CollectAllAvailableGarbage();
5221
5222 Handle<Object> pos =
5223 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
5224 CHECK(pos->IsSmi());
5225
5226 Handle<FixedArray> frame_array = Handle<FixedArray>::cast(stack_trace);
5227 int array_length = frame_array->length();
5228 for (int i = 0; i < array_length; i++) {
5229 Handle<Object> element =
5230 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
5231 CHECK(!element->IsCode());
5232 }
5233 }
5234
5235
AllocateInSpace(Isolate * isolate,size_t bytes,AllocationSpace space)5236 void AllocateInSpace(Isolate* isolate, size_t bytes, AllocationSpace space) {
5237 CHECK_LE(FixedArray::kHeaderSize, bytes);
5238 CHECK(IsAligned(bytes, kTaggedSize));
5239 Factory* factory = isolate->factory();
5240 HandleScope scope(isolate);
5241 AlwaysAllocateScopeForTesting always_allocate(isolate->heap());
5242 int elements =
5243 static_cast<int>((bytes - FixedArray::kHeaderSize) / kTaggedSize);
5244 Handle<FixedArray> array = factory->NewFixedArray(
5245 elements,
5246 space == NEW_SPACE ? AllocationType::kYoung : AllocationType::kOld);
5247 CHECK((space == NEW_SPACE) == Heap::InYoungGeneration(*array));
5248 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
5249 }
5250
5251
TEST(NewSpaceAllocationCounter)5252 TEST(NewSpaceAllocationCounter) {
5253 if (FLAG_single_generation) return;
5254 CcTest::InitializeVM();
5255 v8::HandleScope scope(CcTest::isolate());
5256 Isolate* isolate = CcTest::i_isolate();
5257 Heap* heap = isolate->heap();
5258 size_t counter1 = heap->NewSpaceAllocationCounter();
5259 CcTest::CollectGarbage(NEW_SPACE);
5260 CcTest::CollectGarbage(NEW_SPACE); // Ensure new space is empty.
5261 const size_t kSize = 1024;
5262 AllocateInSpace(isolate, kSize, NEW_SPACE);
5263 size_t counter2 = heap->NewSpaceAllocationCounter();
5264 CHECK_EQ(kSize, counter2 - counter1);
5265 CcTest::CollectGarbage(NEW_SPACE);
5266 size_t counter3 = heap->NewSpaceAllocationCounter();
5267 CHECK_EQ(0U, counter3 - counter2);
5268 // Test counter overflow.
5269 size_t max_counter = static_cast<size_t>(-1);
5270 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
5271 size_t start = heap->NewSpaceAllocationCounter();
5272 for (int i = 0; i < 20; i++) {
5273 AllocateInSpace(isolate, kSize, NEW_SPACE);
5274 size_t counter = heap->NewSpaceAllocationCounter();
5275 CHECK_EQ(kSize, counter - start);
5276 start = counter;
5277 }
5278 }
5279
5280
TEST(OldSpaceAllocationCounter)5281 TEST(OldSpaceAllocationCounter) {
5282 ManualGCScope manual_gc_scope;
5283 CcTest::InitializeVM();
5284 v8::HandleScope scope(CcTest::isolate());
5285 Isolate* isolate = CcTest::i_isolate();
5286 Heap* heap = isolate->heap();
5287 // Disable LAB, such that calculations with SizeOfObjects() and object size
5288 // are correct.
5289 heap->DisableInlineAllocation();
5290 size_t counter1 = heap->OldGenerationAllocationCounter();
5291 CcTest::CollectGarbage(NEW_SPACE);
5292 CcTest::CollectGarbage(NEW_SPACE);
5293 const size_t kSize = 1024;
5294 AllocateInSpace(isolate, kSize, OLD_SPACE);
5295 size_t counter2 = heap->OldGenerationAllocationCounter();
5296 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
5297 CHECK_LE(kSize, counter2 - counter1);
5298 CcTest::CollectGarbage(NEW_SPACE);
5299 size_t counter3 = heap->OldGenerationAllocationCounter();
5300 CHECK_EQ(0u, counter3 - counter2);
5301 AllocateInSpace(isolate, kSize, OLD_SPACE);
5302 CcTest::CollectGarbage(OLD_SPACE);
5303 size_t counter4 = heap->OldGenerationAllocationCounter();
5304 CHECK_LE(kSize, counter4 - counter3);
5305 // Test counter overflow.
5306 size_t max_counter = static_cast<size_t>(-1);
5307 heap->set_old_generation_allocation_counter_at_last_gc(max_counter -
5308 10 * kSize);
5309 size_t start = heap->OldGenerationAllocationCounter();
5310 for (int i = 0; i < 20; i++) {
5311 AllocateInSpace(isolate, kSize, OLD_SPACE);
5312 size_t counter = heap->OldGenerationAllocationCounter();
5313 CHECK_LE(kSize, counter - start);
5314 start = counter;
5315 }
5316 }
5317
5318
CheckLeak(const v8::FunctionCallbackInfo<v8::Value> & args)5319 static void CheckLeak(const v8::FunctionCallbackInfo<v8::Value>& args) {
5320 Isolate* isolate = CcTest::i_isolate();
5321 Object message(
5322 *reinterpret_cast<Address*>(isolate->pending_message_address()));
5323 CHECK(message.IsTheHole(isolate));
5324 }
5325
5326
TEST(MessageObjectLeak)5327 TEST(MessageObjectLeak) {
5328 CcTest::InitializeVM();
5329 v8::Isolate* isolate = CcTest::isolate();
5330 v8::HandleScope scope(isolate);
5331 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5332 global->Set(isolate, "check", v8::FunctionTemplate::New(isolate, CheckLeak));
5333 v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5334 v8::Context::Scope cscope(context);
5335
5336 const char* test =
5337 "try {"
5338 " throw 'message 1';"
5339 "} catch (e) {"
5340 "}"
5341 "check();"
5342 "L: try {"
5343 " throw 'message 2';"
5344 "} finally {"
5345 " break L;"
5346 "}"
5347 "check();";
5348 CompileRun(test);
5349
5350 const char* flag = "--turbo-filter=*";
5351 FlagList::SetFlagsFromString(flag, strlen(flag));
5352 FLAG_always_opt = true;
5353
5354 CompileRun(test);
5355 }
5356
5357
CheckEqualSharedFunctionInfos(const v8::FunctionCallbackInfo<v8::Value> & args)5358 static void CheckEqualSharedFunctionInfos(
5359 const v8::FunctionCallbackInfo<v8::Value>& args) {
5360 Handle<Object> obj1 = v8::Utils::OpenHandle(*args[0]);
5361 Handle<Object> obj2 = v8::Utils::OpenHandle(*args[1]);
5362 Handle<JSFunction> fun1 = Handle<JSFunction>::cast(obj1);
5363 Handle<JSFunction> fun2 = Handle<JSFunction>::cast(obj2);
5364 CHECK(fun1->shared() == fun2->shared());
5365 }
5366
5367
RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value> & args)5368 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
5369 Isolate* isolate = CcTest::i_isolate();
5370 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
5371 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
5372 // Bytecode is code too.
5373 SharedFunctionInfo::DiscardCompiled(isolate, handle(fun->shared(), isolate));
5374 fun->set_code(*BUILTIN_CODE(isolate, CompileLazy));
5375 CcTest::CollectAllAvailableGarbage();
5376 }
5377
5378
TEST(CanonicalSharedFunctionInfo)5379 TEST(CanonicalSharedFunctionInfo) {
5380 CcTest::InitializeVM();
5381 v8::Isolate* isolate = CcTest::isolate();
5382 v8::HandleScope scope(isolate);
5383 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
5384 global->Set(
5385 isolate, "check",
5386 v8::FunctionTemplate::New(isolate, CheckEqualSharedFunctionInfos));
5387 global->Set(isolate, "remove",
5388 v8::FunctionTemplate::New(isolate, RemoveCodeAndGC));
5389 v8::Local<v8::Context> context = v8::Context::New(isolate, nullptr, global);
5390 v8::Context::Scope cscope(context);
5391 CompileRun(
5392 "function f() { return function g() {}; }"
5393 "var g1 = f();"
5394 "remove(f);"
5395 "var g2 = f();"
5396 "check(g1, g2);");
5397
5398 CompileRun(
5399 "function f() { return (function() { return function g() {}; })(); }"
5400 "var g1 = f();"
5401 "remove(f);"
5402 "var g2 = f();"
5403 "check(g1, g2);");
5404 }
5405
5406
TEST(ScriptIterator)5407 TEST(ScriptIterator) {
5408 CcTest::InitializeVM();
5409 v8::HandleScope scope(CcTest::isolate());
5410 Isolate* isolate = CcTest::i_isolate();
5411 Heap* heap = CcTest::heap();
5412 LocalContext context;
5413
5414 CcTest::CollectAllGarbage();
5415
5416 int script_count = 0;
5417 {
5418 HeapObjectIterator it(heap);
5419 for (HeapObject obj = it.Next(); !obj.is_null(); obj = it.Next()) {
5420 if (obj.IsScript()) script_count++;
5421 }
5422 }
5423
5424 {
5425 Script::Iterator iterator(isolate);
5426 for (Script script = iterator.Next(); !script.is_null();
5427 script = iterator.Next()) {
5428 script_count--;
5429 }
5430 }
5431
5432 CHECK_EQ(0, script_count);
5433 }
5434
5435 // This is the same as Factory::NewByteArray, except it doesn't retry on
5436 // allocation failure.
AllocateByteArrayForTest(Heap * heap,int length,AllocationType allocation_type)5437 AllocationResult HeapTester::AllocateByteArrayForTest(
5438 Heap* heap, int length, AllocationType allocation_type) {
5439 DCHECK(length >= 0 && length <= ByteArray::kMaxLength);
5440 int size = ByteArray::SizeFor(length);
5441 HeapObject result;
5442 {
5443 AllocationResult allocation = heap->AllocateRaw(size, allocation_type);
5444 if (!allocation.To(&result)) return allocation;
5445 }
5446
5447 result.set_map_after_allocation(ReadOnlyRoots(heap).byte_array_map(),
5448 SKIP_WRITE_BARRIER);
5449 ByteArray::cast(result).set_length(length);
5450 ByteArray::cast(result).clear_padding();
5451 return result;
5452 }
5453
CodeEnsureLinearAllocationArea(Heap * heap,int size_in_bytes)5454 bool HeapTester::CodeEnsureLinearAllocationArea(Heap* heap, int size_in_bytes) {
5455 bool result = heap->code_space()->EnsureLabMain(size_in_bytes,
5456 AllocationOrigin::kRuntime);
5457 heap->code_space()->UpdateInlineAllocationLimit(0);
5458 return result;
5459 }
5460
HEAP_TEST(Regress587004)5461 HEAP_TEST(Regress587004) {
5462 if (FLAG_single_generation) return;
5463 ManualGCScope manual_gc_scope;
5464 #ifdef VERIFY_HEAP
5465 FLAG_verify_heap = false;
5466 #endif
5467 CcTest::InitializeVM();
5468 v8::HandleScope scope(CcTest::isolate());
5469 Heap* heap = CcTest::heap();
5470 Isolate* isolate = CcTest::i_isolate();
5471 Factory* factory = isolate->factory();
5472 const int N =
5473 (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kTaggedSize;
5474 Handle<FixedArray> array = factory->NewFixedArray(N, AllocationType::kOld);
5475 CHECK(heap->old_space()->Contains(*array));
5476 Handle<Object> number = factory->NewHeapNumber(1.0);
5477 CHECK(Heap::InYoungGeneration(*number));
5478 for (int i = 0; i < N; i++) {
5479 array->set(i, *number);
5480 }
5481 CcTest::CollectGarbage(OLD_SPACE);
5482 heap::SimulateFullSpace(heap->old_space());
5483 heap->RightTrimFixedArray(*array, N - 1);
5484 heap->mark_compact_collector()->EnsureSweepingCompleted();
5485 ByteArray byte_array;
5486 const int M = 256;
5487 // Don't allow old space expansion. The test works without this flag too,
5488 // but becomes very slow.
5489 heap->set_force_oom(true);
5490 while (
5491 AllocateByteArrayForTest(heap, M, AllocationType::kOld).To(&byte_array)) {
5492 for (int j = 0; j < M; j++) {
5493 byte_array.set(j, 0x31);
5494 }
5495 }
5496 // Re-enable old space expansion to avoid OOM crash.
5497 heap->set_force_oom(false);
5498 CcTest::CollectGarbage(NEW_SPACE);
5499 }
5500
HEAP_TEST(Regress589413)5501 HEAP_TEST(Regress589413) {
5502 if (!FLAG_incremental_marking || FLAG_stress_concurrent_allocation) return;
5503 FLAG_stress_compaction = true;
5504 FLAG_manual_evacuation_candidates_selection = true;
5505 FLAG_parallel_compaction = false;
5506 ManualGCScope manual_gc_scope;
5507 CcTest::InitializeVM();
5508 v8::HandleScope scope(CcTest::isolate());
5509 Heap* heap = CcTest::heap();
5510 // Get the heap in clean state.
5511 CcTest::CollectGarbage(OLD_SPACE);
5512 CcTest::CollectGarbage(OLD_SPACE);
5513 Isolate* isolate = CcTest::i_isolate();
5514 Factory* factory = isolate->factory();
5515 // Fill the new space with byte arrays with elements looking like pointers.
5516 const int M = 256;
5517 ByteArray byte_array;
5518 Page* young_page = nullptr;
5519 while (AllocateByteArrayForTest(heap, M, AllocationType::kYoung)
5520 .To(&byte_array)) {
5521 // Only allocate objects on one young page as a rough estimate on
5522 // how much memory can be promoted into the old generation.
5523 // Otherwise we would crash when forcing promotion of all young
5524 // live objects.
5525 if (!young_page) young_page = Page::FromHeapObject(byte_array);
5526 if (Page::FromHeapObject(byte_array) != young_page) break;
5527
5528 for (int j = 0; j < M; j++) {
5529 byte_array.set(j, 0x31);
5530 }
5531 // Add the array in root set.
5532 handle(byte_array, isolate);
5533 }
5534 auto reset_oom = [](void* heap, size_t limit, size_t) -> size_t {
5535 reinterpret_cast<Heap*>(heap)->set_force_oom(false);
5536 return limit;
5537 };
5538 heap->AddNearHeapLimitCallback(reset_oom, heap);
5539
5540 {
5541 // Ensure that incremental marking is not started unexpectedly.
5542 AlwaysAllocateScopeForTesting always_allocate(isolate->heap());
5543
5544 // Make sure the byte arrays will be promoted on the next GC.
5545 CcTest::CollectGarbage(NEW_SPACE);
5546 // This number is close to large free list category threshold.
5547 const int N = 0x3EEE;
5548
5549 std::vector<FixedArray> arrays;
5550 std::set<Page*> pages;
5551 FixedArray array;
5552 // Fill all pages with fixed arrays.
5553 heap->set_force_oom(true);
5554 while (
5555 AllocateFixedArrayForTest(heap, N, AllocationType::kOld).To(&array)) {
5556 arrays.push_back(array);
5557 pages.insert(Page::FromHeapObject(array));
5558 // Add the array in root set.
5559 handle(array, isolate);
5560 }
5561 heap->set_force_oom(false);
5562 size_t initial_pages = pages.size();
5563 // Expand and fill two pages with fixed array to ensure enough space both
5564 // the young objects and the evacuation candidate pages.
5565 while (
5566 AllocateFixedArrayForTest(heap, N, AllocationType::kOld).To(&array)) {
5567 arrays.push_back(array);
5568 pages.insert(Page::FromHeapObject(array));
5569 // Add the array in root set.
5570 handle(array, isolate);
5571 // Do not expand anymore.
5572 if (pages.size() - initial_pages == 2) {
5573 heap->set_force_oom(true);
5574 }
5575 }
5576 // Expand and mark the new page as evacuation candidate.
5577 heap->set_force_oom(false);
5578 {
5579 Handle<HeapObject> ec_obj =
5580 factory->NewFixedArray(5000, AllocationType::kOld);
5581 Page* ec_page = Page::FromHeapObject(*ec_obj);
5582 heap::ForceEvacuationCandidate(ec_page);
5583 // Make all arrays point to evacuation candidate so that
5584 // slots are recorded for them.
5585 for (size_t j = 0; j < arrays.size(); j++) {
5586 array = arrays[j];
5587 for (int i = 0; i < N; i++) {
5588 array.set(i, *ec_obj);
5589 }
5590 }
5591 }
5592 CHECK(heap->incremental_marking()->IsStopped());
5593 heap::SimulateIncrementalMarking(heap);
5594 for (size_t j = 0; j < arrays.size(); j++) {
5595 heap->RightTrimFixedArray(arrays[j], N - 1);
5596 }
5597 }
5598
5599 // Force allocation from the free list.
5600 heap->set_force_oom(true);
5601 CcTest::CollectGarbage(OLD_SPACE);
5602 heap->RemoveNearHeapLimitCallback(reset_oom, 0);
5603 }
5604
TEST(Regress598319)5605 TEST(Regress598319) {
5606 if (!FLAG_incremental_marking) return;
5607 ManualGCScope manual_gc_scope;
5608 // This test ensures that no white objects can cross the progress bar of large
5609 // objects during incremental marking. It checks this by using Shift() during
5610 // incremental marking.
5611 CcTest::InitializeVM();
5612 v8::HandleScope scope(CcTest::isolate());
5613 Heap* heap = CcTest::heap();
5614 Isolate* isolate = heap->isolate();
5615
5616 // The size of the array should be larger than kProgressBarScanningChunk.
5617 const int kNumberOfObjects =
5618 std::max(FixedArray::kMaxRegularLength + 1, 128 * KB);
5619
5620 struct Arr {
5621 Arr(Isolate* isolate, int number_of_objects) {
5622 root = isolate->factory()->NewFixedArray(1, AllocationType::kOld);
5623 {
5624 // Temporary scope to avoid getting any other objects into the root set.
5625 v8::HandleScope scope(CcTest::isolate());
5626 Handle<FixedArray> tmp = isolate->factory()->NewFixedArray(
5627 number_of_objects, AllocationType::kOld);
5628 root->set(0, *tmp);
5629 for (int i = 0; i < get().length(); i++) {
5630 tmp = isolate->factory()->NewFixedArray(100, AllocationType::kOld);
5631 get().set(i, *tmp);
5632 }
5633 }
5634 global_root.Reset(CcTest::isolate(),
5635 Utils::ToLocal(Handle<Object>::cast(root)));
5636 }
5637
5638 FixedArray get() { return FixedArray::cast(root->get(0)); }
5639
5640 Handle<FixedArray> root;
5641
5642 // Store array in global as well to make it part of the root set when
5643 // starting incremental marking.
5644 v8::Global<Value> global_root;
5645 } arr(isolate, kNumberOfObjects);
5646
5647 CHECK_EQ(arr.get().length(), kNumberOfObjects);
5648 CHECK(heap->lo_space()->Contains(arr.get()));
5649 LargePage* page = LargePage::FromHeapObject(arr.get());
5650 CHECK_NOT_NULL(page);
5651
5652 // GC to cleanup state
5653 CcTest::CollectGarbage(OLD_SPACE);
5654 MarkCompactCollector* collector = heap->mark_compact_collector();
5655 if (collector->sweeping_in_progress()) {
5656 collector->EnsureSweepingCompleted();
5657 }
5658
5659 CHECK(heap->lo_space()->Contains(arr.get()));
5660 IncrementalMarking* marking = heap->incremental_marking();
5661 IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5662 CHECK(marking_state->IsWhite(arr.get()));
5663 for (int i = 0; i < arr.get().length(); i++) {
5664 HeapObject arr_value = HeapObject::cast(arr.get().get(i));
5665 CHECK(marking_state->IsWhite(arr_value));
5666 }
5667
5668 // Start incremental marking.
5669 CHECK(marking->IsMarking() || marking->IsStopped());
5670 if (marking->IsStopped()) {
5671 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5672 i::GarbageCollectionReason::kTesting);
5673 }
5674 CHECK(marking->IsMarking());
5675
5676 // Check that we have not marked the interesting array during root scanning.
5677 for (int i = 0; i < arr.get().length(); i++) {
5678 HeapObject arr_value = HeapObject::cast(arr.get().get(i));
5679 CHECK(marking_state->IsWhite(arr_value));
5680 }
5681
5682 // Now we search for a state where we are in incremental marking and have
5683 // only partially marked the large object.
5684 const double kSmallStepSizeInMs = 0.1;
5685 while (!marking->IsComplete()) {
5686 marking->Step(kSmallStepSizeInMs,
5687 i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5688 StepOrigin::kV8);
5689 ProgressBar& progress_bar = page->ProgressBar();
5690 if (progress_bar.IsEnabled() && progress_bar.Value() > 0) {
5691 CHECK_NE(progress_bar.Value(), arr.get().Size());
5692 {
5693 // Shift by 1, effectively moving one white object across the progress
5694 // bar, meaning that we will miss marking it.
5695 v8::HandleScope scope(CcTest::isolate());
5696 Handle<JSArray> js_array = isolate->factory()->NewJSArrayWithElements(
5697 Handle<FixedArray>(arr.get(), isolate));
5698 js_array->GetElementsAccessor()->Shift(js_array).Check();
5699 }
5700 break;
5701 }
5702 }
5703
5704 // Finish marking with bigger steps to speed up test.
5705 const double kLargeStepSizeInMs = 1000;
5706 while (!marking->IsComplete()) {
5707 marking->Step(kLargeStepSizeInMs,
5708 i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5709 StepOrigin::kV8);
5710 if (marking->IsReadyToOverApproximateWeakClosure()) {
5711 SafepointScope scope(heap);
5712 marking->FinalizeIncrementally();
5713 }
5714 }
5715 CHECK(marking->IsComplete());
5716
5717 // All objects need to be black after marking. If a white object crossed the
5718 // progress bar, we would fail here.
5719 for (int i = 0; i < arr.get().length(); i++) {
5720 HeapObject arr_value = HeapObject::cast(arr.get().get(i));
5721 CHECK(marking_state->IsBlack(arr_value));
5722 }
5723 }
5724
ShrinkArrayAndCheckSize(Heap * heap,int length)5725 Handle<FixedArray> ShrinkArrayAndCheckSize(Heap* heap, int length) {
5726 // Make sure there is no garbage and the compilation cache is empty.
5727 for (int i = 0; i < 5; i++) {
5728 CcTest::CollectAllGarbage();
5729 }
5730 heap->mark_compact_collector()->EnsureSweepingCompleted();
5731 // Disable LAB, such that calculations with SizeOfObjects() and object size
5732 // are correct.
5733 heap->DisableInlineAllocation();
5734 size_t size_before_allocation = heap->SizeOfObjects();
5735 Handle<FixedArray> array =
5736 heap->isolate()->factory()->NewFixedArray(length, AllocationType::kOld);
5737 size_t size_after_allocation = heap->SizeOfObjects();
5738 CHECK_EQ(size_after_allocation, size_before_allocation + array->Size());
5739 array->Shrink(heap->isolate(), 1);
5740 size_t size_after_shrinking = heap->SizeOfObjects();
5741 // Shrinking does not change the space size immediately.
5742 CHECK_EQ(size_after_allocation, size_after_shrinking);
5743 // GC and sweeping updates the size to acccount for shrinking.
5744 CcTest::CollectAllGarbage();
5745 heap->mark_compact_collector()->EnsureSweepingCompleted();
5746 intptr_t size_after_gc = heap->SizeOfObjects();
5747 CHECK_EQ(size_after_gc, size_before_allocation + array->Size());
5748 return array;
5749 }
5750
TEST(Regress609761)5751 TEST(Regress609761) {
5752 ManualGCScope manual_gc_scope;
5753 CcTest::InitializeVM();
5754 v8::HandleScope scope(CcTest::isolate());
5755 Heap* heap = CcTest::heap();
5756 int length = kMaxRegularHeapObjectSize / kTaggedSize + 1;
5757 Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, length);
5758 CHECK(heap->lo_space()->Contains(*array));
5759 }
5760
TEST(LiveBytes)5761 TEST(LiveBytes) {
5762 ManualGCScope manual_gc_scope;
5763 CcTest::InitializeVM();
5764 v8::HandleScope scope(CcTest::isolate());
5765 Heap* heap = CcTest::heap();
5766 Handle<FixedArray> array = ShrinkArrayAndCheckSize(heap, 2000);
5767 CHECK(heap->old_space()->Contains(*array));
5768 }
5769
TEST(Regress615489)5770 TEST(Regress615489) {
5771 if (!FLAG_incremental_marking) return;
5772 ManualGCScope manual_gc_scope;
5773 CcTest::InitializeVM();
5774 v8::HandleScope scope(CcTest::isolate());
5775 Heap* heap = CcTest::heap();
5776 Isolate* isolate = heap->isolate();
5777 CcTest::CollectAllGarbage();
5778
5779 i::MarkCompactCollector* collector = heap->mark_compact_collector();
5780 i::IncrementalMarking* marking = heap->incremental_marking();
5781 if (collector->sweeping_in_progress()) {
5782 collector->EnsureSweepingCompleted();
5783 }
5784 CHECK(marking->IsMarking() || marking->IsStopped());
5785 if (marking->IsStopped()) {
5786 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5787 i::GarbageCollectionReason::kTesting);
5788 }
5789 CHECK(marking->IsMarking());
5790 marking->StartBlackAllocationForTesting();
5791 {
5792 AlwaysAllocateScopeForTesting always_allocate(heap);
5793 v8::HandleScope inner(CcTest::isolate());
5794 isolate->factory()->NewFixedArray(500, AllocationType::kOld)->Size();
5795 }
5796 const double kStepSizeInMs = 100;
5797 while (!marking->IsComplete()) {
5798 marking->Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5799 StepOrigin::kV8);
5800 if (marking->IsReadyToOverApproximateWeakClosure()) {
5801 SafepointScope scope(heap);
5802 marking->FinalizeIncrementally();
5803 }
5804 }
5805 CHECK(marking->IsComplete());
5806 intptr_t size_before = heap->SizeOfObjects();
5807 CcTest::CollectAllGarbage();
5808 intptr_t size_after = heap->SizeOfObjects();
5809 // Live size does not increase after garbage collection.
5810 CHECK_LE(size_after, size_before);
5811 }
5812
5813 class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
5814 public:
StaticOneByteResource(const char * data)5815 explicit StaticOneByteResource(const char* data) : data_(data) {}
5816
5817 ~StaticOneByteResource() override = default;
5818
data() const5819 const char* data() const override { return data_; }
5820
length() const5821 size_t length() const override { return strlen(data_); }
5822
5823 private:
5824 const char* data_;
5825 };
5826
TEST(Regress631969)5827 TEST(Regress631969) {
5828 if (!FLAG_incremental_marking) return;
5829 FLAG_manual_evacuation_candidates_selection = true;
5830 FLAG_parallel_compaction = false;
5831 ManualGCScope manual_gc_scope;
5832 CcTest::InitializeVM();
5833 v8::HandleScope scope(CcTest::isolate());
5834 Heap* heap = CcTest::heap();
5835 // Get the heap in clean state.
5836 CcTest::CollectGarbage(OLD_SPACE);
5837 CcTest::CollectGarbage(OLD_SPACE);
5838 Isolate* isolate = CcTest::i_isolate();
5839 Factory* factory = isolate->factory();
5840 // Allocate two strings in a fresh page and mark the page as evacuation
5841 // candidate.
5842 heap::SimulateFullSpace(heap->old_space());
5843 Handle<String> s1 =
5844 factory->NewStringFromStaticChars("123456789", AllocationType::kOld);
5845 Handle<String> s2 =
5846 factory->NewStringFromStaticChars("01234", AllocationType::kOld);
5847 heap::ForceEvacuationCandidate(Page::FromHeapObject(*s1));
5848
5849 heap::SimulateIncrementalMarking(heap, false);
5850
5851 // Allocate a cons string and promote it to a fresh page in the old space.
5852 heap::SimulateFullSpace(heap->old_space());
5853 Handle<String> s3 = factory->NewConsString(s1, s2).ToHandleChecked();
5854 CcTest::CollectGarbage(NEW_SPACE);
5855 CcTest::CollectGarbage(NEW_SPACE);
5856
5857 // Finish incremental marking.
5858 const double kStepSizeInMs = 100;
5859 IncrementalMarking* marking = heap->incremental_marking();
5860 while (!marking->IsComplete()) {
5861 marking->Step(kStepSizeInMs, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5862 StepOrigin::kV8);
5863 if (marking->IsReadyToOverApproximateWeakClosure()) {
5864 SafepointScope scope(heap);
5865 marking->FinalizeIncrementally();
5866 }
5867 }
5868
5869 {
5870 StaticOneByteResource external_string("12345678901234");
5871 s3->MakeExternal(&external_string);
5872 CcTest::CollectGarbage(OLD_SPACE);
5873 // This avoids the GC from trying to free stack allocated resources.
5874 i::Handle<i::ExternalOneByteString>::cast(s3)->SetResource(isolate,
5875 nullptr);
5876 }
5877 }
5878
TEST(LeftTrimFixedArrayInBlackArea)5879 TEST(LeftTrimFixedArrayInBlackArea) {
5880 if (!FLAG_incremental_marking) return;
5881 FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
5882 CcTest::InitializeVM();
5883 v8::HandleScope scope(CcTest::isolate());
5884 Heap* heap = CcTest::heap();
5885 Isolate* isolate = heap->isolate();
5886 CcTest::CollectAllGarbage();
5887
5888 i::MarkCompactCollector* collector = heap->mark_compact_collector();
5889 i::IncrementalMarking* marking = heap->incremental_marking();
5890 if (collector->sweeping_in_progress()) {
5891 collector->EnsureSweepingCompleted();
5892 }
5893 CHECK(marking->IsMarking() || marking->IsStopped());
5894 if (marking->IsStopped()) {
5895 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5896 i::GarbageCollectionReason::kTesting);
5897 }
5898 CHECK(marking->IsMarking());
5899 marking->StartBlackAllocationForTesting();
5900
5901 // Ensure that we allocate a new page, set up a bump pointer area, and
5902 // perform the allocation in a black area.
5903 heap::SimulateFullSpace(heap->old_space());
5904 isolate->factory()->NewFixedArray(4, AllocationType::kOld);
5905 Handle<FixedArray> array =
5906 isolate->factory()->NewFixedArray(50, AllocationType::kOld);
5907 CHECK(heap->old_space()->Contains(*array));
5908 IncrementalMarking::MarkingState* marking_state = marking->marking_state();
5909 CHECK(marking_state->IsBlack(*array));
5910
5911 // Now left trim the allocated black area. A filler has to be installed
5912 // for the trimmed area and all mark bits of the trimmed area have to be
5913 // cleared.
5914 FixedArrayBase trimmed = heap->LeftTrimFixedArray(*array, 10);
5915 CHECK(marking_state->IsBlack(trimmed));
5916
5917 heap::GcAndSweep(heap, OLD_SPACE);
5918 }
5919
TEST(ContinuousLeftTrimFixedArrayInBlackArea)5920 TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
5921 if (!FLAG_incremental_marking) return;
5922 FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
5923 CcTest::InitializeVM();
5924 v8::HandleScope scope(CcTest::isolate());
5925 Heap* heap = CcTest::heap();
5926 Isolate* isolate = heap->isolate();
5927 CcTest::CollectAllGarbage();
5928
5929 i::MarkCompactCollector* collector = heap->mark_compact_collector();
5930 i::IncrementalMarking* marking = heap->incremental_marking();
5931 if (collector->sweeping_in_progress()) {
5932 collector->EnsureSweepingCompleted();
5933 }
5934 CHECK(marking->IsMarking() || marking->IsStopped());
5935 if (marking->IsStopped()) {
5936 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5937 i::GarbageCollectionReason::kTesting);
5938 }
5939 CHECK(marking->IsMarking());
5940 marking->StartBlackAllocationForTesting();
5941
5942 // Ensure that we allocate a new page, set up a bump pointer area, and
5943 // perform the allocation in a black area.
5944 heap::SimulateFullSpace(heap->old_space());
5945 isolate->factory()->NewFixedArray(10, AllocationType::kOld);
5946
5947 // Allocate the fixed array that will be trimmed later.
5948 Handle<FixedArray> array =
5949 isolate->factory()->NewFixedArray(100, AllocationType::kOld);
5950 Address start_address = array->address();
5951 Address end_address = start_address + array->Size();
5952 Page* page = Page::FromAddress(start_address);
5953 IncrementalMarking::NonAtomicMarkingState* marking_state =
5954 marking->non_atomic_marking_state();
5955 CHECK(marking_state->IsBlack(*array));
5956 CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
5957 page->AddressToMarkbitIndex(start_address),
5958 page->AddressToMarkbitIndex(end_address)));
5959 CHECK(heap->old_space()->Contains(*array));
5960
5961 FixedArrayBase previous = *array;
5962 FixedArrayBase trimmed;
5963
5964 // First trim in one word steps.
5965 for (int i = 0; i < 10; i++) {
5966 trimmed = heap->LeftTrimFixedArray(previous, 1);
5967 HeapObject filler = HeapObject::FromAddress(previous.address());
5968 CHECK(filler.IsFreeSpaceOrFiller());
5969 CHECK(marking_state->IsBlack(trimmed));
5970 CHECK(marking_state->IsBlack(previous));
5971 previous = trimmed;
5972 }
5973
5974 // Then trim in two and three word steps.
5975 for (int i = 2; i <= 3; i++) {
5976 for (int j = 0; j < 10; j++) {
5977 trimmed = heap->LeftTrimFixedArray(previous, i);
5978 HeapObject filler = HeapObject::FromAddress(previous.address());
5979 CHECK(filler.IsFreeSpaceOrFiller());
5980 CHECK(marking_state->IsBlack(trimmed));
5981 CHECK(marking_state->IsBlack(previous));
5982 previous = trimmed;
5983 }
5984 }
5985
5986 heap::GcAndSweep(heap, OLD_SPACE);
5987 }
5988
TEST(ContinuousRightTrimFixedArrayInBlackArea)5989 TEST(ContinuousRightTrimFixedArrayInBlackArea) {
5990 if (!FLAG_incremental_marking) return;
5991 FLAG_stress_concurrent_allocation = false; // For SimulateFullSpace.
5992 CcTest::InitializeVM();
5993 v8::HandleScope scope(CcTest::isolate());
5994 Heap* heap = CcTest::heap();
5995 Isolate* isolate = CcTest::i_isolate();
5996 CcTest::CollectAllGarbage();
5997
5998 i::MarkCompactCollector* collector = heap->mark_compact_collector();
5999 i::IncrementalMarking* marking = heap->incremental_marking();
6000 if (collector->sweeping_in_progress()) {
6001 collector->EnsureSweepingCompleted();
6002 }
6003 CHECK(marking->IsMarking() || marking->IsStopped());
6004 if (marking->IsStopped()) {
6005 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6006 i::GarbageCollectionReason::kTesting);
6007 }
6008 CHECK(marking->IsMarking());
6009 marking->StartBlackAllocationForTesting();
6010
6011 // Ensure that we allocate a new page, set up a bump pointer area, and
6012 // perform the allocation in a black area.
6013 heap::SimulateFullSpace(heap->old_space());
6014 isolate->factory()->NewFixedArray(10, AllocationType::kOld);
6015
6016 // Allocate the fixed array that will be trimmed later.
6017 Handle<FixedArray> array =
6018 CcTest::i_isolate()->factory()->NewFixedArray(100, AllocationType::kOld);
6019 Address start_address = array->address();
6020 Address end_address = start_address + array->Size();
6021 Page* page = Page::FromAddress(start_address);
6022 IncrementalMarking::NonAtomicMarkingState* marking_state =
6023 marking->non_atomic_marking_state();
6024 CHECK(marking_state->IsBlack(*array));
6025
6026 CHECK(marking_state->bitmap(page)->AllBitsSetInRange(
6027 page->AddressToMarkbitIndex(start_address),
6028 page->AddressToMarkbitIndex(end_address)));
6029 CHECK(heap->old_space()->Contains(*array));
6030
6031 // Trim it once by one word to make checking for white marking color uniform.
6032 Address previous = end_address - kTaggedSize;
6033 isolate->heap()->RightTrimFixedArray(*array, 1);
6034
6035 HeapObject filler = HeapObject::FromAddress(previous);
6036 CHECK(filler.IsFreeSpaceOrFiller());
6037 CHECK(marking_state->IsImpossible(filler));
6038
6039 // Trim 10 times by one, two, and three word.
6040 for (int i = 1; i <= 3; i++) {
6041 for (int j = 0; j < 10; j++) {
6042 previous -= kTaggedSize * i;
6043 isolate->heap()->RightTrimFixedArray(*array, i);
6044 HeapObject filler = HeapObject::FromAddress(previous);
6045 CHECK(filler.IsFreeSpaceOrFiller());
6046 CHECK(marking_state->IsWhite(filler));
6047 }
6048 }
6049
6050 heap::GcAndSweep(heap, OLD_SPACE);
6051 }
6052
TEST(Regress618958)6053 TEST(Regress618958) {
6054 if (!FLAG_incremental_marking) return;
6055 CcTest::InitializeVM();
6056 v8::HandleScope scope(CcTest::isolate());
6057 Heap* heap = CcTest::heap();
6058 bool isolate_is_locked = true;
6059 CcTest::isolate()->AdjustAmountOfExternalAllocatedMemory(100 * MB);
6060 int mark_sweep_count_before = heap->ms_count();
6061 heap->MemoryPressureNotification(MemoryPressureLevel::kCritical,
6062 isolate_is_locked);
6063 int mark_sweep_count_after = heap->ms_count();
6064 int mark_sweeps_performed = mark_sweep_count_after - mark_sweep_count_before;
6065 // The memory pressuer handler either performed two GCs or performed one and
6066 // started incremental marking.
6067 CHECK(mark_sweeps_performed == 2 ||
6068 (mark_sweeps_performed == 1 &&
6069 !heap->incremental_marking()->IsStopped()));
6070 }
6071
TEST(YoungGenerationLargeObjectAllocationScavenge)6072 TEST(YoungGenerationLargeObjectAllocationScavenge) {
6073 if (FLAG_minor_mc) return;
6074 if (!FLAG_young_generation_large_objects) return;
6075 CcTest::InitializeVM();
6076 v8::HandleScope scope(CcTest::isolate());
6077 Heap* heap = CcTest::heap();
6078 Isolate* isolate = heap->isolate();
6079 if (!isolate->serializer_enabled()) return;
6080
6081 // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
6082 Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
6083 MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
6084 CHECK_EQ(NEW_LO_SPACE, chunk->owner_identity());
6085 CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
6086 CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
6087
6088 Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
6089 array_small->set(0, *number);
6090
6091 CcTest::CollectGarbage(NEW_SPACE);
6092
6093 // After the first young generation GC array_small will be in the old
6094 // generation large object space.
6095 chunk = MemoryChunk::FromHeapObject(*array_small);
6096 CHECK_EQ(LO_SPACE, chunk->owner_identity());
6097 CHECK(!chunk->InYoungGeneration());
6098
6099 CcTest::CollectAllAvailableGarbage();
6100 }
6101
TEST(YoungGenerationLargeObjectAllocationMarkCompact)6102 TEST(YoungGenerationLargeObjectAllocationMarkCompact) {
6103 if (FLAG_minor_mc) return;
6104 if (!FLAG_young_generation_large_objects) return;
6105 CcTest::InitializeVM();
6106 v8::HandleScope scope(CcTest::isolate());
6107 Heap* heap = CcTest::heap();
6108 Isolate* isolate = heap->isolate();
6109 if (!isolate->serializer_enabled()) return;
6110
6111 // TODO(hpayer): Update the test as soon as we have a tenure limit for LO.
6112 Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(200000);
6113 MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
6114 CHECK_EQ(NEW_LO_SPACE, chunk->owner_identity());
6115 CHECK(chunk->IsFlagSet(MemoryChunk::LARGE_PAGE));
6116 CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
6117
6118 Handle<Object> number = isolate->factory()->NewHeapNumber(123.456);
6119 array_small->set(0, *number);
6120
6121 CcTest::CollectGarbage(OLD_SPACE);
6122
6123 // After the first full GC array_small will be in the old generation
6124 // large object space.
6125 chunk = MemoryChunk::FromHeapObject(*array_small);
6126 CHECK_EQ(LO_SPACE, chunk->owner_identity());
6127 CHECK(!chunk->InYoungGeneration());
6128
6129 CcTest::CollectAllAvailableGarbage();
6130 }
6131
TEST(YoungGenerationLargeObjectAllocationReleaseScavenger)6132 TEST(YoungGenerationLargeObjectAllocationReleaseScavenger) {
6133 if (FLAG_minor_mc) return;
6134 if (!FLAG_young_generation_large_objects) return;
6135 CcTest::InitializeVM();
6136 v8::HandleScope scope(CcTest::isolate());
6137 Heap* heap = CcTest::heap();
6138 Isolate* isolate = heap->isolate();
6139 if (!isolate->serializer_enabled()) return;
6140
6141 {
6142 HandleScope scope(isolate);
6143 for (int i = 0; i < 10; i++) {
6144 Handle<FixedArray> array_small = isolate->factory()->NewFixedArray(20000);
6145 MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array_small);
6146 CHECK_EQ(NEW_LO_SPACE, chunk->owner_identity());
6147 CHECK(chunk->IsFlagSet(MemoryChunk::TO_PAGE));
6148 }
6149 }
6150
6151 CcTest::CollectGarbage(NEW_SPACE);
6152 CHECK(isolate->heap()->new_lo_space()->IsEmpty());
6153 CHECK_EQ(0, isolate->heap()->new_lo_space()->Size());
6154 CHECK_EQ(0, isolate->heap()->new_lo_space()->SizeOfObjects());
6155 CHECK(isolate->heap()->lo_space()->IsEmpty());
6156 CHECK_EQ(0, isolate->heap()->lo_space()->Size());
6157 CHECK_EQ(0, isolate->heap()->lo_space()->SizeOfObjects());
6158 }
6159
TEST(UncommitUnusedLargeObjectMemory)6160 TEST(UncommitUnusedLargeObjectMemory) {
6161 CcTest::InitializeVM();
6162 v8::HandleScope scope(CcTest::isolate());
6163 Heap* heap = CcTest::heap();
6164 Isolate* isolate = heap->isolate();
6165
6166 Handle<FixedArray> array =
6167 isolate->factory()->NewFixedArray(200000, AllocationType::kOld);
6168 MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
6169 CHECK_IMPLIES(!FLAG_enable_third_party_heap,
6170 chunk->owner_identity() == LO_SPACE);
6171
6172 intptr_t size_before = array->Size();
6173 size_t committed_memory_before = chunk->CommittedPhysicalMemory();
6174
6175 array->Shrink(isolate, 1);
6176 CHECK(array->Size() < size_before);
6177
6178 CcTest::CollectAllGarbage();
6179 CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before);
6180 size_t shrinked_size = RoundUp(
6181 (array->address() - chunk->address()) + array->Size(), CommitPageSize());
6182 CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory());
6183 }
6184
6185 template <RememberedSetType direction>
GetRememberedSetSize(HeapObject obj)6186 static size_t GetRememberedSetSize(HeapObject obj) {
6187 size_t count = 0;
6188 auto chunk = MemoryChunk::FromHeapObject(obj);
6189 RememberedSet<direction>::Iterate(
6190 chunk,
6191 [&count](MaybeObjectSlot slot) {
6192 count++;
6193 return KEEP_SLOT;
6194 },
6195 SlotSet::KEEP_EMPTY_BUCKETS);
6196 return count;
6197 }
6198
TEST(RememberedSet_InsertOnWriteBarrier)6199 TEST(RememberedSet_InsertOnWriteBarrier) {
6200 if (FLAG_single_generation) return;
6201 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
6202 CcTest::InitializeVM();
6203 Isolate* isolate = CcTest::i_isolate();
6204 Factory* factory = isolate->factory();
6205 Heap* heap = isolate->heap();
6206 heap::SealCurrentObjects(heap);
6207 HandleScope scope(isolate);
6208
6209 // Allocate an object in old space.
6210 Handle<FixedArray> arr = factory->NewFixedArray(3, AllocationType::kOld);
6211
6212 // Add into 'arr' references to young objects.
6213 {
6214 HandleScope scope_inner(isolate);
6215 Handle<Object> number = factory->NewHeapNumber(42);
6216 arr->set(0, *number);
6217 arr->set(1, *number);
6218 arr->set(2, *number);
6219 Handle<Object> number_other = factory->NewHeapNumber(24);
6220 arr->set(2, *number_other);
6221 }
6222 // Remembered sets track *slots* pages with cross-generational pointers, so
6223 // must have recorded three of them each exactly once.
6224 CHECK_EQ(3, GetRememberedSetSize<OLD_TO_NEW>(*arr));
6225 }
6226
TEST(RememberedSet_InsertInLargePage)6227 TEST(RememberedSet_InsertInLargePage) {
6228 if (FLAG_single_generation) return;
6229 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
6230 CcTest::InitializeVM();
6231 Isolate* isolate = CcTest::i_isolate();
6232 Factory* factory = isolate->factory();
6233 Heap* heap = isolate->heap();
6234 heap::SealCurrentObjects(heap);
6235 HandleScope scope(isolate);
6236
6237 // Allocate an object in Large space.
6238 const int count = std::max(FixedArray::kMaxRegularLength + 1, 128 * KB);
6239 Handle<FixedArray> arr = factory->NewFixedArray(count, AllocationType::kOld);
6240 CHECK(heap->lo_space()->Contains(*arr));
6241 CHECK_EQ(0, GetRememberedSetSize<OLD_TO_NEW>(*arr));
6242
6243 // Create OLD_TO_NEW references from the large object so that the
6244 // corresponding slots end up in different SlotSets.
6245 {
6246 HandleScope short_lived(isolate);
6247 Handle<Object> number = factory->NewHeapNumber(42);
6248 arr->set(0, *number);
6249 arr->set(count - 1, *number);
6250 }
6251 CHECK_EQ(2, GetRememberedSetSize<OLD_TO_NEW>(*arr));
6252 }
6253
TEST(RememberedSet_InsertOnPromotingObjectToOld)6254 TEST(RememberedSet_InsertOnPromotingObjectToOld) {
6255 if (FLAG_single_generation) return;
6256 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
6257 CcTest::InitializeVM();
6258 Isolate* isolate = CcTest::i_isolate();
6259 Factory* factory = isolate->factory();
6260 Heap* heap = isolate->heap();
6261 heap::SealCurrentObjects(heap);
6262 HandleScope scope(isolate);
6263
6264 // Create a young object and age it one generation inside the new space.
6265 Handle<FixedArray> arr = factory->NewFixedArray(1);
6266 CcTest::CollectGarbage(i::NEW_SPACE);
6267 CHECK(Heap::InYoungGeneration(*arr));
6268
6269 // Add into 'arr' a reference to an object one generation younger.
6270 {
6271 HandleScope scope_inner(isolate);
6272 Handle<Object> number = factory->NewHeapNumber(42);
6273 arr->set(0, *number);
6274 }
6275
6276 // Promote 'arr' into old, its element is still in new, the old to new
6277 // refs are inserted into the remembered sets during GC.
6278 CcTest::CollectGarbage(i::NEW_SPACE);
6279
6280 CHECK(heap->InOldSpace(*arr));
6281 CHECK_EQ(1, GetRememberedSetSize<OLD_TO_NEW>(*arr));
6282 }
6283
TEST(RememberedSet_RemoveStaleOnScavenge)6284 TEST(RememberedSet_RemoveStaleOnScavenge) {
6285 if (FLAG_single_generation) return;
6286 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
6287 CcTest::InitializeVM();
6288 Isolate* isolate = CcTest::i_isolate();
6289 Factory* factory = isolate->factory();
6290 Heap* heap = isolate->heap();
6291 heap::SealCurrentObjects(heap);
6292 HandleScope scope(isolate);
6293
6294 // Allocate an object in old space and add into it references to young.
6295 Handle<FixedArray> arr = factory->NewFixedArray(3, AllocationType::kOld);
6296 {
6297 HandleScope scope_inner(isolate);
6298 Handle<Object> number = factory->NewHeapNumber(42);
6299 arr->set(0, *number); // will be trimmed away
6300 arr->set(1, *number); // will be replaced with #undefined
6301 arr->set(2, *number); // will be promoted into old
6302 }
6303 CHECK_EQ(3, GetRememberedSetSize<OLD_TO_NEW>(*arr));
6304
6305 // Run scavenger once so the young object becomes ready for promotion on the
6306 // next pass.
6307 CcTest::CollectGarbage(i::NEW_SPACE);
6308 arr->set(1, ReadOnlyRoots(CcTest::heap()).undefined_value());
6309 Handle<FixedArrayBase> tail =
6310 Handle<FixedArrayBase>(heap->LeftTrimFixedArray(*arr, 1), isolate);
6311
6312 // None of the actions above should have updated the remembered set.
6313 CHECK_EQ(3, GetRememberedSetSize<OLD_TO_NEW>(*tail));
6314
6315 // Run GC to promote the remaining young object and fixup the stale entries in
6316 // the remembered set.
6317 CcTest::CollectGarbage(i::NEW_SPACE);
6318 CHECK_EQ(0, GetRememberedSetSize<OLD_TO_NEW>(*tail));
6319 }
6320
6321 // The OLD_TO_OLD remembered set is created temporary by GC and is cleared at
6322 // the end of the pass. There is no way to observe it so the test only checks
6323 // that compaction has happened and otherwise relies on code's self-validation.
TEST(RememberedSet_OldToOld)6324 TEST(RememberedSet_OldToOld) {
6325 if (FLAG_stress_incremental_marking) return;
6326 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
6327 CcTest::InitializeVM();
6328 Isolate* isolate = CcTest::i_isolate();
6329 Factory* factory = isolate->factory();
6330 Heap* heap = isolate->heap();
6331 heap::SealCurrentObjects(heap);
6332 HandleScope scope(isolate);
6333
6334 Handle<FixedArray> arr = factory->NewFixedArray(10, AllocationType::kOld);
6335 {
6336 HandleScope short_lived(isolate);
6337 factory->NewFixedArray(100, AllocationType::kOld);
6338 }
6339 Handle<Object> ref = factory->NewFixedArray(100, AllocationType::kOld);
6340 arr->set(0, *ref);
6341
6342 // To force compaction of the old space, fill it with garbage and start a new
6343 // page (so that the page with 'arr' becomes subject to compaction).
6344 {
6345 HandleScope short_lived(isolate);
6346 heap::SimulateFullSpace(heap->old_space());
6347 factory->NewFixedArray(100, AllocationType::kOld);
6348 }
6349
6350 FLAG_manual_evacuation_candidates_selection = true;
6351 heap::ForceEvacuationCandidate(Page::FromHeapObject(*arr));
6352 const auto prev_location = *arr;
6353
6354 // This GC pass will evacuate the page with 'arr'/'ref' so it will have to
6355 // create OLD_TO_OLD remembered set to track the reference.
6356 CcTest::CollectAllGarbage();
6357 CHECK_NE(prev_location, *arr);
6358 }
6359
TEST(RememberedSetRemoveRange)6360 TEST(RememberedSetRemoveRange) {
6361 if (FLAG_single_generation) return;
6362 CcTest::InitializeVM();
6363 v8::HandleScope scope(CcTest::isolate());
6364 Heap* heap = CcTest::heap();
6365 Isolate* isolate = heap->isolate();
6366
6367 Handle<FixedArray> array = isolate->factory()->NewFixedArray(
6368 Page::kPageSize / kTaggedSize, AllocationType::kOld);
6369 MemoryChunk* chunk = MemoryChunk::FromHeapObject(*array);
6370 CHECK_IMPLIES(!FLAG_enable_third_party_heap,
6371 chunk->owner_identity() == LO_SPACE);
6372 Address start = array->address();
6373 // Maps slot to boolean indicator of whether the slot should be in the set.
6374 std::map<Address, bool> slots;
6375 slots[start + 0] = true;
6376 slots[start + kTaggedSize] = true;
6377 slots[start + Page::kPageSize - kTaggedSize] = true;
6378 slots[start + Page::kPageSize] = true;
6379 slots[start + Page::kPageSize + kTaggedSize] = true;
6380 slots[chunk->area_end() - kTaggedSize] = true;
6381
6382 for (auto x : slots) {
6383 RememberedSet<OLD_TO_NEW>::Insert<AccessMode::ATOMIC>(chunk, x.first);
6384 }
6385
6386 RememberedSet<OLD_TO_NEW>::Iterate(
6387 chunk,
6388 [&slots](MaybeObjectSlot slot) {
6389 CHECK(slots[slot.address()]);
6390 return KEEP_SLOT;
6391 },
6392 SlotSet::FREE_EMPTY_BUCKETS);
6393
6394 RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kTaggedSize,
6395 SlotSet::FREE_EMPTY_BUCKETS);
6396 slots[start] = false;
6397 RememberedSet<OLD_TO_NEW>::Iterate(
6398 chunk,
6399 [&slots](MaybeObjectSlot slot) {
6400 CHECK(slots[slot.address()]);
6401 return KEEP_SLOT;
6402 },
6403 SlotSet::FREE_EMPTY_BUCKETS);
6404
6405 RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kTaggedSize,
6406 start + Page::kPageSize,
6407 SlotSet::FREE_EMPTY_BUCKETS);
6408 slots[start + kTaggedSize] = false;
6409 slots[start + Page::kPageSize - kTaggedSize] = false;
6410 RememberedSet<OLD_TO_NEW>::Iterate(
6411 chunk,
6412 [&slots](MaybeObjectSlot slot) {
6413 CHECK(slots[slot.address()]);
6414 return KEEP_SLOT;
6415 },
6416 SlotSet::FREE_EMPTY_BUCKETS);
6417
6418 RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start,
6419 start + Page::kPageSize + kTaggedSize,
6420 SlotSet::FREE_EMPTY_BUCKETS);
6421 slots[start + Page::kPageSize] = false;
6422 RememberedSet<OLD_TO_NEW>::Iterate(
6423 chunk,
6424 [&slots](MaybeObjectSlot slot) {
6425 CHECK(slots[slot.address()]);
6426 return KEEP_SLOT;
6427 },
6428 SlotSet::FREE_EMPTY_BUCKETS);
6429
6430 RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, chunk->area_end() - kTaggedSize,
6431 chunk->area_end(),
6432 SlotSet::FREE_EMPTY_BUCKETS);
6433 slots[chunk->area_end() - kTaggedSize] = false;
6434 RememberedSet<OLD_TO_NEW>::Iterate(
6435 chunk,
6436 [&slots](MaybeObjectSlot slot) {
6437 CHECK(slots[slot.address()]);
6438 return KEEP_SLOT;
6439 },
6440 SlotSet::FREE_EMPTY_BUCKETS);
6441 }
6442
HEAP_TEST(Regress670675)6443 HEAP_TEST(Regress670675) {
6444 if (!FLAG_incremental_marking) return;
6445 ManualGCScope manual_gc_scope;
6446 CcTest::InitializeVM();
6447 v8::HandleScope scope(CcTest::isolate());
6448 Heap* heap = CcTest::heap();
6449 Isolate* isolate = heap->isolate();
6450 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6451 CcTest::CollectAllGarbage();
6452
6453 if (collector->sweeping_in_progress()) {
6454 collector->EnsureSweepingCompleted();
6455 }
6456 i::IncrementalMarking* marking = CcTest::heap()->incremental_marking();
6457 if (marking->IsStopped()) {
6458 SafepointScope scope(heap);
6459 marking->Start(i::GarbageCollectionReason::kTesting);
6460 }
6461 size_t array_length = 128 * KB;
6462 size_t n = heap->OldGenerationSpaceAvailable() / array_length;
6463 for (size_t i = 0; i < n + 40; i++) {
6464 {
6465 HandleScope inner_scope(isolate);
6466 isolate->factory()->NewFixedArray(static_cast<int>(array_length),
6467 AllocationType::kOld);
6468 }
6469 if (marking->IsStopped()) break;
6470 double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1;
6471 marking->AdvanceWithDeadline(
6472 deadline, IncrementalMarking::GC_VIA_STACK_GUARD, StepOrigin::kV8);
6473 }
6474 DCHECK(marking->IsStopped());
6475 }
6476
HEAP_TEST(RegressMissingWriteBarrierInAllocate)6477 HEAP_TEST(RegressMissingWriteBarrierInAllocate) {
6478 if (!FLAG_incremental_marking) return;
6479 ManualGCScope manual_gc_scope;
6480 CcTest::InitializeVM();
6481 v8::HandleScope scope(CcTest::isolate());
6482 Heap* heap = CcTest::heap();
6483 Isolate* isolate = heap->isolate();
6484 CcTest::CollectAllGarbage();
6485 heap::SimulateIncrementalMarking(heap, false);
6486 Handle<Map> map;
6487 {
6488 AlwaysAllocateScopeForTesting always_allocate(heap);
6489 map = isolate->factory()->NewMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
6490 }
6491 heap->incremental_marking()->StartBlackAllocationForTesting();
6492 Handle<HeapObject> object;
6493 {
6494 AlwaysAllocateScopeForTesting always_allocate(heap);
6495 object = handle(isolate->factory()->NewForTest(map, AllocationType::kOld),
6496 isolate);
6497 }
6498 // The object is black. If Factory::New sets the map without write-barrier,
6499 // then the map is white and will be freed prematurely.
6500 heap::SimulateIncrementalMarking(heap, true);
6501 CcTest::CollectAllGarbage();
6502 MarkCompactCollector* collector = heap->mark_compact_collector();
6503 if (collector->sweeping_in_progress()) {
6504 collector->EnsureSweepingCompleted();
6505 }
6506 CHECK(object->map().IsMap());
6507 }
6508
HEAP_TEST(MarkCompactEpochCounter)6509 HEAP_TEST(MarkCompactEpochCounter) {
6510 if (!FLAG_incremental_marking) return;
6511 ManualGCScope manual_gc_scope;
6512 CcTest::InitializeVM();
6513 v8::HandleScope scope(CcTest::isolate());
6514 Heap* heap = CcTest::heap();
6515 unsigned epoch0 = heap->mark_compact_collector()->epoch();
6516 CcTest::CollectGarbage(OLD_SPACE);
6517 unsigned epoch1 = heap->mark_compact_collector()->epoch();
6518 CHECK_EQ(epoch0 + 1, epoch1);
6519 heap::SimulateIncrementalMarking(heap, true);
6520 CcTest::CollectGarbage(OLD_SPACE);
6521 unsigned epoch2 = heap->mark_compact_collector()->epoch();
6522 CHECK_EQ(epoch1 + 1, epoch2);
6523 CcTest::CollectGarbage(NEW_SPACE);
6524 unsigned epoch3 = heap->mark_compact_collector()->epoch();
6525 CHECK_EQ(epoch2, epoch3);
6526 }
6527
UNINITIALIZED_TEST(ReinitializeStringHashSeed)6528 UNINITIALIZED_TEST(ReinitializeStringHashSeed) {
6529 // Enable rehashing and create an isolate and context.
6530 i::FLAG_rehash_snapshot = true;
6531 for (int i = 1; i < 3; i++) {
6532 i::FLAG_hash_seed = 1337 * i;
6533 v8::Isolate::CreateParams create_params;
6534 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6535 v8::Isolate* isolate = v8::Isolate::New(create_params);
6536 {
6537 v8::Isolate::Scope isolate_scope(isolate);
6538 CHECK_EQ(static_cast<uint64_t>(1337 * i),
6539 HashSeed(reinterpret_cast<i::Isolate*>(isolate)));
6540 v8::HandleScope handle_scope(isolate);
6541 v8::Local<v8::Context> context = v8::Context::New(isolate);
6542 CHECK(!context.IsEmpty());
6543 v8::Context::Scope context_scope(context);
6544 }
6545 isolate->Dispose();
6546 }
6547 }
6548
6549 const int kHeapLimit = 100 * MB;
6550 Isolate* oom_isolate = nullptr;
6551
OOMCallback(const char * location,bool is_heap_oom)6552 void OOMCallback(const char* location, bool is_heap_oom) {
6553 Heap* heap = oom_isolate->heap();
6554 size_t kSlack = heap->new_space() ? heap->new_space()->Capacity() : 0;
6555 CHECK_LE(heap->OldGenerationCapacity(), kHeapLimit + kSlack);
6556 CHECK_LE(heap->memory_allocator()->Size(), heap->MaxReserved() + kSlack);
6557 base::OS::ExitProcess(0);
6558 }
6559
UNINITIALIZED_TEST(OutOfMemory)6560 UNINITIALIZED_TEST(OutOfMemory) {
6561 if (FLAG_stress_incremental_marking) return;
6562 #ifdef VERIFY_HEAP
6563 if (FLAG_verify_heap) return;
6564 #endif
6565 FLAG_max_old_space_size = kHeapLimit / MB;
6566 v8::Isolate::CreateParams create_params;
6567 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6568 v8::Isolate* isolate = v8::Isolate::New(create_params);
6569 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6570 oom_isolate = i_isolate;
6571 isolate->SetOOMErrorHandler(OOMCallback);
6572 {
6573 Factory* factory = i_isolate->factory();
6574 HandleScope handle_scope(i_isolate);
6575 while (true) {
6576 factory->NewFixedArray(100);
6577 }
6578 }
6579 }
6580
UNINITIALIZED_TEST(OutOfMemoryIneffectiveGC)6581 UNINITIALIZED_TEST(OutOfMemoryIneffectiveGC) {
6582 if (!FLAG_detect_ineffective_gcs_near_heap_limit) return;
6583 if (FLAG_stress_incremental_marking || FLAG_stress_concurrent_allocation)
6584 return;
6585 #ifdef VERIFY_HEAP
6586 if (FLAG_verify_heap) return;
6587 #endif
6588
6589 FLAG_max_old_space_size = kHeapLimit / MB;
6590 v8::Isolate::CreateParams create_params;
6591 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6592 v8::Isolate* isolate = v8::Isolate::New(create_params);
6593 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6594 oom_isolate = i_isolate;
6595 isolate->SetOOMErrorHandler(OOMCallback);
6596 Factory* factory = i_isolate->factory();
6597 Heap* heap = i_isolate->heap();
6598 heap->CollectAllGarbage(Heap::kNoGCFlags, GarbageCollectionReason::kTesting);
6599 {
6600 HandleScope scope(i_isolate);
6601 while (heap->OldGenerationSizeOfObjects() <
6602 heap->MaxOldGenerationSize() * 0.9) {
6603 factory->NewFixedArray(100, AllocationType::kOld);
6604 }
6605 {
6606 int initial_ms_count = heap->ms_count();
6607 int ineffective_ms_start = initial_ms_count;
6608 while (heap->ms_count() < initial_ms_count + 10) {
6609 HandleScope inner_scope(i_isolate);
6610 factory->NewFixedArray(30000, AllocationType::kOld);
6611 if (heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3) {
6612 ineffective_ms_start = heap->ms_count() + 1;
6613 }
6614 }
6615 int consecutive_ineffective_ms = heap->ms_count() - ineffective_ms_start;
6616 CHECK_IMPLIES(
6617 consecutive_ineffective_ms >= 4,
6618 heap->tracer()->AverageMarkCompactMutatorUtilization() >= 0.3);
6619 }
6620 }
6621 isolate->Dispose();
6622 }
6623
UNINITIALIZED_TEST(OutOfMemoryIneffectiveGCRunningJS)6624 UNINITIALIZED_TEST(OutOfMemoryIneffectiveGCRunningJS) {
6625 if (!FLAG_detect_ineffective_gcs_near_heap_limit) return;
6626 if (FLAG_stress_incremental_marking) return;
6627
6628 FLAG_max_old_space_size = 5;
6629 v8::Isolate::CreateParams create_params;
6630 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6631 v8::Isolate* isolate = v8::Isolate::New(create_params);
6632 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
6633 oom_isolate = i_isolate;
6634
6635 isolate->SetOOMErrorHandler(OOMCallback);
6636
6637 v8::Isolate::Scope isolate_scope(isolate);
6638 v8::HandleScope handle_scope(isolate);
6639 v8::Context::New(isolate)->Enter();
6640
6641 // Test that source positions are not collected as part of a failing GC, which
6642 // will fail as allocation is disallowed. If the test works, this should call
6643 // OOMCallback and terminate without crashing.
6644 CompileRun(R"javascript(
6645 var array = [];
6646 for(var i = 20000; i < 40000; ++i) {
6647 array.push(new Array(i));
6648 }
6649 )javascript");
6650
6651 FATAL("Should not get here as OOMCallback should be called");
6652 }
6653
HEAP_TEST(Regress779503)6654 HEAP_TEST(Regress779503) {
6655 // The following regression test ensures that the Scavenger does not allocate
6656 // over invalid slots. More specific, the Scavenger should not sweep a page
6657 // that it currently processes because it might allocate over the currently
6658 // processed slot.
6659 if (FLAG_single_generation) return;
6660 FLAG_stress_concurrent_allocation = false; // For SealCurrentObjects.
6661 const int kArraySize = 2048;
6662 CcTest::InitializeVM();
6663 Isolate* isolate = CcTest::i_isolate();
6664 Heap* heap = CcTest::heap();
6665 heap::SealCurrentObjects(heap);
6666 {
6667 HandleScope handle_scope(isolate);
6668 // The byte array filled with kHeapObjectTag ensures that we cannot read
6669 // from the slot again and interpret it as heap value. Doing so will crash.
6670 Handle<ByteArray> byte_array = isolate->factory()->NewByteArray(kArraySize);
6671 CHECK(Heap::InYoungGeneration(*byte_array));
6672 for (int i = 0; i < kArraySize; i++) {
6673 byte_array->set(i, kHeapObjectTag);
6674 }
6675
6676 {
6677 HandleScope handle_scope(isolate);
6678 // The FixedArray in old space serves as space for slots.
6679 Handle<FixedArray> fixed_array =
6680 isolate->factory()->NewFixedArray(kArraySize, AllocationType::kOld);
6681 CHECK(!Heap::InYoungGeneration(*fixed_array));
6682 for (int i = 0; i < kArraySize; i++) {
6683 fixed_array->set(i, *byte_array);
6684 }
6685 }
6686 // Delay sweeper tasks to allow the scavenger to sweep the page it is
6687 // currently scavenging.
6688 heap->delay_sweeper_tasks_for_testing_ = true;
6689 CcTest::CollectGarbage(OLD_SPACE);
6690 CHECK(FLAG_always_promote_young_mc ? !Heap::InYoungGeneration(*byte_array)
6691 : Heap::InYoungGeneration(*byte_array));
6692 }
6693 // Scavenging and sweeping the same page will crash as slots will be
6694 // overridden.
6695 CcTest::CollectGarbage(NEW_SPACE);
6696 heap->delay_sweeper_tasks_for_testing_ = false;
6697 }
6698
6699 struct OutOfMemoryState {
6700 Heap* heap;
6701 bool oom_triggered;
6702 size_t old_generation_capacity_at_oom;
6703 size_t memory_allocator_size_at_oom;
6704 size_t new_space_capacity_at_oom;
6705 size_t new_lo_space_size_at_oom;
6706 size_t current_heap_limit;
6707 size_t initial_heap_limit;
6708 };
6709
NearHeapLimitCallback(void * raw_state,size_t current_heap_limit,size_t initial_heap_limit)6710 size_t NearHeapLimitCallback(void* raw_state, size_t current_heap_limit,
6711 size_t initial_heap_limit) {
6712 OutOfMemoryState* state = static_cast<OutOfMemoryState*>(raw_state);
6713 Heap* heap = state->heap;
6714 state->oom_triggered = true;
6715 state->old_generation_capacity_at_oom = heap->OldGenerationCapacity();
6716 state->memory_allocator_size_at_oom = heap->memory_allocator()->Size();
6717 state->new_space_capacity_at_oom =
6718 heap->new_space() ? heap->new_space()->Capacity() : 0;
6719 state->new_lo_space_size_at_oom =
6720 heap->new_lo_space() ? heap->new_lo_space()->Size() : 0;
6721 state->current_heap_limit = current_heap_limit;
6722 state->initial_heap_limit = initial_heap_limit;
6723 return initial_heap_limit + 100 * MB;
6724 }
6725
MemoryAllocatorSizeFromHeapCapacity(size_t capacity)6726 size_t MemoryAllocatorSizeFromHeapCapacity(size_t capacity) {
6727 // Size to capacity factor.
6728 double factor =
6729 Page::kPageSize * 1.0 / MemoryChunkLayout::AllocatableMemoryInDataPage();
6730 // Some tables (e.g. deoptimization table) are allocated directly with the
6731 // memory allocator. Allow some slack to account for them.
6732 size_t slack = 5 * MB;
6733 return static_cast<size_t>(capacity * factor) + slack;
6734 }
6735
UNINITIALIZED_TEST(OutOfMemorySmallObjects)6736 UNINITIALIZED_TEST(OutOfMemorySmallObjects) {
6737 if (FLAG_stress_incremental_marking) return;
6738 #ifdef VERIFY_HEAP
6739 if (FLAG_verify_heap) return;
6740 #endif
6741 const size_t kOldGenerationLimit = 50 * MB;
6742 FLAG_max_old_space_size = kOldGenerationLimit / MB;
6743 v8::Isolate::CreateParams create_params;
6744 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6745 Isolate* isolate =
6746 reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6747 Heap* heap = isolate->heap();
6748 Factory* factory = isolate->factory();
6749 OutOfMemoryState state;
6750 state.heap = heap;
6751 state.oom_triggered = false;
6752 heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6753 {
6754 HandleScope handle_scope(isolate);
6755 while (!state.oom_triggered) {
6756 factory->NewFixedArray(100);
6757 }
6758 }
6759 CHECK_LE(state.old_generation_capacity_at_oom,
6760 kOldGenerationLimit + state.new_space_capacity_at_oom);
6761 CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6762 state.new_space_capacity_at_oom);
6763 CHECK_LE(
6764 state.memory_allocator_size_at_oom,
6765 MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6766 2 * state.new_space_capacity_at_oom));
6767 reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6768 }
6769
UNINITIALIZED_TEST(OutOfMemoryLargeObjects)6770 UNINITIALIZED_TEST(OutOfMemoryLargeObjects) {
6771 if (FLAG_stress_incremental_marking) return;
6772 #ifdef VERIFY_HEAP
6773 if (FLAG_verify_heap) return;
6774 #endif
6775 const size_t kOldGenerationLimit = 50 * MB;
6776 FLAG_max_old_space_size = kOldGenerationLimit / MB;
6777 v8::Isolate::CreateParams create_params;
6778 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6779 Isolate* isolate =
6780 reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6781 Heap* heap = isolate->heap();
6782 Factory* factory = isolate->factory();
6783 OutOfMemoryState state;
6784 state.heap = heap;
6785 state.oom_triggered = false;
6786 heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6787 const int kFixedArrayLength = 1000000;
6788 {
6789 HandleScope handle_scope(isolate);
6790 while (!state.oom_triggered) {
6791 factory->NewFixedArray(kFixedArrayLength);
6792 }
6793 }
6794 CHECK_LE(state.old_generation_capacity_at_oom,
6795 kOldGenerationLimit + state.new_space_capacity_at_oom +
6796 state.new_lo_space_size_at_oom +
6797 FixedArray::SizeFor(kFixedArrayLength));
6798 CHECK_LE(kOldGenerationLimit, state.old_generation_capacity_at_oom +
6799 state.new_space_capacity_at_oom +
6800 state.new_lo_space_size_at_oom +
6801 FixedArray::SizeFor(kFixedArrayLength));
6802 CHECK_LE(
6803 state.memory_allocator_size_at_oom,
6804 MemoryAllocatorSizeFromHeapCapacity(state.old_generation_capacity_at_oom +
6805 2 * state.new_space_capacity_at_oom +
6806 state.new_lo_space_size_at_oom));
6807 reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6808 }
6809
UNINITIALIZED_TEST(RestoreHeapLimit)6810 UNINITIALIZED_TEST(RestoreHeapLimit) {
6811 if (FLAG_stress_incremental_marking) return;
6812 #ifdef VERIFY_HEAP
6813 if (FLAG_verify_heap) return;
6814 #endif
6815 ManualGCScope manual_gc_scope;
6816 const size_t kOldGenerationLimit = 50 * MB;
6817 FLAG_max_old_space_size = kOldGenerationLimit / MB;
6818 v8::Isolate::CreateParams create_params;
6819 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
6820 Isolate* isolate =
6821 reinterpret_cast<Isolate*>(v8::Isolate::New(create_params));
6822 Heap* heap = isolate->heap();
6823 Factory* factory = isolate->factory();
6824 OutOfMemoryState state;
6825 state.heap = heap;
6826 state.oom_triggered = false;
6827 heap->AddNearHeapLimitCallback(NearHeapLimitCallback, &state);
6828 heap->AutomaticallyRestoreInitialHeapLimit(0.5);
6829 const int kFixedArrayLength = 1000000;
6830 {
6831 HandleScope handle_scope(isolate);
6832 while (!state.oom_triggered) {
6833 factory->NewFixedArray(kFixedArrayLength);
6834 }
6835 }
6836 heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6837 state.oom_triggered = false;
6838 {
6839 HandleScope handle_scope(isolate);
6840 while (!state.oom_triggered) {
6841 factory->NewFixedArray(kFixedArrayLength);
6842 }
6843 }
6844 CHECK_EQ(state.current_heap_limit, state.initial_heap_limit);
6845 reinterpret_cast<v8::Isolate*>(isolate)->Dispose();
6846 }
6847
UncommitFromSpace(Heap * heap)6848 void HeapTester::UncommitFromSpace(Heap* heap) {
6849 heap->UncommitFromSpace();
6850 heap->memory_allocator()->unmapper()->EnsureUnmappingCompleted();
6851 }
6852
6853 class DeleteNative {
6854 public:
Deleter(void * arg)6855 static void Deleter(void* arg) {
6856 delete reinterpret_cast<DeleteNative*>(arg);
6857 }
6858 };
6859
TEST(Regress8014)6860 TEST(Regress8014) {
6861 Isolate* isolate = CcTest::InitIsolateOnce();
6862 Heap* heap = isolate->heap();
6863 {
6864 HandleScope scope(isolate);
6865 for (int i = 0; i < 10000; i++) {
6866 auto handle = Managed<DeleteNative>::FromRawPtr(isolate, 1000000,
6867 new DeleteNative());
6868 USE(handle);
6869 }
6870 }
6871 int ms_count = heap->ms_count();
6872 heap->MemoryPressureNotification(MemoryPressureLevel::kCritical, true);
6873 // Several GCs can be triggred by the above call.
6874 // The bad case triggers 10000 GCs.
6875 CHECK_LE(heap->ms_count(), ms_count + 10);
6876 }
6877
TEST(Regress8617)6878 TEST(Regress8617) {
6879 if (!FLAG_incremental_marking) return;
6880 ManualGCScope manual_gc_scope;
6881 FLAG_manual_evacuation_candidates_selection = true;
6882 LocalContext env;
6883 Isolate* isolate = CcTest::i_isolate();
6884 Heap* heap = isolate->heap();
6885 HandleScope scope(isolate);
6886 heap::SimulateFullSpace(heap->old_space());
6887 // Step 1. Create a function and ensure that it is in the old space.
6888 Handle<Object> foo =
6889 v8::Utils::OpenHandle(*CompileRun("function foo() { return 42; };"
6890 "foo;"));
6891 if (heap->InYoungGeneration(*foo)) {
6892 CcTest::CollectGarbage(NEW_SPACE);
6893 CcTest::CollectGarbage(NEW_SPACE);
6894 }
6895 // Step 2. Create an object with a reference to foo in the descriptor array.
6896 CompileRun(
6897 "var obj = {};"
6898 "obj.method = foo;"
6899 "obj;");
6900 // Step 3. Make sure that foo moves during Mark-Compact.
6901 Page* ec_page = Page::FromAddress(foo->ptr());
6902 heap::ForceEvacuationCandidate(ec_page);
6903 // Step 4. Start incremental marking.
6904 heap::SimulateIncrementalMarking(heap, false);
6905 CHECK(ec_page->IsEvacuationCandidate());
6906 // Step 5. Install a new descriptor array on the map of the object.
6907 // This runs the marking barrier for the descriptor array.
6908 // In the bad case it sets the number of marked descriptors but does not
6909 // change the color of the descriptor array.
6910 CompileRun("obj.bar = 10;");
6911 // Step 6. Promote the descriptor array to old space. During promotion
6912 // the Scavenger will not record the slot of foo in the descriptor array.
6913 CcTest::CollectGarbage(NEW_SPACE);
6914 CcTest::CollectGarbage(NEW_SPACE);
6915 // Step 7. Complete the Mark-Compact.
6916 CcTest::CollectAllGarbage();
6917 // Step 8. Use the descriptor for foo, which contains a stale pointer.
6918 CompileRun("obj.method()");
6919 }
6920
HEAP_TEST(MemoryReducerActivationForSmallHeaps)6921 HEAP_TEST(MemoryReducerActivationForSmallHeaps) {
6922 if (FLAG_single_generation) return;
6923 ManualGCScope manual_gc_scope;
6924 LocalContext env;
6925 Isolate* isolate = CcTest::i_isolate();
6926 Heap* heap = isolate->heap();
6927 CHECK_EQ(heap->memory_reducer()->state_.action, MemoryReducer::Action::kDone);
6928 HandleScope scope(isolate);
6929 const size_t kActivationThreshold = 1 * MB;
6930 size_t initial_capacity = heap->OldGenerationCapacity();
6931 while (heap->OldGenerationCapacity() <
6932 initial_capacity + kActivationThreshold) {
6933 isolate->factory()->NewFixedArray(1 * KB, AllocationType::kOld);
6934 }
6935 CHECK_EQ(heap->memory_reducer()->state_.action, MemoryReducer::Action::kWait);
6936 }
6937
TEST(AllocateExternalBackingStore)6938 TEST(AllocateExternalBackingStore) {
6939 ManualGCScope manual_gc_scope;
6940 LocalContext env;
6941 Isolate* isolate = CcTest::i_isolate();
6942 Heap* heap = isolate->heap();
6943 int initial_ms_count = heap->ms_count();
6944 void* result =
6945 heap->AllocateExternalBackingStore([](size_t) { return nullptr; }, 10);
6946 CHECK_NULL(result);
6947 // At least two GCs should happen.
6948 CHECK_LE(2, heap->ms_count() - initial_ms_count);
6949 }
6950
TEST(CodeObjectRegistry)6951 TEST(CodeObjectRegistry) {
6952 // We turn off compaction to ensure that code is not moving.
6953 FLAG_never_compact = true;
6954
6955 Isolate* isolate = CcTest::i_isolate();
6956 Heap* heap = isolate->heap();
6957
6958 Handle<Code> code1;
6959 HandleScope outer_scope(heap->isolate());
6960 Address code2_address;
6961 {
6962 // Ensure that both code objects end up on the same page.
6963 CHECK(HeapTester::CodeEnsureLinearAllocationArea(
6964 heap, MemoryChunkLayout::MaxRegularCodeObjectSize()));
6965 code1 = DummyOptimizedCode(isolate);
6966 Handle<Code> code2 = DummyOptimizedCode(isolate);
6967 code2_address = code2->address();
6968
6969 CHECK_EQ(MemoryChunk::FromHeapObject(*code1),
6970 MemoryChunk::FromHeapObject(*code2));
6971 CHECK(MemoryChunk::FromHeapObject(*code1)->Contains(code1->address()));
6972 CHECK(MemoryChunk::FromHeapObject(*code2)->Contains(code2->address()));
6973 }
6974 CcTest::CollectAllAvailableGarbage();
6975 CHECK(MemoryChunk::FromHeapObject(*code1)->Contains(code1->address()));
6976 CHECK(MemoryChunk::FromAddress(code2_address)->Contains(code2_address));
6977 }
6978
TEST(Regress9701)6979 TEST(Regress9701) {
6980 ManualGCScope manual_gc_scope;
6981 if (!FLAG_incremental_marking) return;
6982 CcTest::InitializeVM();
6983 Heap* heap = CcTest::heap();
6984 // Start with an empty new space.
6985 CcTest::CollectGarbage(NEW_SPACE);
6986 CcTest::CollectGarbage(NEW_SPACE);
6987
6988 int mark_sweep_count_before = heap->ms_count();
6989 // Allocate many short living array buffers.
6990 for (int i = 0; i < 1000; i++) {
6991 HandleScope scope(heap->isolate());
6992 CcTest::i_isolate()->factory()->NewJSArrayBufferAndBackingStore(
6993 64 * KB, InitializedFlag::kZeroInitialized);
6994 }
6995 int mark_sweep_count_after = heap->ms_count();
6996 // We expect only scavenges, no full GCs.
6997 CHECK_EQ(mark_sweep_count_before, mark_sweep_count_after);
6998 }
6999
7000 #if defined(V8_TARGET_ARCH_64_BIT) && !defined(V8_OS_ANDROID)
UNINITIALIZED_TEST(HugeHeapLimit)7001 UNINITIALIZED_TEST(HugeHeapLimit) {
7002 uint64_t kMemoryGB = 16;
7003 v8::Isolate::CreateParams create_params;
7004 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
7005 create_params.constraints.ConfigureDefaults(kMemoryGB * GB, kMemoryGB * GB);
7006 v8::Isolate* isolate = v8::Isolate::New(create_params);
7007 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
7008 #ifdef V8_COMPRESS_POINTERS
7009 size_t kExpectedHeapLimit = Heap::AllocatorLimitOnMaxOldGenerationSize();
7010 #else
7011 size_t kExpectedHeapLimit = size_t{4} * GB;
7012 #endif
7013 CHECK_EQ(kExpectedHeapLimit, i_isolate->heap()->MaxOldGenerationSize());
7014 CHECK_LT(size_t{3} * GB, i_isolate->heap()->MaxOldGenerationSize());
7015 isolate->Dispose();
7016 }
7017 #endif
7018
UNINITIALIZED_TEST(HeapLimit)7019 UNINITIALIZED_TEST(HeapLimit) {
7020 uint64_t kMemoryGB = 15;
7021 v8::Isolate::CreateParams create_params;
7022 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
7023 create_params.constraints.ConfigureDefaults(kMemoryGB * GB, kMemoryGB * GB);
7024 v8::Isolate* isolate = v8::Isolate::New(create_params);
7025 Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
7026 #if defined(V8_TARGET_ARCH_64_BIT) && !defined(V8_OS_ANDROID)
7027 size_t kExpectedHeapLimit = size_t{2} * GB;
7028 #else
7029 size_t kExpectedHeapLimit = size_t{1} * GB;
7030 #endif
7031 CHECK_EQ(kExpectedHeapLimit, i_isolate->heap()->MaxOldGenerationSize());
7032 isolate->Dispose();
7033 }
7034
TEST(NoCodeRangeInJitlessMode)7035 TEST(NoCodeRangeInJitlessMode) {
7036 if (!FLAG_jitless) return;
7037 CcTest::InitializeVM();
7038 CHECK(CcTest::i_isolate()->heap()->code_region().is_empty());
7039 }
7040
TEST(Regress978156)7041 TEST(Regress978156) {
7042 if (!FLAG_incremental_marking) return;
7043 if (FLAG_single_generation) return;
7044 ManualGCScope manual_gc_scope;
7045 CcTest::InitializeVM();
7046
7047 HandleScope handle_scope(CcTest::i_isolate());
7048 Heap* heap = CcTest::i_isolate()->heap();
7049
7050 // 1. Ensure that the new space is empty.
7051 CcTest::CollectGarbage(NEW_SPACE);
7052 CcTest::CollectGarbage(NEW_SPACE);
7053 // 2. Fill the first page of the new space with FixedArrays.
7054 std::vector<Handle<FixedArray>> arrays;
7055 i::heap::FillCurrentPage(heap->new_space(), &arrays);
7056 // 3. Trim the last array by one word thus creating a one-word filler.
7057 Handle<FixedArray> last = arrays.back();
7058 CHECK_GT(last->length(), 0);
7059 heap->RightTrimFixedArray(*last, 1);
7060 // 4. Get the last filler on the page.
7061 HeapObject filler = HeapObject::FromAddress(
7062 MemoryChunk::FromHeapObject(*last)->area_end() - kTaggedSize);
7063 HeapObject::FromAddress(last->address() + last->Size());
7064 CHECK(filler.IsFiller());
7065 // 5. Start incremental marking.
7066 i::IncrementalMarking* marking = heap->incremental_marking();
7067 if (marking->IsStopped()) {
7068 SafepointScope scope(heap);
7069 marking->Start(i::GarbageCollectionReason::kTesting);
7070 }
7071 IncrementalMarking::MarkingState* marking_state = marking->marking_state();
7072 // 6. Mark the filler black to access its two markbits. This triggers
7073 // an out-of-bounds access of the marking bitmap in a bad case.
7074 marking_state->WhiteToGrey(filler);
7075 marking_state->GreyToBlack(filler);
7076 }
7077
TEST(GarbageCollectionWithLocalHeap)7078 TEST(GarbageCollectionWithLocalHeap) {
7079 ManualGCScope manual_gc_scope;
7080 CcTest::InitializeVM();
7081
7082 LocalHeap* local_heap = CcTest::i_isolate()->main_thread_local_heap();
7083
7084 CcTest::CollectGarbage(OLD_SPACE);
7085
7086 { ParkedScope parked_scope(local_heap); }
7087 CcTest::CollectGarbage(OLD_SPACE);
7088 }
7089
TEST(Regress10698)7090 TEST(Regress10698) {
7091 if (!FLAG_incremental_marking) return;
7092 CcTest::InitializeVM();
7093 Heap* heap = CcTest::i_isolate()->heap();
7094 Factory* factory = CcTest::i_isolate()->factory();
7095 HandleScope handle_scope(CcTest::i_isolate());
7096 // This is modeled after the manual allocation folding of heap numbers in
7097 // JSON parser (See commit ba7b25e).
7098 // Step 1. Allocate a byte array in the old space.
7099 Handle<ByteArray> array =
7100 factory->NewByteArray(kTaggedSize, AllocationType::kOld);
7101 // Step 2. Start incremental marking.
7102 SimulateIncrementalMarking(heap, false);
7103 // Step 3. Allocate another byte array. It will be black.
7104 factory->NewByteArray(kTaggedSize, AllocationType::kOld);
7105 Address address = reinterpret_cast<Address>(array->GetDataStartAddress());
7106 HeapObject filler = HeapObject::FromAddress(address);
7107 // Step 4. Set the filler at the end of the first array.
7108 // It will have an impossible markbit pattern because the second markbit
7109 // will be taken from the second array.
7110 filler.set_map_after_allocation(*factory->one_pointer_filler_map());
7111 }
7112
7113 class TestAllocationTracker : public HeapObjectAllocationTracker {
7114 public:
TestAllocationTracker(int expected_size)7115 explicit TestAllocationTracker(int expected_size)
7116 : expected_size_(expected_size) {}
7117
AllocationEvent(Address addr,int size)7118 void AllocationEvent(Address addr, int size) {
7119 CHECK(expected_size_ == size);
7120 address_ = addr;
7121 }
7122
address()7123 Address address() { return address_; }
7124
7125 private:
7126 int expected_size_;
7127 Address address_;
7128 };
7129
HEAP_TEST(CodeLargeObjectSpace)7130 HEAP_TEST(CodeLargeObjectSpace) {
7131 Heap* heap = CcTest::heap();
7132 int size_in_bytes =
7133 heap->MaxRegularHeapObjectSize(AllocationType::kCode) + kTaggedSize;
7134 TestAllocationTracker allocation_tracker{size_in_bytes};
7135 heap->AddHeapObjectAllocationTracker(&allocation_tracker);
7136
7137 HeapObject obj;
7138 {
7139 AllocationResult allocation = heap->AllocateRaw(
7140 size_in_bytes, AllocationType::kCode, AllocationOrigin::kRuntime);
7141 CHECK(allocation.To(&obj));
7142 CHECK_EQ(allocation.ToAddress(), allocation_tracker.address());
7143
7144 heap->CreateFillerObjectAt(obj.address(), size_in_bytes,
7145 ClearRecordedSlots::kNo);
7146 }
7147
7148 CHECK(Heap::IsLargeObject(obj));
7149 heap->RemoveHeapObjectAllocationTracker(&allocation_tracker);
7150 }
7151
UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k)7152 UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k) {
7153 // Simulate having a system with 64k OS pages.
7154 i::FLAG_v8_os_page_size = 64;
7155
7156 // Initialize the isolate manually to make sure --v8-os-page-size is taken
7157 // into account.
7158 v8::Isolate::CreateParams create_params;
7159 create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
7160 v8::Isolate* isolate = v8::Isolate::New(create_params);
7161
7162 Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
7163
7164 // Allocate a regular code object.
7165 {
7166 int size_in_bytes =
7167 heap->MaxRegularHeapObjectSize(AllocationType::kCode) - kTaggedSize;
7168 TestAllocationTracker allocation_tracker{size_in_bytes};
7169 heap->AddHeapObjectAllocationTracker(&allocation_tracker);
7170
7171 HeapObject obj;
7172 {
7173 AllocationResult allocation = heap->AllocateRaw(
7174 size_in_bytes, AllocationType::kCode, AllocationOrigin::kRuntime);
7175 CHECK(allocation.To(&obj));
7176 CHECK_EQ(allocation.ToAddress(), allocation_tracker.address());
7177
7178 heap->CreateFillerObjectAt(obj.address(), size_in_bytes,
7179 ClearRecordedSlots::kNo);
7180 }
7181
7182 CHECK(!Heap::IsLargeObject(obj));
7183 heap->RemoveHeapObjectAllocationTracker(&allocation_tracker);
7184 }
7185
7186 // Allocate a large code object.
7187 {
7188 int size_in_bytes =
7189 heap->MaxRegularHeapObjectSize(AllocationType::kCode) + kTaggedSize;
7190 TestAllocationTracker allocation_tracker{size_in_bytes};
7191 heap->AddHeapObjectAllocationTracker(&allocation_tracker);
7192
7193 HeapObject obj;
7194 {
7195 AllocationResult allocation = heap->AllocateRaw(
7196 size_in_bytes, AllocationType::kCode, AllocationOrigin::kRuntime);
7197 CHECK(allocation.To(&obj));
7198 CHECK_EQ(allocation.ToAddress(), allocation_tracker.address());
7199
7200 heap->CreateFillerObjectAt(obj.address(), size_in_bytes,
7201 ClearRecordedSlots::kNo);
7202 }
7203
7204 CHECK(Heap::IsLargeObject(obj));
7205 heap->RemoveHeapObjectAllocationTracker(&allocation_tracker);
7206 }
7207
7208 isolate->Dispose();
7209 }
7210
TEST(IsPendingAllocationNewSpace)7211 TEST(IsPendingAllocationNewSpace) {
7212 CcTest::InitializeVM();
7213 Isolate* isolate = CcTest::i_isolate();
7214 Heap* heap = isolate->heap();
7215 Factory* factory = isolate->factory();
7216 HandleScope handle_scope(isolate);
7217 Handle<FixedArray> object = factory->NewFixedArray(5, AllocationType::kYoung);
7218 CHECK_IMPLIES(!FLAG_enable_third_party_heap,
7219 heap->IsPendingAllocation(*object));
7220 heap->PublishPendingAllocations();
7221 CHECK(!heap->IsPendingAllocation(*object));
7222 }
7223
TEST(IsPendingAllocationNewLOSpace)7224 TEST(IsPendingAllocationNewLOSpace) {
7225 CcTest::InitializeVM();
7226 Isolate* isolate = CcTest::i_isolate();
7227 Heap* heap = isolate->heap();
7228 Factory* factory = isolate->factory();
7229 HandleScope handle_scope(isolate);
7230 Handle<FixedArray> object = factory->NewFixedArray(
7231 FixedArray::kMaxRegularLength + 1, AllocationType::kYoung);
7232 CHECK_IMPLIES(!FLAG_enable_third_party_heap,
7233 heap->IsPendingAllocation(*object));
7234 heap->PublishPendingAllocations();
7235 CHECK(!heap->IsPendingAllocation(*object));
7236 }
7237
TEST(IsPendingAllocationOldSpace)7238 TEST(IsPendingAllocationOldSpace) {
7239 CcTest::InitializeVM();
7240 Isolate* isolate = CcTest::i_isolate();
7241 Heap* heap = isolate->heap();
7242 Factory* factory = isolate->factory();
7243 HandleScope handle_scope(isolate);
7244 Handle<FixedArray> object = factory->NewFixedArray(5, AllocationType::kOld);
7245 CHECK_IMPLIES(!FLAG_enable_third_party_heap,
7246 heap->IsPendingAllocation(*object));
7247 heap->PublishPendingAllocations();
7248 CHECK(!heap->IsPendingAllocation(*object));
7249 }
7250
TEST(IsPendingAllocationLOSpace)7251 TEST(IsPendingAllocationLOSpace) {
7252 CcTest::InitializeVM();
7253 Isolate* isolate = CcTest::i_isolate();
7254 Heap* heap = isolate->heap();
7255 Factory* factory = isolate->factory();
7256 HandleScope handle_scope(isolate);
7257 Handle<FixedArray> object = factory->NewFixedArray(
7258 FixedArray::kMaxRegularLength + 1, AllocationType::kOld);
7259 CHECK_IMPLIES(!FLAG_enable_third_party_heap,
7260 heap->IsPendingAllocation(*object));
7261 heap->PublishPendingAllocations();
7262 CHECK(!heap->IsPendingAllocation(*object));
7263 }
7264
TEST(Regress10900)7265 TEST(Regress10900) {
7266 FLAG_always_compact = true;
7267 CcTest::InitializeVM();
7268 Isolate* isolate = CcTest::i_isolate();
7269 Heap* heap = isolate->heap();
7270 Factory* factory = isolate->factory();
7271 HandleScope handle_scope(isolate);
7272 i::byte buffer[i::Assembler::kDefaultBufferSize];
7273 MacroAssembler masm(isolate, v8::internal::CodeObjectRequired::kYes,
7274 ExternalAssemblerBuffer(buffer, sizeof(buffer)));
7275 #if V8_TARGET_ARCH_ARM64
7276 UseScratchRegisterScope temps(&masm);
7277 Register tmp = temps.AcquireX();
7278 masm.Mov(tmp, Operand(static_cast<int32_t>(
7279 ReadOnlyRoots(heap).undefined_value_handle()->ptr())));
7280 masm.Push(tmp, tmp);
7281 #else
7282 masm.Push(ReadOnlyRoots(heap).undefined_value_handle());
7283 #endif
7284 CodeDesc desc;
7285 masm.GetCode(isolate, &desc);
7286 Handle<Code> code =
7287 Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build();
7288 {
7289 // Generate multiple code pages.
7290 CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
7291 for (int i = 0; i < 100; i++) {
7292 factory->CopyCode(code);
7293 }
7294 }
7295 // Force garbage collection that compacts code pages and triggers
7296 // an assertion in Isolate::AddCodeMemoryRange before the bug fix.
7297 CcTest::CollectAllAvailableGarbage();
7298 }
7299
7300 namespace {
GenerateGarbage()7301 void GenerateGarbage() {
7302 const char* source =
7303 "let roots = [];"
7304 "for (let i = 0; i < 100; i++) roots.push(new Array(1000).fill(0));"
7305 "roots.push(new Array(1000000).fill(0));"
7306 "roots;";
7307 CompileRun(source);
7308 }
7309
7310 } // anonymous namespace
7311
TEST(Regress11181)7312 TEST(Regress11181) {
7313 FLAG_always_compact = true;
7314 CcTest::InitializeVM();
7315 TracingFlags::runtime_stats.store(
7316 v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE,
7317 std::memory_order_relaxed);
7318 v8::HandleScope scope(CcTest::isolate());
7319 GenerateGarbage();
7320 CcTest::CollectAllAvailableGarbage();
7321 }
7322
TEST(LongTaskStatsFullAtomic)7323 TEST(LongTaskStatsFullAtomic) {
7324 CcTest::InitializeVM();
7325 v8::Isolate* isolate = CcTest::isolate();
7326 v8::HandleScope scope(CcTest::isolate());
7327 GenerateGarbage();
7328 v8::metrics::LongTaskStats::Reset(isolate);
7329 CHECK_EQ(0u, v8::metrics::LongTaskStats::Get(isolate)
7330 .gc_full_atomic_wall_clock_duration_us);
7331 for (int i = 0; i < 10; ++i) {
7332 CcTest::CollectAllAvailableGarbage();
7333 }
7334 CHECK_LT(0u, v8::metrics::LongTaskStats::Get(isolate)
7335 .gc_full_atomic_wall_clock_duration_us);
7336 v8::metrics::LongTaskStats::Reset(isolate);
7337 CHECK_EQ(0u, v8::metrics::LongTaskStats::Get(isolate)
7338 .gc_full_atomic_wall_clock_duration_us);
7339 }
7340
TEST(LongTaskStatsFullIncremental)7341 TEST(LongTaskStatsFullIncremental) {
7342 if (!FLAG_incremental_marking) return;
7343 CcTest::InitializeVM();
7344 v8::Isolate* isolate = CcTest::isolate();
7345 v8::HandleScope scope(CcTest::isolate());
7346 GenerateGarbage();
7347 v8::metrics::LongTaskStats::Reset(isolate);
7348 CHECK_EQ(0u, v8::metrics::LongTaskStats::Get(isolate)
7349 .gc_full_incremental_wall_clock_duration_us);
7350 for (int i = 0; i < 10; ++i) {
7351 heap::SimulateIncrementalMarking(CcTest::heap());
7352 CcTest::CollectAllAvailableGarbage();
7353 }
7354 CHECK_LT(0u, v8::metrics::LongTaskStats::Get(isolate)
7355 .gc_full_incremental_wall_clock_duration_us);
7356 v8::metrics::LongTaskStats::Reset(isolate);
7357 CHECK_EQ(0u, v8::metrics::LongTaskStats::Get(isolate)
7358 .gc_full_incremental_wall_clock_duration_us);
7359 }
7360
TEST(LongTaskStatsYoung)7361 TEST(LongTaskStatsYoung) {
7362 if (FLAG_single_generation) return;
7363 CcTest::InitializeVM();
7364 v8::Isolate* isolate = CcTest::isolate();
7365 v8::HandleScope scope(CcTest::isolate());
7366 GenerateGarbage();
7367 v8::metrics::LongTaskStats::Reset(isolate);
7368 CHECK_EQ(
7369 0u,
7370 v8::metrics::LongTaskStats::Get(isolate).gc_young_wall_clock_duration_us);
7371 for (int i = 0; i < 10; ++i) {
7372 CcTest::CollectGarbage(NEW_SPACE);
7373 }
7374 CHECK_LT(
7375 0u,
7376 v8::metrics::LongTaskStats::Get(isolate).gc_young_wall_clock_duration_us);
7377 v8::metrics::LongTaskStats::Reset(isolate);
7378 CHECK_EQ(
7379 0u,
7380 v8::metrics::LongTaskStats::Get(isolate).gc_young_wall_clock_duration_us);
7381 }
7382
7383 } // namespace heap
7384 } // namespace internal
7385 } // namespace v8
7386
7387 #undef __
7388