1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/heap/factory.h"
6
7 #include <algorithm> // For copy
8 #include <memory> // For shared_ptr<>
9 #include <string>
10 #include <utility> // For move
11
12 #include "src/ast/ast-source-ranges.h"
13 #include "src/base/bits.h"
14 #include "src/builtins/accessors.h"
15 #include "src/builtins/constants-table-builder.h"
16 #include "src/codegen/compilation-cache.h"
17 #include "src/codegen/compiler.h"
18 #include "src/common/assert-scope.h"
19 #include "src/common/globals.h"
20 #include "src/diagnostics/basic-block-profiler.h"
21 #include "src/execution/isolate-inl.h"
22 #include "src/execution/protectors-inl.h"
23 #include "src/heap/basic-memory-chunk.h"
24 #include "src/heap/heap-inl.h"
25 #include "src/heap/incremental-marking.h"
26 #include "src/heap/mark-compact-inl.h"
27 #include "src/heap/memory-chunk.h"
28 #include "src/heap/read-only-heap.h"
29 #include "src/ic/handler-configuration-inl.h"
30 #include "src/init/bootstrapper.h"
31 #include "src/interpreter/interpreter.h"
32 #include "src/logging/counters.h"
33 #include "src/logging/log.h"
34 #include "src/numbers/conversions.h"
35 #include "src/numbers/hash-seed-inl.h"
36 #include "src/objects/allocation-site-inl.h"
37 #include "src/objects/allocation-site-scopes.h"
38 #include "src/objects/api-callbacks.h"
39 #include "src/objects/arguments-inl.h"
40 #include "src/objects/bigint.h"
41 #include "src/objects/cell-inl.h"
42 #include "src/objects/debug-objects-inl.h"
43 #include "src/objects/embedder-data-array-inl.h"
44 #include "src/objects/feedback-cell-inl.h"
45 #include "src/objects/fixed-array-inl.h"
46 #include "src/objects/foreign-inl.h"
47 #include "src/objects/instance-type-inl.h"
48 #include "src/objects/js-array-buffer-inl.h"
49 #include "src/objects/js-array-inl.h"
50 #include "src/objects/js-collection-inl.h"
51 #include "src/objects/js-generator-inl.h"
52 #include "src/objects/js-objects.h"
53 #include "src/objects/js-regexp-inl.h"
54 #include "src/objects/js-weak-refs-inl.h"
55 #include "src/objects/literal-objects-inl.h"
56 #include "src/objects/megadom-handler-inl.h"
57 #include "src/objects/microtask-inl.h"
58 #include "src/objects/module-inl.h"
59 #include "src/objects/promise-inl.h"
60 #include "src/objects/property-descriptor-object-inl.h"
61 #include "src/objects/scope-info.h"
62 #include "src/objects/stack-frame-info-inl.h"
63 #include "src/objects/string-set-inl.h"
64 #include "src/objects/struct-inl.h"
65 #include "src/objects/synthetic-module-inl.h"
66 #include "src/objects/template-objects-inl.h"
67 #include "src/objects/transitions-inl.h"
68 #include "src/roots/roots.h"
69 #include "src/strings/unicode-inl.h"
70 #if V8_ENABLE_WEBASSEMBLY
71 #include "src/wasm/wasm-value.h"
72 #endif
73
74 namespace v8 {
75 namespace internal {
76
CodeBuilder(Isolate * isolate,const CodeDesc & desc,CodeKind kind)77 Factory::CodeBuilder::CodeBuilder(Isolate* isolate, const CodeDesc& desc,
78 CodeKind kind)
79 : isolate_(isolate),
80 code_desc_(desc),
81 kind_(kind),
82 position_table_(isolate_->factory()->empty_byte_array()) {}
83
BuildInternal(bool retry_allocation_or_fail)84 MaybeHandle<Code> Factory::CodeBuilder::BuildInternal(
85 bool retry_allocation_or_fail) {
86 const auto factory = isolate_->factory();
87 // Allocate objects needed for code initialization.
88 Handle<ByteArray> reloc_info =
89 factory->NewByteArray(code_desc_.reloc_size, AllocationType::kOld);
90 Handle<CodeDataContainer> data_container;
91
92 // Use a canonical off-heap trampoline CodeDataContainer if possible.
93 const int32_t promise_rejection_flag =
94 Code::IsPromiseRejectionField::encode(true);
95 if (read_only_data_container_ &&
96 (kind_specific_flags_ == 0 ||
97 kind_specific_flags_ == promise_rejection_flag)) {
98 const ReadOnlyRoots roots(isolate_);
99 const auto canonical_code_data_container = Handle<CodeDataContainer>::cast(
100 kind_specific_flags_ == 0
101 ? roots.trampoline_trivial_code_data_container_handle()
102 : roots.trampoline_promise_rejection_code_data_container_handle());
103 DCHECK_EQ(canonical_code_data_container->kind_specific_flags(kRelaxedLoad),
104 kind_specific_flags_);
105 data_container = canonical_code_data_container;
106 } else {
107 data_container = factory->NewCodeDataContainer(
108 0, read_only_data_container_ ? AllocationType::kReadOnly
109 : AllocationType::kOld);
110 data_container->set_kind_specific_flags(kind_specific_flags_,
111 kRelaxedStore);
112 }
113
114 // Basic block profiling data for builtins is stored in the JS heap rather
115 // than in separately-allocated C++ objects. Allocate that data now if
116 // appropriate.
117 Handle<OnHeapBasicBlockProfilerData> on_heap_profiler_data;
118 if (profiler_data_ && isolate_->IsGeneratingEmbeddedBuiltins()) {
119 on_heap_profiler_data = profiler_data_->CopyToJSHeap(isolate_);
120
121 // Add the on-heap data to a global list, which keeps it alive and allows
122 // iteration.
123 Handle<ArrayList> list(isolate_->heap()->basic_block_profiling_data(),
124 isolate_);
125 Handle<ArrayList> new_list =
126 ArrayList::Add(isolate_, list, on_heap_profiler_data);
127 isolate_->heap()->SetBasicBlockProfilingData(new_list);
128 }
129
130 STATIC_ASSERT(Code::kOnHeapBodyIsContiguous);
131 Heap* heap = isolate_->heap();
132 CodePageCollectionMemoryModificationScope code_allocation(heap);
133
134 Handle<Code> code;
135 bool code_is_on_heap = code_desc_.origin && code_desc_.origin->IsOnHeap();
136 if (code_is_on_heap) {
137 DCHECK(FLAG_sparkplug_on_heap);
138 DCHECK_EQ(kind_, CodeKind::BASELINE);
139 code = code_desc_.origin->code().ToHandleChecked();
140 } else {
141 if (!AllocateCode(retry_allocation_or_fail).ToHandle(&code)) {
142 return MaybeHandle<Code>();
143 }
144 }
145
146 {
147 Code raw_code = *code;
148 constexpr bool kIsNotOffHeapTrampoline = false;
149 DisallowGarbageCollection no_gc;
150
151 if (code_is_on_heap) {
152 heap->NotifyCodeObjectChangeStart(raw_code, no_gc);
153 }
154
155 raw_code.set_raw_instruction_size(code_desc_.instruction_size());
156 raw_code.set_raw_metadata_size(code_desc_.metadata_size());
157 raw_code.initialize_flags(kind_, is_turbofanned_, stack_slots_,
158 kIsNotOffHeapTrampoline);
159 raw_code.set_builtin_id(builtin_);
160 // This might impact direct concurrent reads from TF if we are resetting
161 // this field. We currently assume it's immutable thus a relaxed read (after
162 // passing IsPendingAllocation).
163 raw_code.set_inlined_bytecode_size(inlined_bytecode_size_);
164 raw_code.set_code_data_container(*data_container, kReleaseStore);
165 if (kind_ == CodeKind::BASELINE) {
166 raw_code.set_bytecode_or_interpreter_data(*interpreter_data_);
167 raw_code.set_bytecode_offset_table(*position_table_);
168 } else {
169 raw_code.set_deoptimization_data(*deoptimization_data_);
170 raw_code.set_source_position_table(*position_table_);
171 }
172 raw_code.set_handler_table_offset(
173 code_desc_.handler_table_offset_relative());
174 raw_code.set_constant_pool_offset(
175 code_desc_.constant_pool_offset_relative());
176 raw_code.set_code_comments_offset(
177 code_desc_.code_comments_offset_relative());
178 raw_code.set_unwinding_info_offset(
179 code_desc_.unwinding_info_offset_relative());
180
181 // Allow self references to created code object by patching the handle to
182 // point to the newly allocated Code object.
183 Handle<Object> self_reference;
184 if (self_reference_.ToHandle(&self_reference)) {
185 DCHECK(self_reference->IsOddball());
186 DCHECK_EQ(Oddball::cast(*self_reference).kind(),
187 Oddball::kSelfReferenceMarker);
188 DCHECK_NE(kind_, CodeKind::BASELINE);
189 if (isolate_->IsGeneratingEmbeddedBuiltins()) {
190 isolate_->builtins_constants_table_builder()->PatchSelfReference(
191 self_reference, code);
192 }
193 self_reference.PatchValue(*code);
194 }
195
196 // Likewise, any references to the basic block counters marker need to be
197 // updated to point to the newly-allocated counters array.
198 if (!on_heap_profiler_data.is_null()) {
199 isolate_->builtins_constants_table_builder()
200 ->PatchBasicBlockCountersReference(
201 handle(on_heap_profiler_data->counts(), isolate_));
202 }
203
204 if (code_is_on_heap) {
205 FinalizeOnHeapCode(code, *reloc_info);
206 } else {
207 // Migrate generated code.
208 // The generated code can contain embedded objects (typically from
209 // handles) in a pointer-to-tagged-value format (i.e. with indirection
210 // like a handle) that are dereferenced during the copy to point directly
211 // to the actual heap objects. These pointers can include references to
212 // the code object itself, through the self_reference parameter.
213 raw_code.CopyFromNoFlush(*reloc_info, heap, code_desc_);
214 }
215
216 raw_code.clear_padding();
217
218 if (code_is_on_heap) {
219 raw_code.set_relocation_info(*reloc_info, kReleaseStore);
220 // Now that object is properly initialized, the GC needs to revisit this
221 // object if marking is on.
222 heap->NotifyCodeObjectChangeEnd(raw_code, no_gc);
223 } else {
224 raw_code.set_relocation_info(*reloc_info);
225 }
226
227 if (V8_EXTERNAL_CODE_SPACE_BOOL) {
228 data_container->SetCodeAndEntryPoint(isolate_, raw_code);
229 }
230 #ifdef VERIFY_HEAP
231 if (FLAG_verify_heap) raw_code.ObjectVerify(isolate_);
232 #endif
233
234 // Flush the instruction cache before changing the permissions.
235 // Note: we do this before setting permissions to ReadExecute because on
236 // some older ARM kernels there is a bug which causes an access error on
237 // cache flush instructions to trigger access error on non-writable memory.
238 // See https://bugs.chromium.org/p/v8/issues/detail?id=8157
239 raw_code.FlushICache();
240 }
241
242 if (profiler_data_ && FLAG_turbo_profiling_verbose) {
243 #ifdef ENABLE_DISASSEMBLER
244 std::ostringstream os;
245 code->Disassemble(nullptr, os, isolate_);
246 if (!on_heap_profiler_data.is_null()) {
247 Handle<String> disassembly =
248 isolate_->factory()->NewStringFromAsciiChecked(os.str().c_str(),
249 AllocationType::kOld);
250 on_heap_profiler_data->set_code(*disassembly);
251 } else {
252 profiler_data_->SetCode(os);
253 }
254 #endif // ENABLE_DISASSEMBLER
255 }
256
257 return code;
258 }
259
AllocateCode(bool retry_allocation_or_fail)260 MaybeHandle<Code> Factory::CodeBuilder::AllocateCode(
261 bool retry_allocation_or_fail) {
262 Heap* heap = isolate_->heap();
263 HeapObject result;
264 AllocationType allocation_type = V8_EXTERNAL_CODE_SPACE_BOOL || is_executable_
265 ? AllocationType::kCode
266 : AllocationType::kReadOnly;
267 const int object_size = Code::SizeFor(code_desc_.body_size());
268 if (retry_allocation_or_fail) {
269 result = heap->AllocateRawWith<Heap::kRetryOrFail>(
270 object_size, allocation_type, AllocationOrigin::kRuntime);
271 } else {
272 result = heap->AllocateRawWith<Heap::kLightRetry>(
273 object_size, allocation_type, AllocationOrigin::kRuntime);
274 // Return an empty handle if we cannot allocate the code object.
275 if (result.is_null()) return MaybeHandle<Code>();
276 }
277
278 // The code object has not been fully initialized yet. We rely on the
279 // fact that no allocation will happen from this point on.
280 DisallowGarbageCollection no_gc;
281 result.set_map_after_allocation(*isolate_->factory()->code_map(),
282 SKIP_WRITE_BARRIER);
283 Handle<Code> code = handle(Code::cast(result), isolate_);
284 if (is_executable_) {
285 DCHECK(IsAligned(code->address(), kCodeAlignment));
286 DCHECK_IMPLIES(
287 !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
288 heap->code_region().contains(code->address()));
289 }
290 return code;
291 }
292
FinalizeOnHeapCode(Handle<Code> code,ByteArray reloc_info)293 void Factory::CodeBuilder::FinalizeOnHeapCode(Handle<Code> code,
294 ByteArray reloc_info) {
295 Heap* heap = isolate_->heap();
296
297 // We cannot trim the Code object in CODE_LO_SPACE.
298 DCHECK(!heap->code_lo_space()->Contains(*code));
299
300 code->CopyRelocInfoToByteArray(reloc_info, code_desc_);
301
302 if (code_desc_.origin->OnHeapGCCount() != heap->gc_count()) {
303 // If a GC happens between Code object allocation and now, we might have
304 // invalid embedded object references.
305 code_desc_.origin->FixOnHeapReferences();
306 }
307
308 #ifdef VERIFY_HEAP
309 code->VerifyRelocInfo(isolate_, reloc_info);
310 #endif
311
312 int old_object_size = Code::SizeFor(code_desc_.origin->buffer_size());
313 int new_object_size =
314 Code::SizeFor(code_desc_.instruction_size() + code_desc_.metadata_size());
315 int size_to_trim = old_object_size - new_object_size;
316 DCHECK_GE(size_to_trim, 0);
317 heap->CreateFillerObjectAt(code->address() + new_object_size, size_to_trim,
318 ClearRecordedSlots::kNo);
319 }
320
NewEmptyCode(CodeKind kind,int buffer_size)321 MaybeHandle<Code> Factory::NewEmptyCode(CodeKind kind, int buffer_size) {
322 STATIC_ASSERT(Code::kOnHeapBodyIsContiguous);
323 const int object_size = Code::SizeFor(buffer_size);
324 Heap* heap = isolate()->heap();
325
326 HeapObject result = heap->AllocateRawWith<Heap::kLightRetry>(
327 object_size, AllocationType::kCode, AllocationOrigin::kRuntime);
328 if (result.is_null()) return MaybeHandle<Code>();
329
330 DisallowGarbageCollection no_gc;
331 result.set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER);
332
333 Code raw_code = Code::cast(result);
334 constexpr bool kIsNotOffHeapTrampoline = false;
335 raw_code.set_raw_instruction_size(0);
336 raw_code.set_raw_metadata_size(buffer_size);
337 raw_code.set_relocation_info_or_undefined(*undefined_value());
338 raw_code.initialize_flags(kind, false, 0, kIsNotOffHeapTrampoline);
339 raw_code.set_builtin_id(Builtin::kNoBuiltinId);
340 auto code_data_container =
341 Handle<CodeDataContainer>::cast(trampoline_trivial_code_data_container());
342 raw_code.set_code_data_container(*code_data_container, kReleaseStore);
343 raw_code.set_deoptimization_data(*DeoptimizationData::Empty(isolate()));
344 raw_code.set_bytecode_offset_table(*empty_byte_array());
345 raw_code.set_handler_table_offset(0);
346 raw_code.set_constant_pool_offset(0);
347 raw_code.set_code_comments_offset(0);
348 raw_code.set_unwinding_info_offset(0);
349
350 Handle<Code> code = handle(raw_code, isolate());
351 DCHECK(IsAligned(code->address(), kCodeAlignment));
352 DCHECK_IMPLIES(
353 !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
354 heap->code_region().contains(code->address()));
355
356 DCHECK(heap->code_space()->Contains(raw_code));
357 return code;
358 }
359
TryBuild()360 MaybeHandle<Code> Factory::CodeBuilder::TryBuild() {
361 return BuildInternal(false);
362 }
363
Build()364 Handle<Code> Factory::CodeBuilder::Build() {
365 return BuildInternal(true).ToHandleChecked();
366 }
367
AllocateRaw(int size,AllocationType allocation,AllocationAlignment alignment)368 HeapObject Factory::AllocateRaw(int size, AllocationType allocation,
369 AllocationAlignment alignment) {
370 return isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(
371 size, allocation, AllocationOrigin::kRuntime, alignment);
372 }
373
AllocateRawWithAllocationSite(Handle<Map> map,AllocationType allocation,Handle<AllocationSite> allocation_site)374 HeapObject Factory::AllocateRawWithAllocationSite(
375 Handle<Map> map, AllocationType allocation,
376 Handle<AllocationSite> allocation_site) {
377 DCHECK(map->instance_type() != MAP_TYPE);
378 int size = map->instance_size();
379 if (!allocation_site.is_null()) {
380 DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
381 size += AllocationMemento::kSize;
382 }
383 HeapObject result =
384 isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(size, allocation);
385 WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung
386 ? SKIP_WRITE_BARRIER
387 : UPDATE_WRITE_BARRIER;
388 result.set_map_after_allocation(*map, write_barrier_mode);
389 if (!allocation_site.is_null()) {
390 AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
391 Object(result.ptr() + map->instance_size()));
392 InitializeAllocationMemento(alloc_memento, *allocation_site);
393 }
394 return result;
395 }
396
InitializeAllocationMemento(AllocationMemento memento,AllocationSite allocation_site)397 void Factory::InitializeAllocationMemento(AllocationMemento memento,
398 AllocationSite allocation_site) {
399 DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
400 memento.set_map_after_allocation(*allocation_memento_map(),
401 SKIP_WRITE_BARRIER);
402 memento.set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
403 if (FLAG_allocation_site_pretenuring) {
404 allocation_site.IncrementMementoCreateCount();
405 }
406 }
407
New(Handle<Map> map,AllocationType allocation)408 HeapObject Factory::New(Handle<Map> map, AllocationType allocation) {
409 DCHECK(map->instance_type() != MAP_TYPE);
410 int size = map->instance_size();
411 HeapObject result =
412 isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(size, allocation);
413 // New space objects are allocated white.
414 WriteBarrierMode write_barrier_mode = allocation == AllocationType::kYoung
415 ? SKIP_WRITE_BARRIER
416 : UPDATE_WRITE_BARRIER;
417 result.set_map_after_allocation(*map, write_barrier_mode);
418 return result;
419 }
420
NewFillerObject(int size,bool double_align,AllocationType allocation,AllocationOrigin origin)421 Handle<HeapObject> Factory::NewFillerObject(int size, bool double_align,
422 AllocationType allocation,
423 AllocationOrigin origin) {
424 AllocationAlignment alignment = double_align ? kDoubleAligned : kWordAligned;
425 Heap* heap = isolate()->heap();
426 HeapObject result = heap->AllocateRawWith<Heap::kRetryOrFail>(
427 size, allocation, origin, alignment);
428 heap->CreateFillerObjectAt(result.address(), size, ClearRecordedSlots::kNo);
429 return Handle<HeapObject>(result, isolate());
430 }
431
NewPrototypeInfo()432 Handle<PrototypeInfo> Factory::NewPrototypeInfo() {
433 auto result = NewStructInternal<PrototypeInfo>(PROTOTYPE_INFO_TYPE,
434 AllocationType::kOld);
435 DisallowGarbageCollection no_gc;
436 result.set_prototype_users(Smi::zero());
437 result.set_registry_slot(PrototypeInfo::UNREGISTERED);
438 result.set_bit_field(0);
439 result.set_module_namespace(*undefined_value(), SKIP_WRITE_BARRIER);
440 return handle(result, isolate());
441 }
442
NewEnumCache(Handle<FixedArray> keys,Handle<FixedArray> indices)443 Handle<EnumCache> Factory::NewEnumCache(Handle<FixedArray> keys,
444 Handle<FixedArray> indices) {
445 auto result =
446 NewStructInternal<EnumCache>(ENUM_CACHE_TYPE, AllocationType::kOld);
447 DisallowGarbageCollection no_gc;
448 result.set_keys(*keys);
449 result.set_indices(*indices);
450 return handle(result, isolate());
451 }
452
NewTuple2(Handle<Object> value1,Handle<Object> value2,AllocationType allocation)453 Handle<Tuple2> Factory::NewTuple2(Handle<Object> value1, Handle<Object> value2,
454 AllocationType allocation) {
455 auto result = NewStructInternal<Tuple2>(TUPLE2_TYPE, allocation);
456 DisallowGarbageCollection no_gc;
457 result.set_value1(*value1);
458 result.set_value2(*value2);
459 return handle(result, isolate());
460 }
461
NewOddball(Handle<Map> map,const char * to_string,Handle<Object> to_number,const char * type_of,byte kind)462 Handle<Oddball> Factory::NewOddball(Handle<Map> map, const char* to_string,
463 Handle<Object> to_number,
464 const char* type_of, byte kind) {
465 Handle<Oddball> oddball(Oddball::cast(New(map, AllocationType::kReadOnly)),
466 isolate());
467 Oddball::Initialize(isolate(), oddball, to_string, to_number, type_of, kind);
468 return oddball;
469 }
470
NewSelfReferenceMarker()471 Handle<Oddball> Factory::NewSelfReferenceMarker() {
472 return NewOddball(self_reference_marker_map(), "self_reference_marker",
473 handle(Smi::FromInt(-1), isolate()), "undefined",
474 Oddball::kSelfReferenceMarker);
475 }
476
NewBasicBlockCountersMarker()477 Handle<Oddball> Factory::NewBasicBlockCountersMarker() {
478 return NewOddball(basic_block_counters_marker_map(),
479 "basic_block_counters_marker",
480 handle(Smi::FromInt(-1), isolate()), "undefined",
481 Oddball::kBasicBlockCountersMarker);
482 }
483
NewPropertyArray(int length)484 Handle<PropertyArray> Factory::NewPropertyArray(int length) {
485 DCHECK_LE(0, length);
486 if (length == 0) return empty_property_array();
487 HeapObject result = AllocateRawFixedArray(length, AllocationType::kYoung);
488 DisallowGarbageCollection no_gc;
489 result.set_map_after_allocation(*property_array_map(), SKIP_WRITE_BARRIER);
490 PropertyArray array = PropertyArray::cast(result);
491 array.initialize_length(length);
492 MemsetTagged(array.data_start(), read_only_roots().undefined_value(), length);
493 return handle(array, isolate());
494 }
495
TryNewFixedArray(int length,AllocationType allocation_type)496 MaybeHandle<FixedArray> Factory::TryNewFixedArray(
497 int length, AllocationType allocation_type) {
498 DCHECK_LE(0, length);
499 if (length == 0) return empty_fixed_array();
500
501 int size = FixedArray::SizeFor(length);
502 Heap* heap = isolate()->heap();
503 AllocationResult allocation = heap->AllocateRaw(size, allocation_type);
504 HeapObject result;
505 if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
506 if ((size > heap->MaxRegularHeapObjectSize(allocation_type)) &&
507 FLAG_use_marking_progress_bar) {
508 LargePage::FromHeapObject(result)->ProgressBar().Enable();
509 }
510 DisallowGarbageCollection no_gc;
511 result.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
512 FixedArray array = FixedArray::cast(result);
513 array.set_length(length);
514 MemsetTagged(array.data_start(), *undefined_value(), length);
515 return handle(array, isolate());
516 }
517
NewClosureFeedbackCellArray(int length)518 Handle<ClosureFeedbackCellArray> Factory::NewClosureFeedbackCellArray(
519 int length) {
520 if (length == 0) return empty_closure_feedback_cell_array();
521
522 Handle<ClosureFeedbackCellArray> feedback_cell_array =
523 Handle<ClosureFeedbackCellArray>::cast(NewFixedArrayWithMap(
524 read_only_roots().closure_feedback_cell_array_map_handle(), length,
525 AllocationType::kOld));
526
527 return feedback_cell_array;
528 }
529
NewFeedbackVector(Handle<SharedFunctionInfo> shared,Handle<ClosureFeedbackCellArray> closure_feedback_cell_array)530 Handle<FeedbackVector> Factory::NewFeedbackVector(
531 Handle<SharedFunctionInfo> shared,
532 Handle<ClosureFeedbackCellArray> closure_feedback_cell_array) {
533 int length = shared->feedback_metadata().slot_count();
534 DCHECK_LE(0, length);
535 int size = FeedbackVector::SizeFor(length);
536
537 FeedbackVector vector = FeedbackVector::cast(AllocateRawWithImmortalMap(
538 size, AllocationType::kOld, *feedback_vector_map()));
539 DisallowGarbageCollection no_gc;
540 vector.set_shared_function_info(*shared);
541 vector.set_maybe_optimized_code(HeapObjectReference::ClearedValue(isolate()),
542 kReleaseStore);
543 vector.set_length(length);
544 vector.set_invocation_count(0);
545 vector.set_profiler_ticks(0);
546 vector.InitializeOptimizationState();
547 vector.set_closure_feedback_cell_array(*closure_feedback_cell_array);
548
549 // TODO(leszeks): Initialize based on the feedback metadata.
550 MemsetTagged(ObjectSlot(vector.slots_start()), *undefined_value(), length);
551 return handle(vector, isolate());
552 }
553
NewEmbedderDataArray(int length)554 Handle<EmbedderDataArray> Factory::NewEmbedderDataArray(int length) {
555 DCHECK_LE(0, length);
556 int size = EmbedderDataArray::SizeFor(length);
557 EmbedderDataArray array = EmbedderDataArray::cast(AllocateRawWithImmortalMap(
558 size, AllocationType::kYoung, *embedder_data_array_map()));
559 DisallowGarbageCollection no_gc;
560 array.set_length(length);
561
562 if (length > 0) {
563 ObjectSlot start(array.slots_start());
564 ObjectSlot end(array.slots_end());
565 size_t slot_count = end - start;
566 MemsetTagged(start, *undefined_value(), slot_count);
567 for (int i = 0; i < length; i++) {
568 // TODO(v8:10391, saelo): Handle external pointers in EmbedderDataSlot
569 EmbedderDataSlot(array, i).AllocateExternalPointerEntry(isolate());
570 }
571 }
572 return handle(array, isolate());
573 }
574
NewFixedDoubleArrayWithHoles(int length)575 Handle<FixedArrayBase> Factory::NewFixedDoubleArrayWithHoles(int length) {
576 DCHECK_LE(0, length);
577 Handle<FixedArrayBase> array = NewFixedDoubleArray(length);
578 if (length > 0) {
579 Handle<FixedDoubleArray>::cast(array)->FillWithHoles(0, length);
580 }
581 return array;
582 }
583
584 template <typename T>
AllocateSmallOrderedHashTable(Handle<Map> map,int capacity,AllocationType allocation)585 Handle<T> Factory::AllocateSmallOrderedHashTable(Handle<Map> map, int capacity,
586 AllocationType allocation) {
587 // Capacity must be a power of two, since we depend on being able
588 // to divide and multiple by 2 (kLoadFactor) to derive capacity
589 // from number of buckets. If we decide to change kLoadFactor
590 // to something other than 2, capacity should be stored as another
591 // field of this object.
592 DCHECK_EQ(T::kLoadFactor, 2);
593 capacity =
594 base::bits::RoundUpToPowerOfTwo32(std::max({T::kMinCapacity, capacity}));
595 capacity = std::min({capacity, T::kMaxCapacity});
596
597 DCHECK_LT(0, capacity);
598 DCHECK_EQ(0, capacity % T::kLoadFactor);
599
600 int size = T::SizeFor(capacity);
601 HeapObject result = AllocateRawWithImmortalMap(size, allocation, *map);
602 Handle<T> table(T::cast(result), isolate());
603 table->Initialize(isolate(), capacity);
604 return table;
605 }
606
NewSmallOrderedHashSet(int capacity,AllocationType allocation)607 Handle<SmallOrderedHashSet> Factory::NewSmallOrderedHashSet(
608 int capacity, AllocationType allocation) {
609 return AllocateSmallOrderedHashTable<SmallOrderedHashSet>(
610 small_ordered_hash_set_map(), capacity, allocation);
611 }
612
NewSmallOrderedHashMap(int capacity,AllocationType allocation)613 Handle<SmallOrderedHashMap> Factory::NewSmallOrderedHashMap(
614 int capacity, AllocationType allocation) {
615 return AllocateSmallOrderedHashTable<SmallOrderedHashMap>(
616 small_ordered_hash_map_map(), capacity, allocation);
617 }
618
NewSmallOrderedNameDictionary(int capacity,AllocationType allocation)619 Handle<SmallOrderedNameDictionary> Factory::NewSmallOrderedNameDictionary(
620 int capacity, AllocationType allocation) {
621 Handle<SmallOrderedNameDictionary> dict =
622 AllocateSmallOrderedHashTable<SmallOrderedNameDictionary>(
623 small_ordered_name_dictionary_map(), capacity, allocation);
624 dict->SetHash(PropertyArray::kNoHashSentinel);
625 return dict;
626 }
627
NewOrderedHashSet()628 Handle<OrderedHashSet> Factory::NewOrderedHashSet() {
629 return OrderedHashSet::Allocate(isolate(), OrderedHashSet::kInitialCapacity,
630 AllocationType::kYoung)
631 .ToHandleChecked();
632 }
633
NewOrderedHashMap()634 Handle<OrderedHashMap> Factory::NewOrderedHashMap() {
635 return OrderedHashMap::Allocate(isolate(), OrderedHashMap::kInitialCapacity,
636 AllocationType::kYoung)
637 .ToHandleChecked();
638 }
639
NewOrderedNameDictionary(int capacity)640 Handle<OrderedNameDictionary> Factory::NewOrderedNameDictionary(int capacity) {
641 return OrderedNameDictionary::Allocate(isolate(), capacity,
642 AllocationType::kYoung)
643 .ToHandleChecked();
644 }
645
NewNameDictionary(int at_least_space_for)646 Handle<NameDictionary> Factory::NewNameDictionary(int at_least_space_for) {
647 return NameDictionary::New(isolate(), at_least_space_for);
648 }
649
NewPropertyDescriptorObject()650 Handle<PropertyDescriptorObject> Factory::NewPropertyDescriptorObject() {
651 auto object = NewStructInternal<PropertyDescriptorObject>(
652 PROPERTY_DESCRIPTOR_OBJECT_TYPE, AllocationType::kYoung);
653 DisallowGarbageCollection no_gc;
654 object.set_flags(0);
655 Oddball the_hole = read_only_roots().the_hole_value();
656 object.set_value(the_hole, SKIP_WRITE_BARRIER);
657 object.set_get(the_hole, SKIP_WRITE_BARRIER);
658 object.set_set(the_hole, SKIP_WRITE_BARRIER);
659 return handle(object, isolate());
660 }
661
CreateCanonicalEmptySwissNameDictionary()662 Handle<SwissNameDictionary> Factory::CreateCanonicalEmptySwissNameDictionary() {
663 // This function is only supposed to be used to create the canonical empty
664 // version and should not be used afterwards.
665 DCHECK_EQ(kNullAddress, ReadOnlyRoots(isolate()).at(
666 RootIndex::kEmptySwissPropertyDictionary));
667
668 ReadOnlyRoots roots(isolate());
669
670 Handle<ByteArray> empty_meta_table =
671 NewByteArray(SwissNameDictionary::kMetaTableEnumerationDataStartIndex,
672 AllocationType::kReadOnly);
673
674 Map map = roots.swiss_name_dictionary_map();
675 int size = SwissNameDictionary::SizeFor(0);
676 HeapObject obj =
677 AllocateRawWithImmortalMap(size, AllocationType::kReadOnly, map);
678 SwissNameDictionary result = SwissNameDictionary::cast(obj);
679 result.Initialize(isolate(), *empty_meta_table, 0);
680 return handle(result, isolate());
681 }
682
683 // Internalized strings are created in the old generation (data space).
InternalizeUtf8String(const base::Vector<const char> & string)684 Handle<String> Factory::InternalizeUtf8String(
685 const base::Vector<const char>& string) {
686 base::Vector<const uint8_t> utf8_data =
687 base::Vector<const uint8_t>::cast(string);
688 Utf8Decoder decoder(utf8_data);
689 if (decoder.is_ascii()) return InternalizeString(utf8_data);
690 if (decoder.is_one_byte()) {
691 std::unique_ptr<uint8_t[]> buffer(new uint8_t[decoder.utf16_length()]);
692 decoder.Decode(buffer.get(), utf8_data);
693 return InternalizeString(
694 base::Vector<const uint8_t>(buffer.get(), decoder.utf16_length()));
695 }
696 std::unique_ptr<uint16_t[]> buffer(new uint16_t[decoder.utf16_length()]);
697 decoder.Decode(buffer.get(), utf8_data);
698 return InternalizeString(
699 base::Vector<const base::uc16>(buffer.get(), decoder.utf16_length()));
700 }
701
702 template <typename SeqString>
InternalizeString(Handle<SeqString> string,int from,int length,bool convert_encoding)703 Handle<String> Factory::InternalizeString(Handle<SeqString> string, int from,
704 int length, bool convert_encoding) {
705 SeqSubStringKey<SeqString> key(isolate(), string, from, length,
706 convert_encoding);
707 return InternalizeStringWithKey(&key);
708 }
709
710 template Handle<String> Factory::InternalizeString(
711 Handle<SeqOneByteString> string, int from, int length,
712 bool convert_encoding);
713 template Handle<String> Factory::InternalizeString(
714 Handle<SeqTwoByteString> string, int from, int length,
715 bool convert_encoding);
716
NewStringFromOneByte(const base::Vector<const uint8_t> & string,AllocationType allocation)717 MaybeHandle<String> Factory::NewStringFromOneByte(
718 const base::Vector<const uint8_t>& string, AllocationType allocation) {
719 DCHECK_NE(allocation, AllocationType::kReadOnly);
720 int length = string.length();
721 if (length == 0) return empty_string();
722 if (length == 1) return LookupSingleCharacterStringFromCode(string[0]);
723 Handle<SeqOneByteString> result;
724 ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
725 NewRawOneByteString(string.length(), allocation),
726 String);
727
728 DisallowGarbageCollection no_gc;
729 // Copy the characters into the new object.
730 CopyChars(SeqOneByteString::cast(*result).GetChars(no_gc), string.begin(),
731 length);
732 return result;
733 }
734
NewStringFromUtf8(const base::Vector<const char> & string,AllocationType allocation)735 MaybeHandle<String> Factory::NewStringFromUtf8(
736 const base::Vector<const char>& string, AllocationType allocation) {
737 base::Vector<const uint8_t> utf8_data =
738 base::Vector<const uint8_t>::cast(string);
739 Utf8Decoder decoder(utf8_data);
740
741 if (decoder.utf16_length() == 0) return empty_string();
742
743 if (decoder.is_one_byte()) {
744 // Allocate string.
745 Handle<SeqOneByteString> result;
746 ASSIGN_RETURN_ON_EXCEPTION(
747 isolate(), result,
748 NewRawOneByteString(decoder.utf16_length(), allocation), String);
749
750 DisallowGarbageCollection no_gc;
751 decoder.Decode(result->GetChars(no_gc), utf8_data);
752 return result;
753 }
754
755 // Allocate string.
756 Handle<SeqTwoByteString> result;
757 ASSIGN_RETURN_ON_EXCEPTION(
758 isolate(), result,
759 NewRawTwoByteString(decoder.utf16_length(), allocation), String);
760
761 DisallowGarbageCollection no_gc;
762 decoder.Decode(result->GetChars(no_gc), utf8_data);
763 return result;
764 }
765
NewStringFromUtf8SubString(Handle<SeqOneByteString> str,int begin,int length,AllocationType allocation)766 MaybeHandle<String> Factory::NewStringFromUtf8SubString(
767 Handle<SeqOneByteString> str, int begin, int length,
768 AllocationType allocation) {
769 base::Vector<const uint8_t> utf8_data;
770 {
771 DisallowGarbageCollection no_gc;
772 utf8_data =
773 base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
774 }
775 Utf8Decoder decoder(utf8_data);
776
777 if (length == 1) {
778 uint16_t t;
779 // Decode even in the case of length 1 since it can be a bad character.
780 decoder.Decode(&t, utf8_data);
781 return LookupSingleCharacterStringFromCode(t);
782 }
783
784 if (decoder.is_ascii()) {
785 // If the string is ASCII, we can just make a substring.
786 // TODO(v8): the allocation flag is ignored in this case.
787 return NewSubString(str, begin, begin + length);
788 }
789
790 DCHECK_GT(decoder.utf16_length(), 0);
791
792 if (decoder.is_one_byte()) {
793 // Allocate string.
794 Handle<SeqOneByteString> result;
795 ASSIGN_RETURN_ON_EXCEPTION(
796 isolate(), result,
797 NewRawOneByteString(decoder.utf16_length(), allocation), String);
798 DisallowGarbageCollection no_gc;
799 // Update pointer references, since the original string may have moved after
800 // allocation.
801 utf8_data =
802 base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
803 decoder.Decode(result->GetChars(no_gc), utf8_data);
804 return result;
805 }
806
807 // Allocate string.
808 Handle<SeqTwoByteString> result;
809 ASSIGN_RETURN_ON_EXCEPTION(
810 isolate(), result,
811 NewRawTwoByteString(decoder.utf16_length(), allocation), String);
812
813 DisallowGarbageCollection no_gc;
814 // Update pointer references, since the original string may have moved after
815 // allocation.
816 utf8_data = base::Vector<const uint8_t>(str->GetChars(no_gc) + begin, length);
817 decoder.Decode(result->GetChars(no_gc), utf8_data);
818 return result;
819 }
820
NewStringFromTwoByte(const base::uc16 * string,int length,AllocationType allocation)821 MaybeHandle<String> Factory::NewStringFromTwoByte(const base::uc16* string,
822 int length,
823 AllocationType allocation) {
824 DCHECK_NE(allocation, AllocationType::kReadOnly);
825 if (length == 0) return empty_string();
826 if (String::IsOneByte(string, length)) {
827 if (length == 1) return LookupSingleCharacterStringFromCode(string[0]);
828 Handle<SeqOneByteString> result;
829 ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
830 NewRawOneByteString(length, allocation), String);
831 DisallowGarbageCollection no_gc;
832 CopyChars(result->GetChars(no_gc), string, length);
833 return result;
834 } else {
835 Handle<SeqTwoByteString> result;
836 ASSIGN_RETURN_ON_EXCEPTION(isolate(), result,
837 NewRawTwoByteString(length, allocation), String);
838 DisallowGarbageCollection no_gc;
839 CopyChars(result->GetChars(no_gc), string, length);
840 return result;
841 }
842 }
843
NewStringFromTwoByte(const base::Vector<const base::uc16> & string,AllocationType allocation)844 MaybeHandle<String> Factory::NewStringFromTwoByte(
845 const base::Vector<const base::uc16>& string, AllocationType allocation) {
846 return NewStringFromTwoByte(string.begin(), string.length(), allocation);
847 }
848
NewStringFromTwoByte(const ZoneVector<base::uc16> * string,AllocationType allocation)849 MaybeHandle<String> Factory::NewStringFromTwoByte(
850 const ZoneVector<base::uc16>* string, AllocationType allocation) {
851 return NewStringFromTwoByte(string->data(), static_cast<int>(string->size()),
852 allocation);
853 }
854
855 namespace {
856
WriteOneByteData(Handle<String> s,uint8_t * chars,int len)857 inline void WriteOneByteData(Handle<String> s, uint8_t* chars, int len) {
858 DCHECK(s->length() == len);
859 String::WriteToFlat(*s, chars, 0, len);
860 }
861
WriteTwoByteData(Handle<String> s,uint16_t * chars,int len)862 inline void WriteTwoByteData(Handle<String> s, uint16_t* chars, int len) {
863 DCHECK(s->length() == len);
864 String::WriteToFlat(*s, chars, 0, len);
865 }
866
867 } // namespace
868
869 template <bool is_one_byte, typename T>
AllocateInternalizedStringImpl(T t,int chars,uint32_t hash_field)870 Handle<String> Factory::AllocateInternalizedStringImpl(T t, int chars,
871 uint32_t hash_field) {
872 DCHECK_LE(0, chars);
873 DCHECK_GE(String::kMaxLength, chars);
874
875 // Compute map and object size.
876 int size;
877 Map map;
878 if (is_one_byte) {
879 map = *one_byte_internalized_string_map();
880 size = SeqOneByteString::SizeFor(chars);
881 } else {
882 map = *internalized_string_map();
883 size = SeqTwoByteString::SizeFor(chars);
884 }
885
886 String result = String::cast(
887 AllocateRawWithImmortalMap(size,
888 isolate()->heap()->CanAllocateInReadOnlySpace()
889 ? AllocationType::kReadOnly
890 : AllocationType::kOld,
891 map));
892 DisallowGarbageCollection no_gc;
893 result.set_length(chars);
894 result.set_raw_hash_field(hash_field);
895 DCHECK_EQ(size, result.Size());
896
897 if (is_one_byte) {
898 WriteOneByteData(t, SeqOneByteString::cast(result).GetChars(no_gc), chars);
899 } else {
900 WriteTwoByteData(t, SeqTwoByteString::cast(result).GetChars(no_gc), chars);
901 }
902 return handle(result, isolate());
903 }
904
NewInternalizedStringImpl(Handle<String> string,int chars,uint32_t hash_field)905 Handle<String> Factory::NewInternalizedStringImpl(Handle<String> string,
906 int chars,
907 uint32_t hash_field) {
908 if (string->IsOneByteRepresentation()) {
909 return AllocateInternalizedStringImpl<true>(string, chars, hash_field);
910 }
911 return AllocateInternalizedStringImpl<false>(string, chars, hash_field);
912 }
913
914 namespace {
915
GetInternalizedStringMap(Factory * f,Handle<String> string)916 MaybeHandle<Map> GetInternalizedStringMap(Factory* f, Handle<String> string) {
917 switch (string->map().instance_type()) {
918 case STRING_TYPE:
919 return f->internalized_string_map();
920 case ONE_BYTE_STRING_TYPE:
921 return f->one_byte_internalized_string_map();
922 case EXTERNAL_STRING_TYPE:
923 return f->external_internalized_string_map();
924 case EXTERNAL_ONE_BYTE_STRING_TYPE:
925 return f->external_one_byte_internalized_string_map();
926 default:
927 return MaybeHandle<Map>(); // No match found.
928 }
929 }
930
931 } // namespace
932
InternalizedStringMapForString(Handle<String> string)933 MaybeHandle<Map> Factory::InternalizedStringMapForString(
934 Handle<String> string) {
935 // Do not internalize young strings: This allows us to ignore both string
936 // table and stub cache on scavenges.
937 if (Heap::InYoungGeneration(*string)) return MaybeHandle<Map>();
938 return GetInternalizedStringMap(this, string);
939 }
940
941 template <class StringClass>
InternalizeExternalString(Handle<String> string)942 Handle<StringClass> Factory::InternalizeExternalString(Handle<String> string) {
943 Handle<Map> map = GetInternalizedStringMap(this, string).ToHandleChecked();
944 StringClass external_string =
945 StringClass::cast(New(map, AllocationType::kOld));
946 DisallowGarbageCollection no_gc;
947 external_string.AllocateExternalPointerEntries(isolate());
948 StringClass cast_string = StringClass::cast(*string);
949 external_string.set_length(cast_string.length());
950 external_string.set_raw_hash_field(cast_string.raw_hash_field());
951 external_string.SetResource(isolate(), nullptr);
952 isolate()->heap()->RegisterExternalString(external_string);
953 return handle(external_string, isolate());
954 }
955
956 template Handle<ExternalOneByteString>
957 Factory::InternalizeExternalString<ExternalOneByteString>(Handle<String>);
958 template Handle<ExternalTwoByteString>
959 Factory::InternalizeExternalString<ExternalTwoByteString>(Handle<String>);
960
LookupSingleCharacterStringFromCode(uint16_t code)961 Handle<String> Factory::LookupSingleCharacterStringFromCode(uint16_t code) {
962 if (code <= unibrow::Latin1::kMaxChar) {
963 {
964 DisallowGarbageCollection no_gc;
965 Object value = single_character_string_cache()->get(code);
966 if (value != *undefined_value()) {
967 return handle(String::cast(value), isolate());
968 }
969 }
970 uint8_t buffer[] = {static_cast<uint8_t>(code)};
971 Handle<String> result =
972 InternalizeString(base::Vector<const uint8_t>(buffer, 1));
973 single_character_string_cache()->set(code, *result);
974 return result;
975 }
976 uint16_t buffer[] = {code};
977 return InternalizeString(base::Vector<const uint16_t>(buffer, 1));
978 }
979
NewSurrogatePairString(uint16_t lead,uint16_t trail)980 Handle<String> Factory::NewSurrogatePairString(uint16_t lead, uint16_t trail) {
981 DCHECK_GE(lead, 0xD800);
982 DCHECK_LE(lead, 0xDBFF);
983 DCHECK_GE(trail, 0xDC00);
984 DCHECK_LE(trail, 0xDFFF);
985
986 Handle<SeqTwoByteString> str =
987 isolate()->factory()->NewRawTwoByteString(2).ToHandleChecked();
988 DisallowGarbageCollection no_gc;
989 base::uc16* dest = str->GetChars(no_gc);
990 dest[0] = lead;
991 dest[1] = trail;
992 return str;
993 }
994
NewProperSubString(Handle<String> str,int begin,int end)995 Handle<String> Factory::NewProperSubString(Handle<String> str, int begin,
996 int end) {
997 #if VERIFY_HEAP
998 if (FLAG_verify_heap) str->StringVerify(isolate());
999 #endif
1000 DCHECK(begin > 0 || end < str->length());
1001
1002 str = String::Flatten(isolate(), str);
1003
1004 int length = end - begin;
1005 if (length <= 0) return empty_string();
1006 if (length == 1) {
1007 return LookupSingleCharacterStringFromCode(str->Get(begin));
1008 }
1009 if (length == 2) {
1010 // Optimization for 2-byte strings often used as keys in a decompression
1011 // dictionary. Check whether we already have the string in the string
1012 // table to prevent creation of many unnecessary strings.
1013 uint16_t c1 = str->Get(begin);
1014 uint16_t c2 = str->Get(begin + 1);
1015 return MakeOrFindTwoCharacterString(c1, c2);
1016 }
1017
1018 if (!FLAG_string_slices || length < SlicedString::kMinLength) {
1019 if (str->IsOneByteRepresentation()) {
1020 Handle<SeqOneByteString> result =
1021 NewRawOneByteString(length).ToHandleChecked();
1022 DisallowGarbageCollection no_gc;
1023 uint8_t* dest = result->GetChars(no_gc);
1024 String::WriteToFlat(*str, dest, begin, length);
1025 return result;
1026 } else {
1027 Handle<SeqTwoByteString> result =
1028 NewRawTwoByteString(length).ToHandleChecked();
1029 DisallowGarbageCollection no_gc;
1030 base::uc16* dest = result->GetChars(no_gc);
1031 String::WriteToFlat(*str, dest, begin, length);
1032 return result;
1033 }
1034 }
1035
1036 int offset = begin;
1037
1038 if (str->IsSlicedString()) {
1039 Handle<SlicedString> slice = Handle<SlicedString>::cast(str);
1040 str = Handle<String>(slice->parent(), isolate());
1041 offset += slice->offset();
1042 }
1043 if (str->IsThinString()) {
1044 Handle<ThinString> thin = Handle<ThinString>::cast(str);
1045 str = handle(thin->actual(), isolate());
1046 }
1047
1048 DCHECK(str->IsSeqString() || str->IsExternalString());
1049 Handle<Map> map = str->IsOneByteRepresentation()
1050 ? sliced_one_byte_string_map()
1051 : sliced_string_map();
1052 SlicedString slice = SlicedString::cast(New(map, AllocationType::kYoung));
1053 DisallowGarbageCollection no_gc;
1054 slice.set_raw_hash_field(String::kEmptyHashField);
1055 slice.set_length(length);
1056 slice.set_parent(*str);
1057 slice.set_offset(offset);
1058 return handle(slice, isolate());
1059 }
1060
NewExternalStringFromOneByte(const ExternalOneByteString::Resource * resource)1061 MaybeHandle<String> Factory::NewExternalStringFromOneByte(
1062 const ExternalOneByteString::Resource* resource) {
1063 size_t length = resource->length();
1064 if (length > static_cast<size_t>(String::kMaxLength)) {
1065 THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1066 }
1067 if (length == 0) return empty_string();
1068
1069 Handle<Map> map = resource->IsCacheable()
1070 ? external_one_byte_string_map()
1071 : uncached_external_one_byte_string_map();
1072 ExternalOneByteString external_string =
1073 ExternalOneByteString::cast(New(map, AllocationType::kOld));
1074 DisallowGarbageCollection no_gc;
1075 external_string.AllocateExternalPointerEntries(isolate());
1076 external_string.set_length(static_cast<int>(length));
1077 external_string.set_raw_hash_field(String::kEmptyHashField);
1078 external_string.SetResource(isolate(), resource);
1079 isolate()->heap()->RegisterExternalString(external_string);
1080
1081 return Handle<String>(external_string, isolate());
1082 }
1083
NewExternalStringFromTwoByte(const ExternalTwoByteString::Resource * resource)1084 MaybeHandle<String> Factory::NewExternalStringFromTwoByte(
1085 const ExternalTwoByteString::Resource* resource) {
1086 size_t length = resource->length();
1087 if (length > static_cast<size_t>(String::kMaxLength)) {
1088 THROW_NEW_ERROR(isolate(), NewInvalidStringLengthError(), String);
1089 }
1090 if (length == 0) return empty_string();
1091
1092 Handle<Map> map = resource->IsCacheable() ? external_string_map()
1093 : uncached_external_string_map();
1094 ExternalTwoByteString string =
1095 ExternalTwoByteString::cast(New(map, AllocationType::kOld));
1096 DisallowGarbageCollection no_gc;
1097 string.AllocateExternalPointerEntries(isolate());
1098 string.set_length(static_cast<int>(length));
1099 string.set_raw_hash_field(String::kEmptyHashField);
1100 string.SetResource(isolate(), resource);
1101 isolate()->heap()->RegisterExternalString(string);
1102 return Handle<ExternalTwoByteString>(string, isolate());
1103 }
1104
NewJSStringIterator(Handle<String> string)1105 Handle<JSStringIterator> Factory::NewJSStringIterator(Handle<String> string) {
1106 Handle<Map> map(isolate()->native_context()->initial_string_iterator_map(),
1107 isolate());
1108 Handle<String> flat_string = String::Flatten(isolate(), string);
1109 Handle<JSStringIterator> iterator =
1110 Handle<JSStringIterator>::cast(NewJSObjectFromMap(map));
1111
1112 DisallowGarbageCollection no_gc;
1113 JSStringIterator raw = *iterator;
1114 raw.set_string(*flat_string);
1115 raw.set_index(0);
1116 return iterator;
1117 }
1118
NewSymbolInternal(AllocationType allocation)1119 Symbol Factory::NewSymbolInternal(AllocationType allocation) {
1120 DCHECK(allocation != AllocationType::kYoung);
1121 // Statically ensure that it is safe to allocate symbols in paged spaces.
1122 STATIC_ASSERT(Symbol::kSize <= kMaxRegularHeapObjectSize);
1123
1124 Symbol symbol = Symbol::cast(AllocateRawWithImmortalMap(
1125 Symbol::kSize, allocation, read_only_roots().symbol_map()));
1126 DisallowGarbageCollection no_gc;
1127 // Generate a random hash value.
1128 int hash = isolate()->GenerateIdentityHash(Name::kHashBitMask);
1129 symbol.set_raw_hash_field(Name::kIsNotIntegerIndexMask |
1130 (hash << Name::kHashShift));
1131 symbol.set_description(read_only_roots().undefined_value(),
1132 SKIP_WRITE_BARRIER);
1133 symbol.set_flags(0);
1134 DCHECK(!symbol.is_private());
1135 return symbol;
1136 }
1137
NewSymbol(AllocationType allocation)1138 Handle<Symbol> Factory::NewSymbol(AllocationType allocation) {
1139 return handle(NewSymbolInternal(allocation), isolate());
1140 }
1141
NewPrivateSymbol(AllocationType allocation)1142 Handle<Symbol> Factory::NewPrivateSymbol(AllocationType allocation) {
1143 DCHECK(allocation != AllocationType::kYoung);
1144 Symbol symbol = NewSymbolInternal(allocation);
1145 DisallowGarbageCollection no_gc;
1146 symbol.set_is_private(true);
1147 return handle(symbol, isolate());
1148 }
1149
NewPrivateNameSymbol(Handle<String> name)1150 Handle<Symbol> Factory::NewPrivateNameSymbol(Handle<String> name) {
1151 Symbol symbol = NewSymbolInternal();
1152 DisallowGarbageCollection no_gc;
1153 symbol.set_is_private_name();
1154 symbol.set_description(*name);
1155 return handle(symbol, isolate());
1156 }
1157
NewContextInternal(Handle<Map> map,int size,int variadic_part_length,AllocationType allocation)1158 Context Factory::NewContextInternal(Handle<Map> map, int size,
1159 int variadic_part_length,
1160 AllocationType allocation) {
1161 DCHECK_LE(Context::kTodoHeaderSize, size);
1162 DCHECK(IsAligned(size, kTaggedSize));
1163 DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length);
1164 DCHECK_LE(Context::SizeFor(variadic_part_length), size);
1165
1166 HeapObject result =
1167 isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(size, allocation);
1168 result.set_map_after_allocation(*map);
1169 DisallowGarbageCollection no_gc;
1170 Context context = Context::cast(result);
1171 context.set_length(variadic_part_length);
1172 DCHECK_EQ(context.SizeFromMap(*map), size);
1173 if (size > Context::kTodoHeaderSize) {
1174 ObjectSlot start = context.RawField(Context::kTodoHeaderSize);
1175 ObjectSlot end = context.RawField(size);
1176 size_t slot_count = end - start;
1177 MemsetTagged(start, *undefined_value(), slot_count);
1178 }
1179 return context;
1180 }
1181
NewNativeContext()1182 Handle<NativeContext> Factory::NewNativeContext() {
1183 Handle<Map> map = NewMap(NATIVE_CONTEXT_TYPE, kVariableSizeSentinel);
1184 NativeContext context = NativeContext::cast(NewContextInternal(
1185 map, NativeContext::kSize, NativeContext::NATIVE_CONTEXT_SLOTS,
1186 AllocationType::kOld));
1187 DisallowGarbageCollection no_gc;
1188 context.set_native_context_map(*map);
1189 map->set_native_context(context);
1190 // The ExternalPointerTable is a C++ object.
1191 context.AllocateExternalPointerEntries(isolate());
1192 context.set_scope_info(*native_scope_info());
1193 context.set_previous(Context());
1194 context.set_extension(*undefined_value());
1195 context.set_errors_thrown(Smi::zero());
1196 context.set_math_random_index(Smi::zero());
1197 context.set_serialized_objects(*empty_fixed_array());
1198 context.set_microtask_queue(isolate(), nullptr);
1199 context.set_osr_code_cache(*empty_weak_fixed_array());
1200 context.set_retained_maps(*empty_weak_array_list());
1201 return handle(context, isolate());
1202 }
1203
NewScriptContext(Handle<NativeContext> outer,Handle<ScopeInfo> scope_info)1204 Handle<Context> Factory::NewScriptContext(Handle<NativeContext> outer,
1205 Handle<ScopeInfo> scope_info) {
1206 DCHECK_EQ(scope_info->scope_type(), SCRIPT_SCOPE);
1207 int variadic_part_length = scope_info->ContextLength();
1208 Context context =
1209 NewContextInternal(handle(outer->script_context_map(), isolate()),
1210 Context::SizeFor(variadic_part_length),
1211 variadic_part_length, AllocationType::kOld);
1212 DisallowGarbageCollection no_gc;
1213 context.set_scope_info(*scope_info);
1214 context.set_previous(*outer);
1215 DCHECK(context.IsScriptContext());
1216 return handle(context, isolate());
1217 }
1218
NewScriptContextTable()1219 Handle<ScriptContextTable> Factory::NewScriptContextTable() {
1220 Handle<ScriptContextTable> context_table = Handle<ScriptContextTable>::cast(
1221 NewFixedArrayWithMap(read_only_roots().script_context_table_map_handle(),
1222 ScriptContextTable::kMinLength));
1223 context_table->set_used(0, kReleaseStore);
1224 return context_table;
1225 }
1226
NewModuleContext(Handle<SourceTextModule> module,Handle<NativeContext> outer,Handle<ScopeInfo> scope_info)1227 Handle<Context> Factory::NewModuleContext(Handle<SourceTextModule> module,
1228 Handle<NativeContext> outer,
1229 Handle<ScopeInfo> scope_info) {
1230 DCHECK_EQ(scope_info->scope_type(), MODULE_SCOPE);
1231 int variadic_part_length = scope_info->ContextLength();
1232 Context context = NewContextInternal(
1233 isolate()->module_context_map(), Context::SizeFor(variadic_part_length),
1234 variadic_part_length, AllocationType::kOld);
1235 DisallowGarbageCollection no_gc;
1236 context.set_scope_info(*scope_info);
1237 context.set_previous(*outer);
1238 context.set_extension(*module);
1239 DCHECK(context.IsModuleContext());
1240 return handle(context, isolate());
1241 }
1242
NewFunctionContext(Handle<Context> outer,Handle<ScopeInfo> scope_info)1243 Handle<Context> Factory::NewFunctionContext(Handle<Context> outer,
1244 Handle<ScopeInfo> scope_info) {
1245 Handle<Map> map;
1246 switch (scope_info->scope_type()) {
1247 case EVAL_SCOPE:
1248 map = isolate()->eval_context_map();
1249 break;
1250 case FUNCTION_SCOPE:
1251 map = isolate()->function_context_map();
1252 break;
1253 default:
1254 UNREACHABLE();
1255 }
1256 int variadic_part_length = scope_info->ContextLength();
1257 Context context =
1258 NewContextInternal(map, Context::SizeFor(variadic_part_length),
1259 variadic_part_length, AllocationType::kYoung);
1260 DisallowGarbageCollection no_gc;
1261 context.set_scope_info(*scope_info);
1262 context.set_previous(*outer);
1263 return handle(context, isolate());
1264 }
1265
NewCatchContext(Handle<Context> previous,Handle<ScopeInfo> scope_info,Handle<Object> thrown_object)1266 Handle<Context> Factory::NewCatchContext(Handle<Context> previous,
1267 Handle<ScopeInfo> scope_info,
1268 Handle<Object> thrown_object) {
1269 DCHECK_EQ(scope_info->scope_type(), CATCH_SCOPE);
1270 STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
1271 // TODO(ishell): Take the details from CatchContext class.
1272 int variadic_part_length = Context::MIN_CONTEXT_SLOTS + 1;
1273 Context context = NewContextInternal(
1274 isolate()->catch_context_map(), Context::SizeFor(variadic_part_length),
1275 variadic_part_length, AllocationType::kYoung);
1276 DisallowGarbageCollection no_gc;
1277 DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1278 context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
1279 context.set_previous(*previous, SKIP_WRITE_BARRIER);
1280 context.set(Context::THROWN_OBJECT_INDEX, *thrown_object, SKIP_WRITE_BARRIER);
1281 return handle(context, isolate());
1282 }
1283
NewDebugEvaluateContext(Handle<Context> previous,Handle<ScopeInfo> scope_info,Handle<JSReceiver> extension,Handle<Context> wrapped)1284 Handle<Context> Factory::NewDebugEvaluateContext(Handle<Context> previous,
1285 Handle<ScopeInfo> scope_info,
1286 Handle<JSReceiver> extension,
1287 Handle<Context> wrapped) {
1288 DCHECK(scope_info->IsDebugEvaluateScope());
1289 Handle<HeapObject> ext = extension.is_null()
1290 ? Handle<HeapObject>::cast(undefined_value())
1291 : Handle<HeapObject>::cast(extension);
1292 // TODO(ishell): Take the details from DebugEvaluateContextContext class.
1293 int variadic_part_length = Context::MIN_CONTEXT_EXTENDED_SLOTS + 1;
1294 Context context =
1295 NewContextInternal(isolate()->debug_evaluate_context_map(),
1296 Context::SizeFor(variadic_part_length),
1297 variadic_part_length, AllocationType::kYoung);
1298 DisallowGarbageCollection no_gc;
1299 DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1300 context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
1301 context.set_previous(*previous, SKIP_WRITE_BARRIER);
1302 context.set_extension(*ext, SKIP_WRITE_BARRIER);
1303 if (!wrapped.is_null()) {
1304 context.set(Context::WRAPPED_CONTEXT_INDEX, *wrapped, SKIP_WRITE_BARRIER);
1305 }
1306 return handle(context, isolate());
1307 }
1308
NewWithContext(Handle<Context> previous,Handle<ScopeInfo> scope_info,Handle<JSReceiver> extension)1309 Handle<Context> Factory::NewWithContext(Handle<Context> previous,
1310 Handle<ScopeInfo> scope_info,
1311 Handle<JSReceiver> extension) {
1312 DCHECK_EQ(scope_info->scope_type(), WITH_SCOPE);
1313 // TODO(ishell): Take the details from WithContext class.
1314 int variadic_part_length = Context::MIN_CONTEXT_EXTENDED_SLOTS;
1315 Context context = NewContextInternal(
1316 isolate()->with_context_map(), Context::SizeFor(variadic_part_length),
1317 variadic_part_length, AllocationType::kYoung);
1318 DisallowGarbageCollection no_gc;
1319 DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1320 context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
1321 context.set_previous(*previous, SKIP_WRITE_BARRIER);
1322 context.set_extension(*extension, SKIP_WRITE_BARRIER);
1323 return handle(context, isolate());
1324 }
1325
NewBlockContext(Handle<Context> previous,Handle<ScopeInfo> scope_info)1326 Handle<Context> Factory::NewBlockContext(Handle<Context> previous,
1327 Handle<ScopeInfo> scope_info) {
1328 DCHECK_IMPLIES(scope_info->scope_type() != BLOCK_SCOPE,
1329 scope_info->scope_type() == CLASS_SCOPE);
1330 int variadic_part_length = scope_info->ContextLength();
1331 Context context = NewContextInternal(
1332 isolate()->block_context_map(), Context::SizeFor(variadic_part_length),
1333 variadic_part_length, AllocationType::kYoung);
1334 DisallowGarbageCollection no_gc;
1335 DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1336 context.set_scope_info(*scope_info, SKIP_WRITE_BARRIER);
1337 context.set_previous(*previous, SKIP_WRITE_BARRIER);
1338 return handle(context, isolate());
1339 }
1340
NewBuiltinContext(Handle<NativeContext> native_context,int variadic_part_length)1341 Handle<Context> Factory::NewBuiltinContext(Handle<NativeContext> native_context,
1342 int variadic_part_length) {
1343 DCHECK_LE(Context::MIN_CONTEXT_SLOTS, variadic_part_length);
1344 Context context = NewContextInternal(
1345 isolate()->function_context_map(), Context::SizeFor(variadic_part_length),
1346 variadic_part_length, AllocationType::kYoung);
1347 DisallowGarbageCollection no_gc;
1348 DCHECK_IMPLIES(!FLAG_single_generation, Heap::InYoungGeneration(context));
1349 context.set_scope_info(read_only_roots().empty_scope_info(),
1350 SKIP_WRITE_BARRIER);
1351 context.set_previous(*native_context, SKIP_WRITE_BARRIER);
1352 return handle(context, isolate());
1353 }
1354
NewAliasedArgumentsEntry(int aliased_context_slot)1355 Handle<AliasedArgumentsEntry> Factory::NewAliasedArgumentsEntry(
1356 int aliased_context_slot) {
1357 auto entry = NewStructInternal<AliasedArgumentsEntry>(
1358 ALIASED_ARGUMENTS_ENTRY_TYPE, AllocationType::kYoung);
1359 entry.set_aliased_context_slot(aliased_context_slot);
1360 return handle(entry, isolate());
1361 }
1362
NewAccessorInfo()1363 Handle<AccessorInfo> Factory::NewAccessorInfo() {
1364 auto info =
1365 NewStructInternal<AccessorInfo>(ACCESSOR_INFO_TYPE, AllocationType::kOld);
1366 DisallowGarbageCollection no_gc;
1367 info.set_name(*empty_string(), SKIP_WRITE_BARRIER);
1368 info.set_flags(0); // Must clear the flags, it was initialized as undefined.
1369 info.set_is_sloppy(true);
1370 info.set_initial_property_attributes(NONE);
1371
1372 // Clear some other fields that should not be undefined.
1373 info.set_getter(Smi::zero(), SKIP_WRITE_BARRIER);
1374 info.set_setter(Smi::zero(), SKIP_WRITE_BARRIER);
1375 info.set_js_getter(Smi::zero(), SKIP_WRITE_BARRIER);
1376 return handle(info, isolate());
1377 }
1378
AddToScriptList(Handle<Script> script)1379 void Factory::AddToScriptList(Handle<Script> script) {
1380 Handle<WeakArrayList> scripts = script_list();
1381 scripts = WeakArrayList::Append(isolate(), scripts,
1382 MaybeObjectHandle::Weak(script));
1383 isolate()->heap()->set_script_list(*scripts);
1384 }
1385
CloneScript(Handle<Script> script)1386 Handle<Script> Factory::CloneScript(Handle<Script> script) {
1387 Heap* heap = isolate()->heap();
1388 int script_id = isolate()->GetNextScriptId();
1389 Handle<Script> new_script_handle =
1390 Handle<Script>::cast(NewStruct(SCRIPT_TYPE, AllocationType::kOld));
1391 {
1392 DisallowGarbageCollection no_gc;
1393 Script new_script = *new_script_handle;
1394 const Script old_script = *script;
1395 new_script.set_source(old_script.source());
1396 new_script.set_name(old_script.name());
1397 new_script.set_id(script_id);
1398 new_script.set_line_offset(old_script.line_offset());
1399 new_script.set_column_offset(old_script.column_offset());
1400 new_script.set_context_data(old_script.context_data());
1401 new_script.set_type(old_script.type());
1402 new_script.set_line_ends(*undefined_value(), SKIP_WRITE_BARRIER);
1403 new_script.set_eval_from_shared_or_wrapped_arguments_or_sfi_table(
1404 script->eval_from_shared_or_wrapped_arguments_or_sfi_table());
1405 new_script.set_shared_function_infos(*empty_weak_fixed_array(),
1406 SKIP_WRITE_BARRIER);
1407 new_script.set_eval_from_position(old_script.eval_from_position());
1408 new_script.set_flags(old_script.flags());
1409 new_script.set_host_defined_options(old_script.host_defined_options());
1410 }
1411 Handle<WeakArrayList> scripts = script_list();
1412 scripts = WeakArrayList::AddToEnd(isolate(), scripts,
1413 MaybeObjectHandle::Weak(new_script_handle));
1414 heap->set_script_list(*scripts);
1415 LOG(isolate(), ScriptEvent(Logger::ScriptEventType::kCreate, script_id));
1416 return new_script_handle;
1417 }
1418
NewCallableTask(Handle<JSReceiver> callable,Handle<Context> context)1419 Handle<CallableTask> Factory::NewCallableTask(Handle<JSReceiver> callable,
1420 Handle<Context> context) {
1421 DCHECK(callable->IsCallable());
1422 auto microtask = NewStructInternal<CallableTask>(CALLABLE_TASK_TYPE,
1423 AllocationType::kYoung);
1424 DisallowGarbageCollection no_gc;
1425 microtask.set_callable(*callable, SKIP_WRITE_BARRIER);
1426 microtask.set_context(*context, SKIP_WRITE_BARRIER);
1427 return handle(microtask, isolate());
1428 }
1429
NewCallbackTask(Handle<Foreign> callback,Handle<Foreign> data)1430 Handle<CallbackTask> Factory::NewCallbackTask(Handle<Foreign> callback,
1431 Handle<Foreign> data) {
1432 auto microtask = NewStructInternal<CallbackTask>(CALLBACK_TASK_TYPE,
1433 AllocationType::kYoung);
1434 DisallowGarbageCollection no_gc;
1435 microtask.set_callback(*callback, SKIP_WRITE_BARRIER);
1436 microtask.set_data(*data, SKIP_WRITE_BARRIER);
1437 return handle(microtask, isolate());
1438 }
1439
NewPromiseResolveThenableJobTask(Handle<JSPromise> promise_to_resolve,Handle<JSReceiver> thenable,Handle<JSReceiver> then,Handle<Context> context)1440 Handle<PromiseResolveThenableJobTask> Factory::NewPromiseResolveThenableJobTask(
1441 Handle<JSPromise> promise_to_resolve, Handle<JSReceiver> thenable,
1442 Handle<JSReceiver> then, Handle<Context> context) {
1443 DCHECK(then->IsCallable());
1444 auto microtask = NewStructInternal<PromiseResolveThenableJobTask>(
1445 PROMISE_RESOLVE_THENABLE_JOB_TASK_TYPE, AllocationType::kYoung);
1446 DisallowGarbageCollection no_gc;
1447 microtask.set_promise_to_resolve(*promise_to_resolve, SKIP_WRITE_BARRIER);
1448 microtask.set_thenable(*thenable, SKIP_WRITE_BARRIER);
1449 microtask.set_then(*then, SKIP_WRITE_BARRIER);
1450 microtask.set_context(*context, SKIP_WRITE_BARRIER);
1451 return handle(microtask, isolate());
1452 }
1453
NewForeign(Address addr)1454 Handle<Foreign> Factory::NewForeign(Address addr) {
1455 // Statically ensure that it is safe to allocate foreigns in paged spaces.
1456 STATIC_ASSERT(Foreign::kSize <= kMaxRegularHeapObjectSize);
1457 Map map = *foreign_map();
1458 Foreign foreign = Foreign::cast(AllocateRawWithImmortalMap(
1459 map.instance_size(), AllocationType::kYoung, map));
1460 DisallowGarbageCollection no_gc;
1461 foreign.AllocateExternalPointerEntries(isolate());
1462 foreign.set_foreign_address(isolate(), addr);
1463 return handle(foreign, isolate());
1464 }
1465
1466 #if V8_ENABLE_WEBASSEMBLY
NewWasmTypeInfo(Address type_address,Handle<Map> opt_parent,int instance_size_bytes,Handle<WasmInstanceObject> instance)1467 Handle<WasmTypeInfo> Factory::NewWasmTypeInfo(
1468 Address type_address, Handle<Map> opt_parent, int instance_size_bytes,
1469 Handle<WasmInstanceObject> instance) {
1470 // We pretenure WasmTypeInfo objects because they are refererenced by Maps,
1471 // which are assumed to be long-lived. The supertypes list is constant
1472 // after initialization, so we pretenure that too.
1473 // The subtypes list, however, is expected to grow (and hence be replaced),
1474 // so we don't pretenure it.
1475 Handle<ArrayList> subtypes = ArrayList::New(isolate(), 0);
1476 Handle<FixedArray> supertypes;
1477 if (opt_parent.is_null()) {
1478 supertypes = NewFixedArray(0);
1479 } else {
1480 supertypes = CopyArrayAndGrow(
1481 handle(opt_parent->wasm_type_info().supertypes(), isolate()), 1,
1482 AllocationType::kOld);
1483 supertypes->set(supertypes->length() - 1, *opt_parent);
1484 }
1485 Map map = *wasm_type_info_map();
1486 WasmTypeInfo result = WasmTypeInfo::cast(AllocateRawWithImmortalMap(
1487 map.instance_size(), AllocationType::kOld, map));
1488 DisallowGarbageCollection no_gc;
1489 result.AllocateExternalPointerEntries(isolate());
1490 result.set_foreign_address(isolate(), type_address);
1491 result.set_supertypes(*supertypes, SKIP_WRITE_BARRIER);
1492 result.set_subtypes(*subtypes);
1493 result.set_instance_size(instance_size_bytes);
1494 result.set_instance(*instance);
1495 return handle(result, isolate());
1496 }
1497
NewWasmJSFunctionData(Address opt_call_target,Handle<JSReceiver> callable,int return_count,int parameter_count,Handle<PodArray<wasm::ValueType>> serialized_sig,Handle<Code> wrapper_code)1498 Handle<WasmJSFunctionData> Factory::NewWasmJSFunctionData(
1499 Address opt_call_target, Handle<JSReceiver> callable, int return_count,
1500 int parameter_count, Handle<PodArray<wasm::ValueType>> serialized_sig,
1501 Handle<Code> wrapper_code) {
1502 Handle<Tuple2> pair = NewTuple2(null_value(), callable, AllocationType::kOld);
1503 Map map = *wasm_js_function_data_map();
1504 WasmJSFunctionData result =
1505 WasmJSFunctionData::cast(AllocateRawWithImmortalMap(
1506 map.instance_size(), AllocationType::kOld, map));
1507 DisallowGarbageCollection no_gc;
1508 result.AllocateExternalPointerEntries(isolate());
1509 result.set_foreign_address(isolate(), opt_call_target);
1510 result.set_ref(*pair);
1511 result.set_wrapper_code(*wrapper_code);
1512 result.set_serialized_return_count(return_count);
1513 result.set_serialized_parameter_count(parameter_count);
1514 result.set_serialized_signature(*serialized_sig);
1515 // Default value, will be overwritten by the caller.
1516 result.set_wasm_to_js_wrapper_code(
1517 isolate()->heap()->builtin(Builtin::kAbort));
1518 return handle(result, isolate());
1519 }
1520
NewWasmExportedFunctionData(Handle<Code> export_wrapper,Handle<WasmInstanceObject> instance,Address call_target,Handle<Object> ref,int func_index,Address sig_address,int wrapper_budget)1521 Handle<WasmExportedFunctionData> Factory::NewWasmExportedFunctionData(
1522 Handle<Code> export_wrapper, Handle<WasmInstanceObject> instance,
1523 Address call_target, Handle<Object> ref, int func_index,
1524 Address sig_address, int wrapper_budget) {
1525 Handle<Foreign> sig_foreign = NewForeign(sig_address);
1526 Map map = *wasm_exported_function_data_map();
1527 WasmExportedFunctionData result =
1528 WasmExportedFunctionData::cast(AllocateRawWithImmortalMap(
1529 map.instance_size(), AllocationType::kOld, map));
1530 DisallowGarbageCollection no_gc;
1531 result.AllocateExternalPointerEntries(isolate());
1532 result.set_foreign_address(isolate(), call_target);
1533 result.set_ref(*ref);
1534 result.set_wrapper_code(*export_wrapper);
1535 result.set_instance(*instance);
1536 result.set_function_index(func_index);
1537 result.set_signature(*sig_foreign);
1538 result.set_wrapper_budget(wrapper_budget);
1539 result.set_c_wrapper_code(ToCodeT(*BUILTIN_CODE(isolate(), Illegal)),
1540 SKIP_WRITE_BARRIER);
1541 result.set_packed_args_size(0);
1542 return handle(result, isolate());
1543 }
1544
NewWasmCapiFunctionData(Address call_target,Handle<Foreign> embedder_data,Handle<Code> wrapper_code,Handle<PodArray<wasm::ValueType>> serialized_sig)1545 Handle<WasmCapiFunctionData> Factory::NewWasmCapiFunctionData(
1546 Address call_target, Handle<Foreign> embedder_data,
1547 Handle<Code> wrapper_code,
1548 Handle<PodArray<wasm::ValueType>> serialized_sig) {
1549 Handle<Tuple2> pair =
1550 NewTuple2(null_value(), null_value(), AllocationType::kOld);
1551 Map map = *wasm_capi_function_data_map();
1552 WasmCapiFunctionData result =
1553 WasmCapiFunctionData::cast(AllocateRawWithImmortalMap(
1554 map.instance_size(), AllocationType::kOld, map));
1555 DisallowGarbageCollection no_gc;
1556 result.AllocateExternalPointerEntries(isolate());
1557 result.set_foreign_address(isolate(), call_target);
1558 result.set_ref(*pair);
1559 result.set_wrapper_code(*wrapper_code);
1560 result.set_embedder_data(*embedder_data);
1561 result.set_serialized_signature(*serialized_sig);
1562 return handle(result, isolate());
1563 }
1564
NewWasmArray(const wasm::ArrayType * type,const std::vector<wasm::WasmValue> & elements,Handle<Map> map)1565 Handle<WasmArray> Factory::NewWasmArray(
1566 const wasm::ArrayType* type, const std::vector<wasm::WasmValue>& elements,
1567 Handle<Map> map) {
1568 uint32_t length = static_cast<uint32_t>(elements.size());
1569 HeapObject raw =
1570 AllocateRaw(WasmArray::SizeFor(*map, length), AllocationType::kYoung);
1571 raw.set_map_after_allocation(*map);
1572 WasmArray result = WasmArray::cast(raw);
1573 result.set_raw_properties_or_hash(*empty_fixed_array(), kRelaxedStore);
1574 result.set_length(length);
1575 if (type->element_type().is_numeric()) {
1576 for (uint32_t i = 0; i < length; i++) {
1577 Address address = result.ElementAddress(i);
1578 elements[i]
1579 .Packed(type->element_type())
1580 .CopyTo(reinterpret_cast<byte*>(address));
1581 }
1582 } else {
1583 for (uint32_t i = 0; i < length; i++) {
1584 int offset = result.element_offset(i);
1585 TaggedField<Object>::store(result, offset, *elements[i].to_ref());
1586 }
1587 }
1588 return handle(result, isolate());
1589 }
1590
NewWasmStruct(const wasm::StructType * type,wasm::WasmValue * args,Handle<Map> map)1591 Handle<WasmStruct> Factory::NewWasmStruct(const wasm::StructType* type,
1592 wasm::WasmValue* args,
1593 Handle<Map> map) {
1594 DCHECK_EQ(WasmStruct::Size(type), map->wasm_type_info().instance_size());
1595 HeapObject raw = AllocateRaw(WasmStruct::Size(type), AllocationType::kYoung);
1596 raw.set_map_after_allocation(*map);
1597 WasmStruct result = WasmStruct::cast(raw);
1598 result.set_raw_properties_or_hash(*empty_fixed_array(), kRelaxedStore);
1599 for (uint32_t i = 0; i < type->field_count(); i++) {
1600 int offset = type->field_offset(i);
1601 if (type->field(i).is_numeric()) {
1602 Address address = result.RawFieldAddress(offset);
1603 args[i].Packed(type->field(i)).CopyTo(reinterpret_cast<byte*>(address));
1604 } else {
1605 offset += WasmStruct::kHeaderSize;
1606 TaggedField<Object>::store(result, offset, *args[i].to_ref());
1607 }
1608 }
1609 return handle(result, isolate());
1610 }
1611
1612 Handle<SharedFunctionInfo>
NewSharedFunctionInfoForWasmExportedFunction(Handle<String> name,Handle<WasmExportedFunctionData> data)1613 Factory::NewSharedFunctionInfoForWasmExportedFunction(
1614 Handle<String> name, Handle<WasmExportedFunctionData> data) {
1615 return NewSharedFunctionInfo(name, data, Builtin::kNoBuiltinId);
1616 }
1617
NewSharedFunctionInfoForWasmJSFunction(Handle<String> name,Handle<WasmJSFunctionData> data)1618 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWasmJSFunction(
1619 Handle<String> name, Handle<WasmJSFunctionData> data) {
1620 return NewSharedFunctionInfo(name, data, Builtin::kNoBuiltinId);
1621 }
1622
NewSharedFunctionInfoForWasmCapiFunction(Handle<WasmCapiFunctionData> data)1623 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForWasmCapiFunction(
1624 Handle<WasmCapiFunctionData> data) {
1625 return NewSharedFunctionInfo(MaybeHandle<String>(), data,
1626 Builtin::kNoBuiltinId, kConciseMethod);
1627 }
1628 #endif // V8_ENABLE_WEBASSEMBLY
1629
NewCell(Handle<Object> value)1630 Handle<Cell> Factory::NewCell(Handle<Object> value) {
1631 STATIC_ASSERT(Cell::kSize <= kMaxRegularHeapObjectSize);
1632 Cell result = Cell::cast(AllocateRawWithImmortalMap(
1633 Cell::kSize, AllocationType::kOld, *cell_map()));
1634 DisallowGarbageCollection no_gc;
1635 result.set_value(*value);
1636 return handle(result, isolate());
1637 }
1638
NewNoClosuresCell(Handle<HeapObject> value)1639 Handle<FeedbackCell> Factory::NewNoClosuresCell(Handle<HeapObject> value) {
1640 FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
1641 FeedbackCell::kAlignedSize, AllocationType::kOld,
1642 *no_closures_cell_map()));
1643 DisallowGarbageCollection no_gc;
1644 result.set_value(*value);
1645 result.SetInitialInterruptBudget();
1646 result.clear_padding();
1647 return handle(result, isolate());
1648 }
1649
NewOneClosureCell(Handle<HeapObject> value)1650 Handle<FeedbackCell> Factory::NewOneClosureCell(Handle<HeapObject> value) {
1651 FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
1652 FeedbackCell::kAlignedSize, AllocationType::kOld,
1653 *one_closure_cell_map()));
1654 DisallowGarbageCollection no_gc;
1655 result.set_value(*value);
1656 result.SetInitialInterruptBudget();
1657 result.clear_padding();
1658 return handle(result, isolate());
1659 }
1660
NewManyClosuresCell(Handle<HeapObject> value)1661 Handle<FeedbackCell> Factory::NewManyClosuresCell(Handle<HeapObject> value) {
1662 FeedbackCell result = FeedbackCell::cast(AllocateRawWithImmortalMap(
1663 FeedbackCell::kAlignedSize, AllocationType::kOld,
1664 *many_closures_cell_map()));
1665 DisallowGarbageCollection no_gc;
1666 result.set_value(*value);
1667 result.SetInitialInterruptBudget();
1668 result.clear_padding();
1669 return handle(result, isolate());
1670 }
1671
NewPropertyCell(Handle<Name> name,PropertyDetails details,Handle<Object> value,AllocationType allocation)1672 Handle<PropertyCell> Factory::NewPropertyCell(Handle<Name> name,
1673 PropertyDetails details,
1674 Handle<Object> value,
1675 AllocationType allocation) {
1676 DCHECK(name->IsUniqueName());
1677 STATIC_ASSERT(PropertyCell::kSize <= kMaxRegularHeapObjectSize);
1678 PropertyCell cell = PropertyCell::cast(AllocateRawWithImmortalMap(
1679 PropertyCell::kSize, allocation, *global_property_cell_map()));
1680 DisallowGarbageCollection no_gc;
1681 cell.set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()),
1682 SKIP_WRITE_BARRIER);
1683 WriteBarrierMode mode = allocation == AllocationType::kYoung
1684 ? SKIP_WRITE_BARRIER
1685 : UPDATE_WRITE_BARRIER;
1686 cell.set_name(*name, mode);
1687 cell.set_value(*value, mode);
1688 cell.set_property_details_raw(details.AsSmi(), SKIP_WRITE_BARRIER);
1689 return handle(cell, isolate());
1690 }
1691
NewProtector()1692 Handle<PropertyCell> Factory::NewProtector() {
1693 return NewPropertyCell(
1694 empty_string(), PropertyDetails::Empty(PropertyCellType::kConstantType),
1695 handle(Smi::FromInt(Protectors::kProtectorValid), isolate()));
1696 }
1697
NewTransitionArray(int number_of_transitions,int slack)1698 Handle<TransitionArray> Factory::NewTransitionArray(int number_of_transitions,
1699 int slack) {
1700 int capacity = TransitionArray::LengthFor(number_of_transitions + slack);
1701 Handle<TransitionArray> array = Handle<TransitionArray>::cast(
1702 NewWeakFixedArrayWithMap(read_only_roots().transition_array_map(),
1703 capacity, AllocationType::kOld));
1704 // Transition arrays are AllocationType::kOld. When black allocation is on we
1705 // have to add the transition array to the list of
1706 // encountered_transition_arrays.
1707 Heap* heap = isolate()->heap();
1708 if (heap->incremental_marking()->black_allocation()) {
1709 heap->mark_compact_collector()->AddTransitionArray(*array);
1710 }
1711 array->WeakFixedArray::Set(TransitionArray::kPrototypeTransitionsIndex,
1712 MaybeObject::FromObject(Smi::zero()));
1713 array->WeakFixedArray::Set(
1714 TransitionArray::kTransitionLengthIndex,
1715 MaybeObject::FromObject(Smi::FromInt(number_of_transitions)));
1716 return array;
1717 }
1718
NewAllocationSite(bool with_weak_next)1719 Handle<AllocationSite> Factory::NewAllocationSite(bool with_weak_next) {
1720 Handle<Map> map = with_weak_next ? allocation_site_map()
1721 : allocation_site_without_weaknext_map();
1722 Handle<AllocationSite> site(
1723 AllocationSite::cast(New(map, AllocationType::kOld)), isolate());
1724 site->Initialize();
1725
1726 if (with_weak_next) {
1727 // Link the site
1728 site->set_weak_next(isolate()->heap()->allocation_sites_list());
1729 isolate()->heap()->set_allocation_sites_list(*site);
1730 }
1731 return site;
1732 }
1733
NewMap(InstanceType type,int instance_size,ElementsKind elements_kind,int inobject_properties,AllocationType allocation_type)1734 Handle<Map> Factory::NewMap(InstanceType type, int instance_size,
1735 ElementsKind elements_kind, int inobject_properties,
1736 AllocationType allocation_type) {
1737 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
1738 DCHECK_IMPLIES(InstanceTypeChecker::IsJSObject(type) &&
1739 !Map::CanHaveFastTransitionableElementsKind(type),
1740 IsDictionaryElementsKind(elements_kind) ||
1741 IsTerminalElementsKind(elements_kind));
1742 DCHECK(allocation_type == AllocationType::kMap ||
1743 allocation_type == AllocationType::kSharedMap);
1744 HeapObject result = isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(
1745 Map::kSize, allocation_type);
1746 DisallowGarbageCollection no_gc;
1747 result.set_map_after_allocation(*meta_map(), SKIP_WRITE_BARRIER);
1748 return handle(InitializeMap(Map::cast(result), type, instance_size,
1749 elements_kind, inobject_properties),
1750 isolate());
1751 }
1752
InitializeMap(Map map,InstanceType type,int instance_size,ElementsKind elements_kind,int inobject_properties)1753 Map Factory::InitializeMap(Map map, InstanceType type, int instance_size,
1754 ElementsKind elements_kind,
1755 int inobject_properties) {
1756 DisallowGarbageCollection no_gc;
1757 map.set_instance_type(type);
1758 HeapObject raw_null_value = *null_value();
1759 map.set_prototype(raw_null_value, SKIP_WRITE_BARRIER);
1760 map.set_constructor_or_back_pointer(raw_null_value, SKIP_WRITE_BARRIER);
1761 map.set_instance_size(instance_size);
1762 if (map.IsJSObjectMap()) {
1763 DCHECK(!ReadOnlyHeap::Contains(map));
1764 map.SetInObjectPropertiesStartInWords(instance_size / kTaggedSize -
1765 inobject_properties);
1766 DCHECK_EQ(map.GetInObjectProperties(), inobject_properties);
1767 map.set_prototype_validity_cell(*invalid_prototype_validity_cell());
1768 } else {
1769 DCHECK_EQ(inobject_properties, 0);
1770 map.set_inobject_properties_start_or_constructor_function_index(0);
1771 map.set_prototype_validity_cell(Smi::FromInt(Map::kPrototypeChainValid),
1772 SKIP_WRITE_BARRIER);
1773 }
1774 map.set_dependent_code(DependentCode::cast(*empty_weak_fixed_array()),
1775 SKIP_WRITE_BARRIER);
1776 map.set_raw_transitions(MaybeObject::FromSmi(Smi::zero()),
1777 SKIP_WRITE_BARRIER);
1778 map.SetInObjectUnusedPropertyFields(inobject_properties);
1779 map.SetInstanceDescriptors(isolate(), *empty_descriptor_array(), 0);
1780 // Must be called only after |instance_type| and |instance_size| are set.
1781 map.set_visitor_id(Map::GetVisitorId(map));
1782 map.set_bit_field(0);
1783 map.set_bit_field2(Map::Bits2::NewTargetIsBaseBit::encode(true));
1784 int bit_field3 =
1785 Map::Bits3::EnumLengthBits::encode(kInvalidEnumCacheSentinel) |
1786 Map::Bits3::OwnsDescriptorsBit::encode(true) |
1787 Map::Bits3::ConstructionCounterBits::encode(Map::kNoSlackTracking) |
1788 Map::Bits3::IsExtensibleBit::encode(true);
1789 map.set_bit_field3(bit_field3);
1790 DCHECK(!map.is_in_retained_map_list());
1791 map.clear_padding();
1792 map.set_elements_kind(elements_kind);
1793 isolate()->counters()->maps_created()->Increment();
1794 if (FLAG_log_maps) LOG(isolate(), MapCreate(map));
1795 return map;
1796 }
1797
CopyJSObject(Handle<JSObject> source)1798 Handle<JSObject> Factory::CopyJSObject(Handle<JSObject> source) {
1799 return CopyJSObjectWithAllocationSite(source, Handle<AllocationSite>());
1800 }
1801
CopyJSObjectWithAllocationSite(Handle<JSObject> source,Handle<AllocationSite> site)1802 Handle<JSObject> Factory::CopyJSObjectWithAllocationSite(
1803 Handle<JSObject> source, Handle<AllocationSite> site) {
1804 Handle<Map> map(source->map(), isolate());
1805
1806 // We can only clone regexps, normal objects, api objects, errors or arrays.
1807 // Copying anything else will break invariants.
1808 InstanceType instance_type = map->instance_type();
1809 bool is_clonable_js_type =
1810 instance_type == JS_REG_EXP_TYPE || instance_type == JS_OBJECT_TYPE ||
1811 instance_type == JS_ERROR_TYPE || instance_type == JS_ARRAY_TYPE ||
1812 instance_type == JS_SPECIAL_API_OBJECT_TYPE ||
1813 InstanceTypeChecker::IsJSApiObject(instance_type);
1814 bool is_clonable_wasm_type = false;
1815 #if V8_ENABLE_WEBASSEMBLY
1816 is_clonable_wasm_type = instance_type == WASM_GLOBAL_OBJECT_TYPE ||
1817 instance_type == WASM_INSTANCE_OBJECT_TYPE ||
1818 instance_type == WASM_MEMORY_OBJECT_TYPE ||
1819 instance_type == WASM_MODULE_OBJECT_TYPE ||
1820 instance_type == WASM_TABLE_OBJECT_TYPE;
1821 #endif // V8_ENABLE_WEBASSEMBLY
1822 CHECK(is_clonable_js_type || is_clonable_wasm_type);
1823
1824 DCHECK(site.is_null() || AllocationSite::CanTrack(instance_type));
1825
1826 int object_size = map->instance_size();
1827 int adjusted_object_size = object_size;
1828 if (!site.is_null()) {
1829 DCHECK(V8_ALLOCATION_SITE_TRACKING_BOOL);
1830 adjusted_object_size += AllocationMemento::kSize;
1831 }
1832 HeapObject raw_clone = isolate()->heap()->AllocateRawWith<Heap::kRetryOrFail>(
1833 adjusted_object_size, AllocationType::kYoung);
1834
1835 DCHECK(Heap::InYoungGeneration(raw_clone) || FLAG_single_generation);
1836
1837 Heap::CopyBlock(raw_clone.address(), source->address(), object_size);
1838 Handle<JSObject> clone(JSObject::cast(raw_clone), isolate());
1839
1840 if (FLAG_enable_unconditional_write_barriers) {
1841 // By default, we shouldn't need to update the write barrier here, as the
1842 // clone will be allocated in new space.
1843 const ObjectSlot start(raw_clone.address());
1844 const ObjectSlot end(raw_clone.address() + object_size);
1845 isolate()->heap()->WriteBarrierForRange(raw_clone, start, end);
1846 }
1847 if (!site.is_null()) {
1848 AllocationMemento alloc_memento = AllocationMemento::unchecked_cast(
1849 Object(raw_clone.ptr() + object_size));
1850 InitializeAllocationMemento(alloc_memento, *site);
1851 }
1852
1853 SLOW_DCHECK(clone->GetElementsKind() == source->GetElementsKind());
1854 FixedArrayBase elements = source->elements();
1855 // Update elements if necessary.
1856 if (elements.length() > 0) {
1857 FixedArrayBase elem;
1858 if (elements.map() == *fixed_cow_array_map()) {
1859 elem = elements;
1860 } else if (source->HasDoubleElements()) {
1861 elem = *CopyFixedDoubleArray(
1862 handle(FixedDoubleArray::cast(elements), isolate()));
1863 } else {
1864 elem = *CopyFixedArray(handle(FixedArray::cast(elements), isolate()));
1865 }
1866 clone->set_elements(elem);
1867 }
1868
1869 // Update properties if necessary.
1870 if (source->HasFastProperties()) {
1871 PropertyArray properties = source->property_array();
1872 if (properties.length() > 0) {
1873 // TODO(gsathya): Do not copy hash code.
1874 Handle<PropertyArray> prop = CopyArrayWithMap(
1875 handle(properties, isolate()), handle(properties.map(), isolate()));
1876 clone->set_raw_properties_or_hash(*prop, kRelaxedStore);
1877 }
1878 } else {
1879 Handle<Object> copied_properties;
1880 if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
1881 copied_properties = SwissNameDictionary::ShallowCopy(
1882 isolate(), handle(source->property_dictionary_swiss(), isolate()));
1883 } else {
1884 copied_properties =
1885 CopyFixedArray(handle(source->property_dictionary(), isolate()));
1886 }
1887 clone->set_raw_properties_or_hash(*copied_properties, kRelaxedStore);
1888 }
1889 return clone;
1890 }
1891
1892 namespace {
1893 template <typename T>
initialize_length(T array,int length)1894 void initialize_length(T array, int length) {
1895 array.set_length(length);
1896 }
1897
1898 template <>
initialize_length(PropertyArray array,int length)1899 void initialize_length<PropertyArray>(PropertyArray array, int length) {
1900 array.initialize_length(length);
1901 }
1902
ZeroEmbedderFields(i::JSObject obj)1903 inline void ZeroEmbedderFields(i::JSObject obj) {
1904 int count = obj.GetEmbedderFieldCount();
1905 for (int i = 0; i < count; i++) {
1906 obj.SetEmbedderField(i, Smi::zero());
1907 }
1908 }
1909
1910 } // namespace
1911
1912 template <typename T>
CopyArrayWithMap(Handle<T> src,Handle<Map> map)1913 Handle<T> Factory::CopyArrayWithMap(Handle<T> src, Handle<Map> map) {
1914 int len = src->length();
1915 HeapObject new_object = AllocateRawFixedArray(len, AllocationType::kYoung);
1916 DisallowGarbageCollection no_gc;
1917 new_object.set_map_after_allocation(*map, SKIP_WRITE_BARRIER);
1918 T result = T::cast(new_object);
1919 initialize_length(result, len);
1920 // Copy the content.
1921 WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
1922 result.CopyElements(isolate(), 0, *src, 0, len, mode);
1923 return handle(result, isolate());
1924 }
1925
1926 template <typename T>
CopyArrayAndGrow(Handle<T> src,int grow_by,AllocationType allocation)1927 Handle<T> Factory::CopyArrayAndGrow(Handle<T> src, int grow_by,
1928 AllocationType allocation) {
1929 DCHECK_LT(0, grow_by);
1930 DCHECK_LE(grow_by, kMaxInt - src->length());
1931 int old_len = src->length();
1932 int new_len = old_len + grow_by;
1933 HeapObject new_object = AllocateRawFixedArray(new_len, allocation);
1934 DisallowGarbageCollection no_gc;
1935 new_object.set_map_after_allocation(src->map(), SKIP_WRITE_BARRIER);
1936 T result = T::cast(new_object);
1937 initialize_length(result, new_len);
1938 // Copy the content.
1939 WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
1940 result.CopyElements(isolate(), 0, *src, 0, old_len, mode);
1941 MemsetTagged(ObjectSlot(result.data_start() + old_len),
1942 read_only_roots().undefined_value(), grow_by);
1943 return handle(result, isolate());
1944 }
1945
CopyFixedArrayWithMap(Handle<FixedArray> array,Handle<Map> map)1946 Handle<FixedArray> Factory::CopyFixedArrayWithMap(Handle<FixedArray> array,
1947 Handle<Map> map) {
1948 return CopyArrayWithMap(array, map);
1949 }
1950
CopyFixedArrayAndGrow(Handle<FixedArray> array,int grow_by)1951 Handle<FixedArray> Factory::CopyFixedArrayAndGrow(Handle<FixedArray> array,
1952 int grow_by) {
1953 return CopyArrayAndGrow(array, grow_by, AllocationType::kYoung);
1954 }
1955
NewUninitializedWeakArrayList(int capacity,AllocationType allocation)1956 Handle<WeakArrayList> Factory::NewUninitializedWeakArrayList(
1957 int capacity, AllocationType allocation) {
1958 DCHECK_LE(0, capacity);
1959 if (capacity == 0) return empty_weak_array_list();
1960
1961 HeapObject heap_object = AllocateRawWeakArrayList(capacity, allocation);
1962 DisallowGarbageCollection no_gc;
1963 heap_object.set_map_after_allocation(*weak_array_list_map(),
1964 SKIP_WRITE_BARRIER);
1965 WeakArrayList result = WeakArrayList::cast(heap_object);
1966 result.set_length(0);
1967 result.set_capacity(capacity);
1968 return handle(result, isolate());
1969 }
1970
NewWeakArrayList(int capacity,AllocationType allocation)1971 Handle<WeakArrayList> Factory::NewWeakArrayList(int capacity,
1972 AllocationType allocation) {
1973 Handle<WeakArrayList> result =
1974 NewUninitializedWeakArrayList(capacity, allocation);
1975 MemsetTagged(ObjectSlot(result->data_start()),
1976 read_only_roots().undefined_value(), capacity);
1977 return result;
1978 }
1979
CopyWeakFixedArrayAndGrow(Handle<WeakFixedArray> src,int grow_by)1980 Handle<WeakFixedArray> Factory::CopyWeakFixedArrayAndGrow(
1981 Handle<WeakFixedArray> src, int grow_by) {
1982 DCHECK(!src->IsTransitionArray()); // Compacted by GC, this code doesn't work
1983 return CopyArrayAndGrow(src, grow_by, AllocationType::kOld);
1984 }
1985
CopyWeakArrayListAndGrow(Handle<WeakArrayList> src,int grow_by,AllocationType allocation)1986 Handle<WeakArrayList> Factory::CopyWeakArrayListAndGrow(
1987 Handle<WeakArrayList> src, int grow_by, AllocationType allocation) {
1988 int old_capacity = src->capacity();
1989 int new_capacity = old_capacity + grow_by;
1990 DCHECK_GE(new_capacity, old_capacity);
1991 Handle<WeakArrayList> result =
1992 NewUninitializedWeakArrayList(new_capacity, allocation);
1993 DisallowGarbageCollection no_gc;
1994 WeakArrayList raw = *result;
1995 int old_len = src->length();
1996 raw.set_length(old_len);
1997 // Copy the content.
1998 WriteBarrierMode mode = raw.GetWriteBarrierMode(no_gc);
1999 raw.CopyElements(isolate(), 0, *src, 0, old_len, mode);
2000 MemsetTagged(ObjectSlot(raw.data_start() + old_len),
2001 read_only_roots().undefined_value(), new_capacity - old_len);
2002 return result;
2003 }
2004
CompactWeakArrayList(Handle<WeakArrayList> src,int new_capacity,AllocationType allocation)2005 Handle<WeakArrayList> Factory::CompactWeakArrayList(Handle<WeakArrayList> src,
2006 int new_capacity,
2007 AllocationType allocation) {
2008 Handle<WeakArrayList> result =
2009 NewUninitializedWeakArrayList(new_capacity, allocation);
2010
2011 // Copy the content.
2012 DisallowGarbageCollection no_gc;
2013 WeakArrayList raw_src = *src;
2014 WeakArrayList raw_result = *result;
2015 WriteBarrierMode mode = raw_result.GetWriteBarrierMode(no_gc);
2016 int copy_to = 0, length = raw_src.length();
2017 for (int i = 0; i < length; i++) {
2018 MaybeObject element = raw_src.Get(i);
2019 if (element->IsCleared()) continue;
2020 raw_result.Set(copy_to++, element, mode);
2021 }
2022 raw_result.set_length(copy_to);
2023
2024 MemsetTagged(ObjectSlot(raw_result.data_start() + copy_to),
2025 read_only_roots().undefined_value(), new_capacity - copy_to);
2026 return result;
2027 }
2028
CopyPropertyArrayAndGrow(Handle<PropertyArray> array,int grow_by)2029 Handle<PropertyArray> Factory::CopyPropertyArrayAndGrow(
2030 Handle<PropertyArray> array, int grow_by) {
2031 return CopyArrayAndGrow(array, grow_by, AllocationType::kYoung);
2032 }
2033
CopyFixedArrayUpTo(Handle<FixedArray> array,int new_len,AllocationType allocation)2034 Handle<FixedArray> Factory::CopyFixedArrayUpTo(Handle<FixedArray> array,
2035 int new_len,
2036 AllocationType allocation) {
2037 DCHECK_LE(0, new_len);
2038 DCHECK_LE(new_len, array->length());
2039 if (new_len == 0) return empty_fixed_array();
2040 HeapObject heap_object = AllocateRawFixedArray(new_len, allocation);
2041 DisallowGarbageCollection no_gc;
2042 heap_object.set_map_after_allocation(*fixed_array_map(), SKIP_WRITE_BARRIER);
2043 FixedArray result = FixedArray::cast(heap_object);
2044 result.set_length(new_len);
2045 // Copy the content.
2046 WriteBarrierMode mode = result.GetWriteBarrierMode(no_gc);
2047 result.CopyElements(isolate(), 0, *array, 0, new_len, mode);
2048 return handle(result, isolate());
2049 }
2050
CopyFixedArray(Handle<FixedArray> array)2051 Handle<FixedArray> Factory::CopyFixedArray(Handle<FixedArray> array) {
2052 if (array->length() == 0) return array;
2053 return CopyArrayWithMap(array, handle(array->map(), isolate()));
2054 }
2055
CopyFixedDoubleArray(Handle<FixedDoubleArray> array)2056 Handle<FixedDoubleArray> Factory::CopyFixedDoubleArray(
2057 Handle<FixedDoubleArray> array) {
2058 int len = array->length();
2059 if (len == 0) return array;
2060 Handle<FixedDoubleArray> result =
2061 Handle<FixedDoubleArray>::cast(NewFixedDoubleArray(len));
2062 Heap::CopyBlock(
2063 result->address() + FixedDoubleArray::kLengthOffset,
2064 array->address() + FixedDoubleArray::kLengthOffset,
2065 FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
2066 return result;
2067 }
2068
NewHeapNumberForCodeAssembler(double value)2069 Handle<HeapNumber> Factory::NewHeapNumberForCodeAssembler(double value) {
2070 return isolate()->heap()->CanAllocateInReadOnlySpace()
2071 ? NewHeapNumber<AllocationType::kReadOnly>(value)
2072 : NewHeapNumber<AllocationType::kOld>(value);
2073 }
2074
NewError(Handle<JSFunction> constructor,MessageTemplate template_index,Handle<Object> arg0,Handle<Object> arg1,Handle<Object> arg2)2075 Handle<JSObject> Factory::NewError(Handle<JSFunction> constructor,
2076 MessageTemplate template_index,
2077 Handle<Object> arg0, Handle<Object> arg1,
2078 Handle<Object> arg2) {
2079 HandleScope scope(isolate());
2080
2081 if (arg0.is_null()) arg0 = undefined_value();
2082 if (arg1.is_null()) arg1 = undefined_value();
2083 if (arg2.is_null()) arg2 = undefined_value();
2084
2085 return scope.CloseAndEscape(ErrorUtils::MakeGenericError(
2086 isolate(), constructor, template_index, arg0, arg1, arg2, SKIP_NONE));
2087 }
2088
NewError(Handle<JSFunction> constructor,Handle<String> message)2089 Handle<JSObject> Factory::NewError(Handle<JSFunction> constructor,
2090 Handle<String> message) {
2091 // Construct a new error object. If an exception is thrown, use the exception
2092 // as the result.
2093
2094 Handle<Object> no_caller;
2095 return ErrorUtils::Construct(isolate(), constructor, constructor, message,
2096 undefined_value(), SKIP_NONE, no_caller,
2097 ErrorUtils::StackTraceCollection::kDetailed)
2098 .ToHandleChecked();
2099 }
2100
NewInvalidStringLengthError()2101 Handle<Object> Factory::NewInvalidStringLengthError() {
2102 if (FLAG_correctness_fuzzer_suppressions) {
2103 FATAL("Aborting on invalid string length");
2104 }
2105 // Invalidate the "string length" protector.
2106 if (Protectors::IsStringLengthOverflowLookupChainIntact(isolate())) {
2107 Protectors::InvalidateStringLengthOverflowLookupChain(isolate());
2108 }
2109 return NewRangeError(MessageTemplate::kInvalidStringLength);
2110 }
2111
2112 #define DEFINE_ERROR(NAME, name) \
2113 Handle<JSObject> Factory::New##NAME( \
2114 MessageTemplate template_index, Handle<Object> arg0, \
2115 Handle<Object> arg1, Handle<Object> arg2) { \
2116 return NewError(isolate()->name##_function(), template_index, arg0, arg1, \
2117 arg2); \
2118 }
DEFINE_ERROR(Error,error)2119 DEFINE_ERROR(Error, error)
2120 DEFINE_ERROR(EvalError, eval_error)
2121 DEFINE_ERROR(RangeError, range_error)
2122 DEFINE_ERROR(ReferenceError, reference_error)
2123 DEFINE_ERROR(SyntaxError, syntax_error)
2124 DEFINE_ERROR(TypeError, type_error)
2125 DEFINE_ERROR(WasmCompileError, wasm_compile_error)
2126 DEFINE_ERROR(WasmLinkError, wasm_link_error)
2127 DEFINE_ERROR(WasmRuntimeError, wasm_runtime_error)
2128 DEFINE_ERROR(WasmExceptionError, wasm_exception_error)
2129 #undef DEFINE_ERROR
2130
2131 Handle<JSObject> Factory::NewFunctionPrototype(Handle<JSFunction> function) {
2132 // Make sure to use globals from the function's context, since the function
2133 // can be from a different context.
2134 Handle<NativeContext> native_context(function->context().native_context(),
2135 isolate());
2136 Handle<Map> new_map;
2137 if (V8_UNLIKELY(IsAsyncGeneratorFunction(function->shared().kind()))) {
2138 new_map = handle(native_context->async_generator_object_prototype_map(),
2139 isolate());
2140 } else if (IsResumableFunction(function->shared().kind())) {
2141 // Generator and async function prototypes can share maps since they
2142 // don't have "constructor" properties.
2143 new_map =
2144 handle(native_context->generator_object_prototype_map(), isolate());
2145 } else {
2146 // Each function prototype gets a fresh map to avoid unwanted sharing of
2147 // maps between prototypes of different constructors.
2148 Handle<JSFunction> object_function(native_context->object_function(),
2149 isolate());
2150 DCHECK(object_function->has_initial_map());
2151 new_map = handle(object_function->initial_map(), isolate());
2152 }
2153
2154 DCHECK(!new_map->is_prototype_map());
2155 Handle<JSObject> prototype = NewJSObjectFromMap(new_map);
2156
2157 if (!IsResumableFunction(function->shared().kind())) {
2158 JSObject::AddProperty(isolate(), prototype, constructor_string(), function,
2159 DONT_ENUM);
2160 }
2161
2162 return prototype;
2163 }
2164
NewExternal(void * value)2165 Handle<JSObject> Factory::NewExternal(void* value) {
2166 Handle<Foreign> foreign = NewForeign(reinterpret_cast<Address>(value));
2167 Handle<JSObject> external = NewJSObjectFromMap(external_map());
2168 external->SetEmbedderField(0, *foreign);
2169 return external;
2170 }
2171
NewCodeDataContainer(int flags,AllocationType allocation)2172 Handle<CodeDataContainer> Factory::NewCodeDataContainer(
2173 int flags, AllocationType allocation) {
2174 CodeDataContainer data_container =
2175 CodeDataContainer::cast(New(code_data_container_map(), allocation));
2176 DisallowGarbageCollection no_gc;
2177 data_container.set_next_code_link(*undefined_value(), SKIP_WRITE_BARRIER);
2178 data_container.set_kind_specific_flags(flags, kRelaxedStore);
2179 if (V8_EXTERNAL_CODE_SPACE_BOOL) {
2180 data_container.AllocateExternalPointerEntries(isolate());
2181 data_container.set_raw_code(Smi::zero(), SKIP_WRITE_BARRIER);
2182 data_container.set_code_entry_point(isolate(), kNullAddress);
2183 }
2184 data_container.clear_padding();
2185 return handle(data_container, isolate());
2186 }
2187
NewOffHeapTrampolineFor(Handle<Code> code,Address off_heap_entry)2188 Handle<Code> Factory::NewOffHeapTrampolineFor(Handle<Code> code,
2189 Address off_heap_entry) {
2190 CHECK_NOT_NULL(isolate()->embedded_blob_code());
2191 CHECK_NE(0, isolate()->embedded_blob_code_size());
2192 CHECK(Builtins::IsIsolateIndependentBuiltin(*code));
2193
2194 bool generate_jump_to_instruction_stream =
2195 Builtins::CodeObjectIsExecutable(code->builtin_id());
2196 Handle<Code> result = Builtins::GenerateOffHeapTrampolineFor(
2197 isolate(), off_heap_entry,
2198 code->code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad),
2199 generate_jump_to_instruction_stream);
2200
2201 // Trampolines may not contain any metadata since all metadata offsets,
2202 // stored on the Code object, refer to the off-heap metadata area.
2203 CHECK_EQ(result->raw_metadata_size(), 0);
2204
2205 // The CodeDataContainer should not be modified beyond this point since it's
2206 // now possibly canonicalized.
2207
2208 // The trampoline code object must inherit specific flags from the original
2209 // builtin (e.g. the safepoint-table offset). We set them manually here.
2210 {
2211 DisallowGarbageCollection no_gc;
2212 CodePageMemoryModificationScope code_allocation(*result);
2213 Code raw_code = *code;
2214 Code raw_result = *result;
2215
2216 const bool set_is_off_heap_trampoline = true;
2217 const int stack_slots =
2218 raw_code.has_safepoint_info() ? raw_code.stack_slots() : 0;
2219 raw_result.initialize_flags(raw_code.kind(), raw_code.is_turbofanned(),
2220 stack_slots, set_is_off_heap_trampoline);
2221 raw_result.set_builtin_id(raw_code.builtin_id());
2222 raw_result.set_handler_table_offset(raw_code.handler_table_offset());
2223 raw_result.set_constant_pool_offset(raw_code.constant_pool_offset());
2224 raw_result.set_code_comments_offset(raw_code.code_comments_offset());
2225 raw_result.set_unwinding_info_offset(raw_code.unwinding_info_offset());
2226
2227 // Replace the newly generated trampoline's RelocInfo ByteArray with the
2228 // canonical one stored in the roots to avoid duplicating it for every
2229 // single builtin.
2230 ByteArray canonical_reloc_info =
2231 generate_jump_to_instruction_stream
2232 ? read_only_roots().off_heap_trampoline_relocation_info()
2233 : read_only_roots().empty_byte_array();
2234 #ifdef DEBUG
2235 // Verify that the contents are the same.
2236 ByteArray reloc_info = raw_result.relocation_info();
2237 DCHECK_EQ(reloc_info.length(), canonical_reloc_info.length());
2238 for (int i = 0; i < reloc_info.length(); ++i) {
2239 DCHECK_EQ(reloc_info.get(i), canonical_reloc_info.get(i));
2240 }
2241 #endif
2242 raw_result.set_relocation_info(canonical_reloc_info);
2243 if (V8_EXTERNAL_CODE_SPACE_BOOL) {
2244 // Updating flags (in particular is_off_heap_trampoline one) might change
2245 // the value of the instruction start, so update it here.
2246 raw_result.code_data_container(kAcquireLoad)
2247 .UpdateCodeEntryPoint(isolate(), raw_result);
2248 }
2249 }
2250
2251 return result;
2252 }
2253
CopyCode(Handle<Code> code)2254 Handle<Code> Factory::CopyCode(Handle<Code> code) {
2255 Handle<CodeDataContainer> data_container = NewCodeDataContainer(
2256 code->code_data_container(kAcquireLoad).kind_specific_flags(kRelaxedLoad),
2257 AllocationType::kOld);
2258
2259 Heap* heap = isolate()->heap();
2260 Handle<Code> new_code;
2261 {
2262 int obj_size = code->Size();
2263 CodePageCollectionMemoryModificationScope code_allocation(heap);
2264 HeapObject result = heap->AllocateRawWith<Heap::kRetryOrFail>(
2265 obj_size, AllocationType::kCode, AllocationOrigin::kRuntime);
2266
2267 // Copy code object.
2268 Address old_addr = code->address();
2269 Address new_addr = result.address();
2270 Heap::CopyBlock(new_addr, old_addr, obj_size);
2271 new_code = handle(Code::cast(result), isolate());
2272
2273 // Set the {CodeDataContainer}, it cannot be shared.
2274 new_code->set_code_data_container(*data_container, kReleaseStore);
2275
2276 new_code->Relocate(new_addr - old_addr);
2277 // We have to iterate over the object and process its pointers when black
2278 // allocation is on.
2279 heap->incremental_marking()->ProcessBlackAllocatedObject(*new_code);
2280 // Record all references to embedded objects in the new code object.
2281 #ifndef V8_DISABLE_WRITE_BARRIERS
2282 WriteBarrierForCode(*new_code);
2283 #endif
2284 }
2285 if (V8_EXTERNAL_CODE_SPACE_BOOL) {
2286 data_container->SetCodeAndEntryPoint(isolate(), *new_code);
2287 }
2288
2289 #ifdef VERIFY_HEAP
2290 if (FLAG_verify_heap) new_code->ObjectVerify(isolate());
2291 #endif
2292 DCHECK(IsAligned(new_code->address(), kCodeAlignment));
2293 DCHECK_IMPLIES(
2294 !V8_ENABLE_THIRD_PARTY_HEAP_BOOL && !heap->code_region().is_empty(),
2295 heap->code_region().contains(new_code->address()));
2296 return new_code;
2297 }
2298
CopyBytecodeArray(Handle<BytecodeArray> source)2299 Handle<BytecodeArray> Factory::CopyBytecodeArray(Handle<BytecodeArray> source) {
2300 int size = BytecodeArray::SizeFor(source->length());
2301 BytecodeArray copy = BytecodeArray::cast(AllocateRawWithImmortalMap(
2302 size, AllocationType::kOld, *bytecode_array_map()));
2303 DisallowGarbageCollection no_gc;
2304 BytecodeArray raw_source = *source;
2305 copy.set_length(raw_source.length());
2306 copy.set_frame_size(raw_source.frame_size());
2307 copy.set_parameter_count(raw_source.parameter_count());
2308 copy.set_incoming_new_target_or_generator_register(
2309 raw_source.incoming_new_target_or_generator_register());
2310 copy.set_constant_pool(raw_source.constant_pool());
2311 copy.set_handler_table(raw_source.handler_table());
2312 copy.set_source_position_table(raw_source.source_position_table(kAcquireLoad),
2313 kReleaseStore);
2314 copy.set_osr_loop_nesting_level(raw_source.osr_loop_nesting_level());
2315 copy.set_bytecode_age(raw_source.bytecode_age());
2316 raw_source.CopyBytecodesTo(copy);
2317 return handle(copy, isolate());
2318 }
2319
NewJSObject(Handle<JSFunction> constructor,AllocationType allocation)2320 Handle<JSObject> Factory::NewJSObject(Handle<JSFunction> constructor,
2321 AllocationType allocation) {
2322 JSFunction::EnsureHasInitialMap(constructor);
2323 Handle<Map> map(constructor->initial_map(), isolate());
2324 return NewJSObjectFromMap(map, allocation);
2325 }
2326
NewJSObjectWithNullProto()2327 Handle<JSObject> Factory::NewJSObjectWithNullProto() {
2328 Handle<JSObject> result = NewJSObject(isolate()->object_function());
2329 Handle<Map> new_map = Map::Copy(
2330 isolate(), Handle<Map>(result->map(), isolate()), "ObjectWithNullProto");
2331 Map::SetPrototype(isolate(), new_map, null_value());
2332 JSObject::MigrateToMap(isolate(), result, new_map);
2333 return result;
2334 }
2335
NewJSGlobalObject(Handle<JSFunction> constructor)2336 Handle<JSGlobalObject> Factory::NewJSGlobalObject(
2337 Handle<JSFunction> constructor) {
2338 DCHECK(constructor->has_initial_map());
2339 Handle<Map> map(constructor->initial_map(), isolate());
2340 DCHECK(map->is_dictionary_map());
2341
2342 // Make sure no field properties are described in the initial map.
2343 // This guarantees us that normalizing the properties does not
2344 // require us to change property values to PropertyCells.
2345 DCHECK_EQ(map->NextFreePropertyIndex(), 0);
2346
2347 // Make sure we don't have a ton of pre-allocated slots in the
2348 // global objects. They will be unused once we normalize the object.
2349 DCHECK_EQ(map->UnusedPropertyFields(), 0);
2350 DCHECK_EQ(map->GetInObjectProperties(), 0);
2351
2352 // Initial size of the backing store to avoid resize of the storage during
2353 // bootstrapping. The size differs between the JS global object ad the
2354 // builtins object.
2355 int initial_size = 64;
2356
2357 // Allocate a dictionary object for backing storage.
2358 int at_least_space_for = map->NumberOfOwnDescriptors() * 2 + initial_size;
2359 Handle<GlobalDictionary> dictionary =
2360 GlobalDictionary::New(isolate(), at_least_space_for);
2361
2362 // The global object might be created from an object template with accessors.
2363 // Fill these accessors into the dictionary.
2364 Handle<DescriptorArray> descs(map->instance_descriptors(isolate()),
2365 isolate());
2366 for (InternalIndex i : map->IterateOwnDescriptors()) {
2367 PropertyDetails details = descs->GetDetails(i);
2368 // Only accessors are expected.
2369 DCHECK_EQ(kAccessor, details.kind());
2370 PropertyDetails d(kAccessor, details.attributes(),
2371 PropertyCellType::kMutable);
2372 Handle<Name> name(descs->GetKey(i), isolate());
2373 Handle<Object> value(descs->GetStrongValue(i), isolate());
2374 Handle<PropertyCell> cell = NewPropertyCell(name, d, value);
2375 // |dictionary| already contains enough space for all properties.
2376 USE(GlobalDictionary::Add(isolate(), dictionary, name, cell, d));
2377 }
2378
2379 // Allocate the global object and initialize it with the backing store.
2380 Handle<JSGlobalObject> global(
2381 JSGlobalObject::cast(New(map, AllocationType::kOld)), isolate());
2382 InitializeJSObjectFromMap(*global, *dictionary, *map);
2383
2384 // Create a new map for the global object.
2385 Handle<Map> new_map = Map::CopyDropDescriptors(isolate(), map);
2386 Map raw_map = *new_map;
2387 raw_map.set_may_have_interesting_symbols(true);
2388 raw_map.set_is_dictionary_map(true);
2389 LOG(isolate(), MapDetails(raw_map));
2390
2391 // Set up the global object as a normalized object.
2392 global->set_global_dictionary(*dictionary, kReleaseStore);
2393 global->set_map(raw_map, kReleaseStore);
2394
2395 // Make sure result is a global object with properties in dictionary.
2396 DCHECK(global->IsJSGlobalObject() && !global->HasFastProperties());
2397 return global;
2398 }
2399
InitializeJSObjectFromMap(JSObject obj,Object properties,Map map)2400 void Factory::InitializeJSObjectFromMap(JSObject obj, Object properties,
2401 Map map) {
2402 DisallowGarbageCollection no_gc;
2403 obj.set_raw_properties_or_hash(properties, kRelaxedStore);
2404 obj.initialize_elements();
2405 // TODO(1240798): Initialize the object's body using valid initial values
2406 // according to the object's initial map. For example, if the map's
2407 // instance type is JS_ARRAY_TYPE, the length field should be initialized
2408 // to a number (e.g. Smi::zero()) and the elements initialized to a
2409 // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object
2410 // verification code has to cope with (temporarily) invalid objects. See
2411 // for example, JSArray::JSArrayVerify).
2412 InitializeJSObjectBody(obj, map, JSObject::kHeaderSize);
2413 }
2414
InitializeJSObjectBody(JSObject obj,Map map,int start_offset)2415 void Factory::InitializeJSObjectBody(JSObject obj, Map map, int start_offset) {
2416 DisallowGarbageCollection no_gc;
2417 if (start_offset == map.instance_size()) return;
2418 DCHECK_LT(start_offset, map.instance_size());
2419
2420 // We cannot always fill with one_pointer_filler_map because objects
2421 // created from API functions expect their embedder fields to be initialized
2422 // with undefined_value.
2423 // Pre-allocated fields need to be initialized with undefined_value as well
2424 // so that object accesses before the constructor completes (e.g. in the
2425 // debugger) will not cause a crash.
2426
2427 // In case of Array subclassing the |map| could already be transitioned
2428 // to different elements kind from the initial map on which we track slack.
2429 bool in_progress = map.IsInobjectSlackTrackingInProgress();
2430 obj.InitializeBody(map, start_offset, in_progress,
2431 ReadOnlyRoots(isolate()).one_pointer_filler_map_word(),
2432 *undefined_value());
2433 if (in_progress) {
2434 map.FindRootMap(isolate()).InobjectSlackTrackingStep(isolate());
2435 }
2436 }
2437
NewJSObjectFromMap(Handle<Map> map,AllocationType allocation,Handle<AllocationSite> allocation_site)2438 Handle<JSObject> Factory::NewJSObjectFromMap(
2439 Handle<Map> map, AllocationType allocation,
2440 Handle<AllocationSite> allocation_site) {
2441 // JSFunctions should be allocated using AllocateFunction to be
2442 // properly initialized.
2443 DCHECK(!InstanceTypeChecker::IsJSFunction((map->instance_type())));
2444
2445 // Both types of global objects should be allocated using
2446 // AllocateGlobalObject to be properly initialized.
2447 DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
2448
2449 JSObject js_obj = JSObject::cast(
2450 AllocateRawWithAllocationSite(map, allocation, allocation_site));
2451
2452 InitializeJSObjectFromMap(js_obj, *empty_fixed_array(), *map);
2453
2454 DCHECK(js_obj.HasFastElements() || js_obj.HasTypedArrayElements() ||
2455 js_obj.HasFastStringWrapperElements() ||
2456 js_obj.HasFastArgumentsElements() || js_obj.HasDictionaryElements());
2457 return handle(js_obj, isolate());
2458 }
2459
NewSlowJSObjectFromMap(Handle<Map> map,int capacity,AllocationType allocation,Handle<AllocationSite> allocation_site)2460 Handle<JSObject> Factory::NewSlowJSObjectFromMap(
2461 Handle<Map> map, int capacity, AllocationType allocation,
2462 Handle<AllocationSite> allocation_site) {
2463 DCHECK(map->is_dictionary_map());
2464 Handle<HeapObject> object_properties;
2465 if (V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL) {
2466 object_properties = NewSwissNameDictionary(capacity, allocation);
2467 } else {
2468 object_properties = NameDictionary::New(isolate(), capacity);
2469 }
2470 Handle<JSObject> js_object =
2471 NewJSObjectFromMap(map, allocation, allocation_site);
2472 js_object->set_raw_properties_or_hash(*object_properties, kRelaxedStore);
2473 return js_object;
2474 }
2475
NewSlowJSObjectWithPropertiesAndElements(Handle<HeapObject> prototype,Handle<HeapObject> properties,Handle<FixedArrayBase> elements)2476 Handle<JSObject> Factory::NewSlowJSObjectWithPropertiesAndElements(
2477 Handle<HeapObject> prototype, Handle<HeapObject> properties,
2478 Handle<FixedArrayBase> elements) {
2479 DCHECK_IMPLIES(V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL,
2480 properties->IsSwissNameDictionary());
2481 DCHECK_IMPLIES(!V8_ENABLE_SWISS_NAME_DICTIONARY_BOOL,
2482 properties->IsNameDictionary());
2483
2484 Handle<Map> object_map = isolate()->slow_object_with_object_prototype_map();
2485 if (object_map->prototype() != *prototype) {
2486 object_map = Map::TransitionToPrototype(isolate(), object_map, prototype);
2487 }
2488 DCHECK(object_map->is_dictionary_map());
2489 Handle<JSObject> object =
2490 NewJSObjectFromMap(object_map, AllocationType::kYoung);
2491 object->set_raw_properties_or_hash(*properties);
2492 if (*elements != read_only_roots().empty_fixed_array()) {
2493 DCHECK(elements->IsNumberDictionary());
2494 object_map =
2495 JSObject::GetElementsTransitionMap(object, DICTIONARY_ELEMENTS);
2496 JSObject::MigrateToMap(isolate(), object, object_map);
2497 object->set_elements(*elements);
2498 }
2499 return object;
2500 }
2501
NewJSArray(ElementsKind elements_kind,int length,int capacity,ArrayStorageAllocationMode mode,AllocationType allocation)2502 Handle<JSArray> Factory::NewJSArray(ElementsKind elements_kind, int length,
2503 int capacity,
2504 ArrayStorageAllocationMode mode,
2505 AllocationType allocation) {
2506 DCHECK(capacity >= length);
2507 if (capacity == 0) {
2508 return NewJSArrayWithElements(empty_fixed_array(), elements_kind, length,
2509 allocation);
2510 }
2511
2512 HandleScope inner_scope(isolate());
2513 Handle<FixedArrayBase> elms =
2514 NewJSArrayStorage(elements_kind, capacity, mode);
2515 return inner_scope.CloseAndEscape(NewJSArrayWithUnverifiedElements(
2516 elms, elements_kind, length, allocation));
2517 }
2518
NewJSArrayWithElements(Handle<FixedArrayBase> elements,ElementsKind elements_kind,int length,AllocationType allocation)2519 Handle<JSArray> Factory::NewJSArrayWithElements(Handle<FixedArrayBase> elements,
2520 ElementsKind elements_kind,
2521 int length,
2522 AllocationType allocation) {
2523 Handle<JSArray> array = NewJSArrayWithUnverifiedElements(
2524 elements, elements_kind, length, allocation);
2525 JSObject::ValidateElements(*array);
2526 return array;
2527 }
2528
NewJSArrayWithUnverifiedElements(Handle<FixedArrayBase> elements,ElementsKind elements_kind,int length,AllocationType allocation)2529 Handle<JSArray> Factory::NewJSArrayWithUnverifiedElements(
2530 Handle<FixedArrayBase> elements, ElementsKind elements_kind, int length,
2531 AllocationType allocation) {
2532 DCHECK(length <= elements->length());
2533 NativeContext native_context = isolate()->raw_native_context();
2534 Map map = native_context.GetInitialJSArrayMap(elements_kind);
2535 if (map.is_null()) {
2536 JSFunction array_function = native_context.array_function();
2537 map = array_function.initial_map();
2538 }
2539 Handle<JSArray> array = Handle<JSArray>::cast(
2540 NewJSObjectFromMap(handle(map, isolate()), allocation));
2541 DisallowGarbageCollection no_gc;
2542 JSArray raw = *array;
2543 raw.set_elements(*elements);
2544 raw.set_length(Smi::FromInt(length));
2545 return array;
2546 }
2547
NewJSArrayStorage(Handle<JSArray> array,int length,int capacity,ArrayStorageAllocationMode mode)2548 void Factory::NewJSArrayStorage(Handle<JSArray> array, int length, int capacity,
2549 ArrayStorageAllocationMode mode) {
2550 DCHECK(capacity >= length);
2551
2552 if (capacity == 0) {
2553 JSArray raw = *array;
2554 DisallowGarbageCollection no_gc;
2555 raw.set_length(Smi::zero());
2556 raw.set_elements(*empty_fixed_array());
2557 return;
2558 }
2559
2560 HandleScope inner_scope(isolate());
2561 Handle<FixedArrayBase> elms =
2562 NewJSArrayStorage(array->GetElementsKind(), capacity, mode);
2563 DisallowGarbageCollection no_gc;
2564 JSArray raw = *array;
2565 raw.set_elements(*elms);
2566 raw.set_length(Smi::FromInt(length));
2567 }
2568
NewJSArrayStorage(ElementsKind elements_kind,int capacity,ArrayStorageAllocationMode mode)2569 Handle<FixedArrayBase> Factory::NewJSArrayStorage(
2570 ElementsKind elements_kind, int capacity, ArrayStorageAllocationMode mode) {
2571 DCHECK_GT(capacity, 0);
2572 Handle<FixedArrayBase> elms;
2573 if (IsDoubleElementsKind(elements_kind)) {
2574 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
2575 elms = NewFixedDoubleArray(capacity);
2576 } else {
2577 DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
2578 elms = NewFixedDoubleArrayWithHoles(capacity);
2579 }
2580 } else {
2581 DCHECK(IsSmiOrObjectElementsKind(elements_kind));
2582 if (mode == DONT_INITIALIZE_ARRAY_ELEMENTS) {
2583 elms = NewFixedArray(capacity);
2584 } else {
2585 DCHECK(mode == INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE);
2586 elms = NewFixedArrayWithHoles(capacity);
2587 }
2588 }
2589 return elms;
2590 }
2591
NewJSWeakMap()2592 Handle<JSWeakMap> Factory::NewJSWeakMap() {
2593 NativeContext native_context = isolate()->raw_native_context();
2594 Handle<Map> map(native_context.js_weak_map_fun().initial_map(), isolate());
2595 Handle<JSWeakMap> weakmap(JSWeakMap::cast(*NewJSObjectFromMap(map)),
2596 isolate());
2597 {
2598 // Do not leak handles for the hash table, it would make entries strong.
2599 HandleScope scope(isolate());
2600 JSWeakCollection::Initialize(weakmap, isolate());
2601 }
2602 return weakmap;
2603 }
2604
NewJSModuleNamespace()2605 Handle<JSModuleNamespace> Factory::NewJSModuleNamespace() {
2606 Handle<Map> map = isolate()->js_module_namespace_map();
2607 Handle<JSModuleNamespace> module_namespace(
2608 Handle<JSModuleNamespace>::cast(NewJSObjectFromMap(map)));
2609 FieldIndex index = FieldIndex::ForDescriptor(
2610 *map, InternalIndex(JSModuleNamespace::kToStringTagFieldIndex));
2611 module_namespace->FastPropertyAtPut(index, read_only_roots().Module_string(),
2612 SKIP_WRITE_BARRIER);
2613 return module_namespace;
2614 }
2615
NewJSGeneratorObject(Handle<JSFunction> function)2616 Handle<JSGeneratorObject> Factory::NewJSGeneratorObject(
2617 Handle<JSFunction> function) {
2618 DCHECK(IsResumableFunction(function->shared().kind()));
2619 JSFunction::EnsureHasInitialMap(function);
2620 Handle<Map> map(function->initial_map(), isolate());
2621
2622 DCHECK(map->instance_type() == JS_GENERATOR_OBJECT_TYPE ||
2623 map->instance_type() == JS_ASYNC_GENERATOR_OBJECT_TYPE);
2624
2625 return Handle<JSGeneratorObject>::cast(NewJSObjectFromMap(map));
2626 }
2627
NewSourceTextModule(Handle<SharedFunctionInfo> sfi)2628 Handle<SourceTextModule> Factory::NewSourceTextModule(
2629 Handle<SharedFunctionInfo> sfi) {
2630 Handle<SourceTextModuleInfo> module_info(
2631 sfi->scope_info().ModuleDescriptorInfo(), isolate());
2632 Handle<ObjectHashTable> exports =
2633 ObjectHashTable::New(isolate(), module_info->RegularExportCount());
2634 Handle<FixedArray> regular_exports =
2635 NewFixedArray(module_info->RegularExportCount());
2636 Handle<FixedArray> regular_imports =
2637 NewFixedArray(module_info->regular_imports().length());
2638 int requested_modules_length = module_info->module_requests().length();
2639 Handle<FixedArray> requested_modules =
2640 requested_modules_length > 0 ? NewFixedArray(requested_modules_length)
2641 : empty_fixed_array();
2642 Handle<ArrayList> async_parent_modules = ArrayList::New(isolate(), 0);
2643
2644 ReadOnlyRoots roots(isolate());
2645 SourceTextModule module = SourceTextModule::cast(
2646 New(source_text_module_map(), AllocationType::kOld));
2647 DisallowGarbageCollection no_gc;
2648 module.set_code(*sfi);
2649 module.set_exports(*exports);
2650 module.set_regular_exports(*regular_exports);
2651 module.set_regular_imports(*regular_imports);
2652 module.set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue));
2653 module.set_module_namespace(roots.undefined_value(), SKIP_WRITE_BARRIER);
2654 module.set_requested_modules(*requested_modules);
2655 module.set_status(Module::kUnlinked);
2656 module.set_exception(roots.the_hole_value(), SKIP_WRITE_BARRIER);
2657 module.set_top_level_capability(roots.undefined_value(), SKIP_WRITE_BARRIER);
2658 module.set_import_meta(roots.the_hole_value(), kReleaseStore,
2659 SKIP_WRITE_BARRIER);
2660 module.set_dfs_index(-1);
2661 module.set_dfs_ancestor_index(-1);
2662 module.set_flags(0);
2663 module.set_async(IsAsyncModule(sfi->kind()));
2664 module.set_async_evaluating_ordinal(SourceTextModule::kNotAsyncEvaluated);
2665 module.set_cycle_root(roots.the_hole_value(), SKIP_WRITE_BARRIER);
2666 module.set_async_parent_modules(*async_parent_modules);
2667 module.set_pending_async_dependencies(0);
2668 return handle(module, isolate());
2669 }
2670
NewSyntheticModule(Handle<String> module_name,Handle<FixedArray> export_names,v8::Module::SyntheticModuleEvaluationSteps evaluation_steps)2671 Handle<SyntheticModule> Factory::NewSyntheticModule(
2672 Handle<String> module_name, Handle<FixedArray> export_names,
2673 v8::Module::SyntheticModuleEvaluationSteps evaluation_steps) {
2674 ReadOnlyRoots roots(isolate());
2675
2676 Handle<ObjectHashTable> exports =
2677 ObjectHashTable::New(isolate(), static_cast<int>(export_names->length()));
2678 Handle<Foreign> evaluation_steps_foreign =
2679 NewForeign(reinterpret_cast<i::Address>(evaluation_steps));
2680
2681 SyntheticModule module =
2682 SyntheticModule::cast(New(synthetic_module_map(), AllocationType::kOld));
2683 DisallowGarbageCollection no_gc;
2684 module.set_hash(isolate()->GenerateIdentityHash(Smi::kMaxValue));
2685 module.set_module_namespace(roots.undefined_value(), SKIP_WRITE_BARRIER);
2686 module.set_status(Module::kUnlinked);
2687 module.set_exception(roots.the_hole_value(), SKIP_WRITE_BARRIER);
2688 module.set_top_level_capability(roots.undefined_value(), SKIP_WRITE_BARRIER);
2689 module.set_name(*module_name);
2690 module.set_export_names(*export_names);
2691 module.set_exports(*exports);
2692 module.set_evaluation_steps(*evaluation_steps_foreign);
2693 return handle(module, isolate());
2694 }
2695
NewJSArrayBuffer(std::shared_ptr<BackingStore> backing_store,AllocationType allocation)2696 Handle<JSArrayBuffer> Factory::NewJSArrayBuffer(
2697 std::shared_ptr<BackingStore> backing_store, AllocationType allocation) {
2698 Handle<Map> map(isolate()->native_context()->array_buffer_fun().initial_map(),
2699 isolate());
2700 auto result =
2701 Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, allocation));
2702 result->Setup(SharedFlag::kNotShared, ResizableFlag::kNotResizable,
2703 std::move(backing_store));
2704 return result;
2705 }
2706
NewJSArrayBufferAndBackingStore(size_t byte_length,InitializedFlag initialized,AllocationType allocation)2707 MaybeHandle<JSArrayBuffer> Factory::NewJSArrayBufferAndBackingStore(
2708 size_t byte_length, InitializedFlag initialized,
2709 AllocationType allocation) {
2710 std::unique_ptr<BackingStore> backing_store = nullptr;
2711
2712 if (byte_length > 0) {
2713 backing_store = BackingStore::Allocate(isolate(), byte_length,
2714 SharedFlag::kNotShared, initialized);
2715 if (!backing_store) return MaybeHandle<JSArrayBuffer>();
2716 }
2717 Handle<Map> map(isolate()->native_context()->array_buffer_fun().initial_map(),
2718 isolate());
2719 auto array_buffer =
2720 Handle<JSArrayBuffer>::cast(NewJSObjectFromMap(map, allocation));
2721 array_buffer->Setup(SharedFlag::kNotShared, ResizableFlag::kNotResizable,
2722 std::move(backing_store));
2723 return array_buffer;
2724 }
2725
NewJSSharedArrayBuffer(std::shared_ptr<BackingStore> backing_store)2726 Handle<JSArrayBuffer> Factory::NewJSSharedArrayBuffer(
2727 std::shared_ptr<BackingStore> backing_store) {
2728 DCHECK_IMPLIES(backing_store->is_resizable(), FLAG_harmony_rab_gsab);
2729 Handle<Map> map(
2730 isolate()->native_context()->shared_array_buffer_fun().initial_map(),
2731 isolate());
2732 auto result = Handle<JSArrayBuffer>::cast(
2733 NewJSObjectFromMap(map, AllocationType::kYoung));
2734 ResizableFlag resizable = backing_store->is_resizable()
2735 ? ResizableFlag::kResizable
2736 : ResizableFlag::kNotResizable;
2737 result->Setup(SharedFlag::kShared, resizable, std::move(backing_store));
2738 return result;
2739 }
2740
NewJSIteratorResult(Handle<Object> value,bool done)2741 Handle<JSIteratorResult> Factory::NewJSIteratorResult(Handle<Object> value,
2742 bool done) {
2743 Handle<Map> map(isolate()->native_context()->iterator_result_map(),
2744 isolate());
2745 Handle<JSIteratorResult> js_iter_result = Handle<JSIteratorResult>::cast(
2746 NewJSObjectFromMap(map, AllocationType::kYoung));
2747 DisallowGarbageCollection no_gc;
2748 JSIteratorResult raw = *js_iter_result;
2749 raw.set_value(*value, SKIP_WRITE_BARRIER);
2750 raw.set_done(*ToBoolean(done), SKIP_WRITE_BARRIER);
2751 return js_iter_result;
2752 }
2753
NewJSAsyncFromSyncIterator(Handle<JSReceiver> sync_iterator,Handle<Object> next)2754 Handle<JSAsyncFromSyncIterator> Factory::NewJSAsyncFromSyncIterator(
2755 Handle<JSReceiver> sync_iterator, Handle<Object> next) {
2756 Handle<Map> map(isolate()->native_context()->async_from_sync_iterator_map(),
2757 isolate());
2758 Handle<JSAsyncFromSyncIterator> iterator =
2759 Handle<JSAsyncFromSyncIterator>::cast(
2760 NewJSObjectFromMap(map, AllocationType::kYoung));
2761 DisallowGarbageCollection no_gc;
2762 JSAsyncFromSyncIterator raw = *iterator;
2763 raw.set_sync_iterator(*sync_iterator, SKIP_WRITE_BARRIER);
2764 raw.set_next(*next, SKIP_WRITE_BARRIER);
2765 return iterator;
2766 }
2767
NewJSMap()2768 Handle<JSMap> Factory::NewJSMap() {
2769 Handle<Map> map(isolate()->native_context()->js_map_map(), isolate());
2770 Handle<JSMap> js_map = Handle<JSMap>::cast(NewJSObjectFromMap(map));
2771 JSMap::Initialize(js_map, isolate());
2772 return js_map;
2773 }
2774
NewJSSet()2775 Handle<JSSet> Factory::NewJSSet() {
2776 Handle<Map> map(isolate()->native_context()->js_set_map(), isolate());
2777 Handle<JSSet> js_set = Handle<JSSet>::cast(NewJSObjectFromMap(map));
2778 JSSet::Initialize(js_set, isolate());
2779 return js_set;
2780 }
2781
TypeAndSizeForElementsKind(ElementsKind kind,ExternalArrayType * array_type,size_t * element_size)2782 void Factory::TypeAndSizeForElementsKind(ElementsKind kind,
2783 ExternalArrayType* array_type,
2784 size_t* element_size) {
2785 switch (kind) {
2786 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2787 case TYPE##_ELEMENTS: \
2788 *array_type = kExternal##Type##Array; \
2789 *element_size = sizeof(ctype); \
2790 break;
2791 TYPED_ARRAYS(TYPED_ARRAY_CASE)
2792 RAB_GSAB_TYPED_ARRAYS_WITH_TYPED_ARRAY_TYPE(TYPED_ARRAY_CASE)
2793 #undef TYPED_ARRAY_CASE
2794
2795 default:
2796 UNREACHABLE();
2797 }
2798 }
2799
2800 namespace {
2801
ForFixedTypedArray(ExternalArrayType array_type,size_t * element_size,ElementsKind * element_kind)2802 void ForFixedTypedArray(ExternalArrayType array_type, size_t* element_size,
2803 ElementsKind* element_kind) {
2804 switch (array_type) {
2805 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype) \
2806 case kExternal##Type##Array: \
2807 *element_size = sizeof(ctype); \
2808 *element_kind = TYPE##_ELEMENTS; \
2809 return;
2810
2811 TYPED_ARRAYS(TYPED_ARRAY_CASE)
2812 #undef TYPED_ARRAY_CASE
2813 }
2814 UNREACHABLE();
2815 }
2816
2817 } // namespace
2818
NewJSArrayBufferView(Handle<Map> map,Handle<FixedArrayBase> elements,Handle<JSArrayBuffer> buffer,size_t byte_offset,size_t byte_length)2819 Handle<JSArrayBufferView> Factory::NewJSArrayBufferView(
2820 Handle<Map> map, Handle<FixedArrayBase> elements,
2821 Handle<JSArrayBuffer> buffer, size_t byte_offset, size_t byte_length) {
2822 CHECK_LE(byte_length, buffer->byte_length());
2823 CHECK_LE(byte_offset, buffer->byte_length());
2824 CHECK_LE(byte_offset + byte_length, buffer->byte_length());
2825 Handle<JSArrayBufferView> array_buffer_view = Handle<JSArrayBufferView>::cast(
2826 NewJSObjectFromMap(map, AllocationType::kYoung));
2827 DisallowGarbageCollection no_gc;
2828 JSArrayBufferView raw = *array_buffer_view;
2829 raw.set_elements(*elements, SKIP_WRITE_BARRIER);
2830 raw.set_buffer(*buffer, SKIP_WRITE_BARRIER);
2831 raw.set_byte_offset(byte_offset);
2832 raw.set_byte_length(byte_length);
2833 ZeroEmbedderFields(raw);
2834 DCHECK_EQ(raw.GetEmbedderFieldCount(),
2835 v8::ArrayBufferView::kEmbedderFieldCount);
2836 return array_buffer_view;
2837 }
2838
NewJSTypedArray(ExternalArrayType type,Handle<JSArrayBuffer> buffer,size_t byte_offset,size_t length)2839 Handle<JSTypedArray> Factory::NewJSTypedArray(ExternalArrayType type,
2840 Handle<JSArrayBuffer> buffer,
2841 size_t byte_offset,
2842 size_t length) {
2843 size_t element_size;
2844 ElementsKind elements_kind;
2845 ForFixedTypedArray(type, &element_size, &elements_kind);
2846 size_t byte_length = length * element_size;
2847
2848 CHECK_LE(length, JSTypedArray::kMaxLength);
2849 CHECK_EQ(length, byte_length / element_size);
2850 CHECK_EQ(0, byte_offset % ElementsKindToByteSize(elements_kind));
2851
2852 Handle<Map> map;
2853 switch (elements_kind) {
2854 #define TYPED_ARRAY_FUN(Type, type, TYPE, ctype) \
2855 case TYPE##_ELEMENTS: \
2856 map = \
2857 handle(isolate()->native_context()->type##_array_fun().initial_map(), \
2858 isolate()); \
2859 break;
2860
2861 TYPED_ARRAYS(TYPED_ARRAY_FUN)
2862 #undef TYPED_ARRAY_FUN
2863
2864 default:
2865 UNREACHABLE();
2866 }
2867 Handle<JSTypedArray> typed_array =
2868 Handle<JSTypedArray>::cast(NewJSArrayBufferView(
2869 map, empty_byte_array(), buffer, byte_offset, byte_length));
2870 JSTypedArray raw = *typed_array;
2871 DisallowGarbageCollection no_gc;
2872 raw.set_length(length);
2873 raw.SetOffHeapDataPtr(isolate(), buffer->backing_store(), byte_offset);
2874 raw.set_is_length_tracking(false);
2875 raw.set_is_backed_by_rab(!buffer->is_shared() && buffer->is_resizable());
2876 return typed_array;
2877 }
2878
NewJSDataView(Handle<JSArrayBuffer> buffer,size_t byte_offset,size_t byte_length)2879 Handle<JSDataView> Factory::NewJSDataView(Handle<JSArrayBuffer> buffer,
2880 size_t byte_offset,
2881 size_t byte_length) {
2882 Handle<Map> map(isolate()->native_context()->data_view_fun().initial_map(),
2883 isolate());
2884 Handle<JSDataView> obj = Handle<JSDataView>::cast(NewJSArrayBufferView(
2885 map, empty_fixed_array(), buffer, byte_offset, byte_length));
2886 obj->set_data_pointer(
2887 isolate(), static_cast<uint8_t*>(buffer->backing_store()) + byte_offset);
2888 return obj;
2889 }
2890
NewJSBoundFunction(Handle<JSReceiver> target_function,Handle<Object> bound_this,base::Vector<Handle<Object>> bound_args)2891 MaybeHandle<JSBoundFunction> Factory::NewJSBoundFunction(
2892 Handle<JSReceiver> target_function, Handle<Object> bound_this,
2893 base::Vector<Handle<Object>> bound_args) {
2894 DCHECK(target_function->IsCallable());
2895 STATIC_ASSERT(Code::kMaxArguments <= FixedArray::kMaxLength);
2896 if (bound_args.length() >= Code::kMaxArguments) {
2897 THROW_NEW_ERROR(isolate(),
2898 NewRangeError(MessageTemplate::kTooManyArguments),
2899 JSBoundFunction);
2900 }
2901
2902 // Determine the prototype of the {target_function}.
2903 Handle<HeapObject> prototype;
2904 ASSIGN_RETURN_ON_EXCEPTION(
2905 isolate(), prototype,
2906 JSReceiver::GetPrototype(isolate(), target_function), JSBoundFunction);
2907
2908 SaveAndSwitchContext save(
2909 isolate(), *target_function->GetCreationContext().ToHandleChecked());
2910
2911 // Create the [[BoundArguments]] for the result.
2912 Handle<FixedArray> bound_arguments;
2913 if (bound_args.length() == 0) {
2914 bound_arguments = empty_fixed_array();
2915 } else {
2916 bound_arguments = NewFixedArray(bound_args.length());
2917 for (int i = 0; i < bound_args.length(); ++i) {
2918 bound_arguments->set(i, *bound_args[i]);
2919 }
2920 }
2921
2922 // Setup the map for the JSBoundFunction instance.
2923 Handle<Map> map = target_function->IsConstructor()
2924 ? isolate()->bound_function_with_constructor_map()
2925 : isolate()->bound_function_without_constructor_map();
2926 if (map->prototype() != *prototype) {
2927 map = Map::TransitionToPrototype(isolate(), map, prototype);
2928 }
2929 DCHECK_EQ(target_function->IsConstructor(), map->is_constructor());
2930
2931 // Setup the JSBoundFunction instance.
2932 Handle<JSBoundFunction> result = Handle<JSBoundFunction>::cast(
2933 NewJSObjectFromMap(map, AllocationType::kYoung));
2934 DisallowGarbageCollection no_gc;
2935 JSBoundFunction raw = *result;
2936 raw.set_bound_target_function(*target_function, SKIP_WRITE_BARRIER);
2937 raw.set_bound_this(*bound_this, SKIP_WRITE_BARRIER);
2938 raw.set_bound_arguments(*bound_arguments, SKIP_WRITE_BARRIER);
2939 return result;
2940 }
2941
2942 // ES6 section 9.5.15 ProxyCreate (target, handler)
NewJSProxy(Handle<JSReceiver> target,Handle<JSReceiver> handler)2943 Handle<JSProxy> Factory::NewJSProxy(Handle<JSReceiver> target,
2944 Handle<JSReceiver> handler) {
2945 // Allocate the proxy object.
2946 Handle<Map> map;
2947 if (target->IsCallable()) {
2948 if (target->IsConstructor()) {
2949 map = Handle<Map>(isolate()->proxy_constructor_map());
2950 } else {
2951 map = Handle<Map>(isolate()->proxy_callable_map());
2952 }
2953 } else {
2954 map = Handle<Map>(isolate()->proxy_map());
2955 }
2956 DCHECK(map->prototype().IsNull(isolate()));
2957 JSProxy result = JSProxy::cast(New(map, AllocationType::kYoung));
2958 DisallowGarbageCollection no_gc;
2959 result.initialize_properties(isolate());
2960 result.set_target(*target, SKIP_WRITE_BARRIER);
2961 result.set_handler(*handler, SKIP_WRITE_BARRIER);
2962 return handle(result, isolate());
2963 }
2964
NewUninitializedJSGlobalProxy(int size)2965 Handle<JSGlobalProxy> Factory::NewUninitializedJSGlobalProxy(int size) {
2966 // Create an empty shell of a JSGlobalProxy that needs to be reinitialized
2967 // via ReinitializeJSGlobalProxy later.
2968 Handle<Map> map = NewMap(JS_GLOBAL_PROXY_TYPE, size);
2969 // Maintain invariant expected from any JSGlobalProxy.
2970 {
2971 DisallowGarbageCollection no_gc;
2972 Map raw = *map;
2973 raw.set_is_access_check_needed(true);
2974 raw.set_may_have_interesting_symbols(true);
2975 LOG(isolate(), MapDetails(raw));
2976 }
2977 Handle<JSGlobalProxy> proxy = Handle<JSGlobalProxy>::cast(
2978 NewJSObjectFromMap(map, AllocationType::kOld));
2979 // Create identity hash early in case there is any JS collection containing
2980 // a global proxy key and needs to be rehashed after deserialization.
2981 proxy->GetOrCreateIdentityHash(isolate());
2982 return proxy;
2983 }
2984
ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object,Handle<JSFunction> constructor)2985 void Factory::ReinitializeJSGlobalProxy(Handle<JSGlobalProxy> object,
2986 Handle<JSFunction> constructor) {
2987 DCHECK(constructor->has_initial_map());
2988 Handle<Map> map(constructor->initial_map(), isolate());
2989 Handle<Map> old_map(object->map(), isolate());
2990
2991 // The proxy's hash should be retained across reinitialization.
2992 Handle<Object> raw_properties_or_hash(object->raw_properties_or_hash(),
2993 isolate());
2994
2995 if (old_map->is_prototype_map()) {
2996 map = Map::Copy(isolate(), map, "CopyAsPrototypeForJSGlobalProxy");
2997 map->set_is_prototype_map(true);
2998 }
2999 JSObject::NotifyMapChange(old_map, map, isolate());
3000 old_map->NotifyLeafMapLayoutChange(isolate());
3001
3002 // Check that the already allocated object has the same size and type as
3003 // objects allocated using the constructor.
3004 DCHECK(map->instance_size() == old_map->instance_size());
3005 DCHECK(map->instance_type() == old_map->instance_type());
3006
3007 // In order to keep heap in consistent state there must be no allocations
3008 // before object re-initialization is finished.
3009 DisallowGarbageCollection no_gc;
3010
3011 // Reset the map for the object.
3012 JSGlobalProxy raw = *object;
3013 raw.set_map(*map, kReleaseStore);
3014
3015 // Reinitialize the object from the constructor map.
3016 InitializeJSObjectFromMap(raw, *raw_properties_or_hash, *map);
3017 }
3018
NewJSMessageObject(MessageTemplate message,Handle<Object> argument,int start_position,int end_position,Handle<SharedFunctionInfo> shared_info,int bytecode_offset,Handle<Script> script,Handle<Object> stack_frames)3019 Handle<JSMessageObject> Factory::NewJSMessageObject(
3020 MessageTemplate message, Handle<Object> argument, int start_position,
3021 int end_position, Handle<SharedFunctionInfo> shared_info,
3022 int bytecode_offset, Handle<Script> script, Handle<Object> stack_frames) {
3023 Handle<Map> map = message_object_map();
3024 JSMessageObject message_obj =
3025 JSMessageObject::cast(New(map, AllocationType::kYoung));
3026 DisallowGarbageCollection no_gc;
3027 message_obj.set_raw_properties_or_hash(*empty_fixed_array(),
3028 SKIP_WRITE_BARRIER);
3029 message_obj.initialize_elements();
3030 message_obj.set_elements(*empty_fixed_array(), SKIP_WRITE_BARRIER);
3031 message_obj.set_type(message);
3032 message_obj.set_argument(*argument, SKIP_WRITE_BARRIER);
3033 message_obj.set_start_position(start_position);
3034 message_obj.set_end_position(end_position);
3035 message_obj.set_script(*script, SKIP_WRITE_BARRIER);
3036 if (start_position >= 0) {
3037 // If there's a start_position, then there's no need to store the
3038 // SharedFunctionInfo as it will never be necessary to regenerate the
3039 // position.
3040 message_obj.set_shared_info(*undefined_value(), SKIP_WRITE_BARRIER);
3041 message_obj.set_bytecode_offset(Smi::FromInt(0));
3042 } else {
3043 message_obj.set_bytecode_offset(Smi::FromInt(bytecode_offset));
3044 if (shared_info.is_null()) {
3045 message_obj.set_shared_info(*undefined_value(), SKIP_WRITE_BARRIER);
3046 DCHECK_EQ(bytecode_offset, -1);
3047 } else {
3048 message_obj.set_shared_info(*shared_info, SKIP_WRITE_BARRIER);
3049 DCHECK_GE(bytecode_offset, kFunctionEntryBytecodeOffset);
3050 }
3051 }
3052
3053 message_obj.set_stack_frames(*stack_frames, SKIP_WRITE_BARRIER);
3054 message_obj.set_error_level(v8::Isolate::kMessageError);
3055 return handle(message_obj, isolate());
3056 }
3057
NewSharedFunctionInfoForApiFunction(MaybeHandle<String> maybe_name,Handle<FunctionTemplateInfo> function_template_info,FunctionKind kind)3058 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForApiFunction(
3059 MaybeHandle<String> maybe_name,
3060 Handle<FunctionTemplateInfo> function_template_info, FunctionKind kind) {
3061 Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(
3062 maybe_name, function_template_info, Builtin::kNoBuiltinId, kind);
3063 return shared;
3064 }
3065
NewSharedFunctionInfoForBuiltin(MaybeHandle<String> maybe_name,Builtin builtin,FunctionKind kind)3066 Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfoForBuiltin(
3067 MaybeHandle<String> maybe_name, Builtin builtin, FunctionKind kind) {
3068 Handle<SharedFunctionInfo> shared =
3069 NewSharedFunctionInfo(maybe_name, MaybeHandle<Code>(), builtin, kind);
3070 return shared;
3071 }
3072
3073 namespace {
NumberToStringCacheHash(Handle<FixedArray> cache,Smi number)3074 V8_INLINE int NumberToStringCacheHash(Handle<FixedArray> cache, Smi number) {
3075 int mask = (cache->length() >> 1) - 1;
3076 return number.value() & mask;
3077 }
3078
NumberToStringCacheHash(Handle<FixedArray> cache,double number)3079 V8_INLINE int NumberToStringCacheHash(Handle<FixedArray> cache, double number) {
3080 int mask = (cache->length() >> 1) - 1;
3081 int64_t bits = bit_cast<int64_t>(number);
3082 return (static_cast<int>(bits) ^ static_cast<int>(bits >> 32)) & mask;
3083 }
3084
CharToString(Factory * factory,const char * string,NumberCacheMode mode)3085 V8_INLINE Handle<String> CharToString(Factory* factory, const char* string,
3086 NumberCacheMode mode) {
3087 // We tenure the allocated string since it is referenced from the
3088 // number-string cache which lives in the old space.
3089 AllocationType type = mode == NumberCacheMode::kIgnore
3090 ? AllocationType::kYoung
3091 : AllocationType::kOld;
3092 return factory->NewStringFromAsciiChecked(string, type);
3093 }
3094
3095 } // namespace
3096
NumberToStringCacheSet(Handle<Object> number,int hash,Handle<String> js_string)3097 void Factory::NumberToStringCacheSet(Handle<Object> number, int hash,
3098 Handle<String> js_string) {
3099 if (!number_string_cache()->get(hash * 2).IsUndefined(isolate()) &&
3100 !FLAG_optimize_for_size) {
3101 int full_size = isolate()->heap()->MaxNumberToStringCacheSize();
3102 if (number_string_cache()->length() != full_size) {
3103 Handle<FixedArray> new_cache =
3104 NewFixedArray(full_size, AllocationType::kOld);
3105 isolate()->heap()->set_number_string_cache(*new_cache);
3106 return;
3107 }
3108 }
3109 DisallowGarbageCollection no_gc;
3110 FixedArray cache = *number_string_cache();
3111 cache.set(hash * 2, *number);
3112 cache.set(hash * 2 + 1, *js_string);
3113 }
3114
NumberToStringCacheGet(Object number,int hash)3115 Handle<Object> Factory::NumberToStringCacheGet(Object number, int hash) {
3116 DisallowGarbageCollection no_gc;
3117 FixedArray cache = *number_string_cache();
3118 Object key = cache.get(hash * 2);
3119 if (key == number || (key.IsHeapNumber() && number.IsHeapNumber() &&
3120 key.Number() == number.Number())) {
3121 return Handle<String>(String::cast(cache.get(hash * 2 + 1)), isolate());
3122 }
3123 return undefined_value();
3124 }
3125
NumberToString(Handle<Object> number,NumberCacheMode mode)3126 Handle<String> Factory::NumberToString(Handle<Object> number,
3127 NumberCacheMode mode) {
3128 if (number->IsSmi()) return SmiToString(Smi::cast(*number), mode);
3129
3130 double double_value = Handle<HeapNumber>::cast(number)->value();
3131 // Try to canonicalize doubles.
3132 int smi_value;
3133 if (DoubleToSmiInteger(double_value, &smi_value)) {
3134 return SmiToString(Smi::FromInt(smi_value), mode);
3135 }
3136 return HeapNumberToString(Handle<HeapNumber>::cast(number), double_value,
3137 mode);
3138 }
3139
3140 // Must be large enough to fit any double, int, or size_t.
3141 static const int kNumberToStringBufferSize = 32;
3142
HeapNumberToString(Handle<HeapNumber> number,double value,NumberCacheMode mode)3143 Handle<String> Factory::HeapNumberToString(Handle<HeapNumber> number,
3144 double value, NumberCacheMode mode) {
3145 int hash = 0;
3146 if (mode != NumberCacheMode::kIgnore) {
3147 hash = NumberToStringCacheHash(number_string_cache(), value);
3148 }
3149 if (mode == NumberCacheMode::kBoth) {
3150 Handle<Object> cached = NumberToStringCacheGet(*number, hash);
3151 if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached);
3152 }
3153
3154 char arr[kNumberToStringBufferSize];
3155 base::Vector<char> buffer(arr, arraysize(arr));
3156 const char* string = DoubleToCString(value, buffer);
3157 Handle<String> result = CharToString(this, string, mode);
3158 if (mode != NumberCacheMode::kIgnore) {
3159 NumberToStringCacheSet(number, hash, result);
3160 }
3161 return result;
3162 }
3163
SmiToString(Smi number,NumberCacheMode mode)3164 inline Handle<String> Factory::SmiToString(Smi number, NumberCacheMode mode) {
3165 int hash = NumberToStringCacheHash(number_string_cache(), number);
3166 if (mode == NumberCacheMode::kBoth) {
3167 Handle<Object> cached = NumberToStringCacheGet(number, hash);
3168 if (!cached->IsUndefined(isolate())) return Handle<String>::cast(cached);
3169 }
3170
3171 char arr[kNumberToStringBufferSize];
3172 base::Vector<char> buffer(arr, arraysize(arr));
3173 const char* string = IntToCString(number.value(), buffer);
3174 Handle<String> result = CharToString(this, string, mode);
3175 if (mode != NumberCacheMode::kIgnore) {
3176 NumberToStringCacheSet(handle(number, isolate()), hash, result);
3177 }
3178
3179 // Compute the hash here (rather than letting the caller take care of it) so
3180 // that the "cache hit" case above doesn't have to bother with it.
3181 STATIC_ASSERT(Smi::kMaxValue <= std::numeric_limits<uint32_t>::max());
3182 {
3183 DisallowGarbageCollection no_gc;
3184 String raw = *result;
3185 if (raw.raw_hash_field() == String::kEmptyHashField &&
3186 number.value() >= 0) {
3187 uint32_t raw_hash_field = StringHasher::MakeArrayIndexHash(
3188 static_cast<uint32_t>(number.value()), raw.length());
3189 raw.set_raw_hash_field(raw_hash_field);
3190 }
3191 }
3192 return result;
3193 }
3194
SizeToString(size_t value,bool check_cache)3195 Handle<String> Factory::SizeToString(size_t value, bool check_cache) {
3196 Handle<String> result;
3197 NumberCacheMode cache_mode =
3198 check_cache ? NumberCacheMode::kBoth : NumberCacheMode::kIgnore;
3199 if (value <= Smi::kMaxValue) {
3200 int32_t int32v = static_cast<int32_t>(static_cast<uint32_t>(value));
3201 // SmiToString sets the hash when needed, we can return immediately.
3202 return SmiToString(Smi::FromInt(int32v), cache_mode);
3203 } else if (value <= kMaxSafeInteger) {
3204 // TODO(jkummerow): Refactor the cache to not require Objects as keys.
3205 double double_value = static_cast<double>(value);
3206 result = HeapNumberToString(NewHeapNumber(double_value), value, cache_mode);
3207 } else {
3208 char arr[kNumberToStringBufferSize];
3209 base::Vector<char> buffer(arr, arraysize(arr));
3210 // Build the string backwards from the least significant digit.
3211 int i = buffer.length();
3212 size_t value_copy = value;
3213 buffer[--i] = '\0';
3214 do {
3215 buffer[--i] = '0' + (value_copy % 10);
3216 value_copy /= 10;
3217 } while (value_copy > 0);
3218 char* string = buffer.begin() + i;
3219 // No way to cache this; we'd need an {Object} to use as key.
3220 result = NewStringFromAsciiChecked(string);
3221 }
3222 {
3223 DisallowGarbageCollection no_gc;
3224 String raw = *result;
3225 if (value <= JSArray::kMaxArrayIndex &&
3226 raw.raw_hash_field() == String::kEmptyHashField) {
3227 uint32_t raw_hash_field = StringHasher::MakeArrayIndexHash(
3228 static_cast<uint32_t>(value), raw.length());
3229 raw.set_raw_hash_field(raw_hash_field);
3230 }
3231 }
3232 return result;
3233 }
3234
NewDebugInfo(Handle<SharedFunctionInfo> shared)3235 Handle<DebugInfo> Factory::NewDebugInfo(Handle<SharedFunctionInfo> shared) {
3236 DCHECK(!shared->HasDebugInfo());
3237
3238 auto debug_info =
3239 NewStructInternal<DebugInfo>(DEBUG_INFO_TYPE, AllocationType::kOld);
3240 DisallowGarbageCollection no_gc;
3241 SharedFunctionInfo raw_shared = *shared;
3242 debug_info.set_flags(DebugInfo::kNone, kRelaxedStore);
3243 debug_info.set_shared(raw_shared);
3244 debug_info.set_debugger_hints(0);
3245 DCHECK_EQ(DebugInfo::kNoDebuggingId, debug_info.debugging_id());
3246 debug_info.set_script(raw_shared.script_or_debug_info(kAcquireLoad));
3247 HeapObject undefined = *undefined_value();
3248 debug_info.set_original_bytecode_array(undefined, kReleaseStore,
3249 SKIP_WRITE_BARRIER);
3250 debug_info.set_debug_bytecode_array(undefined, kReleaseStore,
3251 SKIP_WRITE_BARRIER);
3252 debug_info.set_break_points(*empty_fixed_array(), SKIP_WRITE_BARRIER);
3253
3254 // Link debug info to function.
3255 raw_shared.SetDebugInfo(debug_info);
3256
3257 return handle(debug_info, isolate());
3258 }
3259
NewBreakPointInfo(int source_position)3260 Handle<BreakPointInfo> Factory::NewBreakPointInfo(int source_position) {
3261 auto new_break_point_info = NewStructInternal<BreakPointInfo>(
3262 BREAK_POINT_INFO_TYPE, AllocationType::kOld);
3263 DisallowGarbageCollection no_gc;
3264 new_break_point_info.set_source_position(source_position);
3265 new_break_point_info.set_break_points(*undefined_value(), SKIP_WRITE_BARRIER);
3266 return handle(new_break_point_info, isolate());
3267 }
3268
NewBreakPoint(int id,Handle<String> condition)3269 Handle<BreakPoint> Factory::NewBreakPoint(int id, Handle<String> condition) {
3270 auto new_break_point =
3271 NewStructInternal<BreakPoint>(BREAK_POINT_TYPE, AllocationType::kOld);
3272 DisallowGarbageCollection no_gc;
3273 new_break_point.set_id(id);
3274 new_break_point.set_condition(*condition);
3275 return handle(new_break_point, isolate());
3276 }
3277
NewStackFrameInfo(Handle<Object> receiver_or_instance,Handle<Object> function,Handle<HeapObject> code_object,int code_offset_or_source_position,int flags,Handle<FixedArray> parameters)3278 Handle<StackFrameInfo> Factory::NewStackFrameInfo(
3279 Handle<Object> receiver_or_instance, Handle<Object> function,
3280 Handle<HeapObject> code_object, int code_offset_or_source_position,
3281 int flags, Handle<FixedArray> parameters) {
3282 auto info = NewStructInternal<StackFrameInfo>(STACK_FRAME_INFO_TYPE,
3283 AllocationType::kYoung);
3284 DisallowGarbageCollection no_gc;
3285 info.set_receiver_or_instance(*receiver_or_instance, SKIP_WRITE_BARRIER);
3286 info.set_function(*function, SKIP_WRITE_BARRIER);
3287 info.set_code_object(*code_object, SKIP_WRITE_BARRIER);
3288 info.set_code_offset_or_source_position(code_offset_or_source_position);
3289 info.set_flags(flags);
3290 info.set_parameters(*parameters, SKIP_WRITE_BARRIER);
3291 return handle(info, isolate());
3292 }
3293
NewArgumentsObject(Handle<JSFunction> callee,int length)3294 Handle<JSObject> Factory::NewArgumentsObject(Handle<JSFunction> callee,
3295 int length) {
3296 bool strict_mode_callee = is_strict(callee->shared().language_mode()) ||
3297 !callee->shared().has_simple_parameters();
3298 Handle<Map> map = strict_mode_callee ? isolate()->strict_arguments_map()
3299 : isolate()->sloppy_arguments_map();
3300 AllocationSiteUsageContext context(isolate(), Handle<AllocationSite>(),
3301 false);
3302 DCHECK(!isolate()->has_pending_exception());
3303 Handle<JSObject> result = NewJSObjectFromMap(map);
3304 Handle<Smi> value(Smi::FromInt(length), isolate());
3305 Object::SetProperty(isolate(), result, length_string(), value,
3306 StoreOrigin::kMaybeKeyed,
3307 Just(ShouldThrow::kThrowOnError))
3308 .Assert();
3309 if (!strict_mode_callee) {
3310 Object::SetProperty(isolate(), result, callee_string(), callee,
3311 StoreOrigin::kMaybeKeyed,
3312 Just(ShouldThrow::kThrowOnError))
3313 .Assert();
3314 }
3315 return result;
3316 }
3317
ObjectLiteralMapFromCache(Handle<NativeContext> context,int number_of_properties)3318 Handle<Map> Factory::ObjectLiteralMapFromCache(Handle<NativeContext> context,
3319 int number_of_properties) {
3320 // Use initial slow object proto map for too many properties.
3321 if (number_of_properties >= JSObject::kMapCacheSize) {
3322 return handle(context->slow_object_with_object_prototype_map(), isolate());
3323 }
3324
3325 Handle<WeakFixedArray> cache(WeakFixedArray::cast(context->map_cache()),
3326 isolate());
3327
3328 // Check to see whether there is a matching element in the cache.
3329 MaybeObject result = cache->Get(number_of_properties);
3330 HeapObject heap_object;
3331 if (result->GetHeapObjectIfWeak(&heap_object)) {
3332 Map map = Map::cast(heap_object);
3333 DCHECK(!map.is_dictionary_map());
3334 return handle(map, isolate());
3335 }
3336
3337 // Create a new map and add it to the cache.
3338 Handle<Map> map = Map::Create(isolate(), number_of_properties);
3339 DCHECK(!map->is_dictionary_map());
3340 cache->Set(number_of_properties, HeapObjectReference::Weak(*map));
3341 return map;
3342 }
3343
NewMegaDomHandler(MaybeObjectHandle accessor,MaybeObjectHandle context)3344 Handle<MegaDomHandler> Factory::NewMegaDomHandler(MaybeObjectHandle accessor,
3345 MaybeObjectHandle context) {
3346 Handle<Map> map = read_only_roots().mega_dom_handler_map_handle();
3347 MegaDomHandler handler = MegaDomHandler::cast(New(map, AllocationType::kOld));
3348 DisallowGarbageCollection no_gc;
3349 handler.set_accessor(*accessor);
3350 handler.set_context(*context);
3351 return handle(handler, isolate());
3352 }
3353
NewLoadHandler(int data_count,AllocationType allocation)3354 Handle<LoadHandler> Factory::NewLoadHandler(int data_count,
3355 AllocationType allocation) {
3356 Handle<Map> map;
3357 switch (data_count) {
3358 case 1:
3359 map = load_handler1_map();
3360 break;
3361 case 2:
3362 map = load_handler2_map();
3363 break;
3364 case 3:
3365 map = load_handler3_map();
3366 break;
3367 default:
3368 UNREACHABLE();
3369 }
3370 return handle(LoadHandler::cast(New(map, allocation)), isolate());
3371 }
3372
NewStoreHandler(int data_count)3373 Handle<StoreHandler> Factory::NewStoreHandler(int data_count) {
3374 Handle<Map> map;
3375 switch (data_count) {
3376 case 0:
3377 map = store_handler0_map();
3378 break;
3379 case 1:
3380 map = store_handler1_map();
3381 break;
3382 case 2:
3383 map = store_handler2_map();
3384 break;
3385 case 3:
3386 map = store_handler3_map();
3387 break;
3388 default:
3389 UNREACHABLE();
3390 }
3391 return handle(StoreHandler::cast(New(map, AllocationType::kOld)), isolate());
3392 }
3393
SetRegExpAtomData(Handle<JSRegExp> regexp,Handle<String> source,JSRegExp::Flags flags,Handle<Object> data)3394 void Factory::SetRegExpAtomData(Handle<JSRegExp> regexp, Handle<String> source,
3395 JSRegExp::Flags flags, Handle<Object> data) {
3396 FixedArray store =
3397 *NewFixedArray(JSRegExp::kAtomDataSize, AllocationType::kYoung);
3398 DisallowGarbageCollection no_gc;
3399 store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::ATOM));
3400 store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3401 store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3402 store.set(JSRegExp::kAtomPatternIndex, *data, SKIP_WRITE_BARRIER);
3403 regexp->set_data(store);
3404 }
3405
SetRegExpIrregexpData(Handle<JSRegExp> regexp,Handle<String> source,JSRegExp::Flags flags,int capture_count,uint32_t backtrack_limit)3406 void Factory::SetRegExpIrregexpData(Handle<JSRegExp> regexp,
3407 Handle<String> source,
3408 JSRegExp::Flags flags, int capture_count,
3409 uint32_t backtrack_limit) {
3410 DCHECK(Smi::IsValid(backtrack_limit));
3411 FixedArray store =
3412 *NewFixedArray(JSRegExp::kIrregexpDataSize, AllocationType::kYoung);
3413 DisallowGarbageCollection no_gc;
3414 Smi uninitialized = Smi::FromInt(JSRegExp::kUninitializedValue);
3415 Smi ticks_until_tier_up = FLAG_regexp_tier_up
3416 ? Smi::FromInt(FLAG_regexp_tier_up_ticks)
3417 : uninitialized;
3418 store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::IRREGEXP));
3419 store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3420 store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3421 store.set(JSRegExp::kIrregexpLatin1CodeIndex, uninitialized);
3422 store.set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized);
3423 store.set(JSRegExp::kIrregexpLatin1BytecodeIndex, uninitialized);
3424 store.set(JSRegExp::kIrregexpUC16BytecodeIndex, uninitialized);
3425 store.set(JSRegExp::kIrregexpMaxRegisterCountIndex, Smi::zero());
3426 store.set(JSRegExp::kIrregexpCaptureCountIndex, Smi::FromInt(capture_count));
3427 store.set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized);
3428 store.set(JSRegExp::kIrregexpTicksUntilTierUpIndex, ticks_until_tier_up);
3429 store.set(JSRegExp::kIrregexpBacktrackLimit, Smi::FromInt(backtrack_limit));
3430 regexp->set_data(store);
3431 }
3432
SetRegExpExperimentalData(Handle<JSRegExp> regexp,Handle<String> source,JSRegExp::Flags flags,int capture_count)3433 void Factory::SetRegExpExperimentalData(Handle<JSRegExp> regexp,
3434 Handle<String> source,
3435 JSRegExp::Flags flags,
3436 int capture_count) {
3437 FixedArray store =
3438 *NewFixedArray(JSRegExp::kExperimentalDataSize, AllocationType::kYoung);
3439 DisallowGarbageCollection no_gc;
3440 Smi uninitialized = Smi::FromInt(JSRegExp::kUninitializedValue);
3441
3442 store.set(JSRegExp::kTagIndex, Smi::FromInt(JSRegExp::EXPERIMENTAL));
3443 store.set(JSRegExp::kSourceIndex, *source, SKIP_WRITE_BARRIER);
3444 store.set(JSRegExp::kFlagsIndex, Smi::FromInt(flags));
3445 store.set(JSRegExp::kIrregexpLatin1CodeIndex, uninitialized);
3446 store.set(JSRegExp::kIrregexpUC16CodeIndex, uninitialized);
3447 store.set(JSRegExp::kIrregexpLatin1BytecodeIndex, uninitialized);
3448 store.set(JSRegExp::kIrregexpUC16BytecodeIndex, uninitialized);
3449 store.set(JSRegExp::kIrregexpMaxRegisterCountIndex, uninitialized);
3450 store.set(JSRegExp::kIrregexpCaptureCountIndex, Smi::FromInt(capture_count));
3451 store.set(JSRegExp::kIrregexpCaptureNameMapIndex, uninitialized);
3452 store.set(JSRegExp::kIrregexpTicksUntilTierUpIndex, uninitialized);
3453 store.set(JSRegExp::kIrregexpBacktrackLimit, uninitialized);
3454 regexp->set_data(store);
3455 }
3456
NewRegExpMatchInfo()3457 Handle<RegExpMatchInfo> Factory::NewRegExpMatchInfo() {
3458 // Initially, the last match info consists of all fixed fields plus space for
3459 // the match itself (i.e., 2 capture indices).
3460 static const int kInitialSize = RegExpMatchInfo::kFirstCaptureIndex +
3461 RegExpMatchInfo::kInitialCaptureIndices;
3462
3463 Handle<FixedArray> elems =
3464 NewFixedArray(kInitialSize, AllocationType::kYoung);
3465 Handle<RegExpMatchInfo> result = Handle<RegExpMatchInfo>::cast(elems);
3466 {
3467 DisallowGarbageCollection no_gc;
3468 RegExpMatchInfo raw = *result;
3469 raw.SetNumberOfCaptureRegisters(RegExpMatchInfo::kInitialCaptureIndices);
3470 raw.SetLastSubject(*empty_string(), SKIP_WRITE_BARRIER);
3471 raw.SetLastInput(*undefined_value(), SKIP_WRITE_BARRIER);
3472 raw.SetCapture(0, 0);
3473 raw.SetCapture(1, 0);
3474 }
3475 return result;
3476 }
3477
GlobalConstantFor(Handle<Name> name)3478 Handle<Object> Factory::GlobalConstantFor(Handle<Name> name) {
3479 if (Name::Equals(isolate(), name, undefined_string())) {
3480 return undefined_value();
3481 }
3482 if (Name::Equals(isolate(), name, NaN_string())) return nan_value();
3483 if (Name::Equals(isolate(), name, Infinity_string())) return infinity_value();
3484 return Handle<Object>::null();
3485 }
3486
ToPrimitiveHintString(ToPrimitiveHint hint)3487 Handle<String> Factory::ToPrimitiveHintString(ToPrimitiveHint hint) {
3488 switch (hint) {
3489 case ToPrimitiveHint::kDefault:
3490 return default_string();
3491 case ToPrimitiveHint::kNumber:
3492 return number_string();
3493 case ToPrimitiveHint::kString:
3494 return string_string();
3495 }
3496 UNREACHABLE();
3497 }
3498
CreateSloppyFunctionMap(FunctionMode function_mode,MaybeHandle<JSFunction> maybe_empty_function)3499 Handle<Map> Factory::CreateSloppyFunctionMap(
3500 FunctionMode function_mode, MaybeHandle<JSFunction> maybe_empty_function) {
3501 bool has_prototype = IsFunctionModeWithPrototype(function_mode);
3502 int header_size = has_prototype ? JSFunction::kSizeWithPrototype
3503 : JSFunction::kSizeWithoutPrototype;
3504 int descriptors_count = has_prototype ? 5 : 4;
3505 int inobject_properties_count = 0;
3506 if (IsFunctionModeWithName(function_mode)) ++inobject_properties_count;
3507
3508 Handle<Map> map = NewMap(
3509 JS_FUNCTION_TYPE, header_size + inobject_properties_count * kTaggedSize,
3510 TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
3511 {
3512 DisallowGarbageCollection no_gc;
3513 Map raw_map = *map;
3514 raw_map.set_has_prototype_slot(has_prototype);
3515 raw_map.set_is_constructor(has_prototype);
3516 raw_map.set_is_callable(true);
3517 }
3518 Handle<JSFunction> empty_function;
3519 if (maybe_empty_function.ToHandle(&empty_function)) {
3520 Map::SetPrototype(isolate(), map, empty_function);
3521 }
3522
3523 //
3524 // Setup descriptors array.
3525 //
3526 Map::EnsureDescriptorSlack(isolate(), map, descriptors_count);
3527
3528 PropertyAttributes ro_attribs =
3529 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
3530 PropertyAttributes rw_attribs =
3531 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
3532 PropertyAttributes roc_attribs =
3533 static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
3534
3535 int field_index = 0;
3536 STATIC_ASSERT(JSFunctionOrBoundFunction::kLengthDescriptorIndex == 0);
3537 { // Add length accessor.
3538 Descriptor d = Descriptor::AccessorConstant(
3539 length_string(), function_length_accessor(), roc_attribs);
3540 map->AppendDescriptor(isolate(), &d);
3541 }
3542
3543 STATIC_ASSERT(JSFunctionOrBoundFunction::kNameDescriptorIndex == 1);
3544 if (IsFunctionModeWithName(function_mode)) {
3545 // Add name field.
3546 Handle<Name> name = isolate()->factory()->name_string();
3547 Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
3548 roc_attribs, Representation::Tagged());
3549 map->AppendDescriptor(isolate(), &d);
3550
3551 } else {
3552 // Add name accessor.
3553 Descriptor d = Descriptor::AccessorConstant(
3554 name_string(), function_name_accessor(), roc_attribs);
3555 map->AppendDescriptor(isolate(), &d);
3556 }
3557 { // Add arguments accessor.
3558 Descriptor d = Descriptor::AccessorConstant(
3559 arguments_string(), function_arguments_accessor(), ro_attribs);
3560 map->AppendDescriptor(isolate(), &d);
3561 }
3562 { // Add caller accessor.
3563 Descriptor d = Descriptor::AccessorConstant(
3564 caller_string(), function_caller_accessor(), ro_attribs);
3565 map->AppendDescriptor(isolate(), &d);
3566 }
3567 if (IsFunctionModeWithPrototype(function_mode)) {
3568 // Add prototype accessor.
3569 PropertyAttributes attribs =
3570 IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs
3571 : ro_attribs;
3572 Descriptor d = Descriptor::AccessorConstant(
3573 prototype_string(), function_prototype_accessor(), attribs);
3574 map->AppendDescriptor(isolate(), &d);
3575 }
3576 DCHECK_EQ(inobject_properties_count, field_index);
3577 DCHECK_EQ(0,
3578 map->instance_descriptors(isolate()).number_of_slack_descriptors());
3579 LOG(isolate(), MapDetails(*map));
3580 return map;
3581 }
3582
CreateStrictFunctionMap(FunctionMode function_mode,Handle<JSFunction> empty_function)3583 Handle<Map> Factory::CreateStrictFunctionMap(
3584 FunctionMode function_mode, Handle<JSFunction> empty_function) {
3585 bool has_prototype = IsFunctionModeWithPrototype(function_mode);
3586 int header_size = has_prototype ? JSFunction::kSizeWithPrototype
3587 : JSFunction::kSizeWithoutPrototype;
3588 int inobject_properties_count = 0;
3589 // length and prototype accessors or just length accessor.
3590 int descriptors_count = IsFunctionModeWithPrototype(function_mode) ? 2 : 1;
3591 if (IsFunctionModeWithName(function_mode)) {
3592 ++inobject_properties_count; // name property.
3593 } else {
3594 ++descriptors_count; // name accessor.
3595 }
3596 descriptors_count += inobject_properties_count;
3597
3598 Handle<Map> map = NewMap(
3599 JS_FUNCTION_TYPE, header_size + inobject_properties_count * kTaggedSize,
3600 TERMINAL_FAST_ELEMENTS_KIND, inobject_properties_count);
3601 {
3602 DisallowGarbageCollection no_gc;
3603 Map raw_map = *map;
3604 raw_map.set_has_prototype_slot(has_prototype);
3605 raw_map.set_is_constructor(has_prototype);
3606 raw_map.set_is_callable(true);
3607 }
3608 Map::SetPrototype(isolate(), map, empty_function);
3609
3610 //
3611 // Setup descriptors array.
3612 //
3613 Map::EnsureDescriptorSlack(isolate(), map, descriptors_count);
3614
3615 PropertyAttributes rw_attribs =
3616 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE);
3617 PropertyAttributes ro_attribs =
3618 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
3619 PropertyAttributes roc_attribs =
3620 static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
3621
3622 int field_index = 0;
3623 STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
3624 { // Add length accessor.
3625 Descriptor d = Descriptor::AccessorConstant(
3626 length_string(), function_length_accessor(), roc_attribs);
3627 map->AppendDescriptor(isolate(), &d);
3628 }
3629
3630 STATIC_ASSERT(JSFunction::kNameDescriptorIndex == 1);
3631 if (IsFunctionModeWithName(function_mode)) {
3632 // Add name field.
3633 Handle<Name> name = isolate()->factory()->name_string();
3634 Descriptor d = Descriptor::DataField(isolate(), name, field_index++,
3635 roc_attribs, Representation::Tagged());
3636 map->AppendDescriptor(isolate(), &d);
3637
3638 } else {
3639 // Add name accessor.
3640 Descriptor d = Descriptor::AccessorConstant(
3641 name_string(), function_name_accessor(), roc_attribs);
3642 map->AppendDescriptor(isolate(), &d);
3643 }
3644
3645 if (IsFunctionModeWithPrototype(function_mode)) {
3646 // Add prototype accessor.
3647 PropertyAttributes attribs =
3648 IsFunctionModeWithWritablePrototype(function_mode) ? rw_attribs
3649 : ro_attribs;
3650 Descriptor d = Descriptor::AccessorConstant(
3651 prototype_string(), function_prototype_accessor(), attribs);
3652 map->AppendDescriptor(isolate(), &d);
3653 }
3654 DCHECK_EQ(inobject_properties_count, field_index);
3655 DCHECK_EQ(0,
3656 map->instance_descriptors(isolate()).number_of_slack_descriptors());
3657 LOG(isolate(), MapDetails(*map));
3658 return map;
3659 }
3660
CreateClassFunctionMap(Handle<JSFunction> empty_function)3661 Handle<Map> Factory::CreateClassFunctionMap(Handle<JSFunction> empty_function) {
3662 Handle<Map> map =
3663 NewMap(JS_CLASS_CONSTRUCTOR_TYPE, JSFunction::kSizeWithPrototype);
3664 {
3665 DisallowGarbageCollection no_gc;
3666 Map raw_map = *map;
3667 raw_map.set_has_prototype_slot(true);
3668 raw_map.set_is_constructor(true);
3669 raw_map.set_is_prototype_map(true);
3670 raw_map.set_is_callable(true);
3671 }
3672 Map::SetPrototype(isolate(), map, empty_function);
3673
3674 //
3675 // Setup descriptors array.
3676 //
3677 Map::EnsureDescriptorSlack(isolate(), map, 2);
3678
3679 PropertyAttributes ro_attribs =
3680 static_cast<PropertyAttributes>(DONT_ENUM | DONT_DELETE | READ_ONLY);
3681 PropertyAttributes roc_attribs =
3682 static_cast<PropertyAttributes>(DONT_ENUM | READ_ONLY);
3683
3684 STATIC_ASSERT(JSFunction::kLengthDescriptorIndex == 0);
3685 { // Add length accessor.
3686 Descriptor d = Descriptor::AccessorConstant(
3687 length_string(), function_length_accessor(), roc_attribs);
3688 map->AppendDescriptor(isolate(), &d);
3689 }
3690
3691 {
3692 // Add prototype accessor.
3693 Descriptor d = Descriptor::AccessorConstant(
3694 prototype_string(), function_prototype_accessor(), ro_attribs);
3695 map->AppendDescriptor(isolate(), &d);
3696 }
3697 LOG(isolate(), MapDetails(*map));
3698 return map;
3699 }
3700
NewJSPromiseWithoutHook()3701 Handle<JSPromise> Factory::NewJSPromiseWithoutHook() {
3702 Handle<JSPromise> promise =
3703 Handle<JSPromise>::cast(NewJSObject(isolate()->promise_function()));
3704 DisallowGarbageCollection no_gc;
3705 JSPromise raw = *promise;
3706 raw.set_reactions_or_result(Smi::zero(), SKIP_WRITE_BARRIER);
3707 raw.set_flags(0);
3708 ZeroEmbedderFields(*promise);
3709 DCHECK_EQ(raw.GetEmbedderFieldCount(), v8::Promise::kEmbedderFieldCount);
3710 return promise;
3711 }
3712
NewJSPromise()3713 Handle<JSPromise> Factory::NewJSPromise() {
3714 Handle<JSPromise> promise = NewJSPromiseWithoutHook();
3715 isolate()->RunAllPromiseHooks(PromiseHookType::kInit, promise,
3716 undefined_value());
3717 return promise;
3718 }
3719
NewCallHandlerInfo(bool has_no_side_effect)3720 Handle<CallHandlerInfo> Factory::NewCallHandlerInfo(bool has_no_side_effect) {
3721 Handle<Map> map = has_no_side_effect
3722 ? side_effect_free_call_handler_info_map()
3723 : side_effect_call_handler_info_map();
3724 CallHandlerInfo info = CallHandlerInfo::cast(New(map, AllocationType::kOld));
3725 DisallowGarbageCollection no_gc;
3726 Object undefined_value = read_only_roots().undefined_value();
3727 info.set_callback(undefined_value, SKIP_WRITE_BARRIER);
3728 info.set_js_callback(undefined_value, SKIP_WRITE_BARRIER);
3729 info.set_data(undefined_value, SKIP_WRITE_BARRIER);
3730 return handle(info, isolate());
3731 }
3732
CanAllocateInReadOnlySpace()3733 bool Factory::CanAllocateInReadOnlySpace() {
3734 return isolate()->heap()->CanAllocateInReadOnlySpace();
3735 }
3736
EmptyStringRootIsInitialized()3737 bool Factory::EmptyStringRootIsInitialized() {
3738 return isolate()->roots_table()[RootIndex::kempty_string] != kNullAddress;
3739 }
3740
NewFunctionForTesting(Handle<String> name)3741 Handle<JSFunction> Factory::NewFunctionForTesting(Handle<String> name) {
3742 Handle<SharedFunctionInfo> info =
3743 NewSharedFunctionInfoForBuiltin(name, Builtin::kIllegal);
3744 info->set_language_mode(LanguageMode::kSloppy);
3745 return JSFunctionBuilder{isolate(), info, isolate()->native_context()}
3746 .Build();
3747 }
3748
JSFunctionBuilder(Isolate * isolate,Handle<SharedFunctionInfo> sfi,Handle<Context> context)3749 Factory::JSFunctionBuilder::JSFunctionBuilder(Isolate* isolate,
3750 Handle<SharedFunctionInfo> sfi,
3751 Handle<Context> context)
3752 : isolate_(isolate), sfi_(sfi), context_(context) {}
3753
Build()3754 Handle<JSFunction> Factory::JSFunctionBuilder::Build() {
3755 PrepareMap();
3756 PrepareFeedbackCell();
3757
3758 Handle<Code> code = handle(sfi_->GetCode(), isolate_);
3759 Handle<JSFunction> result = BuildRaw(code);
3760
3761 if (code->kind() == CodeKind::BASELINE) {
3762 IsCompiledScope is_compiled_scope(sfi_->is_compiled_scope(isolate_));
3763 JSFunction::EnsureFeedbackVector(result, &is_compiled_scope);
3764 }
3765
3766 Compiler::PostInstantiation(result);
3767 return result;
3768 }
3769
BuildRaw(Handle<Code> code)3770 Handle<JSFunction> Factory::JSFunctionBuilder::BuildRaw(Handle<Code> code) {
3771 Isolate* isolate = isolate_;
3772 Factory* factory = isolate_->factory();
3773
3774 Handle<Map> map = maybe_map_.ToHandleChecked();
3775 Handle<FeedbackCell> feedback_cell = maybe_feedback_cell_.ToHandleChecked();
3776
3777 DCHECK(InstanceTypeChecker::IsJSFunction(map->instance_type()));
3778
3779 // Allocation.
3780 JSFunction function = JSFunction::cast(factory->New(map, allocation_type_));
3781 DisallowGarbageCollection no_gc;
3782
3783 WriteBarrierMode mode = allocation_type_ == AllocationType::kYoung
3784 ? SKIP_WRITE_BARRIER
3785 : UPDATE_WRITE_BARRIER;
3786 // Header initialization.
3787 function.initialize_properties(isolate);
3788 function.initialize_elements();
3789 function.set_shared(*sfi_, mode);
3790 function.set_context(*context_, mode);
3791 function.set_raw_feedback_cell(*feedback_cell, mode);
3792 function.set_code(*code, kReleaseStore, mode);
3793 if (function.has_prototype_slot()) {
3794 function.set_prototype_or_initial_map(
3795 ReadOnlyRoots(isolate).the_hole_value(), kReleaseStore,
3796 SKIP_WRITE_BARRIER);
3797 }
3798
3799 // Potentially body initialization.
3800 factory->InitializeJSObjectBody(
3801 function, *map, JSFunction::GetHeaderSize(map->has_prototype_slot()));
3802
3803 return handle(function, isolate_);
3804 }
3805
PrepareMap()3806 void Factory::JSFunctionBuilder::PrepareMap() {
3807 if (maybe_map_.is_null()) {
3808 // No specific map requested, use the default.
3809 maybe_map_ = handle(
3810 Map::cast(context_->native_context().get(sfi_->function_map_index())),
3811 isolate_);
3812 }
3813 }
3814
PrepareFeedbackCell()3815 void Factory::JSFunctionBuilder::PrepareFeedbackCell() {
3816 Handle<FeedbackCell> feedback_cell;
3817 if (maybe_feedback_cell_.ToHandle(&feedback_cell)) {
3818 // Track the newly-created closure.
3819 feedback_cell->IncrementClosureCount(isolate_);
3820 } else {
3821 // Fall back to the many_closures_cell.
3822 maybe_feedback_cell_ = isolate_->factory()->many_closures_cell();
3823 }
3824 }
3825
3826 } // namespace internal
3827 } // namespace v8
3828