1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/builtins/builtins.h"
6
7 #include "src/api/api-inl.h"
8 #include "src/builtins/builtins-descriptors.h"
9 #include "src/codegen/assembler-inl.h"
10 #include "src/codegen/callable.h"
11 #include "src/codegen/macro-assembler-inl.h"
12 #include "src/codegen/macro-assembler.h"
13 #include "src/diagnostics/code-tracer.h"
14 #include "src/execution/isolate.h"
15 #include "src/interpreter/bytecodes.h"
16 #include "src/logging/code-events.h" // For CodeCreateEvent.
17 #include "src/logging/log.h" // For Logger.
18 #include "src/objects/fixed-array.h"
19 #include "src/objects/objects-inl.h"
20 #include "src/objects/visitors.h"
21 #include "src/snapshot/embedded/embedded-data.h"
22 #include "src/utils/ostreams.h"
23
24 namespace v8 {
25 namespace internal {
26
27 // Forward declarations for C++ builtins.
28 #define FORWARD_DECLARE(Name) \
29 Address Builtin_##Name(int argc, Address* args, Isolate* isolate);
30 BUILTIN_LIST_C(FORWARD_DECLARE)
31 #undef FORWARD_DECLARE
32
33 namespace {
34
35 // TODO(jgruber): Pack in CallDescriptors::Key.
36 struct BuiltinMetadata {
37 const char* name;
38 Builtins::Kind kind;
39
40 struct BytecodeAndScale {
41 interpreter::Bytecode bytecode : 8;
42 interpreter::OperandScale scale : 8;
43 };
44
45 STATIC_ASSERT(sizeof(interpreter::Bytecode) == 1);
46 STATIC_ASSERT(sizeof(interpreter::OperandScale) == 1);
47 STATIC_ASSERT(sizeof(BytecodeAndScale) <= sizeof(Address));
48
49 // The `data` field has kind-specific contents.
50 union KindSpecificData {
51 // TODO(jgruber): Union constructors are needed since C++11 does not support
52 // designated initializers (e.g.: {.parameter_count = count}). Update once
53 // we're at C++20 :)
54 // The constructors are marked constexpr to avoid the need for a static
55 // initializer for builtins.cc (see check-static-initializers.sh).
KindSpecificData()56 constexpr KindSpecificData() : cpp_entry(kNullAddress) {}
KindSpecificData(Address cpp_entry)57 constexpr KindSpecificData(Address cpp_entry) : cpp_entry(cpp_entry) {}
KindSpecificData(int parameter_count,int)58 constexpr KindSpecificData(int parameter_count,
59 int /* To disambiguate from above */)
60 : parameter_count(static_cast<int16_t>(parameter_count)) {}
KindSpecificData(interpreter::Bytecode bytecode,interpreter::OperandScale scale)61 constexpr KindSpecificData(interpreter::Bytecode bytecode,
62 interpreter::OperandScale scale)
63 : bytecode_and_scale{bytecode, scale} {}
64 Address cpp_entry; // For CPP builtins.
65 int16_t parameter_count; // For TFJ builtins.
66 BytecodeAndScale bytecode_and_scale; // For BCH builtins.
67 } data;
68 };
69
70 #define DECL_CPP(Name, ...) \
71 {#Name, Builtins::CPP, {FUNCTION_ADDR(Builtin_##Name)}},
72 #define DECL_TFJ(Name, Count, ...) {#Name, Builtins::TFJ, {Count, 0}},
73 #define DECL_TFC(Name, ...) {#Name, Builtins::TFC, {}},
74 #define DECL_TFS(Name, ...) {#Name, Builtins::TFS, {}},
75 #define DECL_TFH(Name, ...) {#Name, Builtins::TFH, {}},
76 #define DECL_BCH(Name, OperandScale, Bytecode) \
77 {#Name, Builtins::BCH, {Bytecode, OperandScale}},
78 #define DECL_ASM(Name, ...) {#Name, Builtins::ASM, {}},
79 const BuiltinMetadata builtin_metadata[] = {BUILTIN_LIST(
80 DECL_CPP, DECL_TFJ, DECL_TFC, DECL_TFS, DECL_TFH, DECL_BCH, DECL_ASM)};
81 #undef DECL_CPP
82 #undef DECL_TFJ
83 #undef DECL_TFC
84 #undef DECL_TFS
85 #undef DECL_TFH
86 #undef DECL_BCH
87 #undef DECL_ASM
88
89 } // namespace
90
GetContinuationBailoutId(Name name)91 BailoutId Builtins::GetContinuationBailoutId(Name name) {
92 DCHECK(Builtins::KindOf(name) == TFJ || Builtins::KindOf(name) == TFC ||
93 Builtins::KindOf(name) == TFS);
94 return BailoutId(BailoutId::kFirstBuiltinContinuationId + name);
95 }
96
GetBuiltinFromBailoutId(BailoutId id)97 Builtins::Name Builtins::GetBuiltinFromBailoutId(BailoutId id) {
98 int builtin_index = id.ToInt() - BailoutId::kFirstBuiltinContinuationId;
99 DCHECK(Builtins::KindOf(builtin_index) == TFJ ||
100 Builtins::KindOf(builtin_index) == TFC ||
101 Builtins::KindOf(builtin_index) == TFS);
102 return static_cast<Name>(builtin_index);
103 }
104
TearDown()105 void Builtins::TearDown() { initialized_ = false; }
106
Lookup(Address pc)107 const char* Builtins::Lookup(Address pc) {
108 // Off-heap pc's can be looked up through binary search.
109 Code maybe_builtin = InstructionStream::TryLookupCode(isolate_, pc);
110 if (!maybe_builtin.is_null()) return name(maybe_builtin.builtin_index());
111
112 // May be called during initialization (disassembler).
113 if (initialized_) {
114 for (int i = 0; i < builtin_count; i++) {
115 if (isolate_->heap()->builtin(i).contains(pc)) return name(i);
116 }
117 }
118 return nullptr;
119 }
120
NonPrimitiveToPrimitive(ToPrimitiveHint hint)121 Handle<Code> Builtins::NonPrimitiveToPrimitive(ToPrimitiveHint hint) {
122 switch (hint) {
123 case ToPrimitiveHint::kDefault:
124 return builtin_handle(kNonPrimitiveToPrimitive_Default);
125 case ToPrimitiveHint::kNumber:
126 return builtin_handle(kNonPrimitiveToPrimitive_Number);
127 case ToPrimitiveHint::kString:
128 return builtin_handle(kNonPrimitiveToPrimitive_String);
129 }
130 UNREACHABLE();
131 }
132
OrdinaryToPrimitive(OrdinaryToPrimitiveHint hint)133 Handle<Code> Builtins::OrdinaryToPrimitive(OrdinaryToPrimitiveHint hint) {
134 switch (hint) {
135 case OrdinaryToPrimitiveHint::kNumber:
136 return builtin_handle(kOrdinaryToPrimitive_Number);
137 case OrdinaryToPrimitiveHint::kString:
138 return builtin_handle(kOrdinaryToPrimitive_String);
139 }
140 UNREACHABLE();
141 }
142
set_builtin(int index,Code builtin)143 void Builtins::set_builtin(int index, Code builtin) {
144 isolate_->heap()->set_builtin(index, builtin);
145 }
146
builtin(int index)147 Code Builtins::builtin(int index) { return isolate_->heap()->builtin(index); }
148
builtin_handle(int index)149 Handle<Code> Builtins::builtin_handle(int index) {
150 DCHECK(IsBuiltinId(index));
151 return Handle<Code>(
152 reinterpret_cast<Address*>(isolate_->heap()->builtin_address(index)));
153 }
154
155 // static
GetStackParameterCount(Name name)156 int Builtins::GetStackParameterCount(Name name) {
157 DCHECK(Builtins::KindOf(name) == TFJ);
158 return builtin_metadata[name].data.parameter_count;
159 }
160
161 // static
CallInterfaceDescriptorFor(Name name)162 CallInterfaceDescriptor Builtins::CallInterfaceDescriptorFor(Name name) {
163 CallDescriptors::Key key;
164 switch (name) {
165 // This macro is deliberately crafted so as to emit very little code,
166 // in order to keep binary size of this function under control.
167 #define CASE_OTHER(Name, ...) \
168 case k##Name: { \
169 key = Builtin_##Name##_InterfaceDescriptor::key(); \
170 break; \
171 }
172 BUILTIN_LIST(IGNORE_BUILTIN, IGNORE_BUILTIN, CASE_OTHER, CASE_OTHER,
173 CASE_OTHER, IGNORE_BUILTIN, CASE_OTHER)
174 #undef CASE_OTHER
175 default:
176 Builtins::Kind kind = Builtins::KindOf(name);
177 DCHECK_NE(BCH, kind);
178 if (kind == TFJ || kind == CPP) {
179 return JSTrampolineDescriptor{};
180 }
181 UNREACHABLE();
182 }
183 return CallInterfaceDescriptor{key};
184 }
185
186 // static
CallableFor(Isolate * isolate,Name name)187 Callable Builtins::CallableFor(Isolate* isolate, Name name) {
188 Handle<Code> code = isolate->builtins()->builtin_handle(name);
189 return Callable{code, CallInterfaceDescriptorFor(name)};
190 }
191
192 // static
HasJSLinkage(int builtin_index)193 bool Builtins::HasJSLinkage(int builtin_index) {
194 Name name = static_cast<Name>(builtin_index);
195 DCHECK_NE(BCH, Builtins::KindOf(name));
196 return CallInterfaceDescriptorFor(name) == JSTrampolineDescriptor{};
197 }
198
199 // static
name(int index)200 const char* Builtins::name(int index) {
201 DCHECK(IsBuiltinId(index));
202 return builtin_metadata[index].name;
203 }
204
PrintBuiltinCode()205 void Builtins::PrintBuiltinCode() {
206 DCHECK(FLAG_print_builtin_code);
207 #ifdef ENABLE_DISASSEMBLER
208 for (int i = 0; i < builtin_count; i++) {
209 const char* builtin_name = name(i);
210 Handle<Code> code = builtin_handle(i);
211 if (PassesFilter(CStrVector(builtin_name),
212 CStrVector(FLAG_print_builtin_code_filter))) {
213 CodeTracer::Scope trace_scope(isolate_->GetCodeTracer());
214 OFStream os(trace_scope.file());
215 code->Disassemble(builtin_name, os, isolate_);
216 os << "\n";
217 }
218 }
219 #endif
220 }
221
PrintBuiltinSize()222 void Builtins::PrintBuiltinSize() {
223 DCHECK(FLAG_print_builtin_size);
224 for (int i = 0; i < builtin_count; i++) {
225 const char* builtin_name = name(i);
226 const char* kind = KindNameOf(i);
227 Code code = builtin(i);
228 PrintF(stdout, "%s Builtin, %s, %d\n", kind, builtin_name,
229 code.InstructionSize());
230 }
231 }
232
233 // static
CppEntryOf(int index)234 Address Builtins::CppEntryOf(int index) {
235 DCHECK(Builtins::IsCpp(index));
236 return builtin_metadata[index].data.cpp_entry;
237 }
238
239 // static
IsBuiltin(const Code code)240 bool Builtins::IsBuiltin(const Code code) {
241 return Builtins::IsBuiltinId(code.builtin_index());
242 }
243
IsBuiltinHandle(Handle<HeapObject> maybe_code,int * index) const244 bool Builtins::IsBuiltinHandle(Handle<HeapObject> maybe_code,
245 int* index) const {
246 Heap* heap = isolate_->heap();
247 Address handle_location = maybe_code.address();
248 Address start = heap->builtin_address(0);
249 Address end = heap->builtin_address(Builtins::builtin_count);
250 if (handle_location >= end) return false;
251 if (handle_location < start) return false;
252 *index = static_cast<int>(handle_location - start) >> kSystemPointerSizeLog2;
253 DCHECK(Builtins::IsBuiltinId(*index));
254 return true;
255 }
256
257 // static
IsIsolateIndependentBuiltin(const Code code)258 bool Builtins::IsIsolateIndependentBuiltin(const Code code) {
259 const int builtin_index = code.builtin_index();
260 return Builtins::IsBuiltinId(builtin_index) &&
261 Builtins::IsIsolateIndependent(builtin_index);
262 }
263
264 // static
InitializeBuiltinEntryTable(Isolate * isolate)265 void Builtins::InitializeBuiltinEntryTable(Isolate* isolate) {
266 EmbeddedData d = EmbeddedData::FromBlob();
267 Address* builtin_entry_table = isolate->builtin_entry_table();
268 for (int i = 0; i < builtin_count; i++) {
269 // TODO(jgruber,chromium:1020986): Remove the CHECK once the linked issue is
270 // resolved.
271 CHECK(Builtins::IsBuiltinId(isolate->heap()->builtin(i).builtin_index()));
272 DCHECK(isolate->heap()->builtin(i).is_off_heap_trampoline());
273 builtin_entry_table[i] = d.InstructionStartOfBuiltin(i);
274 }
275 }
276
277 // static
EmitCodeCreateEvents(Isolate * isolate)278 void Builtins::EmitCodeCreateEvents(Isolate* isolate) {
279 if (!isolate->logger()->is_listening_to_code_events() &&
280 !isolate->is_profiling()) {
281 return; // No need to iterate the entire table in this case.
282 }
283
284 Address* builtins = isolate->builtins_table();
285 int i = 0;
286 HandleScope scope(isolate);
287 for (; i < kFirstBytecodeHandler; i++) {
288 Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate);
289 PROFILE(isolate, CodeCreateEvent(CodeEventListener::BUILTIN_TAG, code,
290 Builtins::name(i)));
291 }
292
293 STATIC_ASSERT(kLastBytecodeHandlerPlusOne == builtin_count);
294 for (; i < builtin_count; i++) {
295 Handle<AbstractCode> code(AbstractCode::cast(Object(builtins[i])), isolate);
296 interpreter::Bytecode bytecode =
297 builtin_metadata[i].data.bytecode_and_scale.bytecode;
298 interpreter::OperandScale scale =
299 builtin_metadata[i].data.bytecode_and_scale.scale;
300 PROFILE(isolate,
301 CodeCreateEvent(
302 CodeEventListener::BYTECODE_HANDLER_TAG, code,
303 interpreter::Bytecodes::ToString(bytecode, scale).c_str()));
304 }
305 }
306
307 namespace {
308 enum TrampolineType { kAbort, kJump };
309
310 class OffHeapTrampolineGenerator {
311 public:
OffHeapTrampolineGenerator(Isolate * isolate)312 explicit OffHeapTrampolineGenerator(Isolate* isolate)
313 : isolate_(isolate),
314 masm_(isolate, CodeObjectRequired::kYes,
315 ExternalAssemblerBuffer(buffer_, kBufferSize)) {}
316
Generate(Address off_heap_entry,TrampolineType type)317 CodeDesc Generate(Address off_heap_entry, TrampolineType type) {
318 // Generate replacement code that simply tail-calls the off-heap code.
319 DCHECK(!masm_.has_frame());
320 {
321 FrameScope scope(&masm_, StackFrame::NONE);
322 if (type == TrampolineType::kJump) {
323 masm_.CodeEntry();
324 masm_.JumpToInstructionStream(off_heap_entry);
325 } else {
326 masm_.Trap();
327 }
328 }
329
330 CodeDesc desc;
331 masm_.GetCode(isolate_, &desc);
332 return desc;
333 }
334
CodeObject()335 Handle<HeapObject> CodeObject() { return masm_.CodeObject(); }
336
337 private:
338 Isolate* isolate_;
339 // Enough to fit the single jmp.
340 static constexpr int kBufferSize = 256;
341 byte buffer_[kBufferSize];
342 MacroAssembler masm_;
343 };
344
345 constexpr int OffHeapTrampolineGenerator::kBufferSize;
346
347 } // namespace
348
349 // static
GenerateOffHeapTrampolineFor(Isolate * isolate,Address off_heap_entry,int32_t kind_specfic_flags,bool generate_jump_to_instruction_stream)350 Handle<Code> Builtins::GenerateOffHeapTrampolineFor(
351 Isolate* isolate, Address off_heap_entry, int32_t kind_specfic_flags,
352 bool generate_jump_to_instruction_stream) {
353 DCHECK_NOT_NULL(isolate->embedded_blob());
354 DCHECK_NE(0, isolate->embedded_blob_size());
355
356 OffHeapTrampolineGenerator generator(isolate);
357
358 CodeDesc desc =
359 generator.Generate(off_heap_entry, generate_jump_to_instruction_stream
360 ? TrampolineType::kJump
361 : TrampolineType::kAbort);
362
363 return Factory::CodeBuilder(isolate, desc, Code::BUILTIN)
364 .set_read_only_data_container(kind_specfic_flags)
365 .set_self_reference(generator.CodeObject())
366 .set_is_executable(generate_jump_to_instruction_stream)
367 .Build();
368 }
369
370 // static
GenerateOffHeapTrampolineRelocInfo(Isolate * isolate)371 Handle<ByteArray> Builtins::GenerateOffHeapTrampolineRelocInfo(
372 Isolate* isolate) {
373 OffHeapTrampolineGenerator generator(isolate);
374 // Generate a jump to a dummy address as we're not actually interested in the
375 // generated instruction stream.
376 CodeDesc desc = generator.Generate(kNullAddress, TrampolineType::kJump);
377
378 Handle<ByteArray> reloc_info = isolate->factory()->NewByteArray(
379 desc.reloc_size, AllocationType::kReadOnly);
380 Code::CopyRelocInfoToByteArray(*reloc_info, desc);
381
382 return reloc_info;
383 }
384
385 // static
KindOf(int index)386 Builtins::Kind Builtins::KindOf(int index) {
387 DCHECK(IsBuiltinId(index));
388 return builtin_metadata[index].kind;
389 }
390
391 // static
KindNameOf(int index)392 const char* Builtins::KindNameOf(int index) {
393 Kind kind = Builtins::KindOf(index);
394 // clang-format off
395 switch (kind) {
396 case CPP: return "CPP";
397 case TFJ: return "TFJ";
398 case TFC: return "TFC";
399 case TFS: return "TFS";
400 case TFH: return "TFH";
401 case BCH: return "BCH";
402 case ASM: return "ASM";
403 }
404 // clang-format on
405 UNREACHABLE();
406 }
407
408 // static
IsCpp(int index)409 bool Builtins::IsCpp(int index) { return Builtins::KindOf(index) == CPP; }
410
411 // static
AllowDynamicFunction(Isolate * isolate,Handle<JSFunction> target,Handle<JSObject> target_global_proxy)412 bool Builtins::AllowDynamicFunction(Isolate* isolate, Handle<JSFunction> target,
413 Handle<JSObject> target_global_proxy) {
414 if (FLAG_allow_unsafe_function_constructor) return true;
415 HandleScopeImplementer* impl = isolate->handle_scope_implementer();
416 Handle<Context> responsible_context = impl->LastEnteredOrMicrotaskContext();
417 // TODO(jochen): Remove this.
418 if (responsible_context.is_null()) {
419 return true;
420 }
421 if (*responsible_context == target->context()) return true;
422 return isolate->MayAccess(responsible_context, target_global_proxy);
423 }
424
425 // static
CodeObjectIsExecutable(int builtin_index)426 bool Builtins::CodeObjectIsExecutable(int builtin_index) {
427 // If the runtime/optimized code always knows when executing a given builtin
428 // that it is a builtin, then that builtin does not need an executable Code
429 // object. Such Code objects can go in read_only_space (and can even be
430 // smaller with no branch instruction), thus saving memory.
431
432 // Builtins with JS linkage will always have executable Code objects since
433 // they can be called directly from jitted code with no way of determining
434 // that they are builtins at generation time. E.g.
435 // f = Array.of;
436 // f(1, 2, 3);
437 // TODO(delphick): This is probably too loose but for now Wasm can call any JS
438 // linkage builtin via its Code object. Once Wasm is fixed this can either be
439 // tighted or removed completely.
440 if (Builtins::KindOf(builtin_index) != BCH && HasJSLinkage(builtin_index)) {
441 return true;
442 }
443
444 // There are some other non-TF builtins that also have JS linkage like
445 // InterpreterEntryTrampoline which are explicitly allow-listed below.
446 // TODO(delphick): Some of these builtins do not fit with the above, but
447 // currently cause problems if they're not executable. This list should be
448 // pared down as much as possible.
449 switch (builtin_index) {
450 case Builtins::kInterpreterEntryTrampoline:
451 case Builtins::kCompileLazy:
452 case Builtins::kCompileLazyDeoptimizedCode:
453 case Builtins::kCallFunction_ReceiverIsNullOrUndefined:
454 case Builtins::kCallFunction_ReceiverIsNotNullOrUndefined:
455 case Builtins::kCallFunction_ReceiverIsAny:
456 case Builtins::kCallBoundFunction:
457 case Builtins::kCall_ReceiverIsNullOrUndefined:
458 case Builtins::kCall_ReceiverIsNotNullOrUndefined:
459 case Builtins::kCall_ReceiverIsAny:
460 case Builtins::kArgumentsAdaptorTrampoline:
461 case Builtins::kHandleApiCall:
462 case Builtins::kInstantiateAsmJs:
463
464 // TODO(delphick): Remove this when calls to it have the trampoline inlined
465 // or are converted to use kCallBuiltinPointer.
466 case Builtins::kCEntry_Return1_DontSaveFPRegs_ArgvOnStack_NoBuiltinExit:
467 return true;
468 default:
469 #if V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
470 // TODO(Loongson): Move non-JS linkage builtins code objects into RO_SPACE
471 // caused MIPS platform to crash, and we need some time to handle it. Now
472 // disable this change temporarily on MIPS platform.
473 return true;
474 #else
475 return false;
476 #endif // V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64
477 }
478 }
479
ExampleBuiltinForTorqueFunctionPointerType(size_t function_pointer_type_id)480 Builtins::Name ExampleBuiltinForTorqueFunctionPointerType(
481 size_t function_pointer_type_id) {
482 switch (function_pointer_type_id) {
483 #define FUNCTION_POINTER_ID_CASE(id, name) \
484 case id: \
485 return Builtins::k##name;
486 TORQUE_FUNCTION_POINTER_TYPE_TO_BUILTIN_MAP(FUNCTION_POINTER_ID_CASE)
487 #undef FUNCTION_POINTER_ID_CASE
488 default:
489 UNREACHABLE();
490 }
491 }
492
493 } // namespace internal
494 } // namespace v8
495