1 // Copyright 2017 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
6 #define V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
7 
8 #include "src/base/macros.h"
9 #include "src/base/platform/mutex.h"
10 #include "src/common/globals.h"
11 #include "src/handles/handles-inl.h"
12 #include "src/heap/heap-write-barrier-inl.h"
13 #include "src/objects/debug-objects-inl.h"
14 #include "src/objects/feedback-vector-inl.h"
15 #include "src/objects/scope-info-inl.h"
16 #include "src/objects/script-inl.h"
17 #include "src/objects/shared-function-info.h"
18 #include "src/objects/templates-inl.h"
19 
20 #if V8_ENABLE_WEBASSEMBLY
21 #include "src/wasm/wasm-module.h"
22 #include "src/wasm/wasm-objects.h"
23 #endif  // V8_ENABLE_WEBASSEMBLY
24 
25 // Has to be the last include (doesn't have include guards):
26 #include "src/objects/object-macros.h"
27 
28 namespace v8 {
29 namespace internal {
30 
31 #include "torque-generated/src/objects/shared-function-info-tq-inl.inc"
32 
TQ_OBJECT_CONSTRUCTORS_IMPL(PreparseData)33 TQ_OBJECT_CONSTRUCTORS_IMPL(PreparseData)
34 
35 int PreparseData::inner_start_offset() const {
36   return InnerOffset(data_length());
37 }
38 
inner_data_start()39 ObjectSlot PreparseData::inner_data_start() const {
40   return RawField(inner_start_offset());
41 }
42 
clear_padding()43 void PreparseData::clear_padding() {
44   int data_end_offset = kDataStartOffset + data_length();
45   int padding_size = inner_start_offset() - data_end_offset;
46   DCHECK_LE(0, padding_size);
47   if (padding_size == 0) return;
48   memset(reinterpret_cast<void*>(address() + data_end_offset), 0, padding_size);
49 }
50 
get(int index)51 byte PreparseData::get(int index) const {
52   DCHECK_LE(0, index);
53   DCHECK_LT(index, data_length());
54   int offset = kDataStartOffset + index * kByteSize;
55   return ReadField<byte>(offset);
56 }
57 
set(int index,byte value)58 void PreparseData::set(int index, byte value) {
59   DCHECK_LE(0, index);
60   DCHECK_LT(index, data_length());
61   int offset = kDataStartOffset + index * kByteSize;
62   WriteField<byte>(offset, value);
63 }
64 
copy_in(int index,const byte * buffer,int length)65 void PreparseData::copy_in(int index, const byte* buffer, int length) {
66   DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
67          index + length <= this->data_length());
68   Address dst_addr = field_address(kDataStartOffset + index * kByteSize);
69   memcpy(reinterpret_cast<void*>(dst_addr), buffer, length);
70 }
71 
get_child(int index)72 PreparseData PreparseData::get_child(int index) const {
73   return PreparseData::cast(get_child_raw(index));
74 }
75 
get_child_raw(int index)76 Object PreparseData::get_child_raw(int index) const {
77   DCHECK_LE(0, index);
78   DCHECK_LT(index, this->children_length());
79   int offset = inner_start_offset() + index * kTaggedSize;
80   return RELAXED_READ_FIELD(*this, offset);
81 }
82 
set_child(int index,PreparseData value,WriteBarrierMode mode)83 void PreparseData::set_child(int index, PreparseData value,
84                              WriteBarrierMode mode) {
85   DCHECK_LE(0, index);
86   DCHECK_LT(index, this->children_length());
87   int offset = inner_start_offset() + index * kTaggedSize;
88   RELAXED_WRITE_FIELD(*this, offset, value);
89   CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
90 }
91 
92 TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledData)
TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithoutPreparseData)93 TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithoutPreparseData)
94 TQ_OBJECT_CONSTRUCTORS_IMPL(UncompiledDataWithPreparseData)
95 
96 TQ_OBJECT_CONSTRUCTORS_IMPL(InterpreterData)
97 
98 ACCESSORS(InterpreterData, raw_interpreter_trampoline, CodeT,
99           kInterpreterTrampolineOffset)
100 
101 DEF_GETTER(InterpreterData, interpreter_trampoline, Code) {
102   return FromCodeT(raw_interpreter_trampoline(cage_base));
103 }
104 
set_interpreter_trampoline(Code code,WriteBarrierMode mode)105 void InterpreterData::set_interpreter_trampoline(Code code,
106                                                  WriteBarrierMode mode) {
107   set_raw_interpreter_trampoline(ToCodeT(code), mode);
108 }
109 
110 TQ_OBJECT_CONSTRUCTORS_IMPL(SharedFunctionInfo)
NEVER_READ_ONLY_SPACE_IMPL(SharedFunctionInfo)111 NEVER_READ_ONLY_SPACE_IMPL(SharedFunctionInfo)
112 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
113 
114 RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, function_data, Object,
115                           kFunctionDataOffset)
116 RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, name_or_scope_info, Object,
117                           kNameOrScopeInfoOffset)
118 RELEASE_ACQUIRE_ACCESSORS(SharedFunctionInfo, script_or_debug_info, HeapObject,
119                           kScriptOrDebugInfoOffset)
120 
121 RENAME_TORQUE_ACCESSORS(SharedFunctionInfo,
122                         raw_outer_scope_info_or_feedback_metadata,
123                         outer_scope_info_or_feedback_metadata, HeapObject)
124 DEF_ACQUIRE_GETTER(SharedFunctionInfo,
125                    raw_outer_scope_info_or_feedback_metadata, HeapObject) {
126   HeapObject value =
127       TaggedField<HeapObject, kOuterScopeInfoOrFeedbackMetadataOffset>::
128           Acquire_Load(cage_base, *this);
129   return value;
130 }
131 
internal_formal_parameter_count_with_receiver()132 uint16_t SharedFunctionInfo::internal_formal_parameter_count_with_receiver()
133     const {
134   const uint16_t param_count = TorqueGeneratedClass::formal_parameter_count();
135   if (param_count == kDontAdaptArgumentsSentinel) return param_count;
136   return param_count + (kJSArgcIncludesReceiver ? 0 : 1);
137 }
138 
internal_formal_parameter_count_without_receiver()139 uint16_t SharedFunctionInfo::internal_formal_parameter_count_without_receiver()
140     const {
141   const uint16_t param_count = TorqueGeneratedClass::formal_parameter_count();
142   if (param_count == kDontAdaptArgumentsSentinel) return param_count;
143   return param_count - kJSArgcReceiverSlots;
144 }
145 
set_internal_formal_parameter_count(int value)146 void SharedFunctionInfo::set_internal_formal_parameter_count(int value) {
147   DCHECK_EQ(value, static_cast<uint16_t>(value));
148   DCHECK_GE(value, kJSArgcReceiverSlots);
149   TorqueGeneratedClass::set_formal_parameter_count(value);
150 }
151 
RENAME_UINT16_TORQUE_ACCESSORS(SharedFunctionInfo,raw_function_token_offset,function_token_offset)152 RENAME_UINT16_TORQUE_ACCESSORS(SharedFunctionInfo, raw_function_token_offset,
153                                function_token_offset)
154 
155 RELAXED_INT32_ACCESSORS(SharedFunctionInfo, flags, kFlagsOffset)
156 int32_t SharedFunctionInfo::relaxed_flags() const {
157   return flags(kRelaxedLoad);
158 }
set_relaxed_flags(int32_t flags)159 void SharedFunctionInfo::set_relaxed_flags(int32_t flags) {
160   return set_flags(flags, kRelaxedStore);
161 }
162 
UINT8_ACCESSORS(SharedFunctionInfo,flags2,kFlags2Offset)163 UINT8_ACCESSORS(SharedFunctionInfo, flags2, kFlags2Offset)
164 
165 bool SharedFunctionInfo::HasSharedName() const {
166   Object value = name_or_scope_info(kAcquireLoad);
167   if (value.IsScopeInfo()) {
168     return ScopeInfo::cast(value).HasSharedFunctionName();
169   }
170   return value != kNoSharedNameSentinel;
171 }
172 
Name()173 String SharedFunctionInfo::Name() const {
174   if (!HasSharedName()) return GetReadOnlyRoots().empty_string();
175   Object value = name_or_scope_info(kAcquireLoad);
176   if (value.IsScopeInfo()) {
177     if (ScopeInfo::cast(value).HasFunctionName()) {
178       return String::cast(ScopeInfo::cast(value).FunctionName());
179     }
180     return GetReadOnlyRoots().empty_string();
181   }
182   return String::cast(value);
183 }
184 
SetName(String name)185 void SharedFunctionInfo::SetName(String name) {
186   Object maybe_scope_info = name_or_scope_info(kAcquireLoad);
187   if (maybe_scope_info.IsScopeInfo()) {
188     ScopeInfo::cast(maybe_scope_info).SetFunctionName(name);
189   } else {
190     DCHECK(maybe_scope_info.IsString() ||
191            maybe_scope_info == kNoSharedNameSentinel);
192     set_name_or_scope_info(name, kReleaseStore);
193   }
194   UpdateFunctionMapIndex();
195 }
196 
is_script()197 bool SharedFunctionInfo::is_script() const {
198   return scope_info(kAcquireLoad).is_script_scope() &&
199          Script::cast(script()).compilation_type() ==
200              Script::COMPILATION_TYPE_HOST;
201 }
202 
needs_script_context()203 bool SharedFunctionInfo::needs_script_context() const {
204   return is_script() && scope_info(kAcquireLoad).ContextLocalCount() > 0;
205 }
206 
207 template <typename IsolateT>
abstract_code(IsolateT * isolate)208 AbstractCode SharedFunctionInfo::abstract_code(IsolateT* isolate) {
209   // TODO(v8:11429): Decide if this return bytecode or baseline code, when the
210   // latter is present.
211   if (HasBytecodeArray()) {
212     return AbstractCode::cast(GetBytecodeArray(isolate));
213   } else {
214     return AbstractCode::cast(GetCode());
215   }
216 }
217 
function_token_position()218 int SharedFunctionInfo::function_token_position() const {
219   int offset = raw_function_token_offset();
220   if (offset == kFunctionTokenOutOfRange) {
221     return kNoSourcePosition;
222   } else {
223     return StartPosition() - offset;
224   }
225 }
226 
227 template <typename IsolateT>
AreSourcePositionsAvailable(IsolateT * isolate)228 bool SharedFunctionInfo::AreSourcePositionsAvailable(IsolateT* isolate) const {
229   if (FLAG_enable_lazy_source_positions) {
230     return !HasBytecodeArray() ||
231            GetBytecodeArray(isolate).HasSourcePositionTable();
232   }
233   return true;
234 }
235 
236 template <typename IsolateT>
GetInlineability(IsolateT * isolate,bool is_turboprop)237 SharedFunctionInfo::Inlineability SharedFunctionInfo::GetInlineability(
238     IsolateT* isolate, bool is_turboprop) const {
239   if (!script().IsScript()) return kHasNoScript;
240 
241   if (GetIsolate()->is_precise_binary_code_coverage() &&
242       !has_reported_binary_coverage()) {
243     // We may miss invocations if this function is inlined.
244     return kNeedsBinaryCoverage;
245   }
246 
247   // Built-in functions are handled by the JSCallReducer.
248   if (HasBuiltinId()) return kIsBuiltin;
249 
250   if (!IsUserJavaScript()) return kIsNotUserCode;
251 
252   // If there is no bytecode array, it is either not compiled or it is compiled
253   // with WebAssembly for the asm.js pipeline. In either case we don't want to
254   // inline.
255   if (!HasBytecodeArray()) return kHasNoBytecode;
256 
257   int max_inlined_size = FLAG_max_inlined_bytecode_size;
258   if (is_turboprop) {
259     max_inlined_size = max_inlined_size / FLAG_turboprop_inline_scaling_factor;
260   }
261   if (GetBytecodeArray(isolate).length() > max_inlined_size) {
262     return kExceedsBytecodeLimit;
263   }
264 
265   if (HasBreakInfo()) return kMayContainBreakPoints;
266 
267   if (optimization_disabled()) return kHasOptimizationDisabled;
268 
269   return kIsInlineable;
270 }
271 
BIT_FIELD_ACCESSORS(SharedFunctionInfo,flags2,class_scope_has_private_brand,SharedFunctionInfo::ClassScopeHasPrivateBrandBit)272 BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags2, class_scope_has_private_brand,
273                     SharedFunctionInfo::ClassScopeHasPrivateBrandBit)
274 
275 BIT_FIELD_ACCESSORS(SharedFunctionInfo, flags2,
276                     has_static_private_methods_or_accessors,
277                     SharedFunctionInfo::HasStaticPrivateMethodsOrAccessorsBit)
278 
279 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, syntax_kind,
280                     SharedFunctionInfo::FunctionSyntaxKindBits)
281 
282 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, allows_lazy_compilation,
283                     SharedFunctionInfo::AllowLazyCompilationBit)
284 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, has_duplicate_parameters,
285                     SharedFunctionInfo::HasDuplicateParametersBit)
286 
287 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, native,
288                     SharedFunctionInfo::IsNativeBit)
289 #if V8_ENABLE_WEBASSEMBLY
290 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, is_asm_wasm_broken,
291                     SharedFunctionInfo::IsAsmWasmBrokenBit)
292 #endif  // V8_ENABLE_WEBASSEMBLY
293 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
294                     requires_instance_members_initializer,
295                     SharedFunctionInfo::RequiresInstanceMembersInitializerBit)
296 
297 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
298                     name_should_print_as_anonymous,
299                     SharedFunctionInfo::NameShouldPrintAsAnonymousBit)
300 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
301                     has_reported_binary_coverage,
302                     SharedFunctionInfo::HasReportedBinaryCoverageBit)
303 
304 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, is_toplevel,
305                     SharedFunctionInfo::IsTopLevelBit)
306 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags, properties_are_final,
307                     SharedFunctionInfo::PropertiesAreFinalBit)
308 BIT_FIELD_ACCESSORS(SharedFunctionInfo, relaxed_flags,
309                     private_name_lookup_skips_outer_class,
310                     SharedFunctionInfo::PrivateNameLookupSkipsOuterClassBit)
311 
312 bool SharedFunctionInfo::optimization_disabled() const {
313   return disable_optimization_reason() != BailoutReason::kNoReason;
314 }
315 
disable_optimization_reason()316 BailoutReason SharedFunctionInfo::disable_optimization_reason() const {
317   return DisabledOptimizationReasonBits::decode(flags(kRelaxedLoad));
318 }
319 
language_mode()320 LanguageMode SharedFunctionInfo::language_mode() const {
321   STATIC_ASSERT(LanguageModeSize == 2);
322   return construct_language_mode(IsStrictBit::decode(flags(kRelaxedLoad)));
323 }
324 
set_language_mode(LanguageMode language_mode)325 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
326   STATIC_ASSERT(LanguageModeSize == 2);
327   // We only allow language mode transitions that set the same language mode
328   // again or go up in the chain:
329   DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
330   int hints = flags(kRelaxedLoad);
331   hints = IsStrictBit::update(hints, is_strict(language_mode));
332   set_flags(hints, kRelaxedStore);
333   UpdateFunctionMapIndex();
334 }
335 
kind()336 FunctionKind SharedFunctionInfo::kind() const {
337   STATIC_ASSERT(FunctionKindBits::kSize == kFunctionKindBitSize);
338   return FunctionKindBits::decode(flags(kRelaxedLoad));
339 }
340 
set_kind(FunctionKind kind)341 void SharedFunctionInfo::set_kind(FunctionKind kind) {
342   int hints = flags(kRelaxedLoad);
343   hints = FunctionKindBits::update(hints, kind);
344   hints = IsClassConstructorBit::update(hints, IsClassConstructor(kind));
345   set_flags(hints, kRelaxedStore);
346   UpdateFunctionMapIndex();
347 }
348 
is_wrapped()349 bool SharedFunctionInfo::is_wrapped() const {
350   return syntax_kind() == FunctionSyntaxKind::kWrapped;
351 }
352 
construct_as_builtin()353 bool SharedFunctionInfo::construct_as_builtin() const {
354   return ConstructAsBuiltinBit::decode(flags(kRelaxedLoad));
355 }
356 
CalculateConstructAsBuiltin()357 void SharedFunctionInfo::CalculateConstructAsBuiltin() {
358   bool uses_builtins_construct_stub = false;
359   if (HasBuiltinId()) {
360     Builtin id = builtin_id();
361     if (id != Builtin::kCompileLazy && id != Builtin::kEmptyFunction) {
362       uses_builtins_construct_stub = true;
363     }
364   } else if (IsApiFunction()) {
365     uses_builtins_construct_stub = true;
366   }
367 
368   int f = flags(kRelaxedLoad);
369   f = ConstructAsBuiltinBit::update(f, uses_builtins_construct_stub);
370   set_flags(f, kRelaxedStore);
371 }
372 
function_map_index()373 int SharedFunctionInfo::function_map_index() const {
374   // Note: Must be kept in sync with the FastNewClosure builtin.
375   int index = Context::FIRST_FUNCTION_MAP_INDEX +
376               FunctionMapIndexBits::decode(flags(kRelaxedLoad));
377   DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX);
378   return index;
379 }
380 
set_function_map_index(int index)381 void SharedFunctionInfo::set_function_map_index(int index) {
382   STATIC_ASSERT(Context::LAST_FUNCTION_MAP_INDEX <=
383                 Context::FIRST_FUNCTION_MAP_INDEX + FunctionMapIndexBits::kMax);
384   DCHECK_LE(Context::FIRST_FUNCTION_MAP_INDEX, index);
385   DCHECK_LE(index, Context::LAST_FUNCTION_MAP_INDEX);
386   index -= Context::FIRST_FUNCTION_MAP_INDEX;
387   set_flags(FunctionMapIndexBits::update(flags(kRelaxedLoad), index),
388             kRelaxedStore);
389 }
390 
clear_padding()391 void SharedFunctionInfo::clear_padding() {
392   memset(reinterpret_cast<void*>(this->address() + kSize), 0,
393          kAlignedSize - kSize);
394 }
395 
UpdateFunctionMapIndex()396 void SharedFunctionInfo::UpdateFunctionMapIndex() {
397   int map_index =
398       Context::FunctionMapIndex(language_mode(), kind(), HasSharedName());
399   set_function_map_index(map_index);
400 }
401 
DontAdaptArguments()402 void SharedFunctionInfo::DontAdaptArguments() {
403 #if V8_ENABLE_WEBASSEMBLY
404   // TODO(leszeks): Revise this DCHECK now that the code field is gone.
405   DCHECK(!HasWasmExportedFunctionData());
406 #endif  // V8_ENABLE_WEBASSEMBLY
407   TorqueGeneratedClass::set_formal_parameter_count(kDontAdaptArgumentsSentinel);
408 }
409 
IsDontAdaptArguments()410 bool SharedFunctionInfo::IsDontAdaptArguments() const {
411   return TorqueGeneratedClass::formal_parameter_count() ==
412          kDontAdaptArgumentsSentinel;
413 }
414 
IsInterpreted()415 bool SharedFunctionInfo::IsInterpreted() const { return HasBytecodeArray(); }
416 
scope_info(AcquireLoadTag tag)417 ScopeInfo SharedFunctionInfo::scope_info(AcquireLoadTag tag) const {
418   Object maybe_scope_info = name_or_scope_info(tag);
419   if (maybe_scope_info.IsScopeInfo()) {
420     return ScopeInfo::cast(maybe_scope_info);
421   }
422   return GetReadOnlyRoots().empty_scope_info();
423 }
424 
scope_info()425 ScopeInfo SharedFunctionInfo::scope_info() const {
426   return scope_info(kAcquireLoad);
427 }
428 
SetScopeInfo(ScopeInfo scope_info,WriteBarrierMode mode)429 void SharedFunctionInfo::SetScopeInfo(ScopeInfo scope_info,
430                                       WriteBarrierMode mode) {
431   // Move the existing name onto the ScopeInfo.
432   Object name = name_or_scope_info(kAcquireLoad);
433   if (name.IsScopeInfo()) {
434     name = ScopeInfo::cast(name).FunctionName();
435   }
436   DCHECK(name.IsString() || name == kNoSharedNameSentinel);
437   // Only set the function name for function scopes.
438   scope_info.SetFunctionName(name);
439   if (HasInferredName() && inferred_name().length() != 0) {
440     scope_info.SetInferredFunctionName(inferred_name());
441   }
442   set_name_or_scope_info(scope_info, kReleaseStore, mode);
443 }
444 
set_raw_scope_info(ScopeInfo scope_info,WriteBarrierMode mode)445 void SharedFunctionInfo::set_raw_scope_info(ScopeInfo scope_info,
446                                             WriteBarrierMode mode) {
447   WRITE_FIELD(*this, kNameOrScopeInfoOffset, scope_info);
448   CONDITIONAL_WRITE_BARRIER(*this, kNameOrScopeInfoOffset, scope_info, mode);
449 }
450 
outer_scope_info()451 HeapObject SharedFunctionInfo::outer_scope_info() const {
452   DCHECK(!is_compiled());
453   DCHECK(!HasFeedbackMetadata());
454   return raw_outer_scope_info_or_feedback_metadata();
455 }
456 
HasOuterScopeInfo()457 bool SharedFunctionInfo::HasOuterScopeInfo() const {
458   ScopeInfo outer_info;
459   if (!is_compiled()) {
460     if (!outer_scope_info().IsScopeInfo()) return false;
461     outer_info = ScopeInfo::cast(outer_scope_info());
462   } else {
463     ScopeInfo info = scope_info(kAcquireLoad);
464     if (!info.HasOuterScopeInfo()) return false;
465     outer_info = info.OuterScopeInfo();
466   }
467   return !outer_info.IsEmpty();
468 }
469 
GetOuterScopeInfo()470 ScopeInfo SharedFunctionInfo::GetOuterScopeInfo() const {
471   DCHECK(HasOuterScopeInfo());
472   if (!is_compiled()) return ScopeInfo::cast(outer_scope_info());
473   return scope_info(kAcquireLoad).OuterScopeInfo();
474 }
475 
set_outer_scope_info(HeapObject value,WriteBarrierMode mode)476 void SharedFunctionInfo::set_outer_scope_info(HeapObject value,
477                                               WriteBarrierMode mode) {
478   DCHECK(!is_compiled());
479   DCHECK(raw_outer_scope_info_or_feedback_metadata().IsTheHole());
480   DCHECK(value.IsScopeInfo() || value.IsTheHole());
481   set_raw_outer_scope_info_or_feedback_metadata(value, mode);
482 }
483 
HasFeedbackMetadata()484 bool SharedFunctionInfo::HasFeedbackMetadata() const {
485   return raw_outer_scope_info_or_feedback_metadata().IsFeedbackMetadata();
486 }
487 
HasFeedbackMetadata(AcquireLoadTag tag)488 bool SharedFunctionInfo::HasFeedbackMetadata(AcquireLoadTag tag) const {
489   return raw_outer_scope_info_or_feedback_metadata(tag).IsFeedbackMetadata();
490 }
491 
feedback_metadata()492 FeedbackMetadata SharedFunctionInfo::feedback_metadata() const {
493   DCHECK(HasFeedbackMetadata());
494   return FeedbackMetadata::cast(raw_outer_scope_info_or_feedback_metadata());
495 }
496 
497 RELEASE_ACQUIRE_ACCESSORS_CHECKED2(SharedFunctionInfo, feedback_metadata,
498                                    FeedbackMetadata,
499                                    kOuterScopeInfoOrFeedbackMetadataOffset,
500                                    HasFeedbackMetadata(kAcquireLoad),
501                                    !HasFeedbackMetadata(kAcquireLoad) &&
502                                        value.IsFeedbackMetadata())
503 
is_compiled()504 bool SharedFunctionInfo::is_compiled() const {
505   Object data = function_data(kAcquireLoad);
506   return data != Smi::FromEnum(Builtin::kCompileLazy) &&
507          !data.IsUncompiledData();
508 }
509 
510 template <typename IsolateT>
is_compiled_scope(IsolateT * isolate)511 IsCompiledScope SharedFunctionInfo::is_compiled_scope(IsolateT* isolate) const {
512   return IsCompiledScope(*this, isolate);
513 }
514 
IsCompiledScope(const SharedFunctionInfo shared,Isolate * isolate)515 IsCompiledScope::IsCompiledScope(const SharedFunctionInfo shared,
516                                  Isolate* isolate)
517     : is_compiled_(shared.is_compiled()) {
518   if (shared.HasBaselineCode()) {
519     retain_code_ = handle(shared.baseline_code(kAcquireLoad), isolate);
520   } else if (shared.HasBytecodeArray()) {
521     retain_code_ = handle(shared.GetBytecodeArray(isolate), isolate);
522   } else {
523     retain_code_ = MaybeHandle<HeapObject>();
524   }
525 
526   DCHECK_IMPLIES(!retain_code_.is_null(), is_compiled());
527 }
528 
IsCompiledScope(const SharedFunctionInfo shared,LocalIsolate * isolate)529 IsCompiledScope::IsCompiledScope(const SharedFunctionInfo shared,
530                                  LocalIsolate* isolate)
531     : is_compiled_(shared.is_compiled()) {
532   if (shared.HasBaselineCode()) {
533     retain_code_ = isolate->heap()->NewPersistentHandle(
534         shared.baseline_code(kAcquireLoad));
535   } else if (shared.HasBytecodeArray()) {
536     retain_code_ =
537         isolate->heap()->NewPersistentHandle(shared.GetBytecodeArray(isolate));
538   } else {
539     retain_code_ = MaybeHandle<HeapObject>();
540   }
541 
542   DCHECK_IMPLIES(!retain_code_.is_null(), is_compiled());
543 }
544 
has_simple_parameters()545 bool SharedFunctionInfo::has_simple_parameters() {
546   return scope_info(kAcquireLoad).HasSimpleParameters();
547 }
548 
CanCollectSourcePosition(Isolate * isolate)549 bool SharedFunctionInfo::CanCollectSourcePosition(Isolate* isolate) {
550   return FLAG_enable_lazy_source_positions && HasBytecodeArray() &&
551          !GetBytecodeArray(isolate).HasSourcePositionTable();
552 }
553 
IsApiFunction()554 bool SharedFunctionInfo::IsApiFunction() const {
555   return function_data(kAcquireLoad).IsFunctionTemplateInfo();
556 }
557 
get_api_func_data()558 FunctionTemplateInfo SharedFunctionInfo::get_api_func_data() const {
559   DCHECK(IsApiFunction());
560   return FunctionTemplateInfo::cast(function_data(kAcquireLoad));
561 }
562 
HasBytecodeArray()563 bool SharedFunctionInfo::HasBytecodeArray() const {
564   Object data = function_data(kAcquireLoad);
565   return data.IsBytecodeArray() || data.IsInterpreterData() || data.IsCodeT();
566 }
567 
568 template <typename IsolateT>
GetBytecodeArray(IsolateT * isolate)569 BytecodeArray SharedFunctionInfo::GetBytecodeArray(IsolateT* isolate) const {
570   SharedMutexGuardIfOffThread<IsolateT, base::kShared> mutex_guard(
571       GetIsolate()->shared_function_info_access(), isolate);
572 
573   DCHECK(HasBytecodeArray());
574   if (HasDebugInfo() && GetDebugInfo().HasInstrumentedBytecodeArray()) {
575     return GetDebugInfo().OriginalBytecodeArray();
576   }
577 
578   return GetActiveBytecodeArray();
579 }
580 
GetActiveBytecodeArray()581 BytecodeArray SharedFunctionInfo::GetActiveBytecodeArray() const {
582   Object data = function_data(kAcquireLoad);
583   if (data.IsCodeT()) {
584     Code baseline_code = FromCodeT(CodeT::cast(data));
585     data = baseline_code.bytecode_or_interpreter_data();
586   }
587   if (data.IsBytecodeArray()) {
588     return BytecodeArray::cast(data);
589   } else {
590     DCHECK(data.IsInterpreterData());
591     return InterpreterData::cast(data).bytecode_array();
592   }
593 }
594 
SetActiveBytecodeArray(BytecodeArray bytecode)595 void SharedFunctionInfo::SetActiveBytecodeArray(BytecodeArray bytecode) {
596   // We don't allow setting the active bytecode array on baseline-optimized
597   // functions. They should have been flushed earlier.
598   DCHECK(!HasBaselineCode());
599 
600   Object data = function_data(kAcquireLoad);
601   if (data.IsBytecodeArray()) {
602     set_function_data(bytecode, kReleaseStore);
603   } else {
604     DCHECK(data.IsInterpreterData());
605     interpreter_data().set_bytecode_array(bytecode);
606   }
607 }
608 
set_bytecode_array(BytecodeArray bytecode)609 void SharedFunctionInfo::set_bytecode_array(BytecodeArray bytecode) {
610   DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
611          HasUncompiledData());
612   set_function_data(bytecode, kReleaseStore);
613 }
614 
ShouldFlushCode(base::EnumSet<CodeFlushMode> code_flush_mode)615 bool SharedFunctionInfo::ShouldFlushCode(
616     base::EnumSet<CodeFlushMode> code_flush_mode) {
617   if (IsFlushingDisabled(code_flush_mode)) return false;
618 
619   // TODO(rmcilroy): Enable bytecode flushing for resumable functions.
620   if (IsResumableFunction(kind()) || !allows_lazy_compilation()) {
621     return false;
622   }
623 
624   // Get a snapshot of the function data field, and if it is a bytecode array,
625   // check if it is old. Note, this is done this way since this function can be
626   // called by the concurrent marker.
627   Object data = function_data(kAcquireLoad);
628   if (data.IsCodeT()) {
629     Code baseline_code = FromCodeT(CodeT::cast(data));
630     DCHECK_EQ(baseline_code.kind(), CodeKind::BASELINE);
631     // If baseline code flushing isn't enabled and we have baseline data on SFI
632     // we cannot flush baseline / bytecode.
633     if (!IsBaselineCodeFlushingEnabled(code_flush_mode)) return false;
634     data = baseline_code.bytecode_or_interpreter_data();
635   } else if (!IsByteCodeFlushingEnabled(code_flush_mode)) {
636     // If bytecode flushing isn't enabled and there is no baseline code there is
637     // nothing to flush.
638     return false;
639   }
640   if (!data.IsBytecodeArray()) return false;
641 
642   if (IsStressFlushingEnabled(code_flush_mode)) return true;
643 
644   BytecodeArray bytecode = BytecodeArray::cast(data);
645 
646   return bytecode.IsOld();
647 }
648 
InterpreterTrampoline()649 Code SharedFunctionInfo::InterpreterTrampoline() const {
650   DCHECK(HasInterpreterData());
651   return interpreter_data().interpreter_trampoline();
652 }
653 
HasInterpreterData()654 bool SharedFunctionInfo::HasInterpreterData() const {
655   Object data = function_data(kAcquireLoad);
656   if (data.IsCodeT()) {
657     Code baseline_code = FromCodeT(CodeT::cast(data));
658     DCHECK_EQ(baseline_code.kind(), CodeKind::BASELINE);
659     data = baseline_code.bytecode_or_interpreter_data();
660   }
661   return data.IsInterpreterData();
662 }
663 
interpreter_data()664 InterpreterData SharedFunctionInfo::interpreter_data() const {
665   DCHECK(HasInterpreterData());
666   Object data = function_data(kAcquireLoad);
667   if (data.IsCodeT()) {
668     Code baseline_code = FromCodeT(CodeT::cast(data));
669     DCHECK_EQ(baseline_code.kind(), CodeKind::BASELINE);
670     data = baseline_code.bytecode_or_interpreter_data();
671   }
672   return InterpreterData::cast(data);
673 }
674 
set_interpreter_data(InterpreterData interpreter_data)675 void SharedFunctionInfo::set_interpreter_data(
676     InterpreterData interpreter_data) {
677   DCHECK(FLAG_interpreted_frames_native_stack);
678   DCHECK(!HasBaselineCode());
679   set_function_data(interpreter_data, kReleaseStore);
680 }
681 
HasBaselineCode()682 bool SharedFunctionInfo::HasBaselineCode() const {
683   Object data = function_data(kAcquireLoad);
684   if (data.IsCodeT()) {
685     DCHECK_EQ(FromCodeT(CodeT::cast(data)).kind(), CodeKind::BASELINE);
686     return true;
687   }
688   return false;
689 }
690 
baseline_code(AcquireLoadTag)691 Code SharedFunctionInfo::baseline_code(AcquireLoadTag) const {
692   DCHECK(HasBaselineCode());
693   return FromCodeT(CodeT::cast(function_data(kAcquireLoad)));
694 }
695 
set_baseline_code(Code baseline_code,ReleaseStoreTag)696 void SharedFunctionInfo::set_baseline_code(Code baseline_code,
697                                            ReleaseStoreTag) {
698   DCHECK_EQ(baseline_code.kind(), CodeKind::BASELINE);
699   set_function_data(ToCodeT(baseline_code), kReleaseStore);
700 }
701 
FlushBaselineCode()702 void SharedFunctionInfo::FlushBaselineCode() {
703   DCHECK(HasBaselineCode());
704   set_function_data(baseline_code(kAcquireLoad).bytecode_or_interpreter_data(),
705                     kReleaseStore);
706 }
707 
708 #if V8_ENABLE_WEBASSEMBLY
HasAsmWasmData()709 bool SharedFunctionInfo::HasAsmWasmData() const {
710   return function_data(kAcquireLoad).IsAsmWasmData();
711 }
712 
HasWasmExportedFunctionData()713 bool SharedFunctionInfo::HasWasmExportedFunctionData() const {
714   return function_data(kAcquireLoad).IsWasmExportedFunctionData();
715 }
716 
HasWasmJSFunctionData()717 bool SharedFunctionInfo::HasWasmJSFunctionData() const {
718   return function_data(kAcquireLoad).IsWasmJSFunctionData();
719 }
720 
HasWasmCapiFunctionData()721 bool SharedFunctionInfo::HasWasmCapiFunctionData() const {
722   return function_data(kAcquireLoad).IsWasmCapiFunctionData();
723 }
724 
asm_wasm_data()725 AsmWasmData SharedFunctionInfo::asm_wasm_data() const {
726   DCHECK(HasAsmWasmData());
727   return AsmWasmData::cast(function_data(kAcquireLoad));
728 }
729 
set_asm_wasm_data(AsmWasmData data)730 void SharedFunctionInfo::set_asm_wasm_data(AsmWasmData data) {
731   DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
732          HasUncompiledData() || HasAsmWasmData());
733   set_function_data(data, kReleaseStore);
734 }
735 
wasm_module()736 const wasm::WasmModule* SharedFunctionInfo::wasm_module() const {
737   if (!HasWasmExportedFunctionData()) return nullptr;
738   const WasmExportedFunctionData& function_data = wasm_exported_function_data();
739   const WasmInstanceObject& wasm_instance = function_data.instance();
740   const WasmModuleObject& wasm_module_object = wasm_instance.module_object();
741   return wasm_module_object.module();
742 }
743 
wasm_function_signature()744 const wasm::FunctionSig* SharedFunctionInfo::wasm_function_signature() const {
745   const wasm::WasmModule* module = wasm_module();
746   if (!module) return nullptr;
747   const WasmExportedFunctionData& function_data = wasm_exported_function_data();
748   DCHECK_LT(function_data.function_index(), module->functions.size());
749   return module->functions[function_data.function_index()].sig;
750 }
751 #endif  // V8_ENABLE_WEBASSEMBLY
752 
HasBuiltinId()753 bool SharedFunctionInfo::HasBuiltinId() const {
754   return function_data(kAcquireLoad).IsSmi();
755 }
756 
builtin_id()757 Builtin SharedFunctionInfo::builtin_id() const {
758   DCHECK(HasBuiltinId());
759   int id = Smi::ToInt(function_data(kAcquireLoad));
760   DCHECK(Builtins::IsBuiltinId(id));
761   return Builtins::FromInt(id);
762 }
763 
set_builtin_id(Builtin builtin)764 void SharedFunctionInfo::set_builtin_id(Builtin builtin) {
765   DCHECK(Builtins::IsBuiltinId(builtin));
766   set_function_data(Smi::FromInt(static_cast<int>(builtin)), kReleaseStore,
767                     SKIP_WRITE_BARRIER);
768 }
769 
HasUncompiledData()770 bool SharedFunctionInfo::HasUncompiledData() const {
771   return function_data(kAcquireLoad).IsUncompiledData();
772 }
773 
uncompiled_data()774 UncompiledData SharedFunctionInfo::uncompiled_data() const {
775   DCHECK(HasUncompiledData());
776   return UncompiledData::cast(function_data(kAcquireLoad));
777 }
778 
set_uncompiled_data(UncompiledData uncompiled_data)779 void SharedFunctionInfo::set_uncompiled_data(UncompiledData uncompiled_data) {
780   DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy) ||
781          HasUncompiledData());
782   DCHECK(uncompiled_data.IsUncompiledData());
783   set_function_data(uncompiled_data, kReleaseStore);
784 }
785 
HasUncompiledDataWithPreparseData()786 bool SharedFunctionInfo::HasUncompiledDataWithPreparseData() const {
787   return function_data(kAcquireLoad).IsUncompiledDataWithPreparseData();
788 }
789 
790 UncompiledDataWithPreparseData
uncompiled_data_with_preparse_data()791 SharedFunctionInfo::uncompiled_data_with_preparse_data() const {
792   DCHECK(HasUncompiledDataWithPreparseData());
793   return UncompiledDataWithPreparseData::cast(function_data(kAcquireLoad));
794 }
795 
set_uncompiled_data_with_preparse_data(UncompiledDataWithPreparseData uncompiled_data_with_preparse_data)796 void SharedFunctionInfo::set_uncompiled_data_with_preparse_data(
797     UncompiledDataWithPreparseData uncompiled_data_with_preparse_data) {
798   DCHECK(function_data(kAcquireLoad) == Smi::FromEnum(Builtin::kCompileLazy));
799   DCHECK(uncompiled_data_with_preparse_data.IsUncompiledDataWithPreparseData());
800   set_function_data(uncompiled_data_with_preparse_data, kReleaseStore);
801 }
802 
HasUncompiledDataWithoutPreparseData()803 bool SharedFunctionInfo::HasUncompiledDataWithoutPreparseData() const {
804   return function_data(kAcquireLoad).IsUncompiledDataWithoutPreparseData();
805 }
806 
ClearPreparseData()807 void SharedFunctionInfo::ClearPreparseData() {
808   DCHECK(HasUncompiledDataWithPreparseData());
809   UncompiledDataWithPreparseData data = uncompiled_data_with_preparse_data();
810 
811   // Trim off the pre-parsed scope data from the uncompiled data by swapping the
812   // map, leaving only an uncompiled data without pre-parsed scope.
813   DisallowGarbageCollection no_gc;
814   Heap* heap = GetHeapFromWritableObject(data);
815 
816   // Swap the map.
817   heap->NotifyObjectLayoutChange(data, no_gc);
818   STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize <
819                 UncompiledDataWithPreparseData::kSize);
820   STATIC_ASSERT(UncompiledDataWithoutPreparseData::kSize ==
821                 UncompiledData::kHeaderSize);
822   data.set_map(GetReadOnlyRoots().uncompiled_data_without_preparse_data_map(),
823                kReleaseStore);
824 
825   // Fill the remaining space with filler.
826   heap->CreateFillerObjectAt(
827       data.address() + UncompiledDataWithoutPreparseData::kSize,
828       UncompiledDataWithPreparseData::kSize -
829           UncompiledDataWithoutPreparseData::kSize,
830       ClearRecordedSlots::kYes);
831 
832   // Ensure that the clear was successful.
833   DCHECK(HasUncompiledDataWithoutPreparseData());
834 }
835 
InitAfterBytecodeFlush(String inferred_name,int start_position,int end_position,std::function<void (HeapObject object,ObjectSlot slot,HeapObject target)> gc_notify_updated_slot)836 void UncompiledData::InitAfterBytecodeFlush(
837     String inferred_name, int start_position, int end_position,
838     std::function<void(HeapObject object, ObjectSlot slot, HeapObject target)>
839         gc_notify_updated_slot) {
840   set_inferred_name(inferred_name);
841   gc_notify_updated_slot(*this, RawField(UncompiledData::kInferredNameOffset),
842                          inferred_name);
843   set_start_position(start_position);
844   set_end_position(end_position);
845 }
846 
script()847 HeapObject SharedFunctionInfo::script() const {
848   HeapObject maybe_script = script_or_debug_info(kAcquireLoad);
849   if (maybe_script.IsDebugInfo()) {
850     return DebugInfo::cast(maybe_script).script();
851   }
852   return maybe_script;
853 }
854 
set_script(HeapObject script)855 void SharedFunctionInfo::set_script(HeapObject script) {
856   HeapObject maybe_debug_info = script_or_debug_info(kAcquireLoad);
857   if (maybe_debug_info.IsDebugInfo()) {
858     DebugInfo::cast(maybe_debug_info).set_script(script);
859   } else {
860     set_script_or_debug_info(script, kReleaseStore);
861   }
862 }
863 
is_repl_mode()864 bool SharedFunctionInfo::is_repl_mode() const {
865   return script().IsScript() && Script::cast(script()).is_repl_mode();
866 }
867 
HasDebugInfo()868 bool SharedFunctionInfo::HasDebugInfo() const {
869   return script_or_debug_info(kAcquireLoad).IsDebugInfo();
870 }
871 
GetDebugInfo()872 DebugInfo SharedFunctionInfo::GetDebugInfo() const {
873   auto debug_info = script_or_debug_info(kAcquireLoad);
874   DCHECK(debug_info.IsDebugInfo());
875   return DebugInfo::cast(debug_info);
876 }
877 
SetDebugInfo(DebugInfo debug_info)878 void SharedFunctionInfo::SetDebugInfo(DebugInfo debug_info) {
879   DCHECK(!HasDebugInfo());
880   DCHECK_EQ(debug_info.script(), script_or_debug_info(kAcquireLoad));
881   set_script_or_debug_info(debug_info, kReleaseStore);
882 }
883 
HasInferredName()884 bool SharedFunctionInfo::HasInferredName() {
885   Object scope_info = name_or_scope_info(kAcquireLoad);
886   if (scope_info.IsScopeInfo()) {
887     return ScopeInfo::cast(scope_info).HasInferredFunctionName();
888   }
889   return HasUncompiledData();
890 }
891 
inferred_name()892 String SharedFunctionInfo::inferred_name() {
893   Object maybe_scope_info = name_or_scope_info(kAcquireLoad);
894   if (maybe_scope_info.IsScopeInfo()) {
895     ScopeInfo scope_info = ScopeInfo::cast(maybe_scope_info);
896     if (scope_info.HasInferredFunctionName()) {
897       Object name = scope_info.InferredFunctionName();
898       if (name.IsString()) return String::cast(name);
899     }
900   } else if (HasUncompiledData()) {
901     return uncompiled_data().inferred_name();
902   }
903   return GetReadOnlyRoots().empty_string();
904 }
905 
IsUserJavaScript()906 bool SharedFunctionInfo::IsUserJavaScript() const {
907   Object script_obj = script();
908   if (script_obj.IsUndefined()) return false;
909   Script script = Script::cast(script_obj);
910   return script.IsUserJavaScript();
911 }
912 
IsSubjectToDebugging()913 bool SharedFunctionInfo::IsSubjectToDebugging() const {
914 #if V8_ENABLE_WEBASSEMBLY
915   if (HasAsmWasmData()) return false;
916 #endif  // V8_ENABLE_WEBASSEMBLY
917   return IsUserJavaScript();
918 }
919 
CanDiscardCompiled()920 bool SharedFunctionInfo::CanDiscardCompiled() const {
921 #if V8_ENABLE_WEBASSEMBLY
922   if (HasAsmWasmData()) return true;
923 #endif  // V8_ENABLE_WEBASSEMBLY
924   return HasBytecodeArray() || HasUncompiledDataWithPreparseData() ||
925          HasBaselineCode();
926 }
927 
is_class_constructor()928 bool SharedFunctionInfo::is_class_constructor() const {
929   return IsClassConstructorBit::decode(flags(kRelaxedLoad));
930 }
931 
set_are_properties_final(bool value)932 void SharedFunctionInfo::set_are_properties_final(bool value) {
933   if (is_class_constructor()) {
934     set_properties_are_final(value);
935   }
936 }
937 
are_properties_final()938 bool SharedFunctionInfo::are_properties_final() const {
939   bool bit = properties_are_final();
940   return bit && is_class_constructor();
941 }
942 
943 }  // namespace internal
944 }  // namespace v8
945 
946 #include "src/base/platform/wrappers.h"
947 #include "src/objects/object-macros-undef.h"
948 
949 #endif  // V8_OBJECTS_SHARED_FUNCTION_INFO_INL_H_
950