1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COMPILER_JS_HEAP_BROKER_H_
6 #define V8_COMPILER_JS_HEAP_BROKER_H_
7
8 #include "src/base/compiler-specific.h"
9 #include "src/base/optional.h"
10 #include "src/base/platform/mutex.h"
11 #include "src/common/globals.h"
12 #include "src/compiler/access-info.h"
13 #include "src/compiler/feedback-source.h"
14 #include "src/compiler/globals.h"
15 #include "src/compiler/heap-refs.h"
16 #include "src/compiler/processed-feedback.h"
17 #include "src/compiler/refs-map.h"
18 #include "src/execution/local-isolate.h"
19 #include "src/handles/handles.h"
20 #include "src/handles/persistent-handles.h"
21 #include "src/heap/local-heap.h"
22 #include "src/heap/parked-scope.h"
23 #include "src/interpreter/bytecode-array-iterator.h"
24 #include "src/objects/code-kind.h"
25 #include "src/objects/feedback-vector.h"
26 #include "src/objects/function-kind.h"
27 #include "src/objects/objects.h"
28 #include "src/utils/address-map.h"
29 #include "src/utils/identity-map.h"
30 #include "src/utils/ostreams.h"
31 #include "src/zone/zone-containers.h"
32
33 namespace v8 {
34 namespace internal {
35 namespace compiler {
36
37 class ObjectRef;
38
39 std::ostream& operator<<(std::ostream& os, const ObjectRef& ref);
40
41 #define TRACE_BROKER(broker, x) \
42 do { \
43 if (broker->tracing_enabled() && FLAG_trace_heap_broker_verbose) \
44 StdoutStream{} << broker->Trace() << x << '\n'; \
45 } while (false)
46
47 #define TRACE_BROKER_MEMORY(broker, x) \
48 do { \
49 if (broker->tracing_enabled() && FLAG_trace_heap_broker_memory) \
50 StdoutStream{} << broker->Trace() << x << std::endl; \
51 } while (false)
52
53 #define TRACE_BROKER_MISSING(broker, x) \
54 do { \
55 if (broker->tracing_enabled()) \
56 StdoutStream{} << broker->Trace() << "Missing " << x << " (" << __FILE__ \
57 << ":" << __LINE__ << ")" << std::endl; \
58 } while (false)
59
60 struct PropertyAccessTarget {
61 MapRef map;
62 NameRef name;
63 AccessMode mode;
64
65 struct Hash {
operatorPropertyAccessTarget::Hash66 size_t operator()(const PropertyAccessTarget& pair) const {
67 return base::hash_combine(
68 base::hash_combine(pair.map.object().address(),
69 pair.name.object().address()),
70 static_cast<int>(pair.mode));
71 }
72 };
73 struct Equal {
operatorPropertyAccessTarget::Equal74 bool operator()(const PropertyAccessTarget& lhs,
75 const PropertyAccessTarget& rhs) const {
76 return lhs.map.equals(rhs.map) && lhs.name.equals(rhs.name) &&
77 lhs.mode == rhs.mode;
78 }
79 };
80 };
81
82 enum GetOrCreateDataFlag {
83 // If set, a failure to create the data object results in a crash.
84 kCrashOnError = 1 << 0,
85 // If set, data construction assumes that the given object is protected by
86 // a memory fence (e.g. acquire-release) and thus fields required for
87 // construction (like Object::map) are safe to read. The protection can
88 // extend to some other situations as well.
89 kAssumeMemoryFence = 1 << 1,
90 };
91 using GetOrCreateDataFlags = base::Flags<GetOrCreateDataFlag>;
DEFINE_OPERATORS_FOR_FLAGS(GetOrCreateDataFlags)92 DEFINE_OPERATORS_FOR_FLAGS(GetOrCreateDataFlags)
93
94 class V8_EXPORT_PRIVATE JSHeapBroker {
95 public:
96 JSHeapBroker(Isolate* isolate, Zone* broker_zone, bool tracing_enabled,
97 bool is_concurrent_inlining, CodeKind code_kind);
98
99 // For use only in tests, sets default values for some arguments. Avoids
100 // churn when new flags are added.
101 JSHeapBroker(Isolate* isolate, Zone* broker_zone)
102 : JSHeapBroker(isolate, broker_zone, FLAG_trace_heap_broker, false,
103 CodeKind::TURBOFAN) {}
104
105 ~JSHeapBroker();
106
107 // The compilation target's native context. We need the setter because at
108 // broker construction time we don't yet have the canonical handle.
109 NativeContextRef target_native_context() const {
110 return target_native_context_.value();
111 }
112 void SetTargetNativeContextRef(Handle<NativeContext> native_context);
113
114 void InitializeAndStartSerializing();
115
116 Isolate* isolate() const { return isolate_; }
117 Zone* zone() const { return zone_; }
118 bool tracing_enabled() const { return tracing_enabled_; }
119 bool is_concurrent_inlining() const { return is_concurrent_inlining_; }
120 bool is_turboprop() const { return code_kind_ == CodeKind::TURBOPROP; }
121
122 NexusConfig feedback_nexus_config() const {
123 return IsMainThread() ? NexusConfig::FromMainThread(isolate())
124 : NexusConfig::FromBackgroundThread(
125 isolate(), local_isolate()->heap());
126 }
127
128 enum BrokerMode { kDisabled, kSerializing, kSerialized, kRetired };
129 BrokerMode mode() const { return mode_; }
130
131 void StopSerializing();
132 void Retire();
133 bool SerializingAllowed() const;
134
135 // Remember the local isolate and initialize its local heap with the
136 // persistent and canonical handles provided by {info}.
137 void AttachLocalIsolate(OptimizedCompilationInfo* info,
138 LocalIsolate* local_isolate);
139 // Forget about the local isolate and pass the persistent and canonical
140 // handles provided back to {info}. {info} is responsible for disposing of
141 // them.
142 void DetachLocalIsolate(OptimizedCompilationInfo* info);
143
144 bool StackHasOverflowed() const;
145
146 #ifdef DEBUG
147 void PrintRefsAnalysis() const;
148 #endif // DEBUG
149
150 // Returns the handle from root index table for read only heap objects.
151 Handle<Object> GetRootHandle(Object object);
152
153 // Never returns nullptr.
154 ObjectData* GetOrCreateData(Handle<Object> object,
155 GetOrCreateDataFlags flags = {});
156 ObjectData* GetOrCreateData(Object object, GetOrCreateDataFlags flags = {});
157
158 // Gets data only if we have it. However, thin wrappers will be created for
159 // smis, read-only objects and never-serialized objects.
160 ObjectData* TryGetOrCreateData(Handle<Object> object,
161 GetOrCreateDataFlags flags = {});
162 ObjectData* TryGetOrCreateData(Object object,
163 GetOrCreateDataFlags flags = {});
164
165 // Check if {object} is any native context's %ArrayPrototype% or
166 // %ObjectPrototype%.
167 bool IsArrayOrObjectPrototype(const JSObjectRef& object) const;
168 bool IsArrayOrObjectPrototype(Handle<JSObject> object) const;
169
170 bool HasFeedback(FeedbackSource const& source) const;
171 void SetFeedback(FeedbackSource const& source,
172 ProcessedFeedback const* feedback);
173 FeedbackSlotKind GetFeedbackSlotKind(FeedbackSource const& source) const;
174
175 ElementAccessFeedback const& ProcessFeedbackMapsForElementAccess(
176 ZoneVector<MapRef>& maps, KeyedAccessMode const& keyed_mode,
177 FeedbackSlotKind slot_kind);
178
179 // Binary, comparison and for-in hints can be fully expressed via
180 // an enum. Insufficient feedback is signaled by <Hint enum>::kNone.
181 BinaryOperationHint GetFeedbackForBinaryOperation(
182 FeedbackSource const& source);
183 CompareOperationHint GetFeedbackForCompareOperation(
184 FeedbackSource const& source);
185 ForInHint GetFeedbackForForIn(FeedbackSource const& source);
186
187 ProcessedFeedback const& GetFeedbackForCall(FeedbackSource const& source);
188 ProcessedFeedback const& GetFeedbackForGlobalAccess(
189 FeedbackSource const& source);
190 ProcessedFeedback const& GetFeedbackForInstanceOf(
191 FeedbackSource const& source);
192 ProcessedFeedback const& GetFeedbackForArrayOrObjectLiteral(
193 FeedbackSource const& source);
194 ProcessedFeedback const& GetFeedbackForRegExpLiteral(
195 FeedbackSource const& source);
196 ProcessedFeedback const& GetFeedbackForTemplateObject(
197 FeedbackSource const& source);
198 ProcessedFeedback const& GetFeedbackForPropertyAccess(
199 FeedbackSource const& source, AccessMode mode,
200 base::Optional<NameRef> static_name);
201
202 ProcessedFeedback const& ProcessFeedbackForBinaryOperation(
203 FeedbackSource const& source);
204 ProcessedFeedback const& ProcessFeedbackForCompareOperation(
205 FeedbackSource const& source);
206 ProcessedFeedback const& ProcessFeedbackForForIn(
207 FeedbackSource const& source);
208
209 bool FeedbackIsInsufficient(FeedbackSource const& source) const;
210
211 base::Optional<NameRef> GetNameFeedback(FeedbackNexus const& nexus);
212
213 PropertyAccessInfo GetPropertyAccessInfo(
214 MapRef map, NameRef name, AccessMode access_mode,
215 CompilationDependencies* dependencies);
216
217 MinimorphicLoadPropertyAccessInfo GetPropertyAccessInfo(
218 MinimorphicLoadPropertyAccessFeedback const& feedback,
219 FeedbackSource const& source);
220
221 StringRef GetTypedArrayStringTag(ElementsKind kind);
222
223 bool IsMainThread() const {
224 return local_isolate() == nullptr || local_isolate()->is_main_thread();
225 }
226
227 LocalIsolate* local_isolate() const { return local_isolate_; }
228
229 // TODO(jgruber): Consider always having local_isolate_ set to a real value.
230 // This seems not entirely trivial since we currently reset local_isolate_ to
231 // nullptr at some point in the JSHeapBroker lifecycle.
232 LocalIsolate* local_isolate_or_isolate() const {
233 return local_isolate() != nullptr ? local_isolate()
234 : isolate()->AsLocalIsolate();
235 }
236
237 // Return the corresponding canonical persistent handle for {object}. Create
238 // one if it does not exist.
239 // If we have the canonical map, we can create the canonical & persistent
240 // handle through it. This commonly happens during the Execute phase.
241 // If we don't, that means we are calling this method from serialization. If
242 // that happens, we should be inside a canonical and a persistent handle
243 // scope. Then, we would just use the regular handle creation.
244 template <typename T>
245 Handle<T> CanonicalPersistentHandle(T object) {
246 if (canonical_handles_) {
247 Address address = object.ptr();
248 if (Internals::HasHeapObjectTag(address)) {
249 RootIndex root_index;
250 if (root_index_map_.Lookup(address, &root_index)) {
251 return Handle<T>(isolate_->root_handle(root_index).location());
252 }
253 }
254
255 Object obj(address);
256 auto find_result = canonical_handles_->FindOrInsert(obj);
257 if (!find_result.already_exists) {
258 // Allocate new PersistentHandle if one wasn't created before.
259 DCHECK_NOT_NULL(local_isolate());
260 *find_result.entry =
261 local_isolate()->heap()->NewPersistentHandle(obj).location();
262 }
263 return Handle<T>(*find_result.entry);
264 } else {
265 return Handle<T>(object, isolate());
266 }
267 }
268
269 template <typename T>
270 Handle<T> CanonicalPersistentHandle(Handle<T> object) {
271 if (object.is_null()) return object; // Can't deref a null handle.
272 return CanonicalPersistentHandle<T>(*object);
273 }
274
275 // Find the corresponding handle in the CanonicalHandlesMap. The entry must be
276 // found.
277 template <typename T>
278 Handle<T> FindCanonicalPersistentHandleForTesting(Object object) {
279 Address** entry = canonical_handles_->Find(object);
280 return Handle<T>(*entry);
281 }
282
283 // Set the persistent handles and copy the canonical handles over to the
284 // JSHeapBroker.
285 void SetPersistentAndCopyCanonicalHandlesForTesting(
286 std::unique_ptr<PersistentHandles> persistent_handles,
287 std::unique_ptr<CanonicalHandlesMap> canonical_handles);
288 std::string Trace() const;
289 void IncrementTracingIndentation();
290 void DecrementTracingIndentation();
291
292 // Locks {mutex} through the duration of this scope iff it is the first
293 // occurrence. This is done to have a recursive shared lock on {mutex}.
294 class V8_NODISCARD RecursiveSharedMutexGuardIfNeeded {
295 protected:
296 RecursiveSharedMutexGuardIfNeeded(LocalIsolate* local_isolate,
297 base::SharedMutex* mutex,
298 int* mutex_depth_address)
299 : mutex_depth_address_(mutex_depth_address),
300 initial_mutex_depth_(*mutex_depth_address_),
301 shared_mutex_guard_(local_isolate, mutex, initial_mutex_depth_ == 0) {
302 (*mutex_depth_address_)++;
303 }
304
305 ~RecursiveSharedMutexGuardIfNeeded() {
306 DCHECK_GE((*mutex_depth_address_), 1);
307 (*mutex_depth_address_)--;
308 DCHECK_EQ(initial_mutex_depth_, (*mutex_depth_address_));
309 }
310
311 private:
312 int* const mutex_depth_address_;
313 const int initial_mutex_depth_;
314 ParkedSharedMutexGuardIf<base::kShared> shared_mutex_guard_;
315 };
316
317 class MapUpdaterGuardIfNeeded final
318 : public RecursiveSharedMutexGuardIfNeeded {
319 public:
320 explicit MapUpdaterGuardIfNeeded(JSHeapBroker* broker)
321 : RecursiveSharedMutexGuardIfNeeded(
322 broker->local_isolate_or_isolate(),
323 broker->isolate()->map_updater_access(),
324 &broker->map_updater_mutex_depth_) {}
325 };
326
327 class BoilerplateMigrationGuardIfNeeded final
328 : public RecursiveSharedMutexGuardIfNeeded {
329 public:
330 explicit BoilerplateMigrationGuardIfNeeded(JSHeapBroker* broker)
331 : RecursiveSharedMutexGuardIfNeeded(
332 broker->local_isolate_or_isolate(),
333 broker->isolate()->boilerplate_migration_access(),
334 &broker->boilerplate_migration_mutex_depth_) {}
335 };
336
337 // If this returns false, the object is guaranteed to be fully initialized and
338 // thus safe to read from a memory safety perspective. The converse does not
339 // necessarily hold.
340 bool ObjectMayBeUninitialized(Handle<Object> object) const;
341 bool ObjectMayBeUninitialized(Object object) const;
342 bool ObjectMayBeUninitialized(HeapObject object) const;
343
344 void set_dependencies(CompilationDependencies* dependencies) {
345 DCHECK_NOT_NULL(dependencies);
346 DCHECK_NULL(dependencies_);
347 dependencies_ = dependencies;
348 }
349 CompilationDependencies* dependencies() const {
350 DCHECK_NOT_NULL(dependencies_);
351 return dependencies_;
352 }
353
354 private:
355 friend class HeapObjectRef;
356 friend class ObjectRef;
357 friend class ObjectData;
358 friend class PropertyCellData;
359
360 ProcessedFeedback const& GetFeedback(FeedbackSource const& source) const;
361 const ProcessedFeedback& NewInsufficientFeedback(FeedbackSlotKind kind) const;
362
363 // Bottleneck FeedbackNexus access here, for storage in the broker
364 // or on-the-fly usage elsewhere in the compiler.
365 ProcessedFeedback const& ReadFeedbackForArrayOrObjectLiteral(
366 FeedbackSource const& source);
367 ProcessedFeedback const& ReadFeedbackForBinaryOperation(
368 FeedbackSource const& source) const;
369 ProcessedFeedback const& ReadFeedbackForCall(FeedbackSource const& source);
370 ProcessedFeedback const& ReadFeedbackForCompareOperation(
371 FeedbackSource const& source) const;
372 ProcessedFeedback const& ReadFeedbackForForIn(
373 FeedbackSource const& source) const;
374 ProcessedFeedback const& ReadFeedbackForGlobalAccess(
375 FeedbackSource const& source);
376 ProcessedFeedback const& ReadFeedbackForInstanceOf(
377 FeedbackSource const& source);
378 ProcessedFeedback const& ReadFeedbackForPropertyAccess(
379 FeedbackSource const& source, AccessMode mode,
380 base::Optional<NameRef> static_name);
381 ProcessedFeedback const& ReadFeedbackForRegExpLiteral(
382 FeedbackSource const& source);
383 ProcessedFeedback const& ReadFeedbackForTemplateObject(
384 FeedbackSource const& source);
385
386 void CollectArrayAndObjectPrototypes();
387
388 void set_persistent_handles(
389 std::unique_ptr<PersistentHandles> persistent_handles) {
390 DCHECK_NULL(ph_);
391 ph_ = std::move(persistent_handles);
392 DCHECK_NOT_NULL(ph_);
393 }
394 std::unique_ptr<PersistentHandles> DetachPersistentHandles() {
395 DCHECK_NOT_NULL(ph_);
396 return std::move(ph_);
397 }
398
399 void set_canonical_handles(
400 std::unique_ptr<CanonicalHandlesMap> canonical_handles) {
401 DCHECK_NULL(canonical_handles_);
402 canonical_handles_ = std::move(canonical_handles);
403 DCHECK_NOT_NULL(canonical_handles_);
404 }
405
406 std::unique_ptr<CanonicalHandlesMap> DetachCanonicalHandles() {
407 DCHECK_NOT_NULL(canonical_handles_);
408 return std::move(canonical_handles_);
409 }
410
411 // Copy the canonical handles over to the JSHeapBroker.
412 void CopyCanonicalHandlesForTesting(
413 std::unique_ptr<CanonicalHandlesMap> canonical_handles);
414
415 Isolate* const isolate_;
416 Zone* const zone_;
417 base::Optional<NativeContextRef> target_native_context_;
418 RefsMap* refs_;
419 RootIndexMap root_index_map_;
420 ZoneUnorderedSet<Handle<JSObject>, Handle<JSObject>::hash,
421 Handle<JSObject>::equal_to>
422 array_and_object_prototypes_;
423 BrokerMode mode_ = kDisabled;
424 bool const tracing_enabled_;
425 bool const is_concurrent_inlining_;
426 CodeKind const code_kind_;
427 std::unique_ptr<PersistentHandles> ph_;
428 LocalIsolate* local_isolate_ = nullptr;
429 std::unique_ptr<CanonicalHandlesMap> canonical_handles_;
430 unsigned trace_indentation_ = 0;
431 ZoneUnorderedMap<FeedbackSource, ProcessedFeedback const*,
432 FeedbackSource::Hash, FeedbackSource::Equal>
433 feedback_;
434 ZoneUnorderedMap<PropertyAccessTarget, PropertyAccessInfo,
435 PropertyAccessTarget::Hash, PropertyAccessTarget::Equal>
436 property_access_infos_;
437 ZoneUnorderedMap<FeedbackSource, MinimorphicLoadPropertyAccessInfo,
438 FeedbackSource::Hash, FeedbackSource::Equal>
439 minimorphic_property_access_infos_;
440
441 CompilationDependencies* dependencies_ = nullptr;
442
443 // The MapUpdater mutex is used in recursive patterns; for example,
444 // ComputePropertyAccessInfo may call itself recursively. Thus we need to
445 // emulate a recursive mutex, which we do by checking if this heap broker
446 // instance already holds the mutex when a lock is requested. This field
447 // holds the locking depth, i.e. how many times the mutex has been
448 // recursively locked. Only the outermost locker actually locks underneath.
449 int map_updater_mutex_depth_ = 0;
450 // Likewise for boilerplate migrations.
451 int boilerplate_migration_mutex_depth_ = 0;
452
453 static constexpr uint32_t kMinimalRefsBucketCount = 8;
454 STATIC_ASSERT(base::bits::IsPowerOfTwo(kMinimalRefsBucketCount));
455 static constexpr uint32_t kInitialRefsBucketCount = 1024;
456 STATIC_ASSERT(base::bits::IsPowerOfTwo(kInitialRefsBucketCount));
457 };
458
459 class V8_NODISCARD TraceScope {
460 public:
TraceScope(JSHeapBroker * broker,const char * label)461 TraceScope(JSHeapBroker* broker, const char* label)
462 : TraceScope(broker, static_cast<void*>(broker), label) {}
463
TraceScope(JSHeapBroker * broker,ObjectData * data,const char * label)464 TraceScope(JSHeapBroker* broker, ObjectData* data, const char* label)
465 : TraceScope(broker, static_cast<void*>(data), label) {}
466
TraceScope(JSHeapBroker * broker,void * subject,const char * label)467 TraceScope(JSHeapBroker* broker, void* subject, const char* label)
468 : broker_(broker) {
469 TRACE_BROKER(broker_, "Running " << label << " on " << subject);
470 broker_->IncrementTracingIndentation();
471 }
472
~TraceScope()473 ~TraceScope() { broker_->DecrementTracingIndentation(); }
474
475 private:
476 JSHeapBroker* const broker_;
477 };
478
479 // Scope that unparks the LocalHeap, if:
480 // a) We have a JSHeapBroker,
481 // b) Said JSHeapBroker has a LocalIsolate and thus a LocalHeap,
482 // c) Said LocalHeap has been parked and
483 // d) The given condition evaluates to true.
484 // Used, for example, when printing the graph with --trace-turbo with a
485 // previously parked LocalHeap.
486 class V8_NODISCARD UnparkedScopeIfNeeded {
487 public:
488 explicit UnparkedScopeIfNeeded(JSHeapBroker* broker,
489 bool extra_condition = true) {
490 if (broker != nullptr && extra_condition) {
491 LocalIsolate* local_isolate = broker->local_isolate();
492 if (local_isolate != nullptr && local_isolate->heap()->IsParked()) {
493 unparked_scope.emplace(local_isolate->heap());
494 }
495 }
496 }
497
498 private:
499 base::Optional<UnparkedScope> unparked_scope;
500 };
501
502 template <class T,
503 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
TryMakeRef(JSHeapBroker * broker,ObjectData * data)504 base::Optional<typename ref_traits<T>::ref_type> TryMakeRef(
505 JSHeapBroker* broker, ObjectData* data) {
506 if (data == nullptr) return {};
507 return {typename ref_traits<T>::ref_type(broker, data)};
508 }
509
510 // Usage:
511 //
512 // base::Optional<FooRef> ref = TryMakeRef(broker, o);
513 // if (!ref.has_value()) return {}; // bailout
514 //
515 // or
516 //
517 // FooRef ref = MakeRef(broker, o);
518 template <class T,
519 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
520 base::Optional<typename ref_traits<T>::ref_type> TryMakeRef(
521 JSHeapBroker* broker, T object, GetOrCreateDataFlags flags = {}) {
522 ObjectData* data = broker->TryGetOrCreateData(object, flags);
523 if (data == nullptr) {
524 TRACE_BROKER_MISSING(broker, "ObjectData for " << Brief(object));
525 }
526 return TryMakeRef<T>(broker, data);
527 }
528
529 template <class T,
530 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
531 base::Optional<typename ref_traits<T>::ref_type> TryMakeRef(
532 JSHeapBroker* broker, Handle<T> object, GetOrCreateDataFlags flags = {}) {
533 ObjectData* data = broker->TryGetOrCreateData(object, flags);
534 if (data == nullptr) {
535 DCHECK_EQ(flags & kCrashOnError, 0);
536 TRACE_BROKER_MISSING(broker, "ObjectData for " << Brief(*object));
537 }
538 return TryMakeRef<T>(broker, data);
539 }
540
541 template <class T,
542 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
MakeRef(JSHeapBroker * broker,T object)543 typename ref_traits<T>::ref_type MakeRef(JSHeapBroker* broker, T object) {
544 return TryMakeRef(broker, object, kCrashOnError).value();
545 }
546
547 template <class T,
548 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
MakeRef(JSHeapBroker * broker,Handle<T> object)549 typename ref_traits<T>::ref_type MakeRef(JSHeapBroker* broker,
550 Handle<T> object) {
551 return TryMakeRef(broker, object, kCrashOnError).value();
552 }
553
554 template <class T,
555 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
MakeRefAssumeMemoryFence(JSHeapBroker * broker,T object)556 typename ref_traits<T>::ref_type MakeRefAssumeMemoryFence(JSHeapBroker* broker,
557 T object) {
558 return TryMakeRef(broker, object, kAssumeMemoryFence | kCrashOnError).value();
559 }
560
561 template <class T,
562 typename = std::enable_if_t<std::is_convertible<T*, Object*>::value>>
MakeRefAssumeMemoryFence(JSHeapBroker * broker,Handle<T> object)563 typename ref_traits<T>::ref_type MakeRefAssumeMemoryFence(JSHeapBroker* broker,
564 Handle<T> object) {
565 return TryMakeRef(broker, object, kAssumeMemoryFence | kCrashOnError).value();
566 }
567
568 } // namespace compiler
569 } // namespace internal
570 } // namespace v8
571
572 #endif // V8_COMPILER_JS_HEAP_BROKER_H_
573