1 // Copyright 2018 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_JS_ARRAY_BUFFER_INL_H_
6 #define V8_OBJECTS_JS_ARRAY_BUFFER_INL_H_
7 
8 #include "src/common/external-pointer.h"
9 #include "src/objects/js-array-buffer.h"
10 
11 #include "src/common/external-pointer-inl.h"
12 #include "src/heap/heap-write-barrier-inl.h"
13 #include "src/objects/js-objects-inl.h"
14 #include "src/objects/objects-inl.h"
15 
16 // Has to be the last include (doesn't have include guards):
17 #include "src/objects/object-macros.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 #include "torque-generated/src/objects/js-array-buffer-tq-inl.inc"
23 
24 TQ_OBJECT_CONSTRUCTORS_IMPL(JSArrayBuffer)
TQ_OBJECT_CONSTRUCTORS_IMPL(JSArrayBufferView)25 TQ_OBJECT_CONSTRUCTORS_IMPL(JSArrayBufferView)
26 TQ_OBJECT_CONSTRUCTORS_IMPL(JSTypedArray)
27 TQ_OBJECT_CONSTRUCTORS_IMPL(JSDataView)
28 
29 ACCESSORS(JSTypedArray, base_pointer, Object, kBasePointerOffset)
30 RELEASE_ACQUIRE_ACCESSORS(JSTypedArray, base_pointer, Object,
31                           kBasePointerOffset)
32 
33 size_t JSArrayBuffer::byte_length() const {
34   return ReadField<size_t>(kByteLengthOffset);
35 }
36 
set_byte_length(size_t value)37 void JSArrayBuffer::set_byte_length(size_t value) {
38   WriteField<size_t>(kByteLengthOffset, value);
39 }
40 
DEF_GETTER(JSArrayBuffer,backing_store,void *)41 DEF_GETTER(JSArrayBuffer, backing_store, void*) {
42   return reinterpret_cast<void*>(ReadField<Address>(kBackingStoreOffset));
43 }
44 
set_backing_store(void * value)45 void JSArrayBuffer::set_backing_store(void* value) {
46   DCHECK(IsValidBackingStorePointer(value));
47   WriteField<Address>(kBackingStoreOffset, reinterpret_cast<Address>(value));
48 }
49 
GetBackingStoreRefForDeserialization()50 uint32_t JSArrayBuffer::GetBackingStoreRefForDeserialization() const {
51   return static_cast<uint32_t>(ReadField<Address>(kBackingStoreOffset));
52 }
53 
SetBackingStoreRefForSerialization(uint32_t ref)54 void JSArrayBuffer::SetBackingStoreRefForSerialization(uint32_t ref) {
55   WriteField<Address>(kBackingStoreOffset, static_cast<Address>(ref));
56 }
57 
extension()58 ArrayBufferExtension* JSArrayBuffer::extension() const {
59 #if V8_COMPRESS_POINTERS
60     // With pointer compression the extension-field might not be
61     // pointer-aligned. However on ARM64 this field needs to be aligned to
62     // perform atomic operations on it. Therefore we split the pointer into two
63     // 32-bit words that we update atomically. We don't have an ABA problem here
64     // since there can never be an Attach() after Detach() (transitions only
65     // from NULL --> some ptr --> NULL).
66 
67     // Synchronize with publishing release store of non-null extension
68     uint32_t lo = base::AsAtomic32::Acquire_Load(extension_lo());
69     if (lo & kUninitializedTagMask) return nullptr;
70 
71     // Synchronize with release store of null extension
72     uint32_t hi = base::AsAtomic32::Acquire_Load(extension_hi());
73     uint32_t verify_lo = base::AsAtomic32::Relaxed_Load(extension_lo());
74     if (lo != verify_lo) return nullptr;
75 
76     uintptr_t address = static_cast<uintptr_t>(lo);
77     address |= static_cast<uintptr_t>(hi) << 32;
78     return reinterpret_cast<ArrayBufferExtension*>(address);
79 #else
80     return base::AsAtomicPointer::Acquire_Load(extension_location());
81 #endif
82 }
83 
set_extension(ArrayBufferExtension * extension)84 void JSArrayBuffer::set_extension(ArrayBufferExtension* extension) {
85 #if V8_COMPRESS_POINTERS
86     if (extension != nullptr) {
87       uintptr_t address = reinterpret_cast<uintptr_t>(extension);
88       base::AsAtomic32::Relaxed_Store(extension_hi(),
89                                       static_cast<uint32_t>(address >> 32));
90       base::AsAtomic32::Release_Store(extension_lo(),
91                                       static_cast<uint32_t>(address));
92     } else {
93       base::AsAtomic32::Relaxed_Store(extension_lo(),
94                                       0 | kUninitializedTagMask);
95       base::AsAtomic32::Release_Store(extension_hi(), 0);
96     }
97 #else
98     base::AsAtomicPointer::Release_Store(extension_location(), extension);
99 #endif
100     WriteBarrier::Marking(*this, extension);
101 }
102 
extension_location()103 ArrayBufferExtension** JSArrayBuffer::extension_location() const {
104   Address location = field_address(kExtensionOffset);
105   return reinterpret_cast<ArrayBufferExtension**>(location);
106 }
107 
108 #if V8_COMPRESS_POINTERS
extension_lo()109 uint32_t* JSArrayBuffer::extension_lo() const {
110   Address location = field_address(kExtensionOffset);
111   return reinterpret_cast<uint32_t*>(location);
112 }
113 
extension_hi()114 uint32_t* JSArrayBuffer::extension_hi() const {
115   Address location = field_address(kExtensionOffset) + sizeof(uint32_t);
116   return reinterpret_cast<uint32_t*>(location);
117 }
118 #endif
119 
allocation_length()120 size_t JSArrayBuffer::allocation_length() const {
121   if (backing_store() == nullptr) {
122     return 0;
123   }
124   return byte_length();
125 }
126 
allocation_base()127 void* JSArrayBuffer::allocation_base() const {
128   if (backing_store() == nullptr) {
129     return nullptr;
130   }
131   return backing_store();
132 }
133 
clear_padding()134 void JSArrayBuffer::clear_padding() {
135   if (FIELD_SIZE(kOptionalPaddingOffset) != 0) {
136     DCHECK_EQ(4, FIELD_SIZE(kOptionalPaddingOffset));
137     memset(reinterpret_cast<void*>(address() + kOptionalPaddingOffset), 0,
138            FIELD_SIZE(kOptionalPaddingOffset));
139   }
140 }
141 
set_bit_field(uint32_t bits)142 void JSArrayBuffer::set_bit_field(uint32_t bits) {
143   RELAXED_WRITE_UINT32_FIELD(*this, kBitFieldOffset, bits);
144 }
145 
bit_field()146 uint32_t JSArrayBuffer::bit_field() const {
147   return RELAXED_READ_UINT32_FIELD(*this, kBitFieldOffset);
148 }
149 
150 // |bit_field| fields.
BIT_FIELD_ACCESSORS(JSArrayBuffer,bit_field,is_external,JSArrayBuffer::IsExternalBit)151 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_external,
152                     JSArrayBuffer::IsExternalBit)
153 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_detachable,
154                     JSArrayBuffer::IsDetachableBit)
155 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, was_detached,
156                     JSArrayBuffer::WasDetachedBit)
157 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_asmjs_memory,
158                     JSArrayBuffer::IsAsmJsMemoryBit)
159 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_shared,
160                     JSArrayBuffer::IsSharedBit)
161 BIT_FIELD_ACCESSORS(JSArrayBuffer, bit_field, is_resizable,
162                     JSArrayBuffer::IsResizableBit)
163 
164 size_t JSArrayBufferView::byte_offset() const {
165   return ReadField<size_t>(kByteOffsetOffset);
166 }
167 
set_byte_offset(size_t value)168 void JSArrayBufferView::set_byte_offset(size_t value) {
169   WriteField<size_t>(kByteOffsetOffset, value);
170 }
171 
byte_length()172 size_t JSArrayBufferView::byte_length() const {
173   return ReadField<size_t>(kByteLengthOffset);
174 }
175 
set_byte_length(size_t value)176 void JSArrayBufferView::set_byte_length(size_t value) {
177   WriteField<size_t>(kByteLengthOffset, value);
178 }
179 
WasDetached()180 bool JSArrayBufferView::WasDetached() const {
181   return JSArrayBuffer::cast(buffer()).was_detached();
182 }
183 
BIT_FIELD_ACCESSORS(JSTypedArray,bit_field,is_length_tracking,JSTypedArray::IsLengthTrackingBit)184 BIT_FIELD_ACCESSORS(JSTypedArray, bit_field, is_length_tracking,
185                     JSTypedArray::IsLengthTrackingBit)
186 BIT_FIELD_ACCESSORS(JSTypedArray, bit_field, is_backed_by_rab,
187                     JSTypedArray::IsBackedByRabBit)
188 
189 bool JSTypedArray::IsVariableLength() const {
190   return is_length_tracking() || is_backed_by_rab();
191 }
192 
GetLengthOrOutOfBounds(bool & out_of_bounds)193 size_t JSTypedArray::GetLengthOrOutOfBounds(bool& out_of_bounds) const {
194   DCHECK(!out_of_bounds);
195   if (WasDetached()) return 0;
196   if (is_length_tracking()) {
197     if (is_backed_by_rab()) {
198       if (byte_offset() > buffer().byte_length()) {
199         out_of_bounds = true;
200         return 0;
201       }
202       return (buffer().byte_length() - byte_offset()) / element_size();
203     }
204     if (byte_offset() >
205         buffer().GetBackingStore()->byte_length(std::memory_order_seq_cst)) {
206       out_of_bounds = true;
207       return 0;
208     }
209     return (buffer().GetBackingStore()->byte_length(std::memory_order_seq_cst) -
210             byte_offset()) /
211            element_size();
212   }
213   size_t array_length = LengthUnchecked();
214   if (is_backed_by_rab()) {
215     // The sum can't overflow, since we have managed to allocate the
216     // JSTypedArray.
217     if (byte_offset() + array_length * element_size() >
218         buffer().byte_length()) {
219       out_of_bounds = true;
220       return 0;
221     }
222   }
223   return array_length;
224 }
225 
GetLength()226 size_t JSTypedArray::GetLength() const {
227   bool out_of_bounds = false;
228   return GetLengthOrOutOfBounds(out_of_bounds);
229 }
230 
length()231 size_t JSTypedArray::length() const {
232   DCHECK(!is_length_tracking());
233   DCHECK(!is_backed_by_rab());
234   return ReadField<size_t>(kLengthOffset);
235 }
236 
LengthUnchecked()237 size_t JSTypedArray::LengthUnchecked() const {
238   return ReadField<size_t>(kLengthOffset);
239 }
240 
set_length(size_t value)241 void JSTypedArray::set_length(size_t value) {
242   WriteField<size_t>(kLengthOffset, value);
243 }
244 
DEF_GETTER(JSTypedArray,external_pointer,Address)245 DEF_GETTER(JSTypedArray, external_pointer, Address) {
246   return ReadField<Address>(kExternalPointerOffset);
247 }
248 
DEF_GETTER(JSTypedArray,external_pointer_raw,Address)249 DEF_GETTER(JSTypedArray, external_pointer_raw, Address) {
250   return ReadField<Address>(kExternalPointerOffset);
251 }
252 
set_external_pointer(Isolate * isolate,Address value)253 void JSTypedArray::set_external_pointer(Isolate* isolate, Address value) {
254   DCHECK(IsValidBackingStorePointer(reinterpret_cast<void*>(value)));
255   WriteField<Address>(kExternalPointerOffset, value);
256 }
257 
ExternalPointerCompensationForOnHeapArray(PtrComprCageBase cage_base)258 Address JSTypedArray::ExternalPointerCompensationForOnHeapArray(
259     PtrComprCageBase cage_base) {
260 #ifdef V8_COMPRESS_POINTERS
261   return cage_base.address();
262 #else
263   return 0;
264 #endif
265 }
266 
GetExternalBackingStoreRefForDeserialization()267 uint32_t JSTypedArray::GetExternalBackingStoreRefForDeserialization() const {
268   DCHECK(!is_on_heap());
269   return static_cast<uint32_t>(ReadField<Address>(kExternalPointerOffset));
270 }
271 
SetExternalBackingStoreRefForSerialization(uint32_t ref)272 void JSTypedArray::SetExternalBackingStoreRefForSerialization(uint32_t ref) {
273   DCHECK(!is_on_heap());
274   WriteField<Address>(kExternalPointerOffset, static_cast<Address>(ref));
275 }
276 
RemoveExternalPointerCompensationForSerialization(Isolate * isolate)277 void JSTypedArray::RemoveExternalPointerCompensationForSerialization(
278     Isolate* isolate) {
279   DCHECK(is_on_heap());
280   // TODO(v8:10391): once we have an external table, avoid the need for
281   // compensation by replacing external_pointer and base_pointer fields
282   // with one data_pointer field which can point to either external data
283   // backing store or into on-heap backing store.
284   Address offset =
285       external_pointer() - ExternalPointerCompensationForOnHeapArray(isolate);
286 #ifdef V8_HEAP_SANDBOX
287   // Write decompensated offset directly to the external pointer field, thus
288   // allowing the offset to be propagated through serialization-deserialization.
289   WriteField<ExternalPointer_t>(kExternalPointerOffset, offset);
290 #else
291   set_external_pointer(isolate, offset);
292 #endif
293 }
294 
DataPtr()295 void* JSTypedArray::DataPtr() {
296   // Zero-extend Tagged_t to Address according to current compression scheme
297   // so that the addition with |external_pointer| (which already contains
298   // compensated offset value) will decompress the tagged value.
299   // See JSTypedArray::ExternalPointerCompensationForOnHeapArray() for details.
300   STATIC_ASSERT(kOffHeapDataPtrEqualsExternalPointer);
301   return reinterpret_cast<void*>(external_pointer() +
302                                  static_cast<Tagged_t>(base_pointer().ptr()));
303 }
304 
SetOffHeapDataPtr(Isolate * isolate,void * base,Address offset)305 void JSTypedArray::SetOffHeapDataPtr(Isolate* isolate, void* base,
306                                      Address offset) {
307   Address address = reinterpret_cast<Address>(base) + offset;
308   set_external_pointer(isolate, address);
309   // This is the only spot in which the `base_pointer` field can be mutated
310   // after object initialization. Note this can happen at most once, when
311   // `JSTypedArray::GetBuffer` transitions from an on- to off-heap
312   // representation.
313   // To play well with Turbofan concurrency requirements, `base_pointer` is set
314   // with a release store, after external_pointer has been set.
315   set_base_pointer(Smi::zero(), kReleaseStore, SKIP_WRITE_BARRIER);
316   DCHECK_EQ(address, reinterpret_cast<Address>(DataPtr()));
317 }
318 
SetOnHeapDataPtr(Isolate * isolate,HeapObject base,Address offset)319 void JSTypedArray::SetOnHeapDataPtr(Isolate* isolate, HeapObject base,
320                                     Address offset) {
321   set_base_pointer(base);
322   set_external_pointer(
323       isolate, offset + ExternalPointerCompensationForOnHeapArray(isolate));
324   DCHECK_EQ(base.ptr() + offset, reinterpret_cast<Address>(DataPtr()));
325 }
326 
is_on_heap()327 bool JSTypedArray::is_on_heap() const {
328   // Keep synced with `is_on_heap(AcquireLoadTag)`.
329   DisallowGarbageCollection no_gc;
330   return base_pointer() != Smi::zero();
331 }
332 
is_on_heap(AcquireLoadTag tag)333 bool JSTypedArray::is_on_heap(AcquireLoadTag tag) const {
334   // Keep synced with `is_on_heap()`.
335   // Note: For Turbofan concurrency requirements, it's important that this
336   // function reads only `base_pointer`.
337   DisallowGarbageCollection no_gc;
338   return base_pointer(tag) != Smi::zero();
339 }
340 
341 // static
Validate(Isolate * isolate,Handle<Object> receiver,const char * method_name)342 MaybeHandle<JSTypedArray> JSTypedArray::Validate(Isolate* isolate,
343                                                  Handle<Object> receiver,
344                                                  const char* method_name) {
345   if (V8_UNLIKELY(!receiver->IsJSTypedArray())) {
346     const MessageTemplate message = MessageTemplate::kNotTypedArray;
347     THROW_NEW_ERROR(isolate, NewTypeError(message), JSTypedArray);
348   }
349 
350   Handle<JSTypedArray> array = Handle<JSTypedArray>::cast(receiver);
351   if (V8_UNLIKELY(array->WasDetached())) {
352     const MessageTemplate message = MessageTemplate::kDetachedOperation;
353     Handle<String> operation =
354         isolate->factory()->NewStringFromAsciiChecked(method_name);
355     THROW_NEW_ERROR(isolate, NewTypeError(message, operation), JSTypedArray);
356   }
357 
358   if (V8_UNLIKELY(array->IsVariableLength())) {
359     bool out_of_bounds = false;
360     array->GetLengthOrOutOfBounds(out_of_bounds);
361     if (out_of_bounds) {
362       const MessageTemplate message = MessageTemplate::kDetachedOperation;
363       Handle<String> operation =
364           isolate->factory()->NewStringFromAsciiChecked(method_name);
365       THROW_NEW_ERROR(isolate, NewTypeError(message, operation), JSTypedArray);
366     }
367   }
368 
369   // spec describes to return `buffer`, but it may disrupt current
370   // implementations, and it's much useful to return array for now.
371   return array;
372 }
373 
DEF_GETTER(JSDataView,data_pointer,void *)374 DEF_GETTER(JSDataView, data_pointer, void*) {
375   return reinterpret_cast<void*>(ReadField<Address>(kDataPointerOffset));
376 }
377 
set_data_pointer(Isolate * isolate,void * value)378 void JSDataView::set_data_pointer(Isolate* isolate, void* value) {
379   DCHECK(IsValidBackingStorePointer(value));
380   WriteField<Address>(kDataPointerOffset, reinterpret_cast<Address>(value));
381 }
382 
383 }  // namespace internal
384 }  // namespace v8
385 
386 #include "src/objects/object-macros-undef.h"
387 
388 #endif  // V8_OBJECTS_JS_ARRAY_BUFFER_INL_H_
389