1// Copyright 2018 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/objects/js-array-buffer.h"
6
7#include "src/base/platform/wrappers.h"
8#include "src/execution/protectors-inl.h"
9#include "src/logging/counters.h"
10#include "src/objects/js-array-buffer-inl.h"
11#include "src/objects/property-descriptor.h"
12
13namespace v8 {
14namespace internal {
15
16namespace {
17
18// ES#sec-canonicalnumericindexstring
19// Returns true if the lookup_key represents a valid index string.
20bool CanonicalNumericIndexString(Isolate* isolate,
21                                 const PropertyKey& lookup_key,
22                                 bool* is_minus_zero) {
23  // 1. Assert: Type(argument) is String.
24  DCHECK(lookup_key.is_element() || lookup_key.name()->IsString());
25  *is_minus_zero = false;
26  if (lookup_key.is_element()) return true;
27
28  Handle<String> key = Handle<String>::cast(lookup_key.name());
29
30  // 3. Let n be ! ToNumber(argument).
31  Handle<Object> result = String::ToNumber(isolate, key);
32  if (result->IsMinusZero()) {
33    // 2. If argument is "-0", return -0�.
34    // We are not performing SaveValue check for -0 because it'll be rejected
35    // anyway.
36    *is_minus_zero = true;
37  } else {
38    // 4. If SameValue(! ToString(n), argument) is false, return undefined.
39    Handle<String> str = Object::ToString(isolate, result).ToHandleChecked();
40    // Avoid treating strings like "2E1" and "20" as the same key.
41    if (!str->SameValue(*key)) return false;
42  }
43  return true;
44}
45}  // anonymous namespace
46
47void JSArrayBuffer::Setup(SharedFlag shared, ResizableFlag resizable,
48                          std::shared_ptr<BackingStore> backing_store) {
49  clear_padding();
50  set_bit_field(0);
51  set_is_shared(shared == SharedFlag::kShared);
52  set_is_resizable(resizable == ResizableFlag::kResizable);
53  set_is_detachable(shared != SharedFlag::kShared);
54  for (int i = 0; i < v8::ArrayBuffer::kEmbedderFieldCount; i++) {
55    SetEmbedderField(i, Smi::zero());
56  }
57  set_extension(nullptr);
58  if (!backing_store) {
59    set_backing_store(GetIsolate(), EmptyBackingStoreBuffer());
60    set_byte_length(0);
61    set_max_byte_length(0);
62  } else {
63    Attach(std::move(backing_store));
64  }
65  if (shared == SharedFlag::kShared) {
66    GetIsolate()->CountUsage(
67        v8::Isolate::UseCounterFeature::kSharedArrayBufferConstructed);
68  }
69}
70
71void JSArrayBuffer::Attach(std::shared_ptr<BackingStore> backing_store) {
72  DCHECK_NOT_NULL(backing_store);
73  DCHECK_EQ(is_shared(), backing_store->is_shared());
74  DCHECK_EQ(is_resizable(), backing_store->is_resizable());
75  DCHECK_IMPLIES(
76      !backing_store->is_wasm_memory() && !backing_store->is_resizable(),
77      backing_store->byte_length() == backing_store->max_byte_length());
78  DCHECK(!was_detached());
79  Isolate* isolate = GetIsolate();
80
81  if (backing_store->IsEmpty()) {
82    set_backing_store(isolate, EmptyBackingStoreBuffer());
83  } else {
84    DCHECK_NE(nullptr, backing_store->buffer_start());
85    set_backing_store(isolate, backing_store->buffer_start());
86  }
87
88  if (is_shared() && is_resizable()) {
89    // GSABs need to read their byte_length from the BackingStore. Maintain the
90    // invariant that their byte_length field is always 0.
91    set_byte_length(0);
92  } else {
93    CHECK_LE(backing_store->byte_length(), kMaxByteLength);
94    set_byte_length(backing_store->byte_length());
95  }
96  set_max_byte_length(backing_store->max_byte_length());
97  if (backing_store->is_wasm_memory()) set_is_detachable(false);
98  if (!backing_store->free_on_destruct()) set_is_external(true);
99  ArrayBufferExtension* extension = EnsureExtension();
100  size_t bytes = backing_store->PerIsolateAccountingLength();
101  extension->set_accounting_length(bytes);
102  extension->set_backing_store(std::move(backing_store));
103  isolate->heap()->AppendArrayBufferExtension(*this, extension);
104}
105
106void JSArrayBuffer::Detach(bool force_for_wasm_memory) {
107  if (was_detached()) return;
108
109  if (force_for_wasm_memory) {
110    // Skip the is_detachable() check.
111  } else if (!is_detachable()) {
112    // Not detachable, do nothing.
113    return;
114  }
115
116  Isolate* const isolate = GetIsolate();
117  ArrayBufferExtension* extension = this->extension();
118
119  if (extension) {
120    DisallowGarbageCollection disallow_gc;
121    isolate->heap()->DetachArrayBufferExtension(*this, extension);
122    std::shared_ptr<BackingStore> backing_store = RemoveExtension();
123    CHECK_IMPLIES(force_for_wasm_memory, backing_store->is_wasm_memory());
124  }
125
126  if (Protectors::IsArrayBufferDetachingIntact(isolate)) {
127    Protectors::InvalidateArrayBufferDetaching(isolate);
128  }
129
130  DCHECK(!is_shared());
131  DCHECK(!is_asmjs_memory());
132  set_backing_store(isolate, EmptyBackingStoreBuffer());
133  set_byte_length(0);
134  set_was_detached(true);
135}
136
137size_t JSArrayBuffer::GsabByteLength(Isolate* isolate,
138                                     Address raw_array_buffer) {
139  // TODO(v8:11111): Cache the last seen length in JSArrayBuffer and use it
140  // in bounds checks to minimize the need for calling this function.
141  DCHECK(FLAG_harmony_rab_gsab);
142  DisallowGarbageCollection no_gc;
143  DisallowJavascriptExecution no_js(isolate);
144  JSArrayBuffer buffer = JSArrayBuffer::cast(Object(raw_array_buffer));
145  CHECK(buffer.is_resizable());
146  CHECK(buffer.is_shared());
147  return buffer.GetBackingStore()->byte_length(std::memory_order_seq_cst);
148}
149
150// static
151Maybe<bool> JSArrayBuffer::GetResizableBackingStorePageConfiguration(
152    Isolate* isolate, size_t byte_length, size_t max_byte_length,
153    ShouldThrow should_throw, size_t* page_size, size_t* initial_pages,
154    size_t* max_pages) {
155  DCHECK_NOT_NULL(page_size);
156  DCHECK_NOT_NULL(initial_pages);
157  DCHECK_NOT_NULL(max_pages);
158
159  *page_size = AllocatePageSize();
160
161  if (!RoundUpToPageSize(byte_length, *page_size, JSArrayBuffer::kMaxByteLength,
162                         initial_pages)) {
163    if (should_throw == kDontThrow) return Nothing<bool>();
164    THROW_NEW_ERROR_RETURN_VALUE(
165        isolate, NewRangeError(MessageTemplate::kInvalidArrayBufferLength),
166        Nothing<bool>());
167  }
168
169  if (!RoundUpToPageSize(max_byte_length, *page_size,
170                         JSArrayBuffer::kMaxByteLength, max_pages)) {
171    if (should_throw == kDontThrow) return Nothing<bool>();
172    THROW_NEW_ERROR_RETURN_VALUE(
173        isolate, NewRangeError(MessageTemplate::kInvalidArrayBufferMaxLength),
174        Nothing<bool>());
175  }
176
177  return Just(true);
178}
179
180ArrayBufferExtension* JSArrayBuffer::EnsureExtension() {
181  ArrayBufferExtension* extension = this->extension();
182  if (extension != nullptr) return extension;
183
184  extension = new ArrayBufferExtension(std::shared_ptr<BackingStore>());
185  set_extension(extension);
186  return extension;
187}
188
189std::shared_ptr<BackingStore> JSArrayBuffer::RemoveExtension() {
190  ArrayBufferExtension* extension = this->extension();
191  DCHECK_NOT_NULL(extension);
192  auto result = extension->RemoveBackingStore();
193  // Remove pointer to extension such that the next GC will free it
194  // automatically.
195  set_extension(nullptr);
196  return result;
197}
198
199void JSArrayBuffer::MarkExtension() {
200  ArrayBufferExtension* extension = this->extension();
201  if (extension) {
202    extension->Mark();
203  }
204}
205
206void JSArrayBuffer::YoungMarkExtension() {
207  ArrayBufferExtension* extension = this->extension();
208  if (extension) {
209    extension->YoungMark();
210  }
211}
212
213void JSArrayBuffer::YoungMarkExtensionPromoted() {
214  ArrayBufferExtension* extension = this->extension();
215  if (extension) {
216    extension->YoungMarkPromoted();
217  }
218}
219
220Handle<JSArrayBuffer> JSTypedArray::GetBuffer() {
221  Isolate* isolate = GetIsolate();
222  Handle<JSTypedArray> self(*this, isolate);
223  DCHECK(IsTypedArrayOrRabGsabTypedArrayElementsKind(self->GetElementsKind()));
224  Handle<JSArrayBuffer> array_buffer(JSArrayBuffer::cast(self->buffer()),
225                                     isolate);
226  if (!is_on_heap()) {
227    // Already is off heap, so return the existing buffer.
228    return array_buffer;
229  }
230  DCHECK(!array_buffer->is_resizable());
231
232  // The existing array buffer should be empty.
233  DCHECK(array_buffer->IsEmpty());
234
235  // Allocate a new backing store and attach it to the existing array buffer.
236  size_t byte_length = self->byte_length();
237  auto backing_store =
238      BackingStore::Allocate(isolate, byte_length, SharedFlag::kNotShared,
239                             InitializedFlag::kUninitialized);
240
241  if (!backing_store) {
242    isolate->heap()->FatalProcessOutOfMemory("JSTypedArray::GetBuffer");
243  }
244
245  // Copy the elements into the backing store of the array buffer.
246  if (byte_length > 0) {
247    memcpy(backing_store->buffer_start(), self->DataPtr(), byte_length);
248  }
249
250  // Attach the backing store to the array buffer.
251  array_buffer->Setup(SharedFlag::kNotShared, ResizableFlag::kNotResizable,
252                      std::move(backing_store));
253
254  // Clear the elements of the typed array.
255  self->set_elements(ReadOnlyRoots(isolate).empty_byte_array());
256  self->SetOffHeapDataPtr(isolate, array_buffer->backing_store(), 0);
257  DCHECK(!self->is_on_heap());
258
259  return array_buffer;
260}
261
262// ES#sec-integer-indexed-exotic-objects-defineownproperty-p-desc
263// static
264Maybe<bool> JSTypedArray::DefineOwnProperty(Isolate* isolate,
265                                            Handle<JSTypedArray> o,
266                                            Handle<Object> key,
267                                            PropertyDescriptor* desc,
268                                            Maybe<ShouldThrow> should_throw) {
269  DCHECK(key->IsName() || key->IsNumber());
270  // 1. If Type(P) is String, then
271  PropertyKey lookup_key(isolate, key);
272  if (lookup_key.is_element() || key->IsSmi() || key->IsString()) {
273    // 1a. Let numericIndex be ! CanonicalNumericIndexString(P)
274    // 1b. If numericIndex is not undefined, then
275    bool is_minus_zero = false;
276    if (key->IsSmi() ||  // Smi keys are definitely canonical
277        CanonicalNumericIndexString(isolate, lookup_key, &is_minus_zero)) {
278      // 1b i. If IsValidIntegerIndex(O, numericIndex) is false, return false.
279
280      // IsValidIntegerIndex:
281      size_t index = lookup_key.index();
282      bool out_of_bounds = false;
283      size_t length = o->GetLengthOrOutOfBounds(out_of_bounds);
284      if (o->WasDetached() || out_of_bounds || index >= length) {
285        RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
286                       NewTypeError(MessageTemplate::kInvalidTypedArrayIndex));
287      }
288      if (!lookup_key.is_element() || is_minus_zero) {
289        RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
290                       NewTypeError(MessageTemplate::kInvalidTypedArrayIndex));
291      }
292
293      // 1b ii. If Desc has a [[Configurable]] field and if
294      //     Desc.[[Configurable]] is false, return false.
295      // 1b iii. If Desc has an [[Enumerable]] field and if Desc.[[Enumerable]]
296      //     is false, return false.
297      // 1b iv. If IsAccessorDescriptor(Desc) is true, return false.
298      // 1b v. If Desc has a [[Writable]] field and if Desc.[[Writable]] is
299      //     false, return false.
300
301      if (PropertyDescriptor::IsAccessorDescriptor(desc)) {
302        RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
303                       NewTypeError(MessageTemplate::kRedefineDisallowed, key));
304      }
305
306      if ((desc->has_configurable() && !desc->configurable()) ||
307          (desc->has_enumerable() && !desc->enumerable()) ||
308          (desc->has_writable() && !desc->writable())) {
309        RETURN_FAILURE(isolate, GetShouldThrow(isolate, should_throw),
310                       NewTypeError(MessageTemplate::kRedefineDisallowed, key));
311      }
312
313      // 1b vi. If Desc has a [[Value]] field, perform
314      // ? IntegerIndexedElementSet(O, numericIndex, Desc.[[Value]]).
315      if (desc->has_value()) {
316        if (!desc->has_configurable()) desc->set_configurable(true);
317        if (!desc->has_enumerable()) desc->set_enumerable(true);
318        if (!desc->has_writable()) desc->set_writable(true);
319        Handle<Object> value = desc->value();
320        LookupIterator it(isolate, o, index, LookupIterator::OWN);
321        RETURN_ON_EXCEPTION_VALUE(
322            isolate,
323            DefineOwnPropertyIgnoreAttributes(&it, value, desc->ToAttributes()),
324            Nothing<bool>());
325      }
326      // 1b vii. Return true.
327      return Just(true);
328    }
329  }
330  // 4. Return ! OrdinaryDefineOwnProperty(O, P, Desc).
331  return OrdinaryDefineOwnProperty(isolate, o, lookup_key, desc, should_throw);
332}
333
334ExternalArrayType JSTypedArray::type() {
335  switch (map().elements_kind()) {
336#define ELEMENTS_KIND_TO_ARRAY_TYPE(Type, type, TYPE, ctype) \
337  case TYPE##_ELEMENTS:                                      \
338    return kExternal##Type##Array;
339
340    TYPED_ARRAYS(ELEMENTS_KIND_TO_ARRAY_TYPE)
341    RAB_GSAB_TYPED_ARRAYS_WITH_TYPED_ARRAY_TYPE(ELEMENTS_KIND_TO_ARRAY_TYPE)
342#undef ELEMENTS_KIND_TO_ARRAY_TYPE
343
344    default:
345      UNREACHABLE();
346  }
347}
348
349size_t JSTypedArray::element_size() const {
350  switch (map().elements_kind()) {
351#define ELEMENTS_KIND_TO_ELEMENT_SIZE(Type, type, TYPE, ctype) \
352  case TYPE##_ELEMENTS:                                        \
353    return sizeof(ctype);
354
355    TYPED_ARRAYS(ELEMENTS_KIND_TO_ELEMENT_SIZE)
356    RAB_GSAB_TYPED_ARRAYS(ELEMENTS_KIND_TO_ELEMENT_SIZE)
357#undef ELEMENTS_KIND_TO_ELEMENT_SIZE
358
359    default:
360      UNREACHABLE();
361  }
362}
363
364size_t JSTypedArray::LengthTrackingGsabBackedTypedArrayLength(
365    Isolate* isolate, Address raw_array) {
366  // TODO(v8:11111): Cache the last seen length in JSArrayBuffer and use it
367  // in bounds checks to minimize the need for calling this function.
368  DCHECK(FLAG_harmony_rab_gsab);
369  DisallowGarbageCollection no_gc;
370  DisallowJavascriptExecution no_js(isolate);
371  JSTypedArray array = JSTypedArray::cast(Object(raw_array));
372  CHECK(array.is_length_tracking());
373  JSArrayBuffer buffer = array.buffer();
374  CHECK(buffer.is_resizable());
375  CHECK(buffer.is_shared());
376  size_t backing_byte_length =
377      buffer.GetBackingStore()->byte_length(std::memory_order_seq_cst);
378  CHECK_GE(backing_byte_length, array.byte_offset());
379  auto element_byte_size = ElementsKindToByteSize(array.GetElementsKind());
380  return (backing_byte_length - array.byte_offset()) / element_byte_size;
381}
382
383size_t JSTypedArray::GetVariableLengthOrOutOfBounds(bool& out_of_bounds) const {
384  DCHECK(!WasDetached());
385  if (is_length_tracking()) {
386    if (is_backed_by_rab()) {
387      if (byte_offset() > buffer().byte_length()) {
388        out_of_bounds = true;
389        return 0;
390      }
391      return (buffer().byte_length() - byte_offset()) / element_size();
392    }
393    if (byte_offset() >
394        buffer().GetBackingStore()->byte_length(std::memory_order_seq_cst)) {
395      out_of_bounds = true;
396      return 0;
397    }
398    return (buffer().GetBackingStore()->byte_length(std::memory_order_seq_cst) -
399            byte_offset()) /
400           element_size();
401  }
402  DCHECK(is_backed_by_rab());
403  size_t array_length = LengthUnchecked();
404  // The sum can't overflow, since we have managed to allocate the
405  // JSTypedArray.
406  if (byte_offset() + array_length * element_size() > buffer().byte_length()) {
407    out_of_bounds = true;
408    return 0;
409  }
410  return array_length;
411}
412
413}  // namespace internal
414}  // namespace v8
415