1// Copyright 2017 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_FIXED_ARRAY_INL_H_
6#define V8_OBJECTS_FIXED_ARRAY_INL_H_
7
8#include "src/handles/handles-inl.h"
9#include "src/heap/heap-write-barrier-inl.h"
10#include "src/numbers/conversions.h"
11#include "src/objects/bigint.h"
12#include "src/objects/compressed-slots.h"
13#include "src/objects/fixed-array.h"
14#include "src/objects/map.h"
15#include "src/objects/maybe-object-inl.h"
16#include "src/objects/objects-inl.h"
17#include "src/objects/oddball.h"
18#include "src/objects/slots.h"
19#include "src/roots/roots-inl.h"
20
21// Has to be the last include (doesn't have include guards):
22#include "src/objects/object-macros.h"
23
24namespace v8 {
25namespace internal {
26
27#include "torque-generated/src/objects/fixed-array-tq-inl.inc"
28
29TQ_OBJECT_CONSTRUCTORS_IMPL(FixedArrayBase)
30FixedArrayBase::FixedArrayBase(Address ptr,
31                               HeapObject::AllowInlineSmiStorage allow_smi)
32    : TorqueGeneratedFixedArrayBase(ptr, allow_smi) {}
33TQ_OBJECT_CONSTRUCTORS_IMPL(FixedArray)
34TQ_OBJECT_CONSTRUCTORS_IMPL(FixedDoubleArray)
35TQ_OBJECT_CONSTRUCTORS_IMPL(ArrayList)
36TQ_OBJECT_CONSTRUCTORS_IMPL(ByteArray)
37ByteArray::ByteArray(Address ptr, HeapObject::AllowInlineSmiStorage allow_smi)
38    : TorqueGeneratedByteArray(ptr, allow_smi) {}
39TQ_OBJECT_CONSTRUCTORS_IMPL(TemplateList)
40TQ_OBJECT_CONSTRUCTORS_IMPL(WeakFixedArray)
41TQ_OBJECT_CONSTRUCTORS_IMPL(WeakArrayList)
42
43NEVER_READ_ONLY_SPACE_IMPL(WeakArrayList)
44
45RELEASE_ACQUIRE_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
46
47RELEASE_ACQUIRE_SMI_ACCESSORS(WeakFixedArray, length, kLengthOffset)
48
49Object FixedArrayBase::unchecked_length(AcquireLoadTag) const {
50  return ACQUIRE_READ_FIELD(*this, kLengthOffset);
51}
52
53ObjectSlot FixedArray::GetFirstElementAddress() {
54  return RawField(OffsetOfElementAt(0));
55}
56
57bool FixedArray::ContainsOnlySmisOrHoles() {
58  Object the_hole = GetReadOnlyRoots().the_hole_value();
59  ObjectSlot current = GetFirstElementAddress();
60  for (int i = 0; i < length(); ++i, ++current) {
61    Object candidate = *current;
62    if (!candidate.IsSmi() && candidate != the_hole) return false;
63  }
64  return true;
65}
66
67Object FixedArray::get(int index) const {
68  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
69  return get(cage_base, index);
70}
71
72Object FixedArray::get(PtrComprCageBase cage_base, int index) const {
73  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
74  return TaggedField<Object>::Relaxed_Load(cage_base, *this,
75                                           OffsetOfElementAt(index));
76}
77
78Handle<Object> FixedArray::get(FixedArray array, int index, Isolate* isolate) {
79  return handle(array.get(isolate, index), isolate);
80}
81
82bool FixedArray::is_the_hole(Isolate* isolate, int index) {
83  return get(isolate, index).IsTheHole(isolate);
84}
85
86#if !defined(_WIN32) || (defined(_WIN64) && _MSC_VER < 1930 && __cplusplus < 201703L)
87void FixedArray::set(int index, Smi value) {
88  DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
89  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
90  DCHECK(Object(value).IsSmi());
91  int offset = OffsetOfElementAt(index);
92  RELAXED_WRITE_FIELD(*this, offset, value);
93}
94#endif
95
96void FixedArray::set(int index, Object value) {
97  DCHECK_NE(GetReadOnlyRoots().fixed_cow_array_map(), map());
98  DCHECK(IsFixedArray());
99  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
100  int offset = OffsetOfElementAt(index);
101  RELAXED_WRITE_FIELD(*this, offset, value);
102  WRITE_BARRIER(*this, offset, value);
103}
104
105void FixedArray::set(int index, Object value, WriteBarrierMode mode) {
106  DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
107  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
108  int offset = OffsetOfElementAt(index);
109  RELAXED_WRITE_FIELD(*this, offset, value);
110  CONDITIONAL_WRITE_BARRIER(*this, offset, value, mode);
111}
112
113// static
114void FixedArray::NoWriteBarrierSet(FixedArray array, int index, Object value) {
115  DCHECK_NE(array.map(), array.GetReadOnlyRoots().fixed_cow_array_map());
116  DCHECK_LT(static_cast<unsigned>(index),
117            static_cast<unsigned>(array.length()));
118  DCHECK(!ObjectInYoungGeneration(value));
119  int offset = OffsetOfElementAt(index);
120  RELAXED_WRITE_FIELD(array, offset, value);
121}
122
123Object FixedArray::get(int index, RelaxedLoadTag) const {
124  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
125  return get(cage_base, index);
126}
127
128Object FixedArray::get(PtrComprCageBase cage_base, int index,
129                       RelaxedLoadTag) const {
130  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
131  return RELAXED_READ_FIELD(*this, OffsetOfElementAt(index));
132}
133
134void FixedArray::set(int index, Object value, RelaxedStoreTag,
135                     WriteBarrierMode mode) {
136  DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
137  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
138  RELAXED_WRITE_FIELD(*this, OffsetOfElementAt(index), value);
139  CONDITIONAL_WRITE_BARRIER(*this, OffsetOfElementAt(index), value, mode);
140}
141
142void FixedArray::set(int index, Smi value, RelaxedStoreTag tag) {
143  DCHECK(Object(value).IsSmi());
144  set(index, value, tag, SKIP_WRITE_BARRIER);
145}
146
147Object FixedArray::get(int index, AcquireLoadTag) const {
148  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
149  return get(cage_base, index);
150}
151
152Object FixedArray::get(PtrComprCageBase cage_base, int index,
153                       AcquireLoadTag) const {
154  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
155  return ACQUIRE_READ_FIELD(*this, OffsetOfElementAt(index));
156}
157
158void FixedArray::set(int index, Object value, ReleaseStoreTag,
159                     WriteBarrierMode mode) {
160  DCHECK_NE(map(), GetReadOnlyRoots().fixed_cow_array_map());
161  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
162  RELEASE_WRITE_FIELD(*this, OffsetOfElementAt(index), value);
163  CONDITIONAL_WRITE_BARRIER(*this, OffsetOfElementAt(index), value, mode);
164}
165
166void FixedArray::set(int index, Smi value, ReleaseStoreTag tag) {
167  DCHECK(Object(value).IsSmi());
168  set(index, value, tag, SKIP_WRITE_BARRIER);
169}
170
171void FixedArray::set_undefined(int index) {
172  set_undefined(GetReadOnlyRoots(), index);
173}
174
175void FixedArray::set_undefined(Isolate* isolate, int index) {
176  set_undefined(ReadOnlyRoots(isolate), index);
177}
178
179void FixedArray::set_undefined(ReadOnlyRoots ro_roots, int index) {
180  FixedArray::NoWriteBarrierSet(*this, index, ro_roots.undefined_value());
181}
182
183void FixedArray::set_null(int index) { set_null(GetReadOnlyRoots(), index); }
184
185void FixedArray::set_null(Isolate* isolate, int index) {
186  set_null(ReadOnlyRoots(isolate), index);
187}
188
189void FixedArray::set_null(ReadOnlyRoots ro_roots, int index) {
190  FixedArray::NoWriteBarrierSet(*this, index, ro_roots.null_value());
191}
192
193void FixedArray::set_the_hole(int index) {
194  set_the_hole(GetReadOnlyRoots(), index);
195}
196
197void FixedArray::set_the_hole(Isolate* isolate, int index) {
198  set_the_hole(ReadOnlyRoots(isolate), index);
199}
200
201void FixedArray::set_the_hole(ReadOnlyRoots ro_roots, int index) {
202  FixedArray::NoWriteBarrierSet(*this, index, ro_roots.the_hole_value());
203}
204
205void FixedArray::FillWithHoles(int from, int to) {
206  for (int i = from; i < to; i++) {
207    set_the_hole(i);
208  }
209}
210
211ObjectSlot FixedArray::data_start() { return RawField(OffsetOfElementAt(0)); }
212
213ObjectSlot FixedArray::RawFieldOfElementAt(int index) {
214  return RawField(OffsetOfElementAt(index));
215}
216
217void FixedArray::MoveElements(Isolate* isolate, int dst_index, int src_index,
218                              int len, WriteBarrierMode mode) {
219  if (len == 0) return;
220  DCHECK_LE(dst_index + len, length());
221  DCHECK_LE(src_index + len, length());
222  DisallowGarbageCollection no_gc;
223  ObjectSlot dst_slot(RawFieldOfElementAt(dst_index));
224  ObjectSlot src_slot(RawFieldOfElementAt(src_index));
225  isolate->heap()->MoveRange(*this, dst_slot, src_slot, len, mode);
226}
227
228void FixedArray::CopyElements(Isolate* isolate, int dst_index, FixedArray src,
229                              int src_index, int len, WriteBarrierMode mode) {
230  if (len == 0) return;
231  DCHECK_LE(dst_index + len, length());
232  DCHECK_LE(src_index + len, src.length());
233  DisallowGarbageCollection no_gc;
234
235  ObjectSlot dst_slot(RawFieldOfElementAt(dst_index));
236  ObjectSlot src_slot(src.RawFieldOfElementAt(src_index));
237  isolate->heap()->CopyRange(*this, dst_slot, src_slot, len, mode);
238}
239
240// Due to left- and right-trimming, concurrent visitors need to read the length
241// with acquire semantics.
242// TODO(ulan): Acquire should not be needed anymore.
243inline int FixedArray::AllocatedSize() { return SizeFor(length(kAcquireLoad)); }
244inline int WeakFixedArray::AllocatedSize() {
245  return SizeFor(length(kAcquireLoad));
246}
247inline int WeakArrayList::AllocatedSize() { return SizeFor(capacity()); }
248
249// Perform a binary search in a fixed array.
250template <SearchMode search_mode, typename T>
251int BinarySearch(T* array, Name name, int valid_entries,
252                 int* out_insertion_index) {
253  DCHECK_IMPLIES(search_mode == VALID_ENTRIES, out_insertion_index == nullptr);
254  int low = 0;
255  // We have to search on all entries, even when search_mode == VALID_ENTRIES.
256  // This is because the InternalIndex might be different from the SortedIndex
257  // (i.e the first added item in {array} could be the last in the sorted
258  // index). After doing the binary search and getting the correct internal
259  // index we check to have the index lower than valid_entries, if needed.
260  int high = array->number_of_entries() - 1;
261  uint32_t hash = name.hash();
262  int limit = high;
263
264  DCHECK(low <= high);
265
266  while (low != high) {
267    int mid = low + (high - low) / 2;
268    Name mid_name = array->GetSortedKey(mid);
269    uint32_t mid_hash = mid_name.hash();
270
271    if (mid_hash >= hash) {
272      high = mid;
273    } else {
274      low = mid + 1;
275    }
276  }
277
278  for (; low <= limit; ++low) {
279    int sort_index = array->GetSortedKeyIndex(low);
280    Name entry = array->GetKey(InternalIndex(sort_index));
281    uint32_t current_hash = entry.hash();
282    if (current_hash != hash) {
283      // 'search_mode == ALL_ENTRIES' here and below is not needed since
284      // 'out_insertion_index != nullptr' implies 'search_mode == ALL_ENTRIES'.
285      // Having said that, when creating the template for <VALID_ENTRIES> these
286      // ifs can be elided by the C++ compiler if we add 'search_mode ==
287      // ALL_ENTRIES'.
288      if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
289        *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
290      }
291      return T::kNotFound;
292    }
293    if (entry == name) {
294      if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
295        return sort_index;
296      }
297      return T::kNotFound;
298    }
299  }
300
301  if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
302    *out_insertion_index = limit + 1;
303  }
304  return T::kNotFound;
305}
306
307// Perform a linear search in this fixed array. len is the number of entry
308// indices that are valid.
309template <SearchMode search_mode, typename T>
310int LinearSearch(T* array, Name name, int valid_entries,
311                 int* out_insertion_index) {
312  if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
313    uint32_t hash = name.hash();
314    int len = array->number_of_entries();
315    for (int number = 0; number < len; number++) {
316      int sorted_index = array->GetSortedKeyIndex(number);
317      Name entry = array->GetKey(InternalIndex(sorted_index));
318      uint32_t current_hash = entry.hash();
319      if (current_hash > hash) {
320        *out_insertion_index = sorted_index;
321        return T::kNotFound;
322      }
323      if (entry == name) return sorted_index;
324    }
325    *out_insertion_index = len;
326    return T::kNotFound;
327  } else {
328    DCHECK_LE(valid_entries, array->number_of_entries());
329    DCHECK_NULL(out_insertion_index);  // Not supported here.
330    for (int number = 0; number < valid_entries; number++) {
331      if (array->GetKey(InternalIndex(number)) == name) return number;
332    }
333    return T::kNotFound;
334  }
335}
336
337template <SearchMode search_mode, typename T>
338int Search(T* array, Name name, int valid_entries, int* out_insertion_index,
339           bool concurrent_search) {
340  SLOW_DCHECK_IMPLIES(!concurrent_search, array->IsSortedNoDuplicates());
341
342  if (valid_entries == 0) {
343    if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
344      *out_insertion_index = 0;
345    }
346    return T::kNotFound;
347  }
348
349  // Do linear search for small arrays, and for searches in the background
350  // thread.
351  const int kMaxElementsForLinearSearch = 8;
352  if (valid_entries <= kMaxElementsForLinearSearch || concurrent_search) {
353    return LinearSearch<search_mode>(array, name, valid_entries,
354                                     out_insertion_index);
355  }
356
357  return BinarySearch<search_mode>(array, name, valid_entries,
358                                   out_insertion_index);
359}
360
361double FixedDoubleArray::get_scalar(int index) {
362  DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
363         map() != GetReadOnlyRoots().fixed_array_map());
364  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
365  DCHECK(!is_the_hole(index));
366  return ReadField<double>(kHeaderSize + index * kDoubleSize);
367}
368
369uint64_t FixedDoubleArray::get_representation(int index) {
370  DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
371         map() != GetReadOnlyRoots().fixed_array_map());
372  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
373  int offset = kHeaderSize + index * kDoubleSize;
374  // Bug(v8:8875): Doubles may be unaligned.
375  return base::ReadUnalignedValue<uint64_t>(field_address(offset));
376}
377
378Handle<Object> FixedDoubleArray::get(FixedDoubleArray array, int index,
379                                     Isolate* isolate) {
380  if (array.is_the_hole(index)) {
381    return ReadOnlyRoots(isolate).the_hole_value_handle();
382  } else {
383    return isolate->factory()->NewNumber(array.get_scalar(index));
384  }
385}
386
387void FixedDoubleArray::set(int index, double value) {
388  DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
389         map() != GetReadOnlyRoots().fixed_array_map());
390  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
391  int offset = kHeaderSize + index * kDoubleSize;
392  if (std::isnan(value)) {
393    WriteField<double>(offset, std::numeric_limits<double>::quiet_NaN());
394  } else {
395    WriteField<double>(offset, value);
396  }
397  DCHECK(!is_the_hole(index));
398}
399
400void FixedDoubleArray::set_the_hole(Isolate* isolate, int index) {
401  set_the_hole(index);
402}
403
404void FixedDoubleArray::set_the_hole(int index) {
405  DCHECK(map() != GetReadOnlyRoots().fixed_cow_array_map() &&
406         map() != GetReadOnlyRoots().fixed_array_map());
407  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
408  int offset = kHeaderSize + index * kDoubleSize;
409  base::WriteUnalignedValue<uint64_t>(field_address(offset), kHoleNanInt64);
410}
411
412bool FixedDoubleArray::is_the_hole(Isolate* isolate, int index) {
413  return is_the_hole(index);
414}
415
416bool FixedDoubleArray::is_the_hole(int index) {
417  return get_representation(index) == kHoleNanInt64;
418}
419
420void FixedDoubleArray::MoveElements(Isolate* isolate, int dst_index,
421                                    int src_index, int len,
422                                    WriteBarrierMode mode) {
423  DCHECK_EQ(SKIP_WRITE_BARRIER, mode);
424  double* data_start = reinterpret_cast<double*>(field_address(kHeaderSize));
425  MemMove(data_start + dst_index, data_start + src_index, len * kDoubleSize);
426}
427
428void FixedDoubleArray::FillWithHoles(int from, int to) {
429  for (int i = from; i < to; i++) {
430    set_the_hole(i);
431  }
432}
433
434MaybeObject WeakFixedArray::Get(int index) const {
435  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
436  return Get(cage_base, index);
437}
438
439MaybeObject WeakFixedArray::Get(PtrComprCageBase cage_base, int index) const {
440  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(length()));
441  return objects(cage_base, index, kRelaxedLoad);
442}
443
444void WeakFixedArray::Set(int index, MaybeObject value, WriteBarrierMode mode) {
445  set_objects(index, value, mode);
446}
447
448Handle<WeakFixedArray> WeakFixedArray::EnsureSpace(Isolate* isolate,
449                                                   Handle<WeakFixedArray> array,
450                                                   int length) {
451  if (array->length() < length) {
452    int grow_by = length - array->length();
453    array = isolate->factory()->CopyWeakFixedArrayAndGrow(array, grow_by);
454  }
455  return array;
456}
457
458MaybeObjectSlot WeakFixedArray::data_start() {
459  return RawMaybeWeakField(kObjectsOffset);
460}
461
462MaybeObjectSlot WeakFixedArray::RawFieldOfElementAt(int index) {
463  return RawMaybeWeakField(OffsetOfElementAt(index));
464}
465
466void WeakFixedArray::CopyElements(Isolate* isolate, int dst_index,
467                                  WeakFixedArray src, int src_index, int len,
468                                  WriteBarrierMode mode) {
469  if (len == 0) return;
470  DCHECK_LE(dst_index + len, length());
471  DCHECK_LE(src_index + len, src.length());
472  DisallowGarbageCollection no_gc;
473
474  MaybeObjectSlot dst_slot(data_start() + dst_index);
475  MaybeObjectSlot src_slot(src.data_start() + src_index);
476  isolate->heap()->CopyRange(*this, dst_slot, src_slot, len, mode);
477}
478
479MaybeObject WeakArrayList::Get(int index) const {
480  PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
481  return Get(cage_base, index);
482}
483
484MaybeObject WeakArrayList::Get(PtrComprCageBase cage_base, int index) const {
485  DCHECK_LT(static_cast<unsigned>(index), static_cast<unsigned>(capacity()));
486  return objects(cage_base, index, kRelaxedLoad);
487}
488
489void WeakArrayList::Set(int index, MaybeObject value, WriteBarrierMode mode) {
490  set_objects(index, value, mode);
491}
492
493void WeakArrayList::Set(int index, Smi value) {
494  Set(index, MaybeObject::FromSmi(value), SKIP_WRITE_BARRIER);
495}
496
497MaybeObjectSlot WeakArrayList::data_start() {
498  return RawMaybeWeakField(kObjectsOffset);
499}
500
501void WeakArrayList::CopyElements(Isolate* isolate, int dst_index,
502                                 WeakArrayList src, int src_index, int len,
503                                 WriteBarrierMode mode) {
504  if (len == 0) return;
505  DCHECK_LE(dst_index + len, capacity());
506  DCHECK_LE(src_index + len, src.capacity());
507  DisallowGarbageCollection no_gc;
508
509  MaybeObjectSlot dst_slot(data_start() + dst_index);
510  MaybeObjectSlot src_slot(src.data_start() + src_index);
511  isolate->heap()->CopyRange(*this, dst_slot, src_slot, len, mode);
512}
513
514HeapObject WeakArrayList::Iterator::Next() {
515  if (!array_.is_null()) {
516    while (index_ < array_.length()) {
517      MaybeObject item = array_.Get(index_++);
518      DCHECK(item->IsWeakOrCleared());
519      if (!item->IsCleared()) return item->GetHeapObjectAssumeWeak();
520    }
521    array_ = WeakArrayList();
522  }
523  return HeapObject();
524}
525
526int ArrayList::Length() const {
527  if (FixedArray::cast(*this).length() == 0) return 0;
528  return Smi::ToInt(FixedArray::cast(*this).get(kLengthIndex));
529}
530
531void ArrayList::SetLength(int length) {
532  return FixedArray::cast(*this).set(kLengthIndex, Smi::FromInt(length));
533}
534
535Object ArrayList::Get(int index) const {
536  return FixedArray::cast(*this).get(kFirstIndex + index);
537}
538
539Object ArrayList::Get(PtrComprCageBase cage_base, int index) const {
540  return FixedArray::cast(*this).get(cage_base, kFirstIndex + index);
541}
542
543ObjectSlot ArrayList::Slot(int index) {
544  return RawField(OffsetOfElementAt(kFirstIndex + index));
545}
546
547void ArrayList::Set(int index, Object obj, WriteBarrierMode mode) {
548  FixedArray::cast(*this).set(kFirstIndex + index, obj, mode);
549}
550
551void ArrayList::Set(int index, Smi value) {
552  DCHECK(Object(value).IsSmi());
553  Set(index, value, SKIP_WRITE_BARRIER);
554}
555void ArrayList::Clear(int index, Object undefined) {
556  DCHECK(undefined.IsUndefined());
557  FixedArray::cast(*this).set(kFirstIndex + index, undefined,
558                              SKIP_WRITE_BARRIER);
559}
560
561int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kTaggedSize); }
562
563byte ByteArray::get(int index) const {
564  DCHECK_GE(index, 0);
565  DCHECK_LT(index, length());
566  return ReadField<byte>(kHeaderSize + index * kCharSize);
567}
568
569void ByteArray::set(int index, byte value) {
570  DCHECK_GE(index, 0);
571  DCHECK_LT(index, length());
572  WriteField<byte>(kHeaderSize + index * kCharSize, value);
573}
574
575void ByteArray::copy_in(int index, const byte* buffer, int slice_length) {
576  DCHECK_GE(index, 0);
577  DCHECK_GE(slice_length, 0);
578  DCHECK_LE(slice_length, kMaxInt - index);
579  DCHECK_LE(index + slice_length, length());
580  Address dst_addr = field_address(kHeaderSize + index * kCharSize);
581  memcpy(reinterpret_cast<void*>(dst_addr), buffer, slice_length);
582}
583
584void ByteArray::copy_out(int index, byte* buffer, int slice_length) {
585  DCHECK_GE(index, 0);
586  DCHECK_GE(slice_length, 0);
587  DCHECK_LE(slice_length, kMaxInt - index);
588  DCHECK_LE(index + slice_length, length());
589  Address src_addr = field_address(kHeaderSize + index * kCharSize);
590  memcpy(buffer, reinterpret_cast<void*>(src_addr), slice_length);
591}
592
593int ByteArray::get_int(int index) const {
594  DCHECK_GE(index, 0);
595  DCHECK_LT(index, length() / kIntSize);
596  return ReadField<int>(kHeaderSize + index * kIntSize);
597}
598
599void ByteArray::set_int(int index, int value) {
600  DCHECK_GE(index, 0);
601  DCHECK_LT(index, length() / kIntSize);
602  WriteField<int>(kHeaderSize + index * kIntSize, value);
603}
604
605uint32_t ByteArray::get_uint32(int index) const {
606  DCHECK_GE(index, 0);
607  DCHECK_LT(index, length() / kUInt32Size);
608  return ReadField<uint32_t>(kHeaderSize + index * kUInt32Size);
609}
610
611void ByteArray::set_uint32(int index, uint32_t value) {
612  DCHECK_GE(index, 0);
613  DCHECK_LT(index, length() / kUInt32Size);
614  WriteField<uint32_t>(kHeaderSize + index * kUInt32Size, value);
615}
616
617uint32_t ByteArray::get_uint32_relaxed(int index) const {
618  DCHECK_GE(index, 0);
619  DCHECK_LT(index, length() / kUInt32Size);
620  return RELAXED_READ_UINT32_FIELD(*this, kHeaderSize + index * kUInt32Size);
621}
622
623void ByteArray::set_uint32_relaxed(int index, uint32_t value) {
624  DCHECK_GE(index, 0);
625  DCHECK_LT(index, length() / kUInt32Size);
626  RELAXED_WRITE_UINT32_FIELD(*this, kHeaderSize + index * kUInt32Size, value);
627}
628
629uint16_t ByteArray::get_uint16(int index) const {
630  DCHECK_GE(index, 0);
631  DCHECK_LT(index, length() / kUInt16Size);
632  return ReadField<uint16_t>(kHeaderSize + index * kUInt16Size);
633}
634
635void ByteArray::set_uint16(int index, uint16_t value) {
636  DCHECK_GE(index, 0);
637  DCHECK_LT(index, length() / kUInt16Size);
638  WriteField<uint16_t>(kHeaderSize + index * kUInt16Size, value);
639}
640
641void ByteArray::clear_padding() {
642  int data_size = length() + kHeaderSize;
643  memset(reinterpret_cast<void*>(address() + data_size), 0, Size() - data_size);
644}
645
646ByteArray ByteArray::FromDataStartAddress(Address address) {
647  DCHECK_TAG_ALIGNED(address);
648  return ByteArray::cast(Object(address - kHeaderSize + kHeapObjectTag));
649}
650
651int ByteArray::DataSize() const { return RoundUp(length(), kTaggedSize); }
652
653int ByteArray::ByteArraySize() { return SizeFor(length()); }
654
655byte* ByteArray::GetDataStartAddress() {
656  return reinterpret_cast<byte*>(address() + kHeaderSize);
657}
658
659byte* ByteArray::GetDataEndAddress() {
660  return GetDataStartAddress() + length();
661}
662
663template <class T>
664PodArray<T>::PodArray(Address ptr) : ByteArray(ptr) {}
665
666template <class T>
667PodArray<T> PodArray<T>::cast(Object object) {
668  return PodArray<T>(object.ptr());
669}
670
671// static
672template <class T>
673Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
674                                     AllocationType allocation) {
675  return Handle<PodArray<T>>::cast(
676      isolate->factory()->NewByteArray(length * sizeof(T), allocation));
677}
678
679template <class T>
680int PodArray<T>::length() const {
681  return ByteArray::length() / sizeof(T);
682}
683
684int TemplateList::length() const {
685  return Smi::ToInt(FixedArray::cast(*this).get(kLengthIndex));
686}
687
688Object TemplateList::get(int index) const {
689  return FixedArray::cast(*this).get(kFirstElementIndex + index);
690}
691
692Object TemplateList::get(PtrComprCageBase cage_base, int index) const {
693  return FixedArray::cast(*this).get(cage_base, kFirstElementIndex + index);
694}
695
696void TemplateList::set(int index, Object value) {
697  FixedArray::cast(*this).set(kFirstElementIndex + index, value);
698}
699
700}  // namespace internal
701}  // namespace v8
702
703#include "src/base/platform/wrappers.h"
704#include "src/objects/object-macros-undef.h"
705
706#endif  // V8_OBJECTS_FIXED_ARRAY_INL_H_
707