1// Copyright 2015 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "src/interpreter/constant-array-builder.h" 6 7#include <cmath> 8#include <functional> 9#include <set> 10 11#include "src/ast/ast-value-factory.h" 12#include "src/ast/ast.h" 13#include "src/ast/scopes.h" 14#include "src/base/functional.h" 15#include "src/execution/isolate.h" 16#include "src/heap/local-factory-inl.h" 17#include "src/objects/objects-inl.h" 18 19namespace v8 { 20namespace internal { 21namespace interpreter { 22 23ConstantArrayBuilder::ConstantArraySlice::ConstantArraySlice( 24 Zone* zone, size_t start_index, size_t capacity, OperandSize operand_size) 25 : start_index_(start_index), 26 capacity_(capacity), 27 reserved_(0), 28 operand_size_(operand_size), 29 constants_(zone) {} 30 31void ConstantArrayBuilder::ConstantArraySlice::Reserve() { 32 DCHECK_GT(available(), 0u); 33 reserved_++; 34 DCHECK_LE(reserved_, capacity() - constants_.size()); 35} 36 37void ConstantArrayBuilder::ConstantArraySlice::Unreserve() { 38 DCHECK_GT(reserved_, 0u); 39 reserved_--; 40} 41 42size_t ConstantArrayBuilder::ConstantArraySlice::Allocate( 43 ConstantArrayBuilder::Entry entry, size_t count) { 44 DCHECK_GE(available(), count); 45 size_t index = constants_.size(); 46 DCHECK_LT(index, capacity()); 47 for (size_t i = 0; i < count; ++i) { 48 constants_.push_back(entry); 49 } 50 return index + start_index(); 51} 52 53ConstantArrayBuilder::Entry& ConstantArrayBuilder::ConstantArraySlice::At( 54 size_t index) { 55 DCHECK_GE(index, start_index()); 56 DCHECK_LT(index, start_index() + size()); 57 return constants_[index - start_index()]; 58} 59 60const ConstantArrayBuilder::Entry& ConstantArrayBuilder::ConstantArraySlice::At( 61 size_t index) const { 62 DCHECK_GE(index, start_index()); 63 DCHECK_LT(index, start_index() + size()); 64 return constants_[index - start_index()]; 65} 66 67#if DEBUG 68template <typename IsolateT> 69void ConstantArrayBuilder::ConstantArraySlice::CheckAllElementsAreUnique( 70 IsolateT* isolate) const { 71 std::set<Smi> smis; 72 std::set<double> heap_numbers; 73 std::set<const AstRawString*> strings; 74 std::set<const char*> bigints; 75 std::set<const Scope*> scopes; 76 std::set<Object, Object::Comparer> deferred_objects; 77 for (const Entry& entry : constants_) { 78 bool duplicate = false; 79 switch (entry.tag_) { 80 case Entry::Tag::kSmi: 81 duplicate = !smis.insert(entry.smi_).second; 82 break; 83 case Entry::Tag::kHeapNumber: 84 duplicate = !heap_numbers.insert(entry.heap_number_).second; 85 break; 86 case Entry::Tag::kRawString: 87 duplicate = !strings.insert(entry.raw_string_).second; 88 break; 89 case Entry::Tag::kBigInt: 90 duplicate = !bigints.insert(entry.bigint_.c_str()).second; 91 break; 92 case Entry::Tag::kScope: 93 duplicate = !scopes.insert(entry.scope_).second; 94 break; 95 case Entry::Tag::kHandle: 96 duplicate = !deferred_objects.insert(*entry.handle_).second; 97 break; 98 case Entry::Tag::kDeferred: 99 UNREACHABLE(); // Should be kHandle at this point. 100 case Entry::Tag::kJumpTableSmi: 101 case Entry::Tag::kUninitializedJumpTableSmi: 102 // TODO(leszeks): Ignore jump tables because they have to be contiguous, 103 // so they can contain duplicates. 104 break; 105#define CASE_TAG(NAME, ...) case Entry::Tag::k##NAME: 106 SINGLETON_CONSTANT_ENTRY_TYPES(CASE_TAG) 107#undef CASE_TAG 108 // Singletons are non-duplicated by definition. 109 break; 110 } 111 if (duplicate) { 112 std::ostringstream os; 113 os << "Duplicate constant found: " << Brief(*entry.ToHandle(isolate)) 114 << std::endl; 115 // Print all the entries in the slice to help debug duplicates. 116 size_t i = start_index(); 117 for (const Entry& prev_entry : constants_) { 118 os << i++ << ": " << Brief(*prev_entry.ToHandle(isolate)) << std::endl; 119 } 120 FATAL("%s", os.str().c_str()); 121 } 122 } 123} 124#endif 125 126STATIC_CONST_MEMBER_DEFINITION const size_t ConstantArrayBuilder::k8BitCapacity; 127STATIC_CONST_MEMBER_DEFINITION const size_t 128 ConstantArrayBuilder::k16BitCapacity; 129STATIC_CONST_MEMBER_DEFINITION const size_t 130 ConstantArrayBuilder::k32BitCapacity; 131 132ConstantArrayBuilder::ConstantArrayBuilder(Zone* zone) 133 : constants_map_(16, base::KeyEqualityMatcher<intptr_t>(), 134 ZoneAllocationPolicy(zone)), 135 smi_map_(zone), 136 smi_pairs_(zone), 137 heap_number_map_(zone) { 138 idx_slice_[0] = 139 zone->New<ConstantArraySlice>(zone, 0, k8BitCapacity, OperandSize::kByte); 140 idx_slice_[1] = zone->New<ConstantArraySlice>( 141 zone, k8BitCapacity, k16BitCapacity, OperandSize::kShort); 142 idx_slice_[2] = zone->New<ConstantArraySlice>( 143 zone, k8BitCapacity + k16BitCapacity, k32BitCapacity, OperandSize::kQuad); 144} 145 146size_t ConstantArrayBuilder::size() const { 147 size_t i = arraysize(idx_slice_); 148 while (i > 0) { 149 ConstantArraySlice* slice = idx_slice_[--i]; 150 if (slice->size() > 0) { 151 return slice->start_index() + slice->size(); 152 } 153 } 154 return idx_slice_[0]->size(); 155} 156 157ConstantArrayBuilder::ConstantArraySlice* ConstantArrayBuilder::IndexToSlice( 158 size_t index) const { 159 for (ConstantArraySlice* slice : idx_slice_) { 160 if (index <= slice->max_index()) { 161 return slice; 162 } 163 } 164 UNREACHABLE(); 165} 166 167template <typename IsolateT> 168MaybeHandle<Object> ConstantArrayBuilder::At(size_t index, 169 IsolateT* isolate) const { 170 const ConstantArraySlice* slice = IndexToSlice(index); 171 DCHECK_LT(index, slice->capacity()); 172 if (index < slice->start_index() + slice->size()) { 173 const Entry& entry = slice->At(index); 174 if (!entry.IsDeferred()) return entry.ToHandle(isolate); 175 } 176 return MaybeHandle<Object>(); 177} 178 179template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) 180 MaybeHandle<Object> ConstantArrayBuilder::At(size_t index, 181 Isolate* isolate) const; 182template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) 183 MaybeHandle<Object> ConstantArrayBuilder::At(size_t index, 184 LocalIsolate* isolate) const; 185 186template <typename IsolateT> 187Handle<FixedArray> ConstantArrayBuilder::ToFixedArray(IsolateT* isolate) { 188 Handle<FixedArray> fixed_array = isolate->factory()->NewFixedArrayWithHoles( 189 static_cast<int>(size()), AllocationType::kOld); 190 int array_index = 0; 191 for (const ConstantArraySlice* slice : idx_slice_) { 192 DCHECK_EQ(slice->reserved(), 0); 193 DCHECK(array_index == 0 || 194 base::bits::IsPowerOfTwo(static_cast<uint32_t>(array_index))); 195#if DEBUG 196 // Different slices might contain the same element due to reservations, but 197 // all elements within a slice should be unique. 198 slice->CheckAllElementsAreUnique(isolate); 199#endif 200 // Copy objects from slice into array. 201 for (size_t i = 0; i < slice->size(); ++i) { 202 Handle<Object> value = 203 slice->At(slice->start_index() + i).ToHandle(isolate); 204 fixed_array->set(array_index++, *value); 205 } 206 // Leave holes where reservations led to unused slots. 207 size_t padding = slice->capacity() - slice->size(); 208 if (static_cast<size_t>(fixed_array->length() - array_index) <= padding) { 209 break; 210 } 211 array_index += padding; 212 } 213 DCHECK_GE(array_index, fixed_array->length()); 214 return fixed_array; 215} 216 217template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) 218 Handle<FixedArray> ConstantArrayBuilder::ToFixedArray(Isolate* isolate); 219template EXPORT_TEMPLATE_DEFINE(V8_EXPORT_PRIVATE) 220 Handle<FixedArray> ConstantArrayBuilder::ToFixedArray( 221 LocalIsolate* isolate); 222 223size_t ConstantArrayBuilder::Insert(Smi smi) { 224 auto entry = smi_map_.find(smi); 225 if (entry == smi_map_.end()) { 226 return AllocateReservedEntry(smi); 227 } 228 return entry->second; 229} 230 231size_t ConstantArrayBuilder::Insert(double number) { 232 if (std::isnan(number)) return InsertNaN(); 233 auto entry = heap_number_map_.find(number); 234 if (entry == heap_number_map_.end()) { 235 index_t index = static_cast<index_t>(AllocateIndex(Entry(number))); 236 heap_number_map_[number] = index; 237 return index; 238 } 239 return entry->second; 240} 241 242size_t ConstantArrayBuilder::Insert(const AstRawString* raw_string) { 243 return constants_map_ 244 .LookupOrInsert(reinterpret_cast<intptr_t>(raw_string), 245 raw_string->Hash(), 246 [&]() { return AllocateIndex(Entry(raw_string)); }) 247 ->value; 248} 249 250size_t ConstantArrayBuilder::Insert(AstBigInt bigint) { 251 return constants_map_ 252 .LookupOrInsert(reinterpret_cast<intptr_t>(bigint.c_str()), 253 static_cast<uint32_t>(base::hash_value(bigint.c_str())), 254 [&]() { return AllocateIndex(Entry(bigint)); }) 255 ->value; 256} 257 258size_t ConstantArrayBuilder::Insert(const Scope* scope) { 259 return constants_map_ 260 .LookupOrInsert(reinterpret_cast<intptr_t>(scope), 261 static_cast<uint32_t>(base::hash_value(scope)), 262 [&]() { return AllocateIndex(Entry(scope)); }) 263 ->value; 264} 265 266#define INSERT_ENTRY(NAME, LOWER_NAME) \ 267 size_t ConstantArrayBuilder::Insert##NAME() { \ 268 if (LOWER_NAME##_ < 0) { \ 269 LOWER_NAME##_ = AllocateIndex(Entry::NAME()); \ 270 } \ 271 return LOWER_NAME##_; \ 272 } 273SINGLETON_CONSTANT_ENTRY_TYPES(INSERT_ENTRY) 274#undef INSERT_ENTRY 275 276ConstantArrayBuilder::index_t ConstantArrayBuilder::AllocateIndex( 277 ConstantArrayBuilder::Entry entry) { 278 return AllocateIndexArray(entry, 1); 279} 280 281ConstantArrayBuilder::index_t ConstantArrayBuilder::AllocateIndexArray( 282 ConstantArrayBuilder::Entry entry, size_t count) { 283 for (size_t i = 0; i < arraysize(idx_slice_); ++i) { 284 if (idx_slice_[i]->available() >= count) { 285 return static_cast<index_t>(idx_slice_[i]->Allocate(entry, count)); 286 } 287 } 288 UNREACHABLE(); 289} 290 291ConstantArrayBuilder::ConstantArraySlice* 292ConstantArrayBuilder::OperandSizeToSlice(OperandSize operand_size) const { 293 ConstantArraySlice* slice = nullptr; 294 switch (operand_size) { 295 case OperandSize::kNone: 296 UNREACHABLE(); 297 case OperandSize::kByte: 298 slice = idx_slice_[0]; 299 break; 300 case OperandSize::kShort: 301 slice = idx_slice_[1]; 302 break; 303 case OperandSize::kQuad: 304 slice = idx_slice_[2]; 305 break; 306 } 307 DCHECK(slice->operand_size() == operand_size); 308 return slice; 309} 310 311size_t ConstantArrayBuilder::InsertDeferred() { 312 return AllocateIndex(Entry::Deferred()); 313} 314 315size_t ConstantArrayBuilder::InsertJumpTable(size_t size) { 316 return AllocateIndexArray(Entry::UninitializedJumpTableSmi(), size); 317} 318 319void ConstantArrayBuilder::SetDeferredAt(size_t index, Handle<Object> object) { 320 ConstantArraySlice* slice = IndexToSlice(index); 321 return slice->At(index).SetDeferred(object); 322} 323 324void ConstantArrayBuilder::SetJumpTableSmi(size_t index, Smi smi) { 325 ConstantArraySlice* slice = IndexToSlice(index); 326 // Allow others to reuse these Smis, but insert using emplace to avoid 327 // overwriting existing values in the Smi map (which may have a smaller 328 // operand size). 329 smi_map_.emplace(smi, static_cast<index_t>(index)); 330 return slice->At(index).SetJumpTableSmi(smi); 331} 332 333OperandSize ConstantArrayBuilder::CreateReservedEntry() { 334 for (size_t i = 0; i < arraysize(idx_slice_); ++i) { 335 if (idx_slice_[i]->available() > 0) { 336 idx_slice_[i]->Reserve(); 337 return idx_slice_[i]->operand_size(); 338 } 339 } 340 UNREACHABLE(); 341} 342 343ConstantArrayBuilder::index_t ConstantArrayBuilder::AllocateReservedEntry( 344 Smi value) { 345 index_t index = static_cast<index_t>(AllocateIndex(Entry(value))); 346 smi_map_[value] = index; 347 return index; 348} 349 350size_t ConstantArrayBuilder::CommitReservedEntry(OperandSize operand_size, 351 Smi value) { 352 DiscardReservedEntry(operand_size); 353 size_t index; 354 auto entry = smi_map_.find(value); 355 if (entry == smi_map_.end()) { 356 index = AllocateReservedEntry(value); 357 } else { 358 ConstantArraySlice* slice = OperandSizeToSlice(operand_size); 359 index = entry->second; 360 if (index > slice->max_index()) { 361 // The object is already in the constant array, but may have an 362 // index too big for the reserved operand_size. So, duplicate 363 // entry with the smaller operand size. 364 index = AllocateReservedEntry(value); 365 } 366 DCHECK_LE(index, slice->max_index()); 367 } 368 return index; 369} 370 371void ConstantArrayBuilder::DiscardReservedEntry(OperandSize operand_size) { 372 OperandSizeToSlice(operand_size)->Unreserve(); 373} 374 375template <typename IsolateT> 376Handle<Object> ConstantArrayBuilder::Entry::ToHandle(IsolateT* isolate) const { 377 switch (tag_) { 378 case Tag::kDeferred: 379 // We shouldn't have any deferred entries by now. 380 UNREACHABLE(); 381 case Tag::kHandle: 382 return handle_; 383 case Tag::kSmi: 384 case Tag::kJumpTableSmi: 385 return handle(smi_, isolate); 386 case Tag::kUninitializedJumpTableSmi: 387 // TODO(leszeks): There's probably a better value we could use here. 388 return isolate->factory()->the_hole_value(); 389 case Tag::kRawString: 390 return raw_string_->string(); 391 case Tag::kHeapNumber: 392 return isolate->factory()->template NewNumber<AllocationType::kOld>( 393 heap_number_); 394 case Tag::kBigInt: 395 // This should never fail: the parser will never create a BigInt 396 // literal that cannot be allocated. 397 return BigIntLiteral(isolate, bigint_.c_str()).ToHandleChecked(); 398 case Tag::kScope: 399 return scope_->scope_info(); 400#define ENTRY_LOOKUP(Name, name) \ 401 case Tag::k##Name: \ 402 return isolate->factory()->name(); 403 SINGLETON_CONSTANT_ENTRY_TYPES(ENTRY_LOOKUP); 404#undef ENTRY_LOOKUP 405 } 406 UNREACHABLE(); 407} 408 409template Handle<Object> ConstantArrayBuilder::Entry::ToHandle( 410 Isolate* isolate) const; 411template Handle<Object> ConstantArrayBuilder::Entry::ToHandle( 412 LocalIsolate* isolate) const; 413 414} // namespace interpreter 415} // namespace internal 416} // namespace v8 417