/third_party/node/deps/v8/src/heap/ |
H A D | heap-allocator-inl.h | 57 int size_in_bytes, AllocationOrigin origin, AllocationAlignment alignment) { in AllocateRaw() 63 return AllocateRaw(size_in_bytes, AllocationType::kOld, origin, alignment); in AllocateRaw() 84 static_cast<size_t>(size_in_bytes) > large_object_threshold; in AllocateRaw() 90 allocation = heap_->tp_heap_->Allocate(size_in_bytes, type, alignment); in AllocateRaw() 94 AllocateRawLargeInternal(size_in_bytes, type, origin, alignment); in AllocateRaw() 99 new_space()->AllocateRaw(size_in_bytes, alignment, origin); in AllocateRaw() 103 old_space()->AllocateRaw(size_in_bytes, alignment, origin); in AllocateRaw() 108 allocation = code_space()->AllocateRawUnaligned(size_in_bytes); in AllocateRaw() 112 allocation = space_for_maps()->AllocateRawUnaligned(size_in_bytes); in AllocateRaw() 117 allocation = read_only_space()->AllocateRaw(size_in_bytes, alignmen in AllocateRaw() 56 AllocateRaw( int size_in_bytes, AllocationOrigin origin, AllocationAlignment alignment) AllocateRaw() argument 161 AllocateRaw(int size_in_bytes, AllocationType type, AllocationOrigin origin, AllocationAlignment alignment) AllocateRaw() argument 191 AllocateRawData(int size_in_bytes, AllocationType type, AllocationOrigin origin, AllocationAlignment alignment) AllocateRawData() argument [all...] |
H A D | free-list.cc | 75 void FreeListCategory::Free(Address start, size_t size_in_bytes, FreeMode mode, in Free() argument 80 available_ += size_in_bytes; in Free() 83 owner->IncreaseAvailableBytes(size_in_bytes); in Free() 149 size_t FreeList::Free(Address start, size_t size_in_bytes, FreeMode mode) { in Free() argument 151 page->DecreaseAllocatedBytes(size_in_bytes); in Free() 154 if (size_in_bytes < min_block_size_) { in Free() 155 page->add_wasted_memory(size_in_bytes); in Free() 156 wasted_bytes_ += size_in_bytes; in Free() 157 return size_in_bytes; in Free() 162 FreeListCategoryType type = SelectFreeListCategoryType(size_in_bytes); in Free() 198 GetPageForSize(size_t size_in_bytes) GetPageForSize() argument 212 Allocate(size_t size_in_bytes, size_t* node_size, AllocationOrigin origin) Allocate() argument 274 Free(Address start, size_t size_in_bytes, FreeMode mode) Free() argument 305 Allocate(size_t size_in_bytes, size_t* node_size, AllocationOrigin origin) Allocate() argument 344 Allocate(size_t size_in_bytes, size_t* node_size, AllocationOrigin origin) Allocate() argument 410 Allocate(size_t size_in_bytes, size_t* node_size, AllocationOrigin origin) Allocate() argument [all...] |
H A D | spaces-inl.h | 139 int size_in_bytes, AllocationAlignment alignment) { in AllocateRawAligned() 142 int aligned_size = filler_size + size_in_bytes; in AllocateRawAligned() 196 int size_in_bytes, AllocationOrigin origin) { in AllocateFastUnaligned() 197 if (!allocation_info_->CanIncrementTop(size_in_bytes)) { in AllocateFastUnaligned() 201 HeapObject::FromAddress(allocation_info_->IncrementTop(size_in_bytes)); in AllocateFastUnaligned() 203 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes); in AllocateFastUnaligned() 213 int size_in_bytes, int* result_aligned_size_in_bytes, in AllocateFastAligned() 217 int aligned_size_in_bytes = size_in_bytes + filler_size; in AllocateFastAligned() 231 MSAN_ALLOCATED_UNINITIALIZED_MEMORY(obj.address(), size_in_bytes); in AllocateFastAligned() 240 AllocationResult SpaceWithLinearArea::AllocateRaw(int size_in_bytes, in AllocateRaw() argument 138 AllocateRawAligned( int size_in_bytes, AllocationAlignment alignment) AllocateRawAligned() argument 195 AllocateFastUnaligned( int size_in_bytes, AllocationOrigin origin) AllocateFastUnaligned() argument 212 AllocateFastAligned( int size_in_bytes, int* result_aligned_size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) AllocateFastAligned() argument 257 AllocateRawUnaligned( int size_in_bytes, AllocationOrigin origin) AllocateRawUnaligned() argument 278 AllocateRawAligned( int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) AllocateRawAligned() argument 302 AllocateRawSlow( int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) AllocateRawSlow() argument [all...] |
H A D | free-list.h | 61 void Free(Address address, size_t size_in_bytes, FreeMode mode, 133 // Adds a node on the free list. The block of size {size_in_bytes} starting 139 virtual size_t Free(Address start, size_t size_in_bytes, FreeMode mode); 141 // Allocates a free space node frome the free list of at least size_in_bytes 143 // size_in_bytes. This method returns null if the allocation request cannot be 145 virtual V8_WARN_UNUSED_RESULT FreeSpace Allocate(size_t size_in_bytes, 150 V8_EXPORT_PRIVATE virtual Page* GetPageForSize(size_t size_in_bytes) = 0; 237 // Returns the smallest category in which an object of |size_in_bytes| could 240 size_t size_in_bytes) = 0; 272 size_t Free(Address start, size_t size_in_bytes, FreeMod 455 SelectFastAllocationFreeListCategoryType( size_t size_in_bytes) SelectFastAllocationFreeListCategoryType() argument [all...] |
H A D | paged-spaces.h | 154 size_t Free(Address start, size_t size_in_bytes, SpaceAccountingMode mode) { in Free() argument 155 if (size_in_bytes == 0) return 0; in Free() 157 start, static_cast<int>(size_in_bytes), in Free() 160 return AccountedFree(start, size_in_bytes); in Free() 162 return UnaccountedFree(start, size_in_bytes); in Free() 170 size_t AccountedFree(Address start, size_t size_in_bytes) { in AccountedFree() argument 171 size_t wasted = free_list_->Free(start, size_in_bytes, kLinkCategory); in AccountedFree() 173 accounting_stats_.DecreaseAllocatedBytes(size_in_bytes, page); in AccountedFree() 174 DCHECK_GE(size_in_bytes, wasted); in AccountedFree() 175 return size_in_bytes in AccountedFree() 178 UnaccountedFree(Address start, size_t size_in_bytes) UnaccountedFree() argument [all...] |
H A D | local-heap-inl.h | 19 AllocationResult LocalHeap::AllocateRaw(int size_in_bytes, AllocationType type, in AllocateRaw() argument 37 bool large_object = size_in_bytes > heap_->MaxRegularHeapObjectSize(type); in AllocateRaw() 43 heap()->code_lo_space()->AllocateRawBackground(this, size_in_bytes); in AllocateRaw() 46 code_space_allocator()->AllocateRaw(size_in_bytes, alignment, origin); in AllocateRaw() 52 heap()->ZapCodeObject(object.address(), size_in_bytes); in AllocateRaw() 59 return heap()->lo_space()->AllocateRawBackground(this, size_in_bytes); in AllocateRaw() 61 return old_space_allocator()->AllocateRaw(size_in_bytes, alignment, in AllocateRaw() 66 return shared_old_space_allocator()->AllocateRaw(size_in_bytes, alignment, in AllocateRaw()
|
H A D | paged-spaces.cc | 270 Page* PagedSpace::RemovePageSafe(int size_in_bytes) { in RemovePageSafe() argument 272 Page* page = free_list()->GetPageForSize(size_in_bytes); in RemovePageSafe() 357 size_t size_in_bytes) { in ExpandBackground() 367 CHECK_LE(size_in_bytes, page->area_size()); in ExpandBackground() 368 Free(page->area_start() + size_in_bytes, page->area_size() - size_in_bytes, in ExpandBackground() 370 AddRangeToActiveSystemPages(page, object_start, object_start + size_in_bytes); in ExpandBackground() 371 return std::make_pair(object_start, size_in_bytes); in ExpandBackground() 552 bool PagedSpace::TryAllocationFromFreeListMain(size_t size_in_bytes, in TryAllocationFromFreeListMain() argument 555 DCHECK(IsAligned(size_in_bytes, kTaggedSiz in TryAllocationFromFreeListMain() 356 ExpandBackground( size_t size_in_bytes) ExpandBackground() argument 905 RefillLabMain(int size_in_bytes, AllocationOrigin origin) RefillLabMain() argument 918 RefillLabMain(int size_in_bytes, AllocationOrigin origin) RefillLabMain() argument 923 TryExpand(int size_in_bytes, AllocationOrigin origin) TryExpand() argument 935 RawRefillLabMain(int size_in_bytes, AllocationOrigin origin) RawRefillLabMain() argument 992 ContributeToSweepingMain(int required_freed_bytes, int max_pages, int size_in_bytes, AllocationOrigin origin) ContributeToSweepingMain() argument 1033 EnsureAllocation(int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin, int* out_max_aligned_size) EnsureAllocation() argument [all...] |
H A D | read-only-spaces.cc | 618 void ReadOnlySpace::EnsureSpaceForAllocation(int size_in_bytes) { in EnsureSpaceForAllocation() argument 619 if (top_ + size_in_bytes <= limit_) { in EnsureSpaceForAllocation() 623 DCHECK_GE(size_in_bytes, 0); in EnsureSpaceForAllocation() 646 int size_in_bytes, AllocationAlignment alignment) { in TryAllocateLinearlyAligned() 650 Address new_top = current_top + filler_size + size_in_bytes; in TryAllocateLinearlyAligned() 655 int allocated_size = filler_size + size_in_bytes; in TryAllocateLinearlyAligned() 669 int size_in_bytes, AllocationAlignment alignment) { in AllocateRawAligned() 672 int allocation_size = size_in_bytes; in AllocateRawAligned() 680 allocation_size = size_in_bytes; in AllocateRawAligned() 681 object = TryAllocateLinearlyAligned(size_in_bytes, alignmen in AllocateRawAligned() 645 TryAllocateLinearlyAligned( int size_in_bytes, AllocationAlignment alignment) TryAllocateLinearlyAligned() argument 668 AllocateRawAligned( int size_in_bytes, AllocationAlignment alignment) AllocateRawAligned() argument 689 AllocateRawUnaligned(int size_in_bytes) AllocateRawUnaligned() argument 709 AllocateRaw(int size_in_bytes, AllocationAlignment alignment) AllocateRaw() argument [all...] |
H A D | heap-allocator.cc | 41 int size_in_bytes, AllocationType allocation, AllocationOrigin origin, in AllocateRawLargeInternal() 43 DCHECK_GT(size_in_bytes, heap_->MaxRegularHeapObjectSize(allocation)); in AllocateRawLargeInternal() 46 return new_lo_space()->AllocateRaw(size_in_bytes); in AllocateRawLargeInternal() 48 return lo_space()->AllocateRaw(size_in_bytes); in AllocateRawLargeInternal() 50 return code_lo_space()->AllocateRaw(size_in_bytes); in AllocateRawLargeInternal() 40 AllocateRawLargeInternal( int size_in_bytes, AllocationType allocation, AllocationOrigin origin, AllocationAlignment alignment) AllocateRawLargeInternal() argument
|
H A D | heap-allocator.h | 39 AllocateRaw(int size_in_bytes, AllocationType allocation, 48 int size_in_bytes, AllocationOrigin origin = AllocationOrigin::kRuntime, 55 AllocateRawData(int size_in_bytes, AllocationType allocation, 86 int size_in_bytes, AllocationType allocation, AllocationOrigin origin,
|
H A D | spaces.h | 438 int size_in_bytes, AllocationAlignment alignment); 495 size_t size_in_bytes, 518 AllocateRaw(int size_in_bytes, AllocationAlignment alignment, 524 int size_in_bytes, AllocationOrigin origin = AllocationOrigin::kRuntime); 529 AllocateRawAligned(int size_in_bytes, AllocationAlignment alignment, 538 AllocateFastUnaligned(int size_in_bytes, AllocationOrigin origin); 542 // (object size + alignment filler size) to the size_in_bytes. 544 AllocateFastAligned(int size_in_bytes, int* aligned_size_in_bytes, 549 AllocateRawSlow(int size_in_bytes, AllocationAlignment alignment, 557 virtual bool EnsureAllocation(int size_in_bytes, [all...] |
H A D | read-only-spaces.h | 195 AllocationResult AllocateRaw(int size_in_bytes, 259 AllocationResult AllocateRawUnaligned(int size_in_bytes); 260 AllocationResult AllocateRawAligned(int size_in_bytes, 263 HeapObject TryAllocateLinearlyAligned(int size_in_bytes, 265 void EnsureSpaceForAllocation(int size_in_bytes);
|
H A D | spaces.cc | 388 // Perform an allocation step when the step is reached. size_in_bytes is the 397 Address soon_object, size_t size_in_bytes, size_t aligned_size_in_bytes, in InvokeAllocationObservers() 399 DCHECK_LE(size_in_bytes, aligned_size_in_bytes); in InvokeAllocationObservers() 401 DCHECK(size_in_bytes == aligned_size_in_bytes || in InvokeAllocationObservers() 409 size_in_bytes); in InvokeAllocationObservers() 421 heap_->CreateFillerObjectAt(soon_object, static_cast<int>(size_in_bytes), in InvokeAllocationObservers() 431 allocation_counter_.InvokeAllocationObservers(soon_object, size_in_bytes, in InvokeAllocationObservers() 396 InvokeAllocationObservers( Address soon_object, size_t size_in_bytes, size_t aligned_size_in_bytes, size_t allocation_size) InvokeAllocationObservers() argument
|
H A D | new-spaces-inl.h | 89 int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) { in AllocateRawSynchronized() 91 return AllocateRaw(size_in_bytes, alignment, origin); in AllocateRawSynchronized() 88 AllocateRawSynchronized( int size_in_bytes, AllocationAlignment alignment, AllocationOrigin origin) AllocateRawSynchronized() argument
|
/third_party/vk-gl-cts/external/amber/src/src/vulkan/ |
H A D | buffer_backed_descriptor.cc | 83 auto size_in_bytes = transfer_resource->GetSizeInBytes(); in MoveTransferResourceToBufferOutput() local 84 buffer->SetElementCount(size_in_bytes / buffer->GetFormat()->SizeInBytes()); in MoveTransferResourceToBufferOutput() 85 buffer->ValuePtr()->resize(size_in_bytes); in MoveTransferResourceToBufferOutput() 86 std::memcpy(buffer->ValuePtr()->data(), resource_memory_ptr, size_in_bytes); in MoveTransferResourceToBufferOutput()
|
H A D | push_constant.cc | 54 uint32_t size_in_bytes = it->offset + in GetVkPushConstantRange() local 65 assert(size_in_bytes + 3U <= std::numeric_limits<uint32_t>::max()); in GetVkPushConstantRange() 66 range.size = ((size_in_bytes + 3U) / 4U) * 4U; in GetVkPushConstantRange() 121 "Vulkan: UpdateMemoryWithInput BufferInput offset + size_in_bytes " in UpdateMemoryWithInput()
|
H A D | transfer_buffer.cc | 24 uint32_t size_in_bytes, in TransferBuffer() 26 : Resource(device, size_in_bytes) { in TransferBuffer() 23 TransferBuffer(Device* device, uint32_t size_in_bytes, Format* format) TransferBuffer() argument
|
H A D | buffer_descriptor.cc | 63 auto size_in_bytes = in CreateResourceIfNeeded() local 66 device_, size_in_bytes, amber_buffer->GetFormat()); in CreateResourceIfNeeded()
|
H A D | resource.cc | 50 Resource::Resource(Device* device, uint32_t size_in_bytes) in Resource() argument 51 : device_(device), size_in_bytes_(size_in_bytes) {} in Resource()
|
/third_party/mesa3d/src/gallium/drivers/r300/ |
H A D | r300_texture_desc.c | 221 tex->tex.size_in_bytes = 0; in r300_setup_miptree() 256 tex->tex.offset_in_bytes[i] = tex->tex.size_in_bytes; in r300_setup_miptree() 257 tex->tex.size_in_bytes = tex->tex.offset_in_bytes[i] + size; in r300_setup_miptree() 265 u_minify(tex->tex.depth0, i), stride, tex->tex.size_in_bytes, in r300_setup_miptree() 535 tex->b.last_level, tex->tex.size_in_bytes, in r300_tex_print_info() 609 if (tex->buf && tex->tex.size_in_bytes > tex->buf->size) { in r300_texture_desc_init() 613 if (tex->tex.size_in_bytes > tex->buf->size) { in r300_texture_desc_init() 619 tex->buf->size, tex->tex.size_in_bytes); in r300_texture_desc_init()
|
/third_party/vk-gl-cts/external/amber/src/android_sample/jni/ |
H A D | amber_script.cc | 129 size_t size_in_bytes = AAsset_getLength(asset); in ReadContent() 131 // Allocate a memory chunk whose size in bytes is |size_in_bytes|. in ReadContent() 132 std::vector<uint8_t> content(size_in_bytes); in ReadContent() 134 AAsset_read(asset, content.data(), size_in_bytes); in ReadContent()
|
/third_party/mesa3d/src/amd/common/ |
H A D | ac_rgp.c | 101 int32_t size_in_bytes; member 195 chunk->header.size_in_bytes = sizeof(*chunk); in ac_sqtt_fill_cpu_info() 440 chunk->header.size_in_bytes = sizeof(*chunk); in ac_sqtt_fill_asic_info() 600 chunk->header.size_in_bytes = sizeof(*chunk); in ac_sqtt_fill_api_info() 630 chunk->header.size_in_bytes = chunk_size; in ac_sqtt_fill_code_object() 663 chunk->header.size_in_bytes = (rgp_loader_events->record_count * in ac_sqtt_fill_loader_events() 694 chunk->header.size_in_bytes = (rgp_pso_correlation->record_count * in ac_sqtt_fill_pso_correlation() 749 chunk->header.size_in_bytes = sizeof(*chunk); in ac_sqtt_fill_sqtt_desc() 778 chunk->header.size_in_bytes = sizeof(*chunk) + size; in ac_sqtt_fill_sqtt_data() 836 chunk->header.size_in_bytes in ac_sqtt_fill_queue_event_timings() [all...] |
/third_party/vk-gl-cts/external/amber/src/src/ |
H A D | verifier.cc | 45 const uint32_t size_in_bytes = (src_bit_offset + bits + 7) / kBitsPerByte; in CopyBitsOfMemoryToBuffer() local 46 assert(size_in_bytes <= kBitsPerByte); in CopyBitsOfMemoryToBuffer() 50 for (uint32_t i = 0; i < size_in_bytes; ++i) { in CopyBitsOfMemoryToBuffer() 531 size_t size_in_bytes = buffer_element_count * fmt->SizeInBytes(); in ProbeSSBO() local 532 if ((elem_count * fmt->SizeInBytes()) + offset > size_in_bytes) { in ProbeSSBO() 537 std::to_string(size_in_bytes) + " bytes"); in ProbeSSBO()
|
/third_party/mesa3d/src/gallium/drivers/radeonsi/ |
H A D | radeon_vcn_enc_1_2.c | 270 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; in radeon_enc_nalu_sps() local 344 *size_in_bytes = (enc->bits_output + 7) / 8; in radeon_enc_nalu_sps() 352 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; in radeon_enc_nalu_sps_hevc() local 442 *size_in_bytes = (enc->bits_output + 7) / 8; in radeon_enc_nalu_sps_hevc() 465 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; in radeon_enc_nalu_prefix() local 495 *size_in_bytes = (enc->bits_output + 7) / 8; in radeon_enc_nalu_prefix() 509 unsigned *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; in radeon_enc_nalu_sei() local 604 *size_in_bytes = (enc->bits_output + 7) / 8; in radeon_enc_nalu_sei() 612 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; in radeon_enc_nalu_pps() local 639 *size_in_bytes in radeon_enc_nalu_pps() 647 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; radeon_enc_nalu_pps_hevc() local 705 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; radeon_enc_nalu_vps() local 759 uint32_t *size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; radeon_enc_nalu_aud_hevc() local [all...] |
H A D | radeon_vcn_enc_3_0.c | 110 uint32_t *size_in_bytes; in radeon_enc_nalu_pps_hevc() local 114 size_in_bytes = &enc->cs.current.buf[enc->cs.current.cdw++]; in radeon_enc_nalu_pps_hevc() 166 *size_in_bytes = (enc->bits_output + 7) / 8; in radeon_enc_nalu_pps_hevc()
|