/arkcompiler/runtime_core/libpandabase/mem/ |
H A D | arena-inl.h | 31 startPos_(ToVoidPtr(AlignUp(ToUintPtr(buff), GetAlignmentInBytes(start_alignment)))), in Arena() 35 ASSERT(ToUintPtr(buff) == AlignUp(ToUintPtr(buff), GetAlignmentInBytes(ARENA_DEFAULT_ALIGNMENT))); in Arena() 53 curPos_ = ToVoidPtr(ToUintPtr(ret) + size); in Alloc() 61 ASSERT(AlignUp(ToUintPtr(curPos_), GetAlignmentInBytes(alignment)) == ToUintPtr(curPos_)); in AlignedAlloc() 64 uintptr_t new_cur_pos = ToUintPtr(curPos_) + size; in AlignedAlloc() 65 if (new_cur_pos <= (ToUintPtr(buff_) + size_)) { in AlignedAlloc() 92 ASSERT(ToUintPtr(curPos_) >= ToUintPtr(GetStartPo in GetFreeSize() [all...] |
H A D | mmap_mem_pool-inl.h | 48 Pool new_pool(element_size - size, ToVoidPtr(ToUintPtr(element_mem) + size)); in PopFreePool() 70 ASSERT(ToUintPtr(prev_pool->GetMem()) + prev_pool->GetSize() == ToUintPtr(mmap_pool->GetMem())); in PushFreePool() 80 ASSERT(ToUintPtr(mmap_pool->GetMem()) + mmap_pool->GetSize() == ToUintPtr(next_pool->GetMem())); in PushFreePool() 131 ASSERT((ToUintPtr(mem) == PANDA_32BITS_HEAP_START_ADDRESS) || (object_space_size == 0)); in MmapMemPool() 132 ASSERT(ToUintPtr(mem) + object_space_size <= PANDA_32BITS_HEAP_END_OBJECTS_ADDRESS); in MmapMemPool() 139 ASSERT(AlignUp(ToUintPtr(mem), PANDA_POOL_ALIGNMENT_IN_BYTES) == ToUintPtr(mem)); in MmapMemPool() 140 min_object_memory_addr_ = ToUintPtr(me in MmapMemPool() [all...] |
H A D | pool_map.cpp | 26 ASSERT(ToUintPtr(pool_addr) % POOL_MAP_GRANULARITY == 0); in AddPoolToMap() 30 size_t last_map_num = AddrToMapNum(ToVoidPtr(ToUintPtr(pool_addr) + pool_size - 1U)); in AddPoolToMap() 39 ASSERT(ToUintPtr(pool_addr) % POOL_MAP_GRANULARITY == 0); in RemovePoolFromMap() 42 size_t last_map_num = AddrToMapNum(ToVoidPtr(ToUintPtr(pool_addr) + pool_size - 1U)); in RemovePoolFromMap() 61 if (ToUintPtr(addr) > (POOL_MAP_COVERAGE - 1U)) { in GetSpaceType()
|
/arkcompiler/runtime_core/static_core/libpandabase/mem/ |
H A D | arena-inl.h | 31 startPos_(ToVoidPtr(AlignUp(ToUintPtr(buff), GetAlignmentInBytes(startAlignment)))), in Arena() 35 ASSERT(ToUintPtr(buff) == AlignUp(ToUintPtr(buff), GetAlignmentInBytes(ARENA_DEFAULT_ALIGNMENT))); in Arena() 53 curPos_ = ToVoidPtr(ToUintPtr(ret) + size); in Alloc() 61 ASSERT(AlignUp(ToUintPtr(curPos_), GetAlignmentInBytes(alignment)) == ToUintPtr(curPos_)); in AlignedAlloc() 64 uintptr_t newCurPos = ToUintPtr(curPos_) + size; in AlignedAlloc() 65 if (newCurPos <= (ToUintPtr(buff_) + size_)) { in AlignedAlloc() 92 ASSERT(ToUintPtr(curPos_) >= ToUintPtr(GetStartPo in GetFreeSize() [all...] |
H A D | stack_like_allocator-inl.h | 47 endAddr_ = ToVoidPtr(ToUintPtr(startAddr_) + MAX_SIZE); in StackLikeAllocator() 49 ASSERT(AlignUp(ToUintPtr(freePointer_), GetAlignmentInBytes(ALIGNMENT)) == ToUintPtr(freePointer_)); in StackLikeAllocator() 72 uintptr_t newCurPos = ToUintPtr(freePointer_) + size; in Alloc() 73 if (LIKELY(newCurPos <= ToUintPtr(endAddr_))) { in Alloc() 81 ASSERT(AlignUp(ToUintPtr(ret), GetAlignmentInBytes(ALIGNMENT)) == ToUintPtr(ret)); in Alloc() 93 ASSERT(ToUintPtr(mem) == AlignUp(ToUintPtr(mem), GetAlignmentInBytes(ALIGNMENT))); in Free() 95 if ((ToUintPtr(me in Free() [all...] |
H A D | stack_like_allocator.h | 53 ASSERT(ToUintPtr(freePointer_) >= ToUintPtr(startAddr_)); in GetAllocatedSize() 54 return ToUintPtr(freePointer_) - ToUintPtr(startAddr_); in GetAllocatedSize() 60 reservedEndAddr_ = ToVoidPtr(ToUintPtr(startAddr_) + size); in SetReservedMemorySize() 76 return ToUintPtr(allocatedEndAddr_) - ToUintPtr(startAddr_); in GetFullMemorySize()
|
H A D | mmap_mem_pool-inl.h | 58 Pool newPool(elementSize - size, ToVoidPtr(ToUintPtr(elementMem) + size)); in PopFreePool() 67 uintptr_t poolStart = ToUintPtr(pool.GetMem()); in PopFreePool() 91 ASSERT(ToUintPtr(prevPool->GetMem()) + prevPool->GetSize() == ToUintPtr(mmapPool->GetMem())); in PushFreePool() 103 ASSERT(ToUintPtr(mmapPool->GetMem()) + mmapPool->GetSize() == ToUintPtr(nextPool->GetMem())); in PushFreePool() 175 ASSERT((ToUintPtr(mem) < PANDA_32BITS_HEAP_END_OBJECTS_ADDRESS) || (objectSpaceSize == 0)); 176 ASSERT(ToUintPtr(mem) + objectSpaceSize <= PANDA_32BITS_HEAP_END_OBJECTS_ADDRESS); 183 ASSERT(AlignUp(ToUintPtr(mem), PANDA_POOL_ALIGNMENT_IN_BYTES) == ToUintPtr(me [all...] |
H A D | pool_map.cpp | 23 ASSERT((ToUintPtr(poolAddr) & POOL_MAP_GRANULARITY_MASK) == 0); in AddPoolToMap() 27 MapNumType lastMapNum = AddrToMapNum(ToVoidPtr(ToUintPtr(poolAddr) + poolSize - 1U)); in AddPoolToMap() 36 ASSERT((ToUintPtr(poolAddr) & POOL_MAP_GRANULARITY_MASK) == 0); in RemovePoolFromMap() 39 MapNumType lastMapNum = AddrToMapNum(ToVoidPtr(ToUintPtr(poolAddr) + poolSize - 1U)); in RemovePoolFromMap() 58 if (ToUintPtr(addr) > (POOL_MAP_COVERAGE - 1U)) { in GetSpaceType()
|
H A D | mem.h | 108 constexpr uintptr_t ToUintPtr(T *val) in ToUintPtr() function 113 constexpr uintptr_t ToUintPtr(std::nullptr_t) in ToUintPtr() function 115 return ToUintPtr(static_cast<void *>(nullptr)); in ToUintPtr() 131 return static_cast<ObjectPointerType>(ToUintPtr(ptr)); in ToObjPtr() 244 return IsAddressInObjectsHeap(ToUintPtr(address)); in IsAddressInObjectsHeap() 249 return address == ToUintPtr(nullptr) || IsAddressInObjectsHeap(address); in IsAddressInObjectsHeapOrNull() 255 return IsAddressInObjectsHeapOrNull(ToUintPtr(address)); in IsAddressInObjectsHeapOrNull() 261 ASSERT(IsAddressInObjectsHeapOrNull(ToUintPtr(val))); in ToObjPtrType() 262 return static_cast<ObjectPointerType>(ToUintPtr(val)); in ToObjPtrType() 267 return static_cast<ObjectPointerType>(ToUintPtr(nullpt in ToObjPtrType() [all...] |
/arkcompiler/ets_runtime/ecmascript/tests/ |
H A D | barrier_test.cpp | 42 NewToEdenBeforeCopy.emplace(ToUintPtr(mem)); in HWTEST_F_L0() 46 LocalToShareBeforeCopy.emplace(ToUintPtr(mem)); in HWTEST_F_L0() 50 JSTaggedValue* to = reinterpret_cast<JSTaggedValue*>(ToUintPtr(dstArray->GetData())); in HWTEST_F_L0() 51 JSTaggedValue* from = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArray->GetData())); in HWTEST_F_L0() 56 EXPECT_TRUE(NewToEdenBeforeCopy.count(ToUintPtr(mem))); in HWTEST_F_L0() 60 EXPECT_TRUE(LocalToShareBeforeCopy.count(ToUintPtr(mem))); in HWTEST_F_L0() 85 NewToEdenBeforeCopy.emplace(ToUintPtr(mem)); in HWTEST_F_L0() 89 LocalToShareBeforeCopy.emplace(ToUintPtr(mem)); in HWTEST_F_L0() 93 JSTaggedValue* to = reinterpret_cast<JSTaggedValue*>(ToUintPtr(dstArray->GetData())); in HWTEST_F_L0() 94 JSTaggedValue* from = reinterpret_cast<JSTaggedValue*>(ToUintPtr(srcArra in HWTEST_F_L0() [all...] |
/arkcompiler/runtime_core/static_core/runtime/mem/ |
H A D | tlab.h | 228 ASSERT(ToUintPtr(curFreePosition_) >= ToUintPtr(memoryStartAddr_)); in GetOccupiedSize() 229 return ToUintPtr(curFreePosition_) - ToUintPtr(memoryStartAddr_); in GetOccupiedSize() 234 return MemRange(ToUintPtr(memoryStartAddr_), ToUintPtr(curFreePosition_) - 1); in GetMemRangeForOccupiedMemory() 261 ASSERT(ToUintPtr(memoryEndAddr_) >= ToUintPtr(memoryStartAddr_)); in GetSize() 262 return ToUintPtr(memoryEndAddr_) - ToUintPtr(memoryStartAddr in GetSize() [all...] |
H A D | tlab.cpp | 37 ASSERT(ToUintPtr(address) == AlignUp(ToUintPtr(address), DEFAULT_ALIGNMENT_IN_BYTES)); in Fill() 39 memoryEndAddr_ = ToVoidPtr(ToUintPtr(address) + size); in Fill() 64 ASSERT(ToUintPtr(curFreePosition_) == AlignUp(ToUintPtr(curFreePosition_), DEFAULT_ALIGNMENT_IN_BYTES)); in Alloc() 67 curFreePosition_ = ToVoidPtr(ToUintPtr(curFreePosition_) + requestedSize); in Alloc() 82 curPtr = ToVoidPtr(AlignUp(ToUintPtr(curPtr) + objectSize, DEFAULT_ALIGNMENT_IN_BYTES)); in IterateOverObjects() 95 void *endPtr = ToVoidPtr(std::min(ToUintPtr(curFreePosition_), memRange.GetEndAddress() + 1)); in IterateOverObjectsInRange() 96 void *startIteratePos = ToVoidPtr(std::max(ToUintPtr(currentPtr), memRange.GetStartAddress())); in IterateOverObjectsInRange() 99 currentPtr = ToVoidPtr(AlignUp(ToUintPtr(currentPt in IterateOverObjectsInRange() [all...] |
H A D | bump-allocator-inl.h | 70 ASSERT(ToUintPtr(arena_.GetArenaEnd()) == ToUintPtr(mem)); in ExpandMemory() 126 uintptr_t endOfArena = ToUintPtr(arena_.GetArenaEnd()); in CreateNewTLAB() 169 curPtr = ToVoidPtr(AlignUp(ToUintPtr(curPtr) + objectSize, DEFAULT_ALIGNMENT_IN_BYTES)); in IterateOverObjects() 190 ASSERT(ToUintPtr(rightBorder) >= ToUintPtr(leftBorder)); in IterateOverObjectsInRange() 193 ASSERT(ToUintPtr(rightBorder) - ToUintPtr(leftBorder) == (CrossingMapSingleton::GetCrossingMapGranularity() - 1U)); in IterateOverObjectsInRange() 194 ASSERT((ToUintPtr(rightBorder) & (~(CrossingMapSingleton::GetCrossingMapGranularity() - 1U))) == in IterateOverObjectsInRange() 195 (ToUintPtr(leftBorde in IterateOverObjectsInRange() [all...] |
H A D | freelist_allocator-inl.h | 74 uintptr_t memoryPointer = ToUintPtr(memoryBlock->GetMemory()); in Alloc() 108 uintptr_t paddingSize = memoryPointer - ToUintPtr(memoryBlock->GetMemory()); in Alloc() 124 ToUintPtr(memoryBlock) + memoryBlock->GetSize() + sizeof(MemoryBlockHeader) - memoryPointer; in Alloc() 162 ToUintPtr(memoryHeader) + memoryHeader->GetSize() + sizeof(MemoryBlockHeader) - ToUintPtr(mem); in FreeUnsafe() 168 prevSize = ToUintPtr(prevUsedHeader) + prevUsedHeader->GetSize() + sizeof(MemoryBlockHeader) - in FreeUnsafe() 169 ToUintPtr(prevUsedHeader->GetMemory()); in FreeUnsafe() 289 ASSERT((ToUintPtr(mem) & (sizeof(MemoryBlockHeader) - 1)) == 0U); in AddMemoryPool() 387 ASSERT(ToUintPtr(rightBorder) >= ToUintPtr(leftBorde in IterateOverObjectsInRange() [all...] |
H A D | humongous_obj_allocator-inl.h | 121 auto memHeader = static_cast<MemoryPoolHeader *>(ToVoidPtr(ToUintPtr(mem) & PAGE_SIZE_MASK)); in FreeUnsafe() 171 if (AlignUp(ToUintPtr(mem), PAGE_SIZE) != ToUintPtr(mem)) { in AddMemoryPool() 186 uintptr_t allocAddr = ToUintPtr(memoryPool->GetMemory()); in ReleaseUnusedPagesOnAlloc() 187 uintptr_t poolAddr = ToUintPtr(memoryPool); in ReleaseUnusedPagesOnAlloc() 243 ASSERT(ToUintPtr(rightBorder) >= ToUintPtr(leftBorder)); in IterateOverObjectsInRange() 246 ASSERT(ToUintPtr(rightBorder) - ToUintPtr(leftBorder) == (CrossingMapSingleton::GetCrossingMapGranularity() - 1U)); in IterateOverObjectsInRange() 247 ASSERT((ToUintPtr(rightBorde in IterateOverObjectsInRange() [all...] |
H A D | object_helpers-inl.h | 44 ASSERT(ToUintPtr(cls) + offset >= ToUintPtr(object)); in TraverseClass() 46 uint32_t objOffset = ToUintPtr(cls) + offset - ToUintPtr(object); in TraverseClass() 109 ASSERT(IsAligned(ToUintPtr(begin), DEFAULT_ALIGNMENT_IN_BYTES)); in TraverseArray() 121 auto offset = ToUintPtr(p) - ToUintPtr(array); in TraverseArray() 224 ASSERT(IsAligned(ToUintPtr(begin), DEFAULT_ALIGNMENT_IN_BYTES)); in TraverseArray() 236 auto offset = ToUintPtr(p) - ToUintPtr(arra in TraverseArray() [all...] |
H A D | region_allocator.h | 41 auto *region = reinterpret_cast<Region *>(((ToUintPtr(object)) & ~DEFAULT_REGION_MASK)); in ObjectToRegion() 42 ASSERT(ToUintPtr(PoolManager::GetMmapMemPool()->GetStartAddrPoolForAddr(object)) == ToUintPtr(region)); in ObjectToRegion() 54 return ((ToUintPtr(o1) ^ ToUintPtr(o2)) >> regionSizeBits) == 0; in IsSameRegion() 221 if (region->Intersect(ToUintPtr(begin), ToUintPtr(end))) { in IterateOverObjectsInRange() 223 if (ToUintPtr(begin) <= ToUintPtr(obj) && ToUintPtr(ob in IterateOverObjectsInRange() [all...] |
/arkcompiler/ets_runtime/ecmascript/mem/ |
H A D | visitor.h | 58 visitor(root, ObjectSlot(ToUintPtr(root)), in IterateBody() 59 ObjectSlot(ToUintPtr(root) + hclassEnd), VisitObjectArea::NORMAL); in IterateBody() 61 visitor(root, ObjectSlot(ToUintPtr(root) + hclassEnd), in IterateBody() 62 ObjectSlot(ToUintPtr(root) + size), VisitObjectArea::RAW_DATA); in IterateBody() 82 visitor(root, ObjectSlot(ToUintPtr(root) + startOffset), in IterateBody() 83 ObjectSlot(ToUintPtr(root) + endOffset), area); in IterateBody() 109 visitor(root, ObjectSlot(ToUintPtr(root)), 110 ObjectSlot(ToUintPtr(root) + hclassEnd), VisitObjectArea::NORMAL); 131 visitor(root, ObjectSlot(ToUintPtr(root) + start), in IteratorRange() 132 ObjectSlot(ToUintPtr(roo in IteratorRange() [all...] |
H A D | region-inl.h | 197 return set->TestBit(ToUintPtr(this), addr); in TestNewToEden() 209 return set->TestBit(ToUintPtr(this), addr); in TestOldToNew() 220 return packedData_.localToShareSet_->TestBit(ToUintPtr(this), addr); in TestLocalToShare() 240 set->Insert(ToUintPtr(this), addr); in InsertCrossRegionRSet() 246 set->AtomicInsert(ToUintPtr(this), addr); in AtomicInsertCrossRegionRSet() 264 set->Insert(ToUintPtr(this), addr); in InsertLocalToShareRSet() 276 set->AtomicInsert(ToUintPtr(this), addr); in AtomicInsertLocalToShareRSet() 282 packedData_.localToShareSet_->ClearRange(ToUintPtr(this), start, end); in ClearLocalToShareRSetInRange() 289 packedData_.localToShareSet_->AtomicClearRange(ToUintPtr(this), start, end); in AtomicClearLocalToShareRSetInRange() 304 sweepingLocalToShareRSet_->AtomicClearRange(ToUintPtr(thi in AtomicClearSweepingLocalToShareRSetInRange() [all...] |
/arkcompiler/runtime_core/static_core/libpandabase/tests/ |
H A D | mmap_test.cpp | 80 EXPECT_GE(ToUintPtr(result), ToUintPtr(DEFAULT_MMAP_TEST_HINT)) << "mmaped address can't be less then hint address"; in TEST_F() 81 EXPECT_LE(ToUintPtr(result) + DEFAULT_MMAP_TEST_SIZE, 4_GB) << "mmaped sapce must be placed into first 4GB"; in TEST_F() 103 EXPECT_GE(ToUintPtr(result), ToUintPtr(DEFAULT_MMAP_TEST_HINT)) << "mmaped address can't be less then hint address"; in TEST_F() 104 EXPECT_LE(ToUintPtr(result) + DEFAULT_MMAP_TEST_SIZE, 4_GB) << "mmaped sapce must be placed into first 4GB"; in TEST_F() 106 os::mem::ReleasePages(ToUintPtr(result), ToUintPtr(result) + DEFAULT_MMAP_TEST_SIZE); in TEST_F() 145 ASSERT_NE(ToUintPtr(result), curAddr); in TEST_F() 147 ASSERT_TRUE(ToUintPtr(resul in TEST_F() [all...] |
/arkcompiler/ets_runtime/ecmascript/dfx/stackinfo/tests/ |
H A D | js_stackinfo_test.cpp | 56 uintptr_t ToUintPtr(FrameType frame) in ToUintPtr() function 77 if (i == ToUintPtr(FrameType::OPTIMIZED_ENTRY_FRAME) || in HWTEST_F_L0() 78 i == ToUintPtr(FrameType::ASM_INTERPRETER_ENTRY_FRAME)) { in HWTEST_F_L0() 96 if (i == ToUintPtr(FrameType::ASM_INTERPRETER_FRAME) || in HWTEST_F_L0() 97 i == ToUintPtr(FrameType::INTERPRETER_CONSTRUCTOR_FRAME) || in HWTEST_F_L0() 98 i == ToUintPtr(FrameType::INTERPRETER_FRAME) || in HWTEST_F_L0() 99 i == ToUintPtr(FrameType::INTERPRETER_FAST_NEW_FRAME)) { in HWTEST_F_L0() 117 if (i == ToUintPtr(FrameType::FASTJIT_FUNCTION_FRAME) || in HWTEST_F_L0() 118 i == ToUintPtr(FrameType::FASTJIT_FAST_CALL_FUNCTION_FRAME)) { in HWTEST_F_L0() 136 if (i == ToUintPtr(FrameTyp in HWTEST_F_L0() [all...] |
/arkcompiler/runtime_core/static_core/runtime/include/ |
H A D | class-inl.h | 665 ASSERT(ToUintPtr(object) < ToUintPtr(this) && ToUintPtr(this) < ToUintPtr(object) + object->ObjectSize()); 666 auto newOffset = offset + (ToUintPtr(this) - ToUintPtr(object)); 694 ASSERT(ToUintPtr(object) < ToUintPtr(this) && ToUintPtr(this) < ToUintPtr(objec [all...] |
/arkcompiler/runtime_core/static_core/runtime/mem/gc/heap-space-misc/ |
H A D | crossing_map.h | 242 ASSERT(ToUintPtr(addr) >= startAddr_); in GetMapNumFromAddr() 243 size_t mapNum = (ToUintPtr(addr) - startAddr_) / CROSSING_MAP_GRANULARITY; in GetMapNumFromAddr() 256 ASSERT(ToUintPtr(addr) >= startAddr_); 257 size_t offset = (ToUintPtr(addr) - startAddr_) % CROSSING_MAP_GRANULARITY; 273 (ToUintPtr(GetStaticArrayElement(staticArrayNum)) + relativeMapNum * sizeof(CrossingMapElement)))); 280 static_cast<StaticArrayPtr>(ToVoidPtr((ToUintPtr(staticArray_) + staticArrayNum * sizeof(StaticArrayPtr)))); 287 void *element = ToVoidPtr(ToUintPtr(staticArray_) + staticArrayNum * sizeof(StaticArrayPtr)); 293 ASSERT(ToUintPtr(addr) >= startAddr_); 294 size_t staticArrayNum = (ToUintPtr(addr) - startAddr_) / CROSSING_MAP_STATIC_ARRAY_GRANULARITY;
|
/arkcompiler/runtime_core/static_core/runtime/tests/ |
H A D | tlab_test.cpp | 57 auto tlabBuffOffs = AlignUp(ToUintPtr(mem) + sizeof(mem::TLAB), DEFAULT_ALIGNMENT_IN_BYTES) - ToUintPtr(mem); in CreateNewTLAB() 58 auto newTlab = new (mem) TLAB(ToVoidPtr(ToUintPtr(mem) + tlabBuffOffs), TLAB_TEST_SIZE - tlabBuffOffs); in CreateNewTLAB() 77 auto freePointerAddr = static_cast<uintptr_t *>(ToVoidPtr(ToUintPtr(tlab) + TLAB::TLABFreePointerOffset())); in TEST_F() 78 auto endAddr = static_cast<uintptr_t *>(ToVoidPtr(ToUintPtr(tlab) + TLAB::TLABEndAddrOffset())); in TEST_F() 84 ASSERT_TRUE(ToUintPtr(mem) == oldFreePointer); in TEST_F()
|
/arkcompiler/ets_runtime/ecmascript/ |
H A D | frames.cpp | 581 end = ToUintPtr(prevFrame); in GetPrevFrame() 586 end = ToUintPtr(prevFrame); in GetPrevFrame() 591 end = ToUintPtr(prevFrame); in GetPrevFrame() 682 uintptr_t jsFuncSlot = ToUintPtr(jsFuncPtr); in GCIterate() 689 uintptr_t start = ToUintPtr(argv); // argv in GCIterate() 690 uintptr_t end = ToUintPtr(argv + argc); in GCIterate() 695 auto machineCodeSlot = ObjectSlot(ToUintPtr(it.GetMachineCodeSlot())); in GCIterate() 752 uintptr_t start = ToUintPtr(argv); // argv in GCIterate() 753 uintptr_t end = ToUintPtr(argv + argc); in GCIterate() 758 auto machineCodeSlot = ObjectSlot(ToUintPtr(i in GCIterate() [all...] |