1/*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 *     http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include "mempool.h"
17#include <cstdio>
18#include <cstdlib>
19#include <cstring>
20#include <iostream>
21#include <mutex>
22#include "securec.h"
23#include "mpl_logging.h"
24
25namespace maple {
26MemPoolCtrler memPoolCtrler;
27bool MemPoolCtrler::freeMemInTime = false;
28
29void MemPoolCtrler::FreeMemBlocks(const MemPool &pool, MemBlock *fixedMemHead, MemBlock *bigMemHead)
30{
31    (void)(pool);
32
33    MemBlock *fixedTail = nullptr;
34
35    if (fixedMemHead != nullptr) {
36        fixedTail = fixedMemHead;
37        while (fixedTail->nextMemBlock != nullptr) {
38            fixedTail = fixedTail->nextMemBlock;
39        }
40    }
41
42    while (bigMemHead != nullptr) {
43        auto *cur = bigMemHead;
44        bigMemHead = bigMemHead->nextMemBlock;
45        free(cur->startPtr);
46        delete cur;
47    }
48
49    if (fixedTail != nullptr) {
50        fixedTail->nextMemBlock = fixedFreeMemBlocks;
51        DEBUG_ASSERT(fixedTail->nextMemBlock != fixedTail, "error");
52        fixedFreeMemBlocks = fixedMemHead;
53    }
54}
55
56// Destructor, free all allocated memories
57MemPoolCtrler::~MemPoolCtrler()
58{
59    FreeMem();
60}
61
62void MemPoolCtrler::FreeFixedSizeMemBlockMemory()
63{
64    FreeMem();
65    sysMemoryMgr->ReleaseMemory();
66}
67
68// Allocate a new memory pool and register it in controller
69MemPool *MemPoolCtrler::NewMemPool(const std::string &name, bool isLocalPool)
70{
71    MemPool *memPool = nullptr;
72
73    if (isLocalPool) {
74        memPool = new ThreadLocalMemPool(*this, name);
75    } else {
76        memPool = new ThreadShareMemPool(*this, name);
77    }
78
79    return memPool;
80}
81
82// This function will be removed soon, DO NOT call it, just use delete memPool
83void MemPoolCtrler::DeleteMemPool(MemPool *memPool) const
84{
85    delete memPool;
86}
87
88void MemPoolCtrler::FreeMem()
89{
90    while (fixedFreeMemBlocks != nullptr) {
91        MemBlock *arena = fixedFreeMemBlocks;
92        fixedFreeMemBlocks = fixedFreeMemBlocks->nextMemBlock;
93        delete arena;
94    }
95}
96
97MemBlock *MemPoolCtrler::AllocMemBlock(const MemPool &pool, size_t size)
98{
99    if (size <= kMemBlockSizeMin) {
100        return AllocFixMemBlock(pool);
101    } else {
102        return AllocBigMemBlock(pool, size);
103    }
104}
105
106MemBlock *MemPoolCtrler::AllocFixMemBlock(const MemPool &pool)
107{
108    (void)(pool);
109    MemBlock *ret = nullptr;
110
111    if (fixedFreeMemBlocks != nullptr) {
112        ret = fixedFreeMemBlocks;
113        fixedFreeMemBlocks = fixedFreeMemBlocks->nextMemBlock;
114        return ret;
115    }
116
117    uint8_t *ptr = sysMemoryMgr->RealAllocMemory(kMemBlockMalloc);
118    // leave one MemBlock to return
119    for (size_t i = 0; i < kMemBlockMalloc / kMemBlockSizeMin - 1; ++i) {
120        auto *block = new MemBlock(ptr, kMemBlockSizeMin);
121        ptr += kMemBlockSizeMin;
122        block->nextMemBlock = fixedFreeMemBlocks;
123        fixedFreeMemBlocks = block;
124    }
125
126    return new MemBlock(ptr, kMemBlockSizeMin);
127}
128
129MemBlock *MemPoolCtrler::AllocBigMemBlock(const MemPool &pool, size_t size) const
130{
131    DEBUG_ASSERT(size > kMemBlockSizeMin, "Big memory block must be bigger than fixed memory block");
132    (void)(pool);
133
134    uint8_t *block = reinterpret_cast<uint8_t *>(malloc(size));
135    CHECK_FATAL(block != nullptr, "malloc failed");
136    return new MemBlock(block, size);
137}
138
139MemPool::~MemPool()
140{
141    ctrler.FreeMemBlocks(*this, fixedMemHead, bigMemHead);
142}
143
144void *MemPool::Malloc(size_t size)
145{
146    size = BITS_ALIGN(size);
147    DEBUG_ASSERT(endPtr >= curPtr, "endPtr should >= curPtr");
148    if (size > static_cast<size_t>(endPtr - curPtr)) {
149        return AllocNewMemBlock(size);
150    }
151    uint8_t *retPtr = curPtr;
152    curPtr += size;
153    return retPtr;
154}
155
156void MemPool::ReleaseContainingMem()
157{
158    ctrler.FreeMemBlocks(*this, fixedMemHead, bigMemHead);
159
160    fixedMemHead = nullptr;
161    bigMemHead = nullptr;
162    endPtr = nullptr;
163    curPtr = nullptr;
164}
165
166// Malloc size of memory from memory pool, then set 0
167void *MemPool::Calloc(size_t size)
168{
169    void *p = Malloc(BITS_ALIGN(size));
170    DEBUG_ASSERT(p != nullptr, "ERROR: Calloc error");
171    errno_t eNum = memset_s(p, BITS_ALIGN(size), 0, BITS_ALIGN(size));
172    CHECK_FATAL(eNum == EOK, "memset_s failed");
173    return p;
174}
175
176// Realloc new size of memory
177void *MemPool::Realloc(const void *ptr, size_t oldSize, size_t newSize)
178{
179    void *result = Malloc(newSize);
180    DEBUG_ASSERT(result != nullptr, "ERROR: Realloc error");
181    size_t copySize = ((newSize > oldSize) ? oldSize : newSize);
182    if (copySize != 0 && ptr != nullptr) {
183        errno_t eNum = memcpy_s(result, copySize, ptr, copySize);
184        CHECK_FATAL(eNum == EOK, "memcpy_s failed");
185    }
186    return result;
187}
188
189uint8_t *MemPool::AllocNewMemBlock(size_t size)
190{
191    MemBlock **head = nullptr;
192    MemBlock *newMemBlock = ctrler.AllocMemBlock(*this, size);
193    if (newMemBlock->memSize <= kMemBlockSizeMin) {
194        head = &fixedMemHead;
195    } else {
196        head = &bigMemHead;
197    }
198
199    newMemBlock->nextMemBlock = *head;
200    *head = newMemBlock;
201    CHECK_FATAL(newMemBlock->nextMemBlock != newMemBlock, "error");
202
203    curPtr = newMemBlock->startPtr + size;
204    endPtr = newMemBlock->startPtr + newMemBlock->memSize;
205    DEBUG_ASSERT(curPtr <= endPtr, "must be");
206
207    return newMemBlock->startPtr;
208}
209
210void *StackMemPool::Malloc(size_t size)
211{
212    size = BITS_ALIGN(size);
213    uint8_t **curPtrPtr = nullptr;
214    uint8_t *curEndPtr = nullptr;
215    if (size <= kMemBlockSizeMin) {
216        curPtrPtr = &curPtr;
217        curEndPtr = endPtr;
218    } else {
219        curPtrPtr = &bigCurPtr;
220        curEndPtr = bigEndPtr;
221    }
222    uint8_t *retPtr = *curPtrPtr;
223    DEBUG_ASSERT(curEndPtr >= *curPtrPtr, "endPtr should >= curPtr");
224    if (size > static_cast<size_t>(curEndPtr - *curPtrPtr)) {
225        retPtr = AllocTailMemBlock(size);
226    }
227    *curPtrPtr = retPtr + size;
228    return retPtr;
229}
230
231// scoped mem pool don't use big mem block for small size, different with normal mempool
232MemBlock *StackMemPool::AllocMemBlockBySize(size_t size)
233{
234    if (size <= kMemBlockSizeMin) {
235        return ctrler.AllocFixMemBlock(*this);
236    } else {
237        return ctrler.AllocBigMemBlock(*this, size);
238    }
239}
240
241void StackMemPool::ResetStackTop(const LocalMapleAllocator *alloc, uint8_t *fixedCurPtrMark,
242                                 MemBlock *fixedStackTopMark, uint8_t *bigCurPtrMark,
243                                 MemBlock *bigStackTopMark) noexcept
244{
245    CheckTopAllocator(alloc);
246    PopAllocator();
247
248    if (fixedStackTopMark != nullptr) {
249        fixedMemStackTop = fixedStackTopMark;
250        curPtr = fixedCurPtrMark;
251        endPtr = fixedMemStackTop->EndPtr();
252    } else if (fixedMemHead != nullptr) {
253        fixedMemStackTop = fixedMemHead;
254        curPtr = fixedMemStackTop->startPtr;
255        endPtr = fixedMemStackTop->EndPtr();
256    }
257
258    if (bigStackTopMark != nullptr) {
259        bigMemStackTop = bigStackTopMark;
260        bigCurPtr = bigCurPtrMark;
261        bigEndPtr = bigMemStackTop->EndPtr();
262    } else if (bigMemHead != nullptr) {
263        bigMemStackTop = bigMemHead;
264        bigCurPtr = bigMemStackTop->startPtr;
265        bigEndPtr = bigMemStackTop->EndPtr();
266    }
267}
268
269uint8_t *StackMemPool::AllocTailMemBlock(size_t size)
270{
271    MemBlock **head = nullptr;
272    MemBlock **stackTop = nullptr;
273    uint8_t **endPtrPtr = nullptr;
274
275    if (size <= kMemBlockSizeMin) {
276        head = &fixedMemHead;
277        stackTop = &fixedMemStackTop;
278        endPtrPtr = &endPtr;
279    } else {
280        head = &bigMemHead;
281        stackTop = &bigMemStackTop;
282        endPtrPtr = &bigEndPtr;
283    }
284
285    if (*stackTop == nullptr) {
286        MemBlock *newMemBlock = AllocMemBlockBySize(size);
287        *stackTop = newMemBlock;
288        *head = newMemBlock;
289        (*stackTop)->nextMemBlock = nullptr;
290    } else {
291        if ((*stackTop)->nextMemBlock != nullptr && (*stackTop)->nextMemBlock->memSize >= size) {
292            *stackTop = (*stackTop)->nextMemBlock;
293        } else {
294            MemBlock *newMemBlock = AllocMemBlockBySize(size);
295            auto *tmp = (*stackTop)->nextMemBlock;
296            (*stackTop)->nextMemBlock = newMemBlock;
297            *stackTop = newMemBlock;
298            newMemBlock->nextMemBlock = tmp;
299        }
300    }
301    *endPtrPtr = (*stackTop)->EndPtr();
302    return (*stackTop)->startPtr;
303}
304}  // namespace maple
305