Home
last modified time | relevance | path

Searched refs:pool (Results 1 - 25 of 700) sorted by relevance

12345678910>>...28

/third_party/node/deps/openssl/openssl/crypto/rand/
H A Drand_pool.c20 * Allocate memory and initialize a new random pool
25 RAND_POOL *pool = OPENSSL_zalloc(sizeof(*pool)); in ossl_rand_pool_new() local
28 if (pool == NULL) { in ossl_rand_pool_new()
33 pool->min_len = min_len; in ossl_rand_pool_new()
34 pool->max_len = (max_len > RAND_POOL_MAX_LENGTH) ? in ossl_rand_pool_new()
36 pool->alloc_len = min_len < min_alloc_size ? min_alloc_size : min_len; in ossl_rand_pool_new()
37 if (pool->alloc_len > pool->max_len) in ossl_rand_pool_new()
38 pool in ossl_rand_pool_new()
68 RAND_POOL *pool = OPENSSL_zalloc(sizeof(*pool)); ossl_rand_pool_attach() local
94 ossl_rand_pool_free(RAND_POOL *pool) ossl_rand_pool_free() argument
118 ossl_rand_pool_buffer(RAND_POOL *pool) ossl_rand_pool_buffer() argument
126 ossl_rand_pool_entropy(RAND_POOL *pool) ossl_rand_pool_entropy() argument
134 ossl_rand_pool_length(RAND_POOL *pool) ossl_rand_pool_length() argument
145 ossl_rand_pool_detach(RAND_POOL *pool) ossl_rand_pool_detach() argument
157 ossl_rand_pool_reattach(RAND_POOL *pool, unsigned char *buffer) ossl_rand_pool_reattach() argument
180 ossl_rand_pool_entropy_available(RAND_POOL *pool) ossl_rand_pool_entropy_available() argument
196 ossl_rand_pool_entropy_needed(RAND_POOL *pool) ossl_rand_pool_entropy_needed() argument
205 rand_pool_grow(RAND_POOL *pool, size_t len) rand_pool_grow() argument
246 ossl_rand_pool_bytes_needed(RAND_POOL *pool, unsigned int entropy_factor) ossl_rand_pool_bytes_needed() argument
291 ossl_rand_pool_bytes_remaining(RAND_POOL *pool) ossl_rand_pool_bytes_remaining() argument
305 ossl_rand_pool_add(RAND_POOL *pool, const unsigned char *buffer, size_t len, size_t entropy) ossl_rand_pool_add() argument
360 ossl_rand_pool_add_begin(RAND_POOL *pool, size_t len) ossl_rand_pool_add_begin() argument
399 ossl_rand_pool_add_end(RAND_POOL *pool, size_t len, size_t entropy) ossl_rand_pool_add_end() argument
[all...]
/third_party/openssl/crypto/rand/
H A Drand_pool.c20 * Allocate memory and initialize a new random pool
25 RAND_POOL *pool = OPENSSL_zalloc(sizeof(*pool)); in ossl_rand_pool_new() local
28 if (pool == NULL) { in ossl_rand_pool_new()
33 pool->min_len = min_len; in ossl_rand_pool_new()
34 pool->max_len = (max_len > RAND_POOL_MAX_LENGTH) ? in ossl_rand_pool_new()
36 pool->alloc_len = min_len < min_alloc_size ? min_alloc_size : min_len; in ossl_rand_pool_new()
37 if (pool->alloc_len > pool->max_len) in ossl_rand_pool_new()
38 pool in ossl_rand_pool_new()
68 RAND_POOL *pool = OPENSSL_zalloc(sizeof(*pool)); ossl_rand_pool_attach() local
94 ossl_rand_pool_free(RAND_POOL *pool) ossl_rand_pool_free() argument
118 ossl_rand_pool_buffer(RAND_POOL *pool) ossl_rand_pool_buffer() argument
126 ossl_rand_pool_entropy(RAND_POOL *pool) ossl_rand_pool_entropy() argument
134 ossl_rand_pool_length(RAND_POOL *pool) ossl_rand_pool_length() argument
145 ossl_rand_pool_detach(RAND_POOL *pool) ossl_rand_pool_detach() argument
157 ossl_rand_pool_reattach(RAND_POOL *pool, unsigned char *buffer) ossl_rand_pool_reattach() argument
180 ossl_rand_pool_entropy_available(RAND_POOL *pool) ossl_rand_pool_entropy_available() argument
196 ossl_rand_pool_entropy_needed(RAND_POOL *pool) ossl_rand_pool_entropy_needed() argument
205 rand_pool_grow(RAND_POOL *pool, size_t len) rand_pool_grow() argument
246 ossl_rand_pool_bytes_needed(RAND_POOL *pool, unsigned int entropy_factor) ossl_rand_pool_bytes_needed() argument
291 ossl_rand_pool_bytes_remaining(RAND_POOL *pool) ossl_rand_pool_bytes_remaining() argument
305 ossl_rand_pool_add(RAND_POOL *pool, const unsigned char *buffer, size_t len, size_t entropy) ossl_rand_pool_add() argument
360 ossl_rand_pool_add_begin(RAND_POOL *pool, size_t len) ossl_rand_pool_add_begin() argument
399 ossl_rand_pool_add_end(RAND_POOL *pool, size_t len, size_t entropy) ossl_rand_pool_add_end() argument
[all...]
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_pool.c31 #define pool_dbg(pool, format, ...) \
32 dev_dbg(pool->kbdev->dev, "%s-pool [%zu/%zu]: " format, \
33 (pool->next_pool) ? "kctx" : "kbdev", \
34 kbase_mem_pool_size(pool), \
35 kbase_mem_pool_max_size(pool), \
41 static size_t kbase_mem_pool_capacity(struct kbase_mem_pool *pool) in kbase_mem_pool_capacity() argument
43 ssize_t max_size = kbase_mem_pool_max_size(pool); in kbase_mem_pool_capacity()
44 ssize_t cur_size = kbase_mem_pool_size(pool); in kbase_mem_pool_capacity()
49 static bool kbase_mem_pool_is_full(struct kbase_mem_pool *pool) in kbase_mem_pool_is_full() argument
54 kbase_mem_pool_is_empty(struct kbase_mem_pool *pool) kbase_mem_pool_is_empty() argument
59 kbase_mem_pool_add_locked(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_add_locked() argument
70 kbase_mem_pool_add(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_add() argument
77 kbase_mem_pool_add_list_locked(struct kbase_mem_pool *pool, struct list_head *page_list, size_t nr_pages) kbase_mem_pool_add_list_locked() argument
88 kbase_mem_pool_add_list(struct kbase_mem_pool *pool, struct list_head *page_list, size_t nr_pages) kbase_mem_pool_add_list() argument
96 kbase_mem_pool_remove_locked(struct kbase_mem_pool *pool) kbase_mem_pool_remove_locked() argument
114 kbase_mem_pool_remove(struct kbase_mem_pool *pool) kbase_mem_pool_remove() argument
125 kbase_mem_pool_sync_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_sync_page() argument
133 kbase_mem_pool_zero_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_zero_page() argument
153 kbase_mem_alloc_page(struct kbase_mem_pool *pool) kbase_mem_alloc_page() argument
187 kbase_mem_pool_free_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_free_page() argument
206 kbase_mem_pool_shrink_locked(struct kbase_mem_pool *pool, size_t nr_to_shrink) kbase_mem_pool_shrink_locked() argument
222 kbase_mem_pool_shrink(struct kbase_mem_pool *pool, size_t nr_to_shrink) kbase_mem_pool_shrink() argument
234 kbase_mem_pool_grow(struct kbase_mem_pool *pool, size_t nr_to_grow) kbase_mem_pool_grow() argument
271 kbase_mem_pool_trim(struct kbase_mem_pool *pool, size_t new_size) kbase_mem_pool_trim() argument
295 kbase_mem_pool_set_max_size(struct kbase_mem_pool *pool, size_t max_size) kbase_mem_pool_set_max_size() argument
317 struct kbase_mem_pool *pool; kbase_mem_pool_reclaim_count_objects() local
336 struct kbase_mem_pool *pool; kbase_mem_pool_reclaim_scan_objects() local
358 kbase_mem_pool_init(struct kbase_mem_pool *pool, const struct kbase_mem_pool_config *config, unsigned int order, int group_id, struct kbase_device *kbdev, struct kbase_mem_pool *next_pool) kbase_mem_pool_init() argument
395 kbase_mem_pool_mark_dying(struct kbase_mem_pool *pool) kbase_mem_pool_mark_dying() argument
402 kbase_mem_pool_term(struct kbase_mem_pool *pool) kbase_mem_pool_term() argument
456 kbase_mem_pool_alloc(struct kbase_mem_pool *pool) kbase_mem_pool_alloc() argument
473 kbase_mem_pool_alloc_locked(struct kbase_mem_pool *pool) kbase_mem_pool_alloc_locked() argument
488 kbase_mem_pool_free(struct kbase_mem_pool *pool, struct page *p, bool dirty) kbase_mem_pool_free() argument
510 kbase_mem_pool_free_locked(struct kbase_mem_pool *pool, struct page *p, bool dirty) kbase_mem_pool_free_locked() argument
529 kbase_mem_pool_alloc_pages(struct kbase_mem_pool *pool, size_t nr_4k_pages, struct tagged_addr *pages, bool partial_allowed) kbase_mem_pool_alloc_pages() argument
613 kbase_mem_pool_alloc_pages_locked(struct kbase_mem_pool *pool, size_t nr_4k_pages, struct tagged_addr *pages) kbase_mem_pool_alloc_pages_locked() argument
656 kbase_mem_pool_add_array(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pages, bool zero, bool sync) kbase_mem_pool_add_array() argument
696 kbase_mem_pool_add_array_locked(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pages, bool zero, bool sync) kbase_mem_pool_add_array_locked() argument
738 kbase_mem_pool_free_pages(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pages, bool dirty, bool reclaimed) kbase_mem_pool_free_pages() argument
789 kbase_mem_pool_free_pages_locked(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pages, bool dirty, bool reclaimed) kbase_mem_pool_free_pages_locked() argument
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem_pool.c25 #define pool_dbg(pool, format, ...) \
26 dev_dbg(pool->kbdev->dev, "%s-pool [%zu/%zu]: " format, (pool->next_pool) ? "kctx" : "kbdev", \
27 kbase_mem_pool_size(pool), kbase_mem_pool_max_size(pool), ##__VA_ARGS__)
32 static inline void kbase_mem_pool_lock(struct kbase_mem_pool *pool) in kbase_mem_pool_lock() argument
34 spin_lock(&pool->pool_lock); in kbase_mem_pool_lock()
37 static inline void kbase_mem_pool_unlock(struct kbase_mem_pool *pool) in kbase_mem_pool_unlock() argument
39 spin_unlock(&pool in kbase_mem_pool_unlock()
42 kbase_mem_pool_capacity(struct kbase_mem_pool *pool) kbase_mem_pool_capacity() argument
50 kbase_mem_pool_is_full(struct kbase_mem_pool *pool) kbase_mem_pool_is_full() argument
55 kbase_mem_pool_is_empty(struct kbase_mem_pool *pool) kbase_mem_pool_is_empty() argument
60 kbase_mem_pool_add_locked(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_add_locked() argument
70 kbase_mem_pool_add(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_add() argument
77 kbase_mem_pool_add_list_locked(struct kbase_mem_pool *pool, struct list_head *page_list, size_t nr_pages) kbase_mem_pool_add_list_locked() argument
87 kbase_mem_pool_add_list(struct kbase_mem_pool *pool, struct list_head *page_list, size_t nr_pages) kbase_mem_pool_add_list() argument
94 kbase_mem_pool_remove_locked(struct kbase_mem_pool *pool) kbase_mem_pool_remove_locked() argument
113 kbase_mem_pool_remove(struct kbase_mem_pool *pool) kbase_mem_pool_remove() argument
124 kbase_mem_pool_sync_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_sync_page() argument
131 kbase_mem_pool_zero_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_zero_page() argument
183 kbase_mem_pool_free_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_free_page() argument
195 kbase_mem_pool_shrink_locked(struct kbase_mem_pool *pool, size_t nr_to_shrink) kbase_mem_pool_shrink_locked() argument
210 kbase_mem_pool_shrink(struct kbase_mem_pool *pool, size_t nr_to_shrink) kbase_mem_pool_shrink() argument
221 kbase_mem_pool_grow(struct kbase_mem_pool *pool, size_t nr_to_grow) kbase_mem_pool_grow() argument
237 kbase_mem_pool_trim(struct kbase_mem_pool *pool, size_t new_size) kbase_mem_pool_trim() argument
254 kbase_mem_pool_set_max_size(struct kbase_mem_pool *pool, size_t max_size) kbase_mem_pool_set_max_size() argument
274 struct kbase_mem_pool *pool; kbase_mem_pool_reclaim_count_objects() local
283 struct kbase_mem_pool *pool; kbase_mem_pool_reclaim_scan_objects() local
308 kbase_mem_pool_init(struct kbase_mem_pool *pool, size_t max_size, struct kbase_device *kbdev, struct kbase_mem_pool *next_pool) kbase_mem_pool_init() argument
339 kbase_mem_pool_term(struct kbase_mem_pool *pool) kbase_mem_pool_term() argument
385 kbase_mem_pool_alloc(struct kbase_mem_pool *pool) kbase_mem_pool_alloc() argument
402 kbase_mem_pool_free(struct kbase_mem_pool *pool, struct page *p, bool dirty) kbase_mem_pool_free() argument
424 kbase_mem_pool_alloc_pages(struct kbase_mem_pool *pool, size_t nr_pages, phys_addr_t *pages) kbase_mem_pool_alloc_pages() argument
470 kbase_mem_pool_add_array(struct kbase_mem_pool *pool, size_t nr_pages, phys_addr_t *pages, bool zero, bool sync) kbase_mem_pool_add_array() argument
509 kbase_mem_pool_free_pages(struct kbase_mem_pool *pool, size_t nr_pages, phys_addr_t *pages, bool dirty, bool reclaimed) kbase_mem_pool_free_pages() argument
[all...]
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mem_pool.c27 #define pool_dbg(pool, format, ...) \
28 dev_dbg(pool->kbdev->dev, "%s-pool [%zu/%zu]: " format, \
29 (pool->next_pool) ? "kctx" : "kbdev", \
30 kbase_mem_pool_size(pool), \
31 kbase_mem_pool_max_size(pool), \
37 static inline void kbase_mem_pool_lock(struct kbase_mem_pool *pool) in kbase_mem_pool_lock() argument
39 spin_lock(&pool->pool_lock); in kbase_mem_pool_lock()
42 static inline void kbase_mem_pool_unlock(struct kbase_mem_pool *pool) in kbase_mem_pool_unlock() argument
44 spin_unlock(&pool in kbase_mem_pool_unlock()
47 kbase_mem_pool_capacity(struct kbase_mem_pool *pool) kbase_mem_pool_capacity() argument
55 kbase_mem_pool_is_full(struct kbase_mem_pool *pool) kbase_mem_pool_is_full() argument
60 kbase_mem_pool_is_empty(struct kbase_mem_pool *pool) kbase_mem_pool_is_empty() argument
65 kbase_mem_pool_add_locked(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_add_locked() argument
76 kbase_mem_pool_add(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_add() argument
83 kbase_mem_pool_add_list_locked(struct kbase_mem_pool *pool, struct list_head *page_list, size_t nr_pages) kbase_mem_pool_add_list_locked() argument
94 kbase_mem_pool_add_list(struct kbase_mem_pool *pool, struct list_head *page_list, size_t nr_pages) kbase_mem_pool_add_list() argument
102 kbase_mem_pool_remove_locked(struct kbase_mem_pool *pool) kbase_mem_pool_remove_locked() argument
120 kbase_mem_pool_remove(struct kbase_mem_pool *pool) kbase_mem_pool_remove() argument
131 kbase_mem_pool_sync_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_sync_page() argument
140 kbase_mem_pool_zero_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_zero_page() argument
194 kbase_mem_pool_free_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_free_page() argument
207 kbase_mem_pool_shrink_locked(struct kbase_mem_pool *pool, size_t nr_to_shrink) kbase_mem_pool_shrink_locked() argument
223 kbase_mem_pool_shrink(struct kbase_mem_pool *pool, size_t nr_to_shrink) kbase_mem_pool_shrink() argument
235 kbase_mem_pool_grow(struct kbase_mem_pool *pool, size_t nr_to_grow) kbase_mem_pool_grow() argument
251 kbase_mem_pool_trim(struct kbase_mem_pool *pool, size_t new_size) kbase_mem_pool_trim() argument
266 kbase_mem_pool_set_max_size(struct kbase_mem_pool *pool, size_t max_size) kbase_mem_pool_set_max_size() argument
288 struct kbase_mem_pool *pool; kbase_mem_pool_reclaim_count_objects() local
298 struct kbase_mem_pool *pool; kbase_mem_pool_reclaim_scan_objects() local
323 kbase_mem_pool_init(struct kbase_mem_pool *pool, size_t max_size, struct kbase_device *kbdev, struct kbase_mem_pool *next_pool) kbase_mem_pool_init() argument
356 kbase_mem_pool_term(struct kbase_mem_pool *pool) kbase_mem_pool_term() argument
402 kbase_mem_pool_alloc(struct kbase_mem_pool *pool) kbase_mem_pool_alloc() argument
419 kbase_mem_pool_free(struct kbase_mem_pool *pool, struct page *p, bool dirty) kbase_mem_pool_free() argument
441 kbase_mem_pool_alloc_pages(struct kbase_mem_pool *pool, size_t nr_pages, phys_addr_t *pages) kbase_mem_pool_alloc_pages() argument
488 kbase_mem_pool_add_array(struct kbase_mem_pool *pool, size_t nr_pages, phys_addr_t *pages, bool zero, bool sync) kbase_mem_pool_add_array() argument
526 kbase_mem_pool_free_pages(struct kbase_mem_pool *pool, size_t nr_pages, phys_addr_t *pages, bool dirty, bool reclaimed) kbase_mem_pool_free_pages() argument
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_mem_pool.c32 #define pool_dbg(pool, format, ...) \
33 dev_dbg(pool->kbdev->dev, "%s-pool [%zu/%zu]: " format, (pool->next_pool) ? "kctx" : "kbdev", \
34 kbase_mem_pool_size(pool), kbase_mem_pool_max_size(pool), ##__VA_ARGS__)
39 static size_t kbase_mem_pool_capacity(struct kbase_mem_pool *pool) in kbase_mem_pool_capacity() argument
41 ssize_t max_size = kbase_mem_pool_max_size(pool); in kbase_mem_pool_capacity()
42 ssize_t cur_size = kbase_mem_pool_size(pool); in kbase_mem_pool_capacity()
47 static bool kbase_mem_pool_is_full(struct kbase_mem_pool *pool) in kbase_mem_pool_is_full() argument
52 kbase_mem_pool_is_empty(struct kbase_mem_pool *pool) kbase_mem_pool_is_empty() argument
57 kbase_mem_pool_add_locked(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_add_locked() argument
67 kbase_mem_pool_add(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_add() argument
74 kbase_mem_pool_add_list_locked(struct kbase_mem_pool *pool, struct list_head *page_list, size_t nr_pages) kbase_mem_pool_add_list_locked() argument
84 kbase_mem_pool_add_list(struct kbase_mem_pool *pool, struct list_head *page_list, size_t nr_pages) kbase_mem_pool_add_list() argument
91 kbase_mem_pool_remove_locked(struct kbase_mem_pool *pool) kbase_mem_pool_remove_locked() argument
110 kbase_mem_pool_remove(struct kbase_mem_pool *pool) kbase_mem_pool_remove() argument
121 kbase_mem_pool_sync_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_sync_page() argument
127 kbase_mem_pool_zero_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_zero_page() argument
146 kbase_mem_alloc_page(struct kbase_mem_pool *pool) kbase_mem_alloc_page() argument
185 kbase_mem_pool_free_page(struct kbase_mem_pool *pool, struct page *p) kbase_mem_pool_free_page() argument
202 kbase_mem_pool_shrink_locked(struct kbase_mem_pool *pool, size_t nr_to_shrink) kbase_mem_pool_shrink_locked() argument
217 kbase_mem_pool_shrink(struct kbase_mem_pool *pool, size_t nr_to_shrink) kbase_mem_pool_shrink() argument
228 kbase_mem_pool_grow(struct kbase_mem_pool *pool, size_t nr_to_grow) kbase_mem_pool_grow() argument
264 kbase_mem_pool_trim(struct kbase_mem_pool *pool, size_t new_size) kbase_mem_pool_trim() argument
290 kbase_mem_pool_set_max_size(struct kbase_mem_pool *pool, size_t max_size) kbase_mem_pool_set_max_size() argument
310 struct kbase_mem_pool *pool; kbase_mem_pool_reclaim_count_objects() local
328 struct kbase_mem_pool *pool; kbase_mem_pool_reclaim_scan_objects() local
361 kbase_mem_pool_init(struct kbase_mem_pool *pool, const struct kbase_mem_pool_config *config, unsigned int order, int group_id, struct kbase_device *kbdev, struct kbase_mem_pool *next_pool) kbase_mem_pool_init() argument
399 kbase_mem_pool_mark_dying(struct kbase_mem_pool *pool) kbase_mem_pool_mark_dying() argument
406 kbase_mem_pool_term(struct kbase_mem_pool *pool) kbase_mem_pool_term() argument
460 kbase_mem_pool_alloc(struct kbase_mem_pool *pool) kbase_mem_pool_alloc() argument
477 kbase_mem_pool_alloc_locked(struct kbase_mem_pool *pool) kbase_mem_pool_alloc_locked() argument
492 kbase_mem_pool_free(struct kbase_mem_pool *pool, struct page *p, bool dirty) kbase_mem_pool_free() argument
514 kbase_mem_pool_free_locked(struct kbase_mem_pool *pool, struct page *p, bool dirty) kbase_mem_pool_free_locked() argument
533 kbase_mem_pool_alloc_pages(struct kbase_mem_pool *pool, size_t nr_4k_pages, struct tagged_addr *pages, bool partial_allowed) kbase_mem_pool_alloc_pages() argument
613 kbase_mem_pool_alloc_pages_locked(struct kbase_mem_pool *pool, size_t nr_4k_pages, struct tagged_addr *pages) kbase_mem_pool_alloc_pages_locked() argument
652 kbase_mem_pool_add_array(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pages, bool zero, bool sync) kbase_mem_pool_add_array() argument
692 kbase_mem_pool_add_array_locked(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pages, bool zero, bool sync) kbase_mem_pool_add_array_locked() argument
734 kbase_mem_pool_free_pages(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pages, bool dirty, bool reclaimed) kbase_mem_pool_free_pages() argument
784 kbase_mem_pool_free_pages_locked(struct kbase_mem_pool *pool, size_t nr_pages, struct tagged_addr *pages, bool dirty, bool reclaimed) kbase_mem_pool_free_pages_locked() argument
[all...]
/third_party/skia/tests/
H A DSkBlockAllocatorTest.cpp19 static size_t ScratchBlockSize(SkSBlockAllocator<N>& pool) { in ScratchBlockSize() argument
20 return (size_t) pool->scratchBlockSize(); in ScratchBlockSize()
26 static int block_count(const SkSBlockAllocator<N>& pool) { in block_count() argument
28 for (const Block* b : pool->blocks()) { in block_count()
36 static Block* get_block(SkSBlockAllocator<N>& pool, int blockIndex) { in get_block() argument
39 for (Block* b: pool->blocks()) { in get_block()
55 static size_t total_size(SkSBlockAllocator<N>& pool) { in total_size() argument
56 return pool->totalSize() - BlockAllocatorTestAccess::ScratchBlockSize(pool); in total_size()
60 static size_t add_block(SkSBlockAllocator<N>& pool) { in add_block() argument
70 alloc_byte(SkSBlockAllocator<N>& pool) alloc_byte() argument
414 SkSBlockAllocator<256> pool; DEF_TEST() local
451 SkSBlockAllocator<256> pool; DEF_TEST() local
621 run_owning_block_test(skiatest::Reporter* r, SkBlockAllocator* pool) run_owning_block_test() argument
638 run_owning_block_tests(skiatest::Reporter* r, SkBlockAllocator* pool) run_owning_block_tests() argument
[all...]
/third_party/skia/third_party/externals/microhttpd/src/microhttpd/
H A Dmemorypool.c22 * @brief memory pool
47 * Handle for a memory pool. Pools are not reentrant and must not be
54 * Pointer to the pool's memory
59 * Size of the pool.
74 * #MHD_NO if pool was malloc'ed, #MHD_YES if mmapped (VirtualAlloc'ed for W32).
81 * Create a memory pool.
83 * @param max maximum size of the pool
89 struct MemoryPool *pool; in MHD_pool_create() local
91 pool = malloc (sizeof (struct MemoryPool)); in MHD_pool_create()
92 if (NULL == pool) in MHD_pool_create()
135 MHD_pool_destroy(struct MemoryPool *pool) MHD_pool_destroy() argument
165 MHD_pool_allocate(struct MemoryPool *pool, size_t size, int from_end) MHD_pool_allocate() argument
208 MHD_pool_reallocate(struct MemoryPool *pool, void *old, size_t old_size, size_t new_size) MHD_pool_reallocate() argument
262 MHD_pool_reset(struct MemoryPool *pool, void *keep, size_t size) MHD_pool_reset() argument
[all...]
/third_party/mesa3d/src/gallium/frontends/nine/
H A Dthreadpool.c44 struct threadpool *pool = data; in threadpool_worker() local
46 pthread_mutex_lock(&pool->m); in threadpool_worker()
48 while (!pool->shutdown) { in threadpool_worker()
52 while (!pool->workqueue && !pool->shutdown) in threadpool_worker()
53 pthread_cond_wait(&pool->new_work, &pool->m); in threadpool_worker()
55 if (pool->shutdown) in threadpool_worker()
62 task = pool->workqueue; in threadpool_worker()
63 pool in threadpool_worker()
90 struct threadpool *pool = calloc(1, sizeof(*pool)); _mesa_threadpool_create() local
109 _mesa_threadpool_destroy(struct NineSwapChain9 *swapchain, struct threadpool *pool) _mesa_threadpool_destroy() argument
142 _mesa_threadpool_queue_task(struct threadpool *pool, threadpool_task_func work, void *data) _mesa_threadpool_queue_task() argument
184 _mesa_threadpool_wait_for_task(struct threadpool *pool, struct threadpool_task **task_handle) _mesa_threadpool_wait_for_task() argument
[all...]
/third_party/ffmpeg/libavfilter/
H A Dframepool.c58 FFFramePool *pool; variable
62 pool = av_mallocz(sizeof(FFFramePool));
63 if (!pool)
66 pool->type = AVMEDIA_TYPE_VIDEO;
67 pool->width = width;
68 pool->height = height;
69 pool->format = format;
70 pool->align = align;
76 if (!pool->linesize[0]) {
77 ret = av_image_fill_linesizes(pool
110 ff_frame_pool_uninit(&pool); global() variable
121 FFFramePool *pool; global() variable
148 ff_frame_pool_uninit(&pool); global() variable
152 ff_frame_pool_get_video_config(FFFramePool *pool, int *width, int *height, enum AVPixelFormat *format, int *align) ff_frame_pool_get_video_config() argument
171 ff_frame_pool_get_audio_config(FFFramePool *pool, int *channels, int *nb_samples, enum AVSampleFormat *format, int *align) ff_frame_pool_get_audio_config() argument
190 ff_frame_pool_get(FFFramePool *pool) ff_frame_pool_get() argument
283 ff_frame_pool_uninit(FFFramePool **pool) ff_frame_pool_uninit() argument
[all...]
/third_party/mesa3d/src/panfrost/vulkan/
H A Dpanvk_mempool.c32 * In "owned" mode, a single parent owns the entire pool, and the pool owns all
34 * panvk_pool_get_bo_handles. Freeing occurs at the level of an entire pool.
35 * This is useful for streaming uploads, where the batch owns the pool.
37 * In "unowned" mode, the pool is freestanding. It does not track created BOs
44 panvk_pool_alloc_backing(struct panvk_pool *pool, size_t bo_sz) in panvk_pool_alloc_backing() argument
48 /* If there's a free BO in our BO pool, let's pick it. */ in panvk_pool_alloc_backing()
49 if (pool->bo_pool && bo_sz == pool->base.slab_size && in panvk_pool_alloc_backing()
50 util_dynarray_num_elements(&pool in panvk_pool_alloc_backing()
75 panvk_pool_alloc_aligned(struct panvk_pool *pool, size_t sz, unsigned alignment) panvk_pool_alloc_aligned() argument
103 panvk_pool_init(struct panvk_pool *pool, struct panfrost_device *dev, struct panvk_bo_pool *bo_pool, unsigned create_flags, size_t slab_size, const char *label, bool prealloc) panvk_pool_init() argument
120 panvk_pool_reset(struct panvk_pool *pool) panvk_pool_reset() argument
142 panvk_pool_cleanup(struct panvk_pool *pool) panvk_pool_cleanup() argument
150 panvk_pool_get_bo_handles(struct panvk_pool *pool, uint32_t *handles) panvk_pool_get_bo_handles() argument
[all...]
/foundation/communication/dsoftbus/components/nstackx/fillp/src/public/src/
H A Ddympool.c24 DympoolType *pool = FILLP_NULL_PTR; in DympCreatePool() local
26 FILLP_LOGERR("Error to create pool initSize:%d,maxSize:%d,itemSize:%d", initSize, maxSize, itemSize); in DympCreatePool()
30 pool = SpungeAlloc(1, sizeof(DympoolType), SPUNGE_ALLOC_TYPE_MALLOC); in DympCreatePool()
31 if (pool == FILLP_NULL_PTR) { in DympCreatePool()
36 pool->maxSize = maxSize; in DympCreatePool()
37 pool->itemSize = itemSize; in DympCreatePool()
38 pool->currentSize = 0; in DympCreatePool()
39 pool->itemOperaCb.createCb = itemOperaCb->createCb; in DympCreatePool()
40 pool->itemOperaCb.destroyCb = itemOperaCb->destroyCb; in DympCreatePool()
41 pool in DympCreatePool()
75 DympDestroyPool(DympoolType *pool) DympDestroyPool() argument
110 DympSetConsSafe(DympoolType *pool, FILLP_BOOL safe) DympSetConsSafe() argument
115 DympSetProdSafe(DympoolType *pool, FILLP_BOOL safe) DympSetProdSafe() argument
120 DympExpandMemory(DympoolType *pool, int stepSizeWork) DympExpandMemory() argument
176 DympAskMoreMemory(DympoolType *pool, int stepSize, int throttleGrow) DympAskMoreMemory() argument
204 DympAlloc(DympoolType *pool, void **data, int throttleGrow) DympAlloc() argument
[all...]
/device/soc/rockchip/common/vendor/drivers/dma-buf/heaps/
H A Dpage_pool.c3 * DMA BUF page pool system
7 * Based on the ION page pool code
21 static inline struct page *dmabuf_page_pool_alloc_pages(struct dmabuf_page_pool *pool) in dmabuf_page_pool_alloc_pages() argument
26 return alloc_pages(pool->gfp_mask, pool->order); in dmabuf_page_pool_alloc_pages()
29 static inline void dmabuf_page_pool_free_pages(struct dmabuf_page_pool *pool, struct page *page) in dmabuf_page_pool_free_pages() argument
31 __free_pages(page, pool->order); in dmabuf_page_pool_free_pages()
34 static void dmabuf_page_pool_add(struct dmabuf_page_pool *pool, struct page *page) in dmabuf_page_pool_add() argument
44 mutex_lock(&pool->mutex); in dmabuf_page_pool_add()
45 list_add_tail(&page->lru, &pool in dmabuf_page_pool_add()
51 dmabuf_page_pool_remove(struct dmabuf_page_pool *pool, int index) dmabuf_page_pool_remove() argument
67 dmabuf_page_pool_fetch(struct dmabuf_page_pool *pool) dmabuf_page_pool_fetch() argument
79 dmabuf_page_pool_alloc(struct dmabuf_page_pool *pool) dmabuf_page_pool_alloc() argument
95 dmabuf_page_pool_free(struct dmabuf_page_pool *pool, struct page *page) dmabuf_page_pool_free() argument
105 dmabuf_page_pool_total(struct dmabuf_page_pool *pool, bool high) dmabuf_page_pool_total() argument
118 struct dmabuf_page_pool *pool = kmalloc(sizeof(*pool), GFP_KERNEL); dmabuf_page_pool_create() local
141 dmabuf_page_pool_destroy(struct dmabuf_page_pool *pool) dmabuf_page_pool_destroy() argument
162 dmabuf_page_pool_do_shrink(struct dmabuf_page_pool *pool, gfp_t gfp_mask, int nr_to_scan) dmabuf_page_pool_do_shrink() argument
199 struct dmabuf_page_pool *pool; dmabuf_page_pool_shrink() local
[all...]
/device/soc/rockchip/rk3588/kernel/drivers/dma-buf/heaps/
H A Dpage_pool.c3 * DMA BUF page pool system
7 * Based on the ION page pool code
22 struct page *dmabuf_page_pool_alloc_pages(struct dmabuf_page_pool *pool) in dmabuf_page_pool_alloc_pages() argument
26 return alloc_pages(pool->gfp_mask, pool->order); in dmabuf_page_pool_alloc_pages()
29 static inline void dmabuf_page_pool_free_pages(struct dmabuf_page_pool *pool, in dmabuf_page_pool_free_pages() argument
32 __free_pages(page, pool->order); in dmabuf_page_pool_free_pages()
35 static void dmabuf_page_pool_add(struct dmabuf_page_pool *pool, struct page *page) in dmabuf_page_pool_add() argument
44 mutex_lock(&pool->mutex); in dmabuf_page_pool_add()
45 list_add_tail(&page->lru, &pool in dmabuf_page_pool_add()
52 dmabuf_page_pool_remove(struct dmabuf_page_pool *pool, int index) dmabuf_page_pool_remove() argument
69 dmabuf_page_pool_fetch(struct dmabuf_page_pool *pool) dmabuf_page_pool_fetch() argument
80 dmabuf_page_pool_alloc(struct dmabuf_page_pool *pool) dmabuf_page_pool_alloc() argument
95 dmabuf_page_pool_free(struct dmabuf_page_pool *pool, struct page *page) dmabuf_page_pool_free() argument
104 dmabuf_page_pool_total(struct dmabuf_page_pool *pool, bool high) dmabuf_page_pool_total() argument
116 struct dmabuf_page_pool *pool = kmalloc(sizeof(*pool), GFP_KERNEL); dmabuf_page_pool_create() local
138 dmabuf_page_pool_destroy(struct dmabuf_page_pool *pool) dmabuf_page_pool_destroy() argument
158 dmabuf_page_pool_do_shrink(struct dmabuf_page_pool *pool, gfp_t gfp_mask, int nr_to_scan) dmabuf_page_pool_do_shrink() argument
192 struct dmabuf_page_pool *pool; dmabuf_page_pool_shrink() local
[all...]
/third_party/mesa3d/src/util/
H A Dslab.c48 * - a pointer to the child pool to which this element belongs, or
62 /* Next page in the same child pool. */
83 * pool has been destroyed). Mark the element as freed and free the whole page
99 * Create a parent pool for the allocation of same-sized objects.
123 * Create a child pool linked to the given parent.
125 void slab_create_child(struct slab_child_pool *pool, in slab_create_child() argument
128 pool->parent = parent; in slab_create_child()
129 pool->pages = NULL; in slab_create_child()
130 pool->free = NULL; in slab_create_child()
131 pool in slab_create_child()
140 slab_destroy_child(struct slab_child_pool *pool) slab_destroy_child() argument
177 slab_add_new_page(struct slab_child_pool *pool) slab_add_new_page() argument
207 slab_alloc(struct slab_child_pool *pool) slab_alloc() argument
238 slab_zalloc(struct slab_child_pool *pool) slab_zalloc() argument
255 slab_free(struct slab_child_pool *pool, void *ptr) slab_free() argument
[all...]
/third_party/mesa3d/src/gallium/drivers/llvmpipe/
H A Dlp_cs_tpool.c27 * compute shader thread pool.
38 struct lp_cs_tpool *pool = data; in lp_cs_tpool_worker() local
42 mtx_lock(&pool->m); in lp_cs_tpool_worker()
44 while (!pool->shutdown) { in lp_cs_tpool_worker()
48 while (list_is_empty(&pool->workqueue) && !pool->shutdown) in lp_cs_tpool_worker()
49 cnd_wait(&pool->new_work, &pool->m); in lp_cs_tpool_worker()
51 if (pool->shutdown) in lp_cs_tpool_worker()
54 task = list_first_entry(&pool in lp_cs_tpool_worker()
89 struct lp_cs_tpool *pool = CALLOC_STRUCT(lp_cs_tpool); lp_cs_tpool_create() local
110 lp_cs_tpool_destroy(struct lp_cs_tpool *pool) lp_cs_tpool_destroy() argument
130 lp_cs_tpool_queue_task(struct lp_cs_tpool *pool, lp_cs_tpool_task_func work, void *data, int num_iters) lp_cs_tpool_queue_task() argument
169 lp_cs_tpool_wait_for_task(struct lp_cs_tpool *pool, struct lp_cs_tpool_task **task_handle) lp_cs_tpool_wait_for_task() argument
[all...]
/third_party/mesa3d/src/gallium/drivers/r600/
H A Dcompute_memory_pool.c48 static void compute_memory_shadow(struct compute_memory_pool* pool,
51 static void compute_memory_defrag(struct compute_memory_pool *pool,
55 static int compute_memory_promote_item(struct compute_memory_pool *pool,
59 static void compute_memory_move_item(struct compute_memory_pool *pool,
64 static void compute_memory_transfer(struct compute_memory_pool* pool,
70 * Creates a new pool.
75 struct compute_memory_pool* pool = (struct compute_memory_pool*) in compute_memory_pool_new() local
77 if (!pool) in compute_memory_pool_new()
82 pool->screen = rscreen; in compute_memory_pool_new()
83 pool in compute_memory_pool_new()
98 compute_memory_pool_init(struct compute_memory_pool * pool, unsigned initial_size_in_dw) compute_memory_pool_init() argument
113 compute_memory_pool_delete(struct compute_memory_pool* pool) compute_memory_pool_delete() argument
132 compute_memory_grow_defrag_pool(struct compute_memory_pool *pool, struct pipe_context *pipe, int new_size_in_dw) compute_memory_grow_defrag_pool() argument
194 compute_memory_shadow(struct compute_memory_pool* pool, struct pipe_context * pipe, int device_to_host) compute_memory_shadow() argument
215 compute_memory_finalize_pending(struct compute_memory_pool* pool, struct pipe_context * pipe) compute_memory_finalize_pending() argument
325 compute_memory_defrag(struct compute_memory_pool *pool, struct pipe_resource *src, struct pipe_resource *dst, struct pipe_context *pipe) compute_memory_defrag() argument
355 compute_memory_promote_item(struct compute_memory_pool *pool, struct compute_memory_item *item, struct pipe_context *pipe, int64_t start_in_dw) compute_memory_promote_item() argument
404 compute_memory_demote_item(struct compute_memory_pool *pool, struct compute_memory_item *item, struct pipe_context *pipe) compute_memory_demote_item() argument
466 compute_memory_move_item(struct compute_memory_pool *pool, struct pipe_resource *src, struct pipe_resource *dst, struct compute_memory_item *item, uint64_t new_start_in_dw, struct pipe_context *pipe) compute_memory_move_item() argument
560 compute_memory_free(struct compute_memory_pool* pool, int64_t id) compute_memory_free() argument
598 compute_memory_alloc( struct compute_memory_pool* pool, int64_t size_in_dw) compute_memory_alloc() argument
631 compute_memory_transfer( struct compute_memory_pool* pool, struct pipe_context * pipe, int device_to_host, struct compute_memory_item* chunk, void* data, int offset_in_chunk, int size) compute_memory_transfer() argument
[all...]
/third_party/vk-gl-cts/framework/delibs/depool/
H A DdeMemPool.c21 * \brief Memory pool management.
48 * Represent a page of memory allocate by a memory pool.
69 * \brief Memory pool.
71 * A pool of memory from which individual memory allocations can be made.
73 * but rather all of the memory allocated from a pool is freed when the pool
76 * The pools can be arranged into a hierarchy. If a pool with children is
78 * the pool itself.
82 * creating the root pool with the deMemPool_createFailingRoot() function.
92 deMemPool* firstChild; /*!< Pointer to first child pool i
171 deMemPool* pool; createPoolInternal() local
243 deMemPool* pool = createPoolInternal(DE_NULL); deMemPool_createRoot() local
283 deMemPool* pool; deMemPool_create() local
298 deMemPool_destroy(deMemPool* pool) deMemPool_destroy() argument
378 deMemPool_getNumChildren(const deMemPool* pool) deMemPool_getNumChildren() argument
390 deMemPool_getNumAllocatedBytes(const deMemPool* pool, deBool recurse) deMemPool_getNumAllocatedBytes() argument
408 deMemPool_getCapacity(const deMemPool* pool, deBool recurse) deMemPool_getCapacity() argument
426 deMemPool_allocInternal(deMemPool* pool, size_t numBytes, deUint32 alignBytes) deMemPool_allocInternal() argument
499 deMemPool_alloc(deMemPool* pool, size_t numBytes) deMemPool_alloc() argument
517 deMemPool_alignedAlloc(deMemPool* pool, size_t numBytes, deUint32 alignBytes) deMemPool_alignedAlloc() argument
536 deMemPool_memDup(deMemPool* pool, const void* ptr, size_t numBytes) deMemPool_memDup() argument
550 deMemPool_strDup(deMemPool* pool, const char* str) deMemPool_strDup() argument
566 deMemPool_strnDup(deMemPool* pool, const char* str, int maxLength) deMemPool_strnDup() argument
583 deMemPool_getMaxNumAllocatedBytes(const deMemPool* pool) deMemPool_getMaxNumAllocatedBytes() argument
589 deMemPool_getMaxCapacity(const deMemPool* pool) deMemPool_getMaxCapacity() argument
[all...]
/foundation/communication/dhcp/services/dhcp_server/src/
H A Ddhcp_address_pool.cpp85 int CheckIpAvailability(DhcpAddressPool *pool, uint8_t macAddr[DHCP_HWADDR_LENGTH], uint32_t distIp) in CheckIpAvailability() argument
87 if (!pool) { in CheckIpAvailability()
88 DHCP_LOGE("pool pointer is null."); in CheckIpAvailability()
95 AddressBinding *lease = GetLease(pool, distIp); in CheckIpAvailability()
98 if (distIp == pool->serverId || distIp == pool->gateway) { in CheckIpAvailability()
104 if (IsReservedIp(pool, distIp) && !same) { in CheckIpAvailability()
128 DhcpAddressPool *pool, uint8_t macAddr[DHCP_HWADDR_LENGTH], uint32_t distIp, int *outOfRange) in CheckRangeAvailability()
130 if (!pool || !pool in CheckRangeAvailability()
127 CheckRangeAvailability( DhcpAddressPool *pool, uint8_t macAddr[DHCP_HWADDR_LENGTH], uint32_t distIp, int *outOfRange) CheckRangeAvailability() argument
180 AddressDistribute(DhcpAddressPool *pool, uint8_t macAddr[DHCP_HWADDR_LENGTH]) AddressDistribute() argument
223 InitAddressPool(DhcpAddressPool *pool, const char *ifname, PDhcpOptionList options) InitAddressPool() argument
255 FreeAddressPool(DhcpAddressPool *pool) FreeAddressPool() argument
301 IsReservedIp(DhcpAddressPool *pool, uint32_t ipAddress) IsReservedIp() argument
398 AddLease(DhcpAddressPool *pool, AddressBinding *lease) AddLease() argument
421 GetLease(DhcpAddressPool *pool, uint32_t ipAddress) GetLease() argument
439 UpdateLease(DhcpAddressPool *pool, AddressBinding *lease) UpdateLease() argument
458 RemoveLease(DhcpAddressPool *pool, AddressBinding *lease) RemoveLease() argument
478 LoadBindingRecoders(DhcpAddressPool *pool) LoadBindingRecoders() argument
517 SaveBindingRecoders(const DhcpAddressPool *pool, int force) SaveBindingRecoders() argument
565 DeleteMacInLease(DhcpAddressPool *pool, AddressBinding *lease) DeleteMacInLease() argument
[all...]
/third_party/mesa3d/src/gallium/drivers/panfrost/
H A Dpan_mempool.c35 * In "owned" mode, a single parent owns the entire pool, and the pool owns all
37 * panfrost_pool_get_bo_handles. Freeing occurs at the level of an entire pool.
38 * This is useful for streaming uploads, where the batch owns the pool.
40 * In "unowned" mode, the pool is freestanding. It does not track created BOs
47 panfrost_pool_alloc_backing(struct panfrost_pool *pool, size_t bo_sz) in panfrost_pool_alloc_backing() argument
55 struct panfrost_bo *bo = panfrost_bo_create(pool->base.dev, bo_sz, in panfrost_pool_alloc_backing()
56 pool->base.create_flags, pool->base.label); in panfrost_pool_alloc_backing()
58 if (pool in panfrost_pool_alloc_backing()
70 panfrost_pool_init(struct panfrost_pool *pool, void *memctx, struct panfrost_device *dev, unsigned create_flags, size_t slab_size, const char *label, bool prealloc, bool owned) panfrost_pool_init() argument
87 panfrost_pool_cleanup(struct panfrost_pool *pool) panfrost_pool_cleanup() argument
101 panfrost_pool_get_bo_handles(struct panfrost_pool *pool, uint32_t *handles) panfrost_pool_get_bo_handles() argument
124 panfrost_pool_alloc_aligned(struct panfrost_pool *pool, size_t sz, unsigned alignment) panfrost_pool_alloc_aligned() argument
[all...]
/third_party/mesa3d/src/broadcom/vulkan/
H A Dv3dv_query.c120 struct v3dv_query_pool *pool, in kperfmon_create()
123 for (uint32_t i = 0; i < pool->perfmon.nperfmons; i++) { in kperfmon_create()
124 assert(i * DRM_V3D_MAX_PERF_COUNTERS < pool->perfmon.ncounters); in kperfmon_create()
127 .ncounters = MIN2(pool->perfmon.ncounters - in kperfmon_create()
132 &pool->perfmon.counters[i * DRM_V3D_MAX_PERF_COUNTERS], in kperfmon_create()
141 pool->queries[query].perf.kperfmon_ids[i] = req.id; in kperfmon_create()
147 struct v3dv_query_pool *pool, in kperfmon_destroy()
151 if (!pool->queries[query].perf.kperfmon_ids[0]) in kperfmon_destroy()
154 for (uint32_t i = 0; i < pool->perfmon.nperfmons; i++) { in kperfmon_destroy()
156 .id = pool in kperfmon_destroy()
119 kperfmon_create(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t query) kperfmon_create() argument
146 kperfmon_destroy(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t query) kperfmon_destroy() argument
183 struct v3dv_query_pool *pool = v3dv_CreateQueryPool() local
380 write_occlusion_query_result(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t query, bool do_64bit, void *data, uint32_t slot) write_occlusion_query_result() argument
401 write_timestamp_query_result(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t query, bool do_64bit, void *data, uint32_t slot) write_timestamp_query_result() argument
417 write_performance_query_result(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t query, bool do_64bit, void *data, uint32_t slot) write_performance_query_result() argument
473 write_query_result(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t query, bool do_64bit, void *data, uint32_t slot) write_query_result() argument
496 query_is_available(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t query, bool do_wait, bool *available) query_is_available() argument
525 get_query_result_count(struct v3dv_query_pool *pool) get_query_result_count() argument
539 v3dv_get_query_pool_results(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t first, uint32_t count, void *data, VkDeviceSize stride, VkQueryResultFlags flags) v3dv_get_query_pool_results() argument
667 v3dv_reset_query_pools(struct v3dv_device *device, struct v3dv_query_pool *pool, uint32_t first, uint32_t count) v3dv_reset_query_pools() argument
[all...]
/third_party/mesa3d/src/gallium/drivers/r300/compiler/
H A Dmemory_pool.c38 void memory_pool_init(struct memory_pool * pool) in memory_pool_init() argument
40 memset(pool, 0, sizeof(struct memory_pool)); in memory_pool_init()
44 void memory_pool_destroy(struct memory_pool * pool) in memory_pool_destroy() argument
46 while(pool->blocks) { in memory_pool_destroy()
47 struct memory_block * block = pool->blocks; in memory_pool_destroy()
48 pool->blocks = block->next; in memory_pool_destroy()
53 static void refill_pool(struct memory_pool * pool) in refill_pool() argument
55 unsigned int blocksize = pool->total_allocated; in refill_pool()
62 newblock->next = pool->blocks; in refill_pool()
63 pool in refill_pool()
71 memory_pool_malloc(struct memory_pool * pool, unsigned int bytes) memory_pool_malloc() argument
[all...]
/third_party/libunwind/libunwind/src/mi/
H A Dmempool.c69 free_object (struct mempool *pool, void *object) in free_object() argument
73 obj->next = pool->free_list; in free_object()
74 pool->free_list = obj; in free_object()
75 ++pool->num_free; in free_object()
79 add_memory (struct mempool *pool, char *mem, size_t size, size_t obj_size) in add_memory() argument
84 free_object (pool, obj); in add_memory()
88 expand (struct mempool *pool) in expand() argument
93 size = pool->chunk_size; in expand()
97 size = UNW_ALIGN(pool->obj_size, pg_size); in expand()
102 size = pool in expand()
110 mempool_init(struct mempool *pool, size_t obj_size, size_t reserve) mempool_init() argument
137 mempool_alloc(struct mempool *pool) mempool_alloc() argument
158 mempool_free(struct mempool *pool, void *object) mempool_free() argument
[all...]
/foundation/filemanagement/dfs_service/services/distributedfiledaemon/test/unittest/network/
H A Dsession_pool_test.cpp135 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); in HWTEST_F() local
139 pool->OccupySession(TEST_SESSION_ID, 1); in HWTEST_F()
161 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); in HWTEST_F() local
165 pool->occupySession_.erase(TEST_SESSION_ID); in HWTEST_F()
166 pool->occupySession_.insert(make_pair(TEST_SESSION_ID, LINK_TYPE_AP)); in HWTEST_F()
167 bool flag = pool->FindSession(TEST_SESSION_ID); in HWTEST_F()
169 pool->occupySession_.erase(TEST_SESSION_ID); in HWTEST_F()
191 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); in HWTEST_F() local
195 pool->occupySession_.erase(TEST_SESSION_ID_TWO); in HWTEST_F()
196 bool flag = pool in HWTEST_F()
221 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
249 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
281 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
307 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
353 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
379 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
426 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
466 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
494 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
520 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
552 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
583 shared_ptr<SessionPool> pool = make_shared<SessionPool>(kernelTalker); HWTEST_F() local
[all...]
/third_party/ffmpeg/libavutil/
H A Dbuffer.c263 AVBufferPool *pool = av_mallocz(sizeof(*pool)); in av_buffer_pool_init2() local
264 if (!pool) in av_buffer_pool_init2()
267 ff_mutex_init(&pool->mutex, NULL); in av_buffer_pool_init2()
269 pool->size = size; in av_buffer_pool_init2()
270 pool->opaque = opaque; in av_buffer_pool_init2()
271 pool->alloc2 = alloc; in av_buffer_pool_init2()
272 pool->alloc = av_buffer_alloc; // fallback in av_buffer_pool_init2()
273 pool->pool_free = pool_free; in av_buffer_pool_init2()
275 atomic_init(&pool in av_buffer_pool_init2()
282 AVBufferPool *pool = av_mallocz(sizeof(*pool)); av_buffer_pool_init() local
296 buffer_pool_flush(AVBufferPool *pool) buffer_pool_flush() argument
311 buffer_pool_free(AVBufferPool *pool) buffer_pool_free() argument
324 AVBufferPool *pool; av_buffer_pool_uninit() local
342 AVBufferPool *pool = buf->pool; pool_release_buffer() local
358 pool_alloc_buffer(AVBufferPool *pool) pool_alloc_buffer() argument
387 av_buffer_pool_get(AVBufferPool *pool) av_buffer_pool_get() argument
[all...]

Completed in 12 milliseconds

12345678910>>...28