/kernel/linux/linux-6.6/drivers/scsi/lpfc/ |
H A D | lpfc_mem.c | 45 #define LPFC_MBUF_POOL_SIZE 64 /* max elements in MBUF safety pool */ 46 #define LPFC_MEM_POOL_SIZE 64 /* max elem in non-DMA safety pool */ 47 #define LPFC_DEVICE_DATA_POOL_SIZE 64 /* max elements in device data pool */ 48 #define LPFC_RRQ_POOL_SIZE 256 /* max elements in non-DMA pool */ 49 #define LPFC_MBX_POOL_SIZE 256 /* max elements in MBX non-DMA pool */ 88 struct lpfc_dma_pool *pool = &phba->lpfc_mbuf_safety_pool; in lpfc_mem_alloc() local 98 pool->elements = kmalloc_array(LPFC_MBUF_POOL_SIZE, in lpfc_mem_alloc() 101 if (!pool->elements) in lpfc_mem_alloc() 104 pool->max_count = 0; in lpfc_mem_alloc() 105 pool in lpfc_mem_alloc() 218 struct lpfc_dma_pool *pool = &phba->lpfc_mbuf_safety_pool; lpfc_mem_free() local 382 struct lpfc_dma_pool *pool = &phba->lpfc_mbuf_safety_pool; lpfc_mbuf_alloc() local 415 struct lpfc_dma_pool *pool = &phba->lpfc_mbuf_safety_pool; __lpfc_mbuf_free() local [all...] |
/third_party/skia/src/gpu/vk/ |
H A D | GrVkResourceProvider.cpp | 415 for (const GrVkCommandPool* pool : fActiveCommandPools) { in findOrCreateCommandPool() 416 SkASSERT(pool != result); in findOrCreateCommandPool() 418 for (const GrVkCommandPool* pool : fAvailableCommandPools) { in findOrCreateCommandPool() 419 SkASSERT(pool != result); in findOrCreateCommandPool() 439 GrVkCommandPool* pool = fActiveCommandPools[i]; in checkCommandBuffers() local 440 if (!pool->isOpen()) { in checkCommandBuffers() 441 GrVkPrimaryCommandBuffer* buffer = pool->getPrimaryCommandBuffer(); in checkCommandBuffers() 444 // This passes ownership of the pool to the backgroundReset call. The pool should in checkCommandBuffers() 447 this->backgroundReset(pool); in checkCommandBuffers() 455 GrVkCommandPool* pool = fActiveCommandPools[i]; forceSyncAllCommandBuffers() local 466 GrVkCommandPool* pool = fActiveCommandPools[i]; addFinishedProcToActiveCommandBuffers() local 538 backgroundReset(GrVkCommandPool* pool) backgroundReset() argument 559 reset(GrVkCommandPool* pool) reset() argument [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/ti/ |
H A D | davinci_cpdma.c | 100 struct cpdma_desc_pool *pool; member 192 struct cpdma_desc_pool *pool = ctlr->pool; in cpdma_desc_pool_destroy() local 194 if (!pool) in cpdma_desc_pool_destroy() 197 WARN(gen_pool_size(pool->gen_pool) != gen_pool_avail(pool->gen_pool), in cpdma_desc_pool_destroy() 199 gen_pool_size(pool->gen_pool), in cpdma_desc_pool_destroy() 200 gen_pool_avail(pool->gen_pool)); in cpdma_desc_pool_destroy() 201 if (pool->cpumap) in cpdma_desc_pool_destroy() 202 dma_free_coherent(ctlr->dev, pool in cpdma_desc_pool_destroy() 215 struct cpdma_desc_pool *pool; cpdma_desc_pool_create() local 279 desc_phys(struct cpdma_desc_pool *pool, struct cpdma_desc __iomem *desc) desc_phys() argument 288 desc_from_phys(struct cpdma_desc_pool *pool, dma_addr_t dma) desc_from_phys() argument 294 cpdma_desc_alloc(struct cpdma_desc_pool *pool) cpdma_desc_alloc() argument 300 cpdma_desc_free(struct cpdma_desc_pool *pool, struct cpdma_desc __iomem *desc, int num_desc) cpdma_desc_free() argument 378 struct cpdma_desc_pool *pool = ctlr->pool; cpdma_chan_on() local 985 struct cpdma_desc_pool *pool = ctlr->pool; __cpdma_chan_submit() local 1182 struct cpdma_desc_pool *pool = ctlr->pool; cpdma_check_free_tx_desc() local 1198 struct cpdma_desc_pool *pool = ctlr->pool; __cpdma_chan_free() local 1225 struct cpdma_desc_pool *pool = ctlr->pool; __cpdma_chan_process() local 1315 struct cpdma_desc_pool *pool = ctlr->pool; cpdma_chan_stop() local [all...] |
H A D | k3-cppi-desc-pool.h | 2 /* TI K3 CPPI5 descriptors pool 15 void k3_cppi_desc_pool_destroy(struct k3_cppi_desc_pool *pool); 23 k3_cppi_desc_pool_virt2dma(struct k3_cppi_desc_pool *pool, void *addr); 25 k3_cppi_desc_pool_dma2virt(struct k3_cppi_desc_pool *pool, dma_addr_t dma); 26 void *k3_cppi_desc_pool_alloc(struct k3_cppi_desc_pool *pool); 27 void k3_cppi_desc_pool_free(struct k3_cppi_desc_pool *pool, void *addr); 28 size_t k3_cppi_desc_pool_avail(struct k3_cppi_desc_pool *pool);
|
/kernel/linux/linux-6.6/drivers/net/ethernet/ti/ |
H A D | davinci_cpdma.c | 100 struct cpdma_desc_pool *pool; member 192 struct cpdma_desc_pool *pool = ctlr->pool; in cpdma_desc_pool_destroy() local 194 if (!pool) in cpdma_desc_pool_destroy() 197 WARN(gen_pool_size(pool->gen_pool) != gen_pool_avail(pool->gen_pool), in cpdma_desc_pool_destroy() 199 gen_pool_size(pool->gen_pool), in cpdma_desc_pool_destroy() 200 gen_pool_avail(pool->gen_pool)); in cpdma_desc_pool_destroy() 201 if (pool->cpumap) in cpdma_desc_pool_destroy() 202 dma_free_coherent(ctlr->dev, pool in cpdma_desc_pool_destroy() 215 struct cpdma_desc_pool *pool; cpdma_desc_pool_create() local 279 desc_phys(struct cpdma_desc_pool *pool, struct cpdma_desc __iomem *desc) desc_phys() argument 288 desc_from_phys(struct cpdma_desc_pool *pool, dma_addr_t dma) desc_from_phys() argument 294 cpdma_desc_alloc(struct cpdma_desc_pool *pool) cpdma_desc_alloc() argument 300 cpdma_desc_free(struct cpdma_desc_pool *pool, struct cpdma_desc __iomem *desc, int num_desc) cpdma_desc_free() argument 378 struct cpdma_desc_pool *pool = ctlr->pool; cpdma_chan_on() local 985 struct cpdma_desc_pool *pool = ctlr->pool; __cpdma_chan_submit() local 1182 struct cpdma_desc_pool *pool = ctlr->pool; cpdma_check_free_tx_desc() local 1198 struct cpdma_desc_pool *pool = ctlr->pool; __cpdma_chan_free() local 1225 struct cpdma_desc_pool *pool = ctlr->pool; __cpdma_chan_process() local 1315 struct cpdma_desc_pool *pool = ctlr->pool; cpdma_chan_stop() local [all...] |
H A D | k3-cppi-desc-pool.h | 2 /* TI K3 CPPI5 descriptors pool 15 void k3_cppi_desc_pool_destroy(struct k3_cppi_desc_pool *pool); 23 k3_cppi_desc_pool_virt2dma(struct k3_cppi_desc_pool *pool, void *addr); 25 k3_cppi_desc_pool_dma2virt(struct k3_cppi_desc_pool *pool, dma_addr_t dma); 26 void *k3_cppi_desc_pool_alloc(struct k3_cppi_desc_pool *pool); 27 void k3_cppi_desc_pool_free(struct k3_cppi_desc_pool *pool, void *addr); 28 size_t k3_cppi_desc_pool_avail(struct k3_cppi_desc_pool *pool);
|
/third_party/vk-gl-cts/framework/delibs/depool/ |
H A D | dePoolArray.c | 21 * \brief Memory pool array class. 38 * \brief Create a new pool array. 39 * \param pool Pool to allocate memory from. 43 dePoolArray* dePoolArray_create (deMemPool* pool, int elementSize) in dePoolArray_create() argument 46 dePoolArray* arr = DE_POOL_NEW(pool, dePoolArray); in dePoolArray_create() 52 arr->pool = pool; in dePoolArray_create() 78 void** newPageTable = (void**)deMemPool_alloc(arr->pool, (size_t)newPageTableCapacity * sizeof(void*)); in dePoolArray_reserve() 115 /* Allocate the rest of the needed pages from the pool. */ in dePoolArray_reserve() 118 void* newPage = deMemPool_alloc(arr->pool, (size_ in dePoolArray_reserve() 159 deMemPool* pool = deMemPool_createRoot(DE_NULL, 0); dePoolArray_selfTest() local [all...] |
H A D | dePoolStringBuilder.c | 21 * \brief Memory pool management. 38 deMemPool* pool; member 44 dePoolStringBuilder* dePoolStringBuilder_create (deMemPool* pool) in dePoolStringBuilder_create() argument 46 dePoolStringBuilder* builder = DE_POOL_NEW(pool, dePoolStringBuilder); in dePoolStringBuilder_create() 50 builder->pool = pool; in dePoolStringBuilder_create() 60 StringBlock* block = DE_POOL_NEW(builder->pool, StringBlock); in dePoolStringBuilder_appendString() 62 char* blockStr = (char*)deMemPool_alloc(builder->pool, len + 1); in dePoolStringBuilder_appendString() 115 return dePoolStringBuilder_dupToPool(builder, builder->pool); in dePoolStringBuilder_dupToString() 118 char* dePoolStringBuilder_dupToPool (dePoolStringBuilder* builder, deMemPool* pool) in dePoolStringBuilder_dupToPool() argument [all...] |
/third_party/mesa3d/src/gallium/drivers/zink/ |
H A D | zink_descriptors.c | 56 struct zink_descriptor_pool *pool; member 90 struct zink_descriptor_pool *pool[ZINK_DESCRIPTOR_TYPES]; member 111 pipe_reference(NULL, &zds->pool->reference); in batch_add_desc_set() 219 for (unsigned i = 0; i < zds->pool->key->layout->num_bindings; i++) { in descriptor_set_invalidate() 220 for (unsigned j = 0; j < zds->pool->key->layout->bindings[i].descriptorCount; j++) { in descriptor_set_invalidate() 221 switch (zds->pool->type) { in descriptor_set_invalidate() 271 descriptor_pool_free(struct zink_screen *screen, struct zink_descriptor_pool *pool) in descriptor_pool_free() argument 273 if (!pool) in descriptor_pool_free() 275 if (pool->descpool) in descriptor_pool_free() 276 VKSCR(DestroyDescriptorPool)(screen->dev, pool in descriptor_pool_free() 295 descriptor_pool_delete(struct zink_context *ctx, struct zink_descriptor_pool *pool) descriptor_pool_delete() argument 308 struct zink_descriptor_pool *pool = rzalloc(NULL, struct zink_descriptor_pool); descriptor_pool_create() local 597 struct zink_descriptor_pool *pool = he->data; descriptor_pool_get() local 602 struct zink_descriptor_pool *pool = descriptor_pool_create(zink_screen(ctx->base.screen), type, pool_key); descriptor_pool_get() local 617 zink_descriptor_util_alloc_sets(struct zink_screen *screen, VkDescriptorSetLayout dsl, VkDescriptorPool pool, VkDescriptorSet *sets, unsigned num_sets) zink_descriptor_util_alloc_sets() argument 643 struct zink_descriptor_pool *pool = push_set ? ctx->dd->push_pool[is_compute] : pdd_cached(pg)->pool[type]; allocate_desc_set() local 791 struct zink_descriptor_pool *pool = push_set ? ctx->dd->push_pool[is_compute] : pdd_cached(pg)->pool[type]; zink_descriptor_set_get() local 941 struct zink_descriptor_pool *pool = zds->pool; zink_descriptor_set_recycle() local 1133 struct zink_descriptor_pool *pool = descriptor_pool_get(ctx, i, pool_key); zink_descriptor_program_init() local 1595 struct zink_descriptor_pool *pool = zds->pool; zink_batch_descriptor_reset() local [all...] |
/kernel/linux/linux-5.10/sound/core/seq/ |
H A D | seq_fifo.c | 26 f->pool = snd_seq_pool_new(poolsize); in snd_seq_fifo_new() 27 if (f->pool == NULL) { in snd_seq_fifo_new() 31 if (snd_seq_pool_init(f->pool) < 0) { in snd_seq_fifo_new() 32 snd_seq_pool_delete(&f->pool); in snd_seq_fifo_new() 60 if (f->pool) in snd_seq_fifo_delete() 61 snd_seq_pool_mark_closing(f->pool); in snd_seq_fifo_delete() 72 if (f->pool) { in snd_seq_fifo_delete() 73 snd_seq_pool_done(f->pool); in snd_seq_fifo_delete() 74 snd_seq_pool_delete(&f->pool); in snd_seq_fifo_delete() 112 err = snd_seq_event_dup(f->pool, even in snd_seq_fifo_event_in() [all...] |
/kernel/linux/linux-6.6/sound/core/seq/ |
H A D | seq_fifo.c | 26 f->pool = snd_seq_pool_new(poolsize); in snd_seq_fifo_new() 27 if (f->pool == NULL) { in snd_seq_fifo_new() 31 if (snd_seq_pool_init(f->pool) < 0) { in snd_seq_fifo_new() 32 snd_seq_pool_delete(&f->pool); in snd_seq_fifo_new() 60 if (f->pool) in snd_seq_fifo_delete() 61 snd_seq_pool_mark_closing(f->pool); in snd_seq_fifo_delete() 72 if (f->pool) { in snd_seq_fifo_delete() 73 snd_seq_pool_done(f->pool); in snd_seq_fifo_delete() 74 snd_seq_pool_delete(&f->pool); in snd_seq_fifo_delete() 112 err = snd_seq_event_dup(f->pool, even in snd_seq_fifo_event_in() [all...] |
/kernel/linux/linux-5.10/drivers/infiniband/hw/i40iw/ |
H A D | i40iw_pble.c | 56 * i40iw_destroy_pble_pool - destroy pool during module unload 66 if (pinfo->pool) { in i40iw_destroy_pble_pool() 73 gen_pool_destroy(pinfo->pool); in i40iw_destroy_pble_pool() 99 pble_rsrc->pinfo.pool = gen_pool_create(pble_rsrc->pinfo.pool_shift, -1); in i40iw_hmc_init_pble() 101 if (!pble_rsrc->pinfo.pool) in i40iw_hmc_init_pble() 385 if (gen_pool_add_virt(pble_rsrc->pinfo.pool, (unsigned long)chunk->vaddr, in add_pble_pool() 424 struct gen_pool *pool; in free_lvl2() local 429 pool = pble_rsrc->pinfo.pool; in free_lvl2() 433 gen_pool_free(pool, lea in free_lvl2() 451 get_lvl2_pble(struct i40iw_hmc_pble_rsrc *pble_rsrc, struct i40iw_pble_alloc *palloc, struct gen_pool *pool) get_lvl2_pble() argument 513 struct gen_pool *pool; get_lvl1_pble() local 538 get_lvl1_lvl2_pble(struct i40iw_sc_dev *dev, struct i40iw_hmc_pble_rsrc *pble_rsrc, struct i40iw_pble_alloc *palloc, struct gen_pool *pool) get_lvl1_lvl2_pble() argument 563 struct gen_pool *pool; i40iw_get_pble() local 601 struct gen_pool *pool; i40iw_free_pble() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/display/dc/dcn10/ |
H A D | dcn10_hw_sequencer_debug.c | 112 struct resource_pool *pool = dc->res_pool; in dcn10_get_hubp_states() local 133 for (i = 0; i < pool->pipe_count; i++) { in dcn10_get_hubp_states() 134 struct hubp *hubp = pool->hubps[i]; in dcn10_get_hubp_states() 190 struct resource_pool *pool = dc->res_pool; in dcn10_get_rq_states() local 203 for (i = 0; i < pool->pipe_count; i++) { in dcn10_get_rq_states() 204 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state); in dcn10_get_rq_states() 212 pool->hubps[i]->inst, rq_regs->drq_expansion_mode, rq_regs->prq_expansion_mode, rq_regs->mrq_expansion_mode, in dcn10_get_rq_states() 232 struct resource_pool *pool = dc->res_pool; in dcn10_get_dlg_states() local 248 for (i = 0; i < pool->pipe_count; i++) { in dcn10_get_dlg_states() 249 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool in dcn10_get_dlg_states() 289 struct resource_pool *pool = dc->res_pool; dcn10_get_ttu_states() local 329 struct resource_pool *pool = dc->res_pool; dcn10_get_cm_states() local 384 struct resource_pool *pool = dc->res_pool; dcn10_get_mpcc_states() local 415 struct resource_pool *pool = dc->res_pool; dcn10_get_otg_states() local 490 struct resource_pool *pool = dc->res_pool; dcn10_clear_otpc_underflow() local 506 struct resource_pool *pool = dc->res_pool; dcn10_clear_hubp_underflow() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/display/dc/dcn10/ |
H A D | dcn10_hw_sequencer_debug.c | 113 struct resource_pool *pool = dc->res_pool; in dcn10_get_hubp_states() local 134 for (i = 0; i < pool->pipe_count; i++) { in dcn10_get_hubp_states() 135 struct hubp *hubp = pool->hubps[i]; in dcn10_get_hubp_states() 191 struct resource_pool *pool = dc->res_pool; in dcn10_get_rq_states() local 204 for (i = 0; i < pool->pipe_count; i++) { in dcn10_get_rq_states() 205 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state); in dcn10_get_rq_states() 213 pool->hubps[i]->inst, rq_regs->drq_expansion_mode, rq_regs->prq_expansion_mode, rq_regs->mrq_expansion_mode, in dcn10_get_rq_states() 233 struct resource_pool *pool = dc->res_pool; in dcn10_get_dlg_states() local 249 for (i = 0; i < pool->pipe_count; i++) { in dcn10_get_dlg_states() 250 struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool in dcn10_get_dlg_states() 290 struct resource_pool *pool = dc->res_pool; dcn10_get_ttu_states() local 330 struct resource_pool *pool = dc->res_pool; dcn10_get_cm_states() local 385 struct resource_pool *pool = dc->res_pool; dcn10_get_mpcc_states() local 416 struct resource_pool *pool = dc->res_pool; dcn10_get_otg_states() local 491 struct resource_pool *pool = dc->res_pool; dcn10_clear_otpc_underflow() local 507 struct resource_pool *pool = dc->res_pool; dcn10_clear_hubp_underflow() local [all...] |
/third_party/curl/lib/ |
H A D | bufq.c | 159 void Curl_bufcp_init(struct bufc_pool *pool, in Curl_bufcp_init() argument 164 memset(pool, 0, sizeof(*pool)); in Curl_bufcp_init() 165 pool->chunk_size = chunk_size; in Curl_bufcp_init() 166 pool->spare_max = spare_max; in Curl_bufcp_init() 169 static CURLcode bufcp_take(struct bufc_pool *pool, in bufcp_take() argument 174 if(pool->spare) { in bufcp_take() 175 chunk = pool->spare; in bufcp_take() 176 pool->spare = chunk->next; in bufcp_take() 177 --pool in bufcp_take() 193 bufcp_put(struct bufc_pool *pool, struct buf_chunk *chunk) bufcp_put() argument 207 Curl_bufcp_free(struct bufc_pool *pool) Curl_bufcp_free() argument 213 bufq_init(struct bufq *q, struct bufc_pool *pool, size_t chunk_size, size_t max_chunks, int opts) bufq_init() argument 236 Curl_bufq_initp(struct bufq *q, struct bufc_pool *pool, size_t max_chunks, int opts) Curl_bufq_initp() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/stmicro/stmmac/ |
H A D | stmmac_xdp.c | 10 struct xsk_buff_pool *pool, u16 queue) in stmmac_xdp_enable_pool() 21 frame_size = xsk_pool_get_rx_frame_size(pool); in stmmac_xdp_enable_pool() 22 /* XDP ZC does not span multiple frame, make sure XSK pool buffer in stmmac_xdp_enable_pool() 28 err = xsk_pool_dma_map(pool, priv->device, STMMAC_RX_DMA_ATTR); in stmmac_xdp_enable_pool() 30 netdev_err(priv->dev, "Failed to map xsk pool\n"); in stmmac_xdp_enable_pool() 61 struct xsk_buff_pool *pool; in stmmac_xdp_disable_pool() local 68 pool = xsk_get_pool_from_qid(priv->dev, queue); in stmmac_xdp_disable_pool() 69 if (!pool) in stmmac_xdp_disable_pool() 81 xsk_pool_dma_unmap(pool, STMMAC_RX_DMA_ATTR); in stmmac_xdp_disable_pool() 95 int stmmac_xdp_setup_pool(struct stmmac_priv *priv, struct xsk_buff_pool *pool, in stmmac_xdp_setup_pool() argument 9 stmmac_xdp_enable_pool(struct stmmac_priv *priv, struct xsk_buff_pool *pool, u16 queue) stmmac_xdp_enable_pool() argument [all...] |
/third_party/eudev/src/shared/ |
H A D | mempool.c | 27 struct pool { struct 28 struct pool *next; 54 struct pool *p; in mempool_alloc_tile() 58 size = PAGE_ALIGN(ALIGN(sizeof(struct pool)) + n*mp->tile_size); in mempool_alloc_tile() 59 n = (size - ALIGN(sizeof(struct pool))) / mp->tile_size; in mempool_alloc_tile() 74 return ((uint8_t*) mp->first_pool) + ALIGN(sizeof(struct pool)) + i*mp->tile_size; in mempool_alloc_tile() 94 struct pool *p = mp->first_pool; in mempool_drop() 96 struct pool *n; in mempool_drop()
|
/third_party/node/deps/v8/tools/testrunner/local/ |
H A D | pool_test.py | 15 from testrunner.local.pool import Pool 28 pool = Pool(3) 29 for result in pool.imap_unordered(Run, [[x] for x in range(0, 10)]): 38 pool = Pool(3) 40 for result in pool.imap_unordered(Run, [[x] for x in range(0, 12)]): 52 pool = Pool(3) 53 for result in pool.imap_unordered(Run, [[x] for x in range(0, 10)]): 59 pool.add([result.value + 20])
|
/kernel/linux/linux-5.10/drivers/scsi/megaraid/ |
H A D | megaraid_mm.c | 207 * Return the kioc to free pool in mraid_mm_ioctl() 502 * First we search for a pool with smallest buffer that is >= @xferlen. If 503 * that pool has no free buffer, we will try for the next bigger size. If none 505 * @xferlen and attach it the pool. 510 mm_dmapool_t *pool; in mraid_mm_attach_buf() local 527 pool = &adp->dma_pool_list[i]; in mraid_mm_attach_buf() 529 if (xferlen > pool->buf_size) in mraid_mm_attach_buf() 535 spin_lock_irqsave(&pool->lock, flags); in mraid_mm_attach_buf() 537 if (!pool->in_use) { in mraid_mm_attach_buf() 539 pool in mraid_mm_attach_buf() 636 mm_dmapool_t *pool; mraid_mm_dealloc_kioc() local 1070 mm_dmapool_t *pool; mraid_mm_setup_dma_pools() local 1186 mm_dmapool_t *pool; mraid_mm_teardown_dma_pools() local [all...] |
/kernel/linux/linux-6.6/drivers/scsi/megaraid/ |
H A D | megaraid_mm.c | 207 * Return the kioc to free pool in mraid_mm_ioctl() 502 * First we search for a pool with smallest buffer that is >= @xferlen. If 503 * that pool has no free buffer, we will try for the next bigger size. If none 505 * @xferlen and attach it the pool. 510 mm_dmapool_t *pool; in mraid_mm_attach_buf() local 527 pool = &adp->dma_pool_list[i]; in mraid_mm_attach_buf() 529 if (xferlen > pool->buf_size) in mraid_mm_attach_buf() 535 spin_lock_irqsave(&pool->lock, flags); in mraid_mm_attach_buf() 537 if (!pool->in_use) { in mraid_mm_attach_buf() 539 pool in mraid_mm_attach_buf() 636 mm_dmapool_t *pool; mraid_mm_dealloc_kioc() local 1070 mm_dmapool_t *pool; mraid_mm_setup_dma_pools() local 1186 mm_dmapool_t *pool; mraid_mm_teardown_dma_pools() local [all...] |
/kernel/linux/linux-5.10/drivers/dma/ |
H A D | coh901318.h | 46 * coh901318_pool_create() - Creates an dma pool for lli:s 47 * @pool: pool handle 49 * @lli_nbr: number of lli:s in the pool 53 int coh901318_pool_create(struct coh901318_pool *pool, 58 * coh901318_pool_destroy() - Destroys the dma pool 59 * @pool: pool handle 62 int coh901318_pool_destroy(struct coh901318_pool *pool); 67 * @pool [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/microchip/sparx5/ |
H A D | sparx5_pool.c | 20 /* Release resource from pool. 23 int sparx5_pool_put(struct sparx5_pool_entry *pool, int size, u32 id) in sparx5_pool_put() argument 27 e_itr = (pool + sparx5_pool_id_to_idx(id)); in sparx5_pool_put() 34 /* Get resource from pool. 37 int sparx5_pool_get(struct sparx5_pool_entry *pool, int size, u32 *id) in sparx5_pool_get() argument 42 for (i = 0, e_itr = pool; i < size; i++, e_itr++) { in sparx5_pool_get() 52 /* Get resource from pool that matches index. 55 int sparx5_pool_get_with_idx(struct sparx5_pool_entry *pool, int size, u32 idx, in sparx5_pool_get_with_idx() argument 61 for (i = 0, e_itr = pool; i < size; i++, e_itr++) { in sparx5_pool_get_with_idx() 75 e_itr = (pool in sparx5_pool_get_with_idx() [all...] |
/kernel/linux/linux-5.10/drivers/staging/media/atomisp/pci/hmm/ |
H A D | hmm_dynamic_pool.c | 21 * This file contains functions for dynamic memory pool management 34 * dynamic memory pool ops. 36 static unsigned int get_pages_from_dynamic_pool(void *pool, in get_pages_from_dynamic_pool() argument 43 struct hmm_dynamic_pool_info *dypool_info = pool; in get_pages_from_dynamic_pool() 73 static void free_pages_to_dynamic_pool(void *pool, in free_pages_to_dynamic_pool() argument 79 struct hmm_dynamic_pool_info *dypool_info = pool; in free_pages_to_dynamic_pool() 140 static int hmm_dynamic_pool_init(void **pool, unsigned int pool_size) in hmm_dynamic_pool_init() argument 166 *pool = dypool_info; in hmm_dynamic_pool_init() 171 static void hmm_dynamic_pool_exit(void **pool) in hmm_dynamic_pool_exit() argument 173 struct hmm_dynamic_pool_info *dypool_info = *pool; in hmm_dynamic_pool_exit() 218 hmm_dynamic_pool_inited(void *pool) hmm_dynamic_pool_inited() argument [all...] |
H A D | hmm_reserved_pool.c | 21 * This file contains functions for reserved memory pool management 33 * reserved memory pool ops. 35 static unsigned int get_pages_from_reserved_pool(void *pool, in get_pages_from_reserved_pool() argument 43 struct hmm_reserved_pool_info *repool_info = pool; in get_pages_from_reserved_pool() 65 static void free_pages_to_reserved_pool(void *pool, in free_pages_to_reserved_pool() argument 69 struct hmm_reserved_pool_info *repool_info = pool; in free_pages_to_reserved_pool() 112 static int hmm_reserved_pool_init(void **pool, unsigned int pool_size) in hmm_reserved_pool_init() argument 186 *pool = repool_info; in hmm_reserved_pool_init() 194 static void hmm_reserved_pool_exit(void **pool) in hmm_reserved_pool_exit() argument 199 struct hmm_reserved_pool_info *repool_info = *pool; in hmm_reserved_pool_exit() 237 hmm_reserved_pool_inited(void *pool) hmm_reserved_pool_inited() argument [all...] |
/third_party/skia/third_party/externals/swiftshader/src/Vulkan/ |
H A D | VkQueryPool.cpp | 84 : pool(reinterpret_cast<Query *>(mem)) in QueryPool() 102 new(&pool[i]) Query(type); in QueryPool() 110 pool[i].~Query(); in destroy() 113 vk::freeHostMemory(pool, pAllocator); in destroy() 134 auto &query = pool[i]; in getResults() 194 pool[query].start(); in begin() 200 pool[query].finish(); 210 pool[i].reset(); 219 pool[query].start(); 220 pool[quer [all...] |