Lines Matching defs:pool
686 static void kdesc_fill_pool(struct knav_pool *pool)
691 region = pool->region;
692 pool->desc_size = region->desc_size;
693 for (i = 0; i < pool->num_desc; i++) {
694 int index = pool->region_offset + i;
698 dma_size = ALIGN(pool->desc_size, SMP_CACHE_BYTES);
699 dma_sync_single_for_device(pool->dev, dma_addr, dma_size,
701 knav_queue_push(pool->queue, dma_addr, dma_size, 0);
706 static void kdesc_empty_pool(struct knav_pool *pool)
713 if (!pool->queue)
717 dma = knav_queue_pop(pool->queue, &size);
720 desc = knav_pool_desc_dma_to_virt(pool, dma);
722 dev_dbg(pool->kdev->dev,
727 WARN_ON(i != pool->num_desc);
728 knav_queue_close(pool->queue);
735 struct knav_pool *pool = ph;
736 return pool->region->dma_start + (virt - pool->region->virt_start);
742 struct knav_pool *pool = ph;
743 return pool->region->virt_start + (dma - pool->region->dma_start);
748 * knav_pool_create() - Create a pool of descriptors
749 * @name - name to give the pool handle
750 * @num_desc - numbers of descriptors in the pool
754 * Returns a pool handle on success.
761 struct knav_pool *pool, *pi;
773 pool = devm_kzalloc(kdev->dev, sizeof(*pool), GFP_KERNEL);
774 if (!pool) {
775 dev_err(kdev->dev, "out of memory allocating pool\n");
792 pool->queue = knav_queue_open(name, KNAV_QUEUE_GP, 0);
793 if (IS_ERR_OR_NULL(pool->queue)) {
795 "failed to open queue for pool(%s), error %ld\n",
796 name, PTR_ERR(pool->queue));
797 ret = PTR_ERR(pool->queue);
801 pool->name = kstrndup(name, KNAV_NAME_SIZE - 1, GFP_KERNEL);
802 pool->kdev = kdev;
803 pool->dev = kdev->dev;
808 dev_err(kdev->dev, "out of descs in region(%d) for pool(%s)\n",
831 pool->region = region;
832 pool->num_desc = num_desc;
833 pool->region_offset = last_offset;
835 list_add_tail(&pool->list, &kdev->pools);
836 list_add_tail(&pool->region_inst, node);
838 dev_err(kdev->dev, "pool(%s) create failed: fragmented desc pool in region(%d)\n",
845 kdesc_fill_pool(pool);
846 return pool;
851 kfree(pool->name);
852 devm_kfree(kdev->dev, pool);
858 * knav_pool_destroy() - Free a pool of descriptors
859 * @pool - pool handle
863 struct knav_pool *pool = ph;
865 if (!pool)
868 if (!pool->region)
871 kdesc_empty_pool(pool);
874 pool->region->used_desc -= pool->num_desc;
875 list_del(&pool->region_inst);
876 list_del(&pool->list);
879 kfree(pool->name);
880 devm_kfree(kdev->dev, pool);
886 * knav_pool_desc_get() - Get a descriptor from the pool
887 * @pool - pool handle
889 * Returns descriptor from the pool.
893 struct knav_pool *pool = ph;
898 dma = knav_queue_pop(pool->queue, &size);
901 data = knav_pool_desc_dma_to_virt(pool, dma);
907 * knav_pool_desc_put() - return a descriptor to the pool
908 * @pool - pool handle
912 struct knav_pool *pool = ph;
914 dma = knav_pool_desc_virt_to_dma(pool, desc);
915 knav_queue_push(pool->queue, dma, pool->region->desc_size, 0);
921 * @pool - pool handle
932 struct knav_pool *pool = ph;
933 *dma = knav_pool_desc_virt_to_dma(pool, desc);
934 size = min(size, pool->region->desc_size);
937 dma_sync_single_for_device(pool->dev, *dma, size, DMA_TO_DEVICE);
948 * @pool - pool handle
957 struct knav_pool *pool = ph;
961 desc_sz = min(dma_sz, pool->region->desc_size);
962 desc = knav_pool_desc_dma_to_virt(pool, dma);
963 dma_sync_single_for_cpu(pool->dev, dma, desc_sz, DMA_FROM_DEVICE);
970 * knav_pool_count() - Get the number of descriptors in pool.
971 * @pool - pool handle
972 * Returns number of elements in the pool.
976 struct knav_pool *pool = ph;
977 return knav_queue_get_count(pool->queue);
987 struct knav_pool *pool;
1029 pool = devm_kzalloc(kdev->dev, sizeof(*pool), GFP_KERNEL);
1030 if (!pool) {
1031 dev_err(kdev->dev, "out of memory allocating dummy pool\n");
1034 pool->num_desc = 0;
1035 pool->region_offset = region->num_desc;
1036 list_add(&pool->region_inst, ®ion->pools);
1351 struct knav_pool *pool, *tmp;
1358 list_for_each_entry_safe(pool, tmp, ®ion->pools, region_inst)
1359 knav_pool_destroy(pool);