Home
last modified time | relevance | path

Searched refs:shmem (Results 1 - 25 of 55) sorted by relevance

123

/kernel/linux/linux-5.10/drivers/gpu/drm/
H A Ddrm_gem_shmem_helper.c23 * This library provides helpers for GEM objects backed by shmem buffers
41 struct drm_gem_shmem_object *shmem; in __drm_gem_shmem_create() local
50 obj = kzalloc(sizeof(*shmem), GFP_KERNEL); in __drm_gem_shmem_create()
68 shmem = to_drm_gem_shmem_obj(obj); in __drm_gem_shmem_create()
69 mutex_init(&shmem->pages_lock); in __drm_gem_shmem_create()
70 mutex_init(&shmem->vmap_lock); in __drm_gem_shmem_create()
71 INIT_LIST_HEAD(&shmem->madv_list); in __drm_gem_shmem_create()
85 return shmem; in __drm_gem_shmem_create()
99 * This function creates a shmem GEM object.
112 * drm_gem_shmem_free_object - Free resources associated with a shmem GE
121 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_free_object() local
147 drm_gem_shmem_get_pages_locked(struct drm_gem_shmem_object *shmem) drm_gem_shmem_get_pages_locked() argument
177 drm_gem_shmem_get_pages(struct drm_gem_shmem_object *shmem) drm_gem_shmem_get_pages() argument
193 drm_gem_shmem_put_pages_locked(struct drm_gem_shmem_object *shmem) drm_gem_shmem_put_pages_locked() argument
215 drm_gem_shmem_put_pages(struct drm_gem_shmem_object *shmem) drm_gem_shmem_put_pages() argument
236 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_pin() local
253 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_unpin() local
261 drm_gem_shmem_vmap_locked(struct drm_gem_shmem_object *shmem) drm_gem_shmem_vmap_locked() argument
319 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_vmap() local
333 drm_gem_shmem_vunmap_locked(struct drm_gem_shmem_object *shmem) drm_gem_shmem_vunmap_locked() argument
367 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_vunmap() local
380 struct drm_gem_shmem_object *shmem; drm_gem_shmem_create_with_handle() local
406 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_madvise() local
424 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_purge_locked() local
454 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_purge() local
481 struct drm_gem_shmem_object *shmem; drm_gem_shmem_create_object_cached() local
513 struct drm_gem_shmem_object *shmem; drm_gem_shmem_dumb_create() local
536 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_fault() local
565 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_vm_open() local
587 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_vm_close() local
613 struct drm_gem_shmem_object *shmem; drm_gem_shmem_mmap() local
660 const struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_print_info() local
686 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_get_sg_table() local
713 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_get_pages_sgt() local
769 struct drm_gem_shmem_object *shmem; drm_gem_shmem_prime_import_sg_table() local
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/
H A Ddrm_gem_shmem_helper.c31 * This library provides helpers for GEM objects backed by shmem buffers
55 struct drm_gem_shmem_object *shmem; in __drm_gem_shmem_create() local
65 shmem = to_drm_gem_shmem_obj(obj); in __drm_gem_shmem_create()
67 shmem = kzalloc(sizeof(*shmem), GFP_KERNEL); in __drm_gem_shmem_create()
68 if (!shmem) in __drm_gem_shmem_create()
70 obj = &shmem->base; in __drm_gem_shmem_create()
78 shmem->map_wc = false; /* dma-buf mappings use always writecombine */ in __drm_gem_shmem_create()
91 INIT_LIST_HEAD(&shmem->madv_list); in __drm_gem_shmem_create()
105 return shmem; in __drm_gem_shmem_create()
138 drm_gem_shmem_free(struct drm_gem_shmem_object *shmem) drm_gem_shmem_free() argument
168 drm_gem_shmem_get_pages(struct drm_gem_shmem_object *shmem) drm_gem_shmem_get_pages() argument
207 drm_gem_shmem_put_pages(struct drm_gem_shmem_object *shmem) drm_gem_shmem_put_pages() argument
231 drm_gem_shmem_pin_locked(struct drm_gem_shmem_object *shmem) drm_gem_shmem_pin_locked() argument
242 drm_gem_shmem_unpin_locked(struct drm_gem_shmem_object *shmem) drm_gem_shmem_unpin_locked() argument
259 drm_gem_shmem_pin(struct drm_gem_shmem_object *shmem) drm_gem_shmem_pin() argument
283 drm_gem_shmem_unpin(struct drm_gem_shmem_object *shmem) drm_gem_shmem_unpin() argument
310 drm_gem_shmem_vmap(struct drm_gem_shmem_object *shmem, struct iosys_map *map) drm_gem_shmem_vmap() argument
377 drm_gem_shmem_vunmap(struct drm_gem_shmem_object *shmem, struct iosys_map *map) drm_gem_shmem_vunmap() argument
406 struct drm_gem_shmem_object *shmem; drm_gem_shmem_create_with_handle() local
427 drm_gem_shmem_madvise(struct drm_gem_shmem_object *shmem, int madv) drm_gem_shmem_madvise() argument
440 drm_gem_shmem_purge(struct drm_gem_shmem_object *shmem) drm_gem_shmem_purge() argument
513 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_fault() local
542 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_vm_open() local
564 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_vm_close() local
591 drm_gem_shmem_mmap(struct drm_gem_shmem_object *shmem, struct vm_area_struct *vma) drm_gem_shmem_mmap() argument
635 drm_gem_shmem_print_info(const struct drm_gem_shmem_object *shmem, struct drm_printer *p, unsigned int indent) drm_gem_shmem_print_info() argument
661 drm_gem_shmem_get_sg_table(struct drm_gem_shmem_object *shmem) drm_gem_shmem_get_sg_table() argument
671 drm_gem_shmem_get_pages_sgt_locked(struct drm_gem_shmem_object *shmem) drm_gem_shmem_get_pages_sgt_locked() argument
724 drm_gem_shmem_get_pages_sgt(struct drm_gem_shmem_object *shmem) drm_gem_shmem_get_pages_sgt() argument
760 struct drm_gem_shmem_object *shmem; drm_gem_shmem_prime_import_sg_table() local
[all...]
/kernel/linux/linux-6.6/include/drm/
H A Ddrm_gem_shmem_helper.h21 * struct drm_gem_shmem_object - GEM object backed by shmem
91 * @map_wc: map object write-combined (instead of using shmem defaults).
100 void drm_gem_shmem_free(struct drm_gem_shmem_object *shmem);
102 void drm_gem_shmem_put_pages(struct drm_gem_shmem_object *shmem);
103 int drm_gem_shmem_pin(struct drm_gem_shmem_object *shmem);
104 void drm_gem_shmem_unpin(struct drm_gem_shmem_object *shmem);
105 int drm_gem_shmem_vmap(struct drm_gem_shmem_object *shmem,
107 void drm_gem_shmem_vunmap(struct drm_gem_shmem_object *shmem,
109 int drm_gem_shmem_mmap(struct drm_gem_shmem_object *shmem, struct vm_area_struct *vma);
111 int drm_gem_shmem_madvise(struct drm_gem_shmem_object *shmem, in
113 drm_gem_shmem_is_purgeable(struct drm_gem_shmem_object *shmem) drm_gem_shmem_is_purgeable() argument
143 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_object_free() local
160 const struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_object_print_info() local
174 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_object_pin() local
188 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_object_unpin() local
205 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_object_get_sg_table() local
224 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_object_vmap() local
240 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_object_vunmap() local
258 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); drm_gem_shmem_object_mmap() local
[all...]
/kernel/linux/linux-6.6/drivers/firmware/arm_scmi/
H A Dshmem.c35 void shmem_tx_prepare(struct scmi_shared_mem __iomem *shmem, in shmem_tx_prepare() argument
55 spin_until_cond((ioread32(&shmem->channel_status) & in shmem_tx_prepare()
58 if (!(ioread32(&shmem->channel_status) & in shmem_tx_prepare()
67 iowrite32(0x0, &shmem->channel_status); in shmem_tx_prepare()
69 &shmem->flags); in shmem_tx_prepare()
70 iowrite32(sizeof(shmem->msg_header) + xfer->tx.len, &shmem->length); in shmem_tx_prepare()
71 iowrite32(pack_scmi_header(&xfer->hdr), &shmem->msg_header); in shmem_tx_prepare()
73 memcpy_toio(shmem->msg_payload, xfer->tx.buf, xfer->tx.len); in shmem_tx_prepare()
76 u32 shmem_read_header(struct scmi_shared_mem __iomem *shmem) in shmem_read_header() argument
81 shmem_fetch_response(struct scmi_shared_mem __iomem *shmem, struct scmi_xfer *xfer) shmem_fetch_response() argument
94 shmem_fetch_notification(struct scmi_shared_mem __iomem *shmem, size_t max_len, struct scmi_xfer *xfer) shmem_fetch_notification() argument
106 shmem_clear_channel(struct scmi_shared_mem __iomem *shmem) shmem_clear_channel() argument
111 shmem_poll_done(struct scmi_shared_mem __iomem *shmem, struct scmi_xfer *xfer) shmem_poll_done() argument
126 shmem_channel_free(struct scmi_shared_mem __iomem *shmem) shmem_channel_free() argument
[all...]
H A Dmailbox.c25 * @shmem: Transmit/Receive shared memory area
32 struct scmi_shared_mem __iomem *shmem; member
41 shmem_tx_prepare(smbox->shmem, m, smbox->cinfo); in tx_prepare()
57 if (cl->knows_txdone && !shmem_channel_free(smbox->shmem)) { in rx_callback()
62 scmi_rx_callback(smbox->cinfo, shmem_read_header(smbox->shmem), NULL); in rx_callback()
93 * 'mboxes' and 'shmem', then determin which mailbox channel indexes are
105 num_sh = of_count_phandle_with_args(np, "shmem", NULL); in mailbox_chan_validate()
108 /* Bail out if mboxes and shmem descriptors are inconsistent */ in mailbox_chan_validate()
117 /* Bail out if provided shmem descriptors do not refer distinct areas */ in mailbox_chan_validate()
121 np_tx = of_parse_phandle(np, "shmem", in mailbox_chan_validate()
165 struct device_node *shmem; mailbox_chan_setup() local
[all...]
H A Dcommon.h304 /* shmem related declarations */
307 void shmem_tx_prepare(struct scmi_shared_mem __iomem *shmem,
309 u32 shmem_read_header(struct scmi_shared_mem __iomem *shmem);
310 void shmem_fetch_response(struct scmi_shared_mem __iomem *shmem,
312 void shmem_fetch_notification(struct scmi_shared_mem __iomem *shmem,
314 void shmem_clear_channel(struct scmi_shared_mem __iomem *shmem);
315 bool shmem_poll_done(struct scmi_shared_mem __iomem *shmem,
317 bool shmem_channel_free(struct scmi_shared_mem __iomem *shmem);
H A Dsmc.c24 * The shmem address is split into 4K page and offset.
27 * This however limits the shmem address to 44 bit.
45 * @shmem: Transmit/Receive shared memory area
51 * @param_page: 4K page number of the shmem channel
52 * @param_offset: Offset within the 4K page of the shmem channel
58 struct scmi_shared_mem __iomem *shmem; member
59 /* Protect access to shmem area */
73 shmem_read_header(scmi_info->shmem), NULL); in smc_msg_done_isr()
80 struct device_node *np = of_parse_phandle(of_node, "shmem", 0); in smc_chan_available()
141 np = of_parse_phandle(cdev->of_node, "shmem", in smc_chan_setup()
[all...]
H A Doptee.c89 * When set, OP-TEE supports command using SMT header protocol (SCMI shmem) in
112 * @shmem: Virtual base address of the shared memory
114 * @tee_shm: TEE shared memory handle @req or NULL if using IOMEM shmem
125 struct scmi_shared_mem __iomem *shmem; member
344 shmem_clear_channel(channel->req.shmem); in scmi_optee_clear_channel()
354 dev_err(channel->cinfo->dev, "shmem allocation failed\n"); in setup_dynamic_shmem()
374 np = of_parse_phandle(cinfo->dev->of_node, "shmem", 0); in setup_static_shmem()
375 if (!of_device_is_compatible(np, "arm,scmi-shmem")) { in setup_static_shmem()
388 channel->req.shmem = devm_ioremap(dev, res.start, size); in setup_static_shmem()
389 if (!channel->req.shmem) { in setup_static_shmem()
[all...]
/kernel/linux/linux-5.10/drivers/firmware/arm_scmi/
H A Dshmem.c32 void shmem_tx_prepare(struct scmi_shared_mem __iomem *shmem, in shmem_tx_prepare() argument
41 spin_until_cond(ioread32(&shmem->channel_status) & in shmem_tx_prepare()
44 iowrite32(0x0, &shmem->channel_status); in shmem_tx_prepare()
46 &shmem->flags); in shmem_tx_prepare()
47 iowrite32(sizeof(shmem->msg_header) + xfer->tx.len, &shmem->length); in shmem_tx_prepare()
48 iowrite32(pack_scmi_header(&xfer->hdr), &shmem->msg_header); in shmem_tx_prepare()
50 memcpy_toio(shmem->msg_payload, xfer->tx.buf, xfer->tx.len); in shmem_tx_prepare()
53 u32 shmem_read_header(struct scmi_shared_mem __iomem *shmem) in shmem_read_header() argument
55 return ioread32(&shmem in shmem_read_header()
58 shmem_fetch_response(struct scmi_shared_mem __iomem *shmem, struct scmi_xfer *xfer) shmem_fetch_response() argument
71 shmem_fetch_notification(struct scmi_shared_mem __iomem *shmem, size_t max_len, struct scmi_xfer *xfer) shmem_fetch_notification() argument
83 shmem_clear_channel(struct scmi_shared_mem __iomem *shmem) shmem_clear_channel() argument
88 shmem_poll_done(struct scmi_shared_mem __iomem *shmem, struct scmi_xfer *xfer) shmem_poll_done() argument
[all...]
H A Dmailbox.c24 * @shmem: Transmit/Receive shared memory area
30 struct scmi_shared_mem __iomem *shmem; member
39 shmem_tx_prepare(smbox->shmem, m); in tx_prepare()
46 scmi_rx_callback(smbox->cinfo, shmem_read_header(smbox->shmem)); in rx_callback()
61 num_sh = of_count_phandle_with_args(np, "shmem", NULL); in mailbox_chan_validate()
62 /* Bail out if mboxes and shmem descriptors are inconsistent */ in mailbox_chan_validate()
72 np_tx = of_parse_phandle(np, "shmem", 0); in mailbox_chan_validate()
73 np_rx = of_parse_phandle(np, "shmem", 1); in mailbox_chan_validate()
76 dev_warn(cdev, "Invalid shmem descriptor for '%s'\n", in mailbox_chan_validate()
94 struct device_node *shmem; in mailbox_chan_setup() local
[all...]
H A Dsmc.c23 * @shmem: Transmit/Receive shared memory area
30 struct scmi_shared_mem __iomem *shmem; member
37 struct device_node *np = of_parse_phandle(dev->of_node, "shmem", 0); in smc_chan_available()
63 np = of_parse_phandle(cdev->of_node, "shmem", 0); in smc_chan_setup()
72 scmi_info->shmem = devm_ioremap(dev, res.start, size); in smc_chan_setup()
73 if (!scmi_info->shmem) { in smc_chan_setup()
111 shmem_tx_prepare(scmi_info->shmem, xfer); in smc_send_message()
114 scmi_rx_callback(scmi_info->cinfo, shmem_read_header(scmi_info->shmem)); in smc_send_message()
129 shmem_fetch_response(scmi_info->shmem, xfer); in smc_fetch_response()
137 return shmem_poll_done(scmi_info->shmem, xfe in smc_poll_done()
[all...]
H A Dcommon.h253 /* shmem related declarations */
256 void shmem_tx_prepare(struct scmi_shared_mem __iomem *shmem,
258 u32 shmem_read_header(struct scmi_shared_mem __iomem *shmem);
259 void shmem_fetch_response(struct scmi_shared_mem __iomem *shmem,
261 void shmem_fetch_notification(struct scmi_shared_mem __iomem *shmem,
263 void shmem_clear_channel(struct scmi_shared_mem __iomem *shmem);
264 bool shmem_poll_done(struct scmi_shared_mem __iomem *shmem,
/kernel/linux/linux-5.10/drivers/gpu/drm/virtio/
H A Dvirtgpu_object.c71 struct virtio_gpu_object_shmem *shmem = to_virtio_gpu_shmem(bo); in virtio_gpu_cleanup_object() local
73 if (shmem->pages) { in virtio_gpu_cleanup_object()
74 if (shmem->mapped) { in virtio_gpu_cleanup_object()
76 shmem->pages, DMA_TO_DEVICE, 0); in virtio_gpu_cleanup_object()
77 shmem->mapped = 0; in virtio_gpu_cleanup_object()
80 sg_free_table(shmem->pages); in virtio_gpu_cleanup_object()
81 kfree(shmem->pages); in virtio_gpu_cleanup_object()
82 shmem->pages = NULL; in virtio_gpu_cleanup_object()
126 struct virtio_gpu_object_shmem *shmem; in virtio_gpu_create_object() local
129 shmem in virtio_gpu_create_object()
145 struct virtio_gpu_object_shmem *shmem = to_virtio_gpu_shmem(bo); virtio_gpu_object_shmem_init() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/panfrost/
H A Dpanfrost_gem_shrinker.c23 struct drm_gem_shmem_object *shmem; in panfrost_gem_shrinker_count() local
29 list_for_each_entry(shmem, &pfdev->shrinker_list, madv_list) { in panfrost_gem_shrinker_count()
30 if (drm_gem_shmem_is_purgeable(shmem)) in panfrost_gem_shrinker_count()
31 count += shmem->base.size >> PAGE_SHIFT; in panfrost_gem_shrinker_count()
41 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); in panfrost_gem_purge() local
51 if (!mutex_trylock(&shmem->pages_lock)) in panfrost_gem_purge()
58 mutex_unlock(&shmem->pages_lock); in panfrost_gem_purge()
70 struct drm_gem_shmem_object *shmem, *tmp; in panfrost_gem_shrinker_scan() local
76 list_for_each_entry_safe(shmem, tmp, &pfdev->shrinker_list, madv_list) { in panfrost_gem_shrinker_scan()
79 if (drm_gem_shmem_is_purgeable(shmem) in panfrost_gem_shrinker_scan()
[all...]
H A Dpanfrost_gem.c237 struct drm_gem_shmem_object *shmem; in panfrost_gem_create() local
244 shmem = drm_gem_shmem_create(dev, size); in panfrost_gem_create()
245 if (IS_ERR(shmem)) in panfrost_gem_create()
246 return ERR_CAST(shmem); in panfrost_gem_create()
248 bo = to_panfrost_bo(&shmem->base); in panfrost_gem_create()
/kernel/linux/linux-6.6/drivers/gpu/drm/panfrost/
H A Dpanfrost_gem_shrinker.c23 struct drm_gem_shmem_object *shmem; in panfrost_gem_shrinker_count() local
29 list_for_each_entry(shmem, &pfdev->shrinker_list, madv_list) { in panfrost_gem_shrinker_count()
30 if (drm_gem_shmem_is_purgeable(shmem)) in panfrost_gem_shrinker_count()
31 count += shmem->base.size >> PAGE_SHIFT; in panfrost_gem_shrinker_count()
41 struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); in panfrost_gem_purge() local
51 if (!dma_resv_trylock(shmem->base.resv)) in panfrost_gem_purge()
58 dma_resv_unlock(shmem->base.resv); in panfrost_gem_purge()
70 struct drm_gem_shmem_object *shmem, *tmp; in panfrost_gem_shrinker_scan() local
76 list_for_each_entry_safe(shmem, tmp, &pfdev->shrinker_list, madv_list) { in panfrost_gem_shrinker_scan()
79 if (drm_gem_shmem_is_purgeable(shmem) in panfrost_gem_shrinker_scan()
[all...]
H A Dpanfrost_gem.c240 struct drm_gem_shmem_object *shmem; in panfrost_gem_create() local
247 shmem = drm_gem_shmem_create(dev, size); in panfrost_gem_create()
248 if (IS_ERR(shmem)) in panfrost_gem_create()
249 return ERR_CAST(shmem); in panfrost_gem_create()
251 bo = to_panfrost_bo(&shmem->base); in panfrost_gem_create()
/kernel/linux/linux-5.10/include/drm/
H A Ddrm_gem_shmem_helper.h21 * struct drm_gem_shmem_object - GEM object backed by shmem
112 int drm_gem_shmem_get_pages(struct drm_gem_shmem_object *shmem);
113 void drm_gem_shmem_put_pages(struct drm_gem_shmem_object *shmem);
121 static inline bool drm_gem_shmem_is_purgeable(struct drm_gem_shmem_object *shmem) in drm_gem_shmem_is_purgeable() argument
123 return (shmem->madv > 0) && in drm_gem_shmem_is_purgeable()
124 !shmem->vmap_use_count && shmem->sgt && in drm_gem_shmem_is_purgeable()
125 !shmem->base.dma_buf && !shmem->base.import_attach; in drm_gem_shmem_is_purgeable()
156 * DRM_GEM_SHMEM_DRIVER_OPS - Default shmem GE
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/microsoft/mana/
H A Dshm_channel.c81 /* shmem reads as 0xFFFFFFFF in the reset case */ in mana_smc_poll_register()
145 u64 *shmem; in mana_smc_setup_hwc() local
185 shmem = (u64 *)ptr; in mana_smc_setup_hwc()
187 *shmem = frame_addr & PAGE_FRAME_L48_MASK; in mana_smc_setup_hwc()
193 shmem = (u64 *)ptr; in mana_smc_setup_hwc()
195 *shmem = frame_addr & PAGE_FRAME_L48_MASK; in mana_smc_setup_hwc()
201 shmem = (u64 *)ptr; in mana_smc_setup_hwc()
203 *shmem = frame_addr & PAGE_FRAME_L48_MASK; in mana_smc_setup_hwc()
209 shmem = (u64 *)ptr; in mana_smc_setup_hwc()
211 *shmem in mana_smc_setup_hwc()
[all...]
/kernel/linux/linux-5.10/drivers/net/arcnet/
H A Dcom90xx.c44 * shmem are left in the list at Stage 5, they must correspond to each
58 static int com90xx_found(int ioaddr, int airq, u_long shmem, void __iomem *);
86 static int io; /* use the insmod io= irq= shmem= options */
88 static int shmem; variable
93 module_param(shmem, int, 0);
107 if (!io && !irq && !shmem && !*device && com90xx_skip_probe) in com90xx_probe()
131 if (shmem) in com90xx_probe()
132 shmems[numshmems++] = shmem; in com90xx_probe()
203 /* Stage 3: abandon any shmem addresses that don't have the signature in com90xx_probe()
243 * sure no "mirror" shmem area in com90xx_probe()
460 com90xx_found(int ioaddr, int airq, u_long shmem, void __iomem *p) com90xx_found() argument
[all...]
H A Darc-rimi.c65 * need to be passed a specific shmem address, IRQ, and node ID.
72 pr_info("Given: node %02Xh, shmem %lXh, irq %d\n", in arcrimi_probe()
78 pr_err("No autoprobe for RIM I; you must specify the shmem and irq!\n"); in arcrimi_probe()
126 unsigned long first_mirror, last_mirror, shmem; in arcrimi_found() local
146 shmem = dev->mem_start; in arcrimi_found()
159 check_mirror(shmem - MIRROR_SIZE, MIRROR_SIZE) == 0 && in arcrimi_found()
160 check_mirror(shmem - 2 * MIRROR_SIZE, MIRROR_SIZE) == 1) in arcrimi_found()
163 first_mirror = shmem - mirror_size; in arcrimi_found()
168 last_mirror = shmem + mirror_size; in arcrimi_found()
194 release_mem_region(shmem, MIRROR_SIZ in arcrimi_found()
[all...]
/kernel/linux/linux-6.6/drivers/net/arcnet/
H A Dcom90xx.c44 * shmem are left in the list at Stage 5, they must correspond to each
58 static int com90xx_found(int ioaddr, int airq, u_long shmem, void __iomem *);
86 static int io; /* use the insmod io= irq= shmem= options */
88 static int shmem; variable
93 module_param(shmem, int, 0);
107 if (!io && !irq && !shmem && !*device && com90xx_skip_probe) in com90xx_probe()
131 if (shmem) in com90xx_probe()
132 shmems[numshmems++] = shmem; in com90xx_probe()
203 /* Stage 3: abandon any shmem addresses that don't have the signature in com90xx_probe()
243 * sure no "mirror" shmem area in com90xx_probe()
460 com90xx_found(int ioaddr, int airq, u_long shmem, void __iomem *p) com90xx_found() argument
[all...]
H A Darc-rimi.c65 * need to be passed a specific shmem address, IRQ, and node ID.
72 pr_info("Given: node %02Xh, shmem %lXh, irq %d\n", in arcrimi_probe()
78 pr_err("No autoprobe for RIM I; you must specify the shmem and irq!\n"); in arcrimi_probe()
126 unsigned long first_mirror, last_mirror, shmem; in arcrimi_found() local
146 shmem = dev->mem_start; in arcrimi_found()
159 check_mirror(shmem - MIRROR_SIZE, MIRROR_SIZE) == 0 && in arcrimi_found()
160 check_mirror(shmem - 2 * MIRROR_SIZE, MIRROR_SIZE) == 1) in arcrimi_found()
163 first_mirror = shmem - mirror_size; in arcrimi_found()
168 last_mirror = shmem + mirror_size; in arcrimi_found()
194 release_mem_region(shmem, MIRROR_SIZ in arcrimi_found()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/virtio/
H A Dvirtgpu_object.c123 struct virtio_gpu_object_shmem *shmem; in virtio_gpu_create_object() local
126 shmem = kzalloc(sizeof(*shmem), GFP_KERNEL); in virtio_gpu_create_object()
127 if (!shmem) in virtio_gpu_create_object()
130 dshmem = &shmem->base.base; in virtio_gpu_create_object()
/kernel/linux/linux-6.6/drivers/gpu/drm/lima/
H A Dlima_gem.c113 struct drm_gem_shmem_object *shmem; in lima_gem_create_handle() local
118 shmem = drm_gem_shmem_create(dev, size); in lima_gem_create_handle()
119 if (IS_ERR(shmem)) in lima_gem_create_handle()
120 return PTR_ERR(shmem); in lima_gem_create_handle()
122 obj = &shmem->base; in lima_gem_create_handle()
136 struct sg_table *sgt = drm_gem_shmem_get_pages_sgt(shmem); in lima_gem_create_handle()

Completed in 13 milliseconds

123