Home
last modified time | relevance | path

Searched refs:dmaobj (Results 1 - 16 of 16) sorted by relevance

/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/engine/dma/
H A Dusernv04.c44 struct nv04_dmaobj *dmaobj = nv04_dmaobj(base); in nv04_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in nv04_dmaobj_bind()
46 u64 offset = dmaobj->base.start & 0xfffff000; in nv04_dmaobj_bind()
47 u64 adjust = dmaobj->base.start & 0x00000fff; in nv04_dmaobj_bind()
48 u32 length = dmaobj->base.limit - dmaobj->base.start; in nv04_dmaobj_bind()
51 if (dmaobj->clone) { in nv04_dmaobj_bind()
54 if (!dmaobj->base.start) in nv04_dmaobj_bind()
65 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0 | (adjust << 20)); in nv04_dmaobj_bind()
67 nvkm_wo32(*pgpuobj, 0x08, dmaobj in nv04_dmaobj_bind()
85 struct nv04_dmaobj *dmaobj; nv04_dmaobj_new() local
[all...]
H A Duser.c51 struct nvkm_dmaobj *dmaobj = nvkm_dmaobj(base); in nvkm_dmaobj_bind() local
52 return dmaobj->func->bind(dmaobj, gpuobj, align, pgpuobj); in nvkm_dmaobj_bind()
70 struct nvkm_dmaobj *dmaobj) in nvkm_dmaobj_ctor()
84 nvkm_object_ctor(&nvkm_dmaobj_func, oclass, &dmaobj->object); in nvkm_dmaobj_ctor()
85 dmaobj->func = func; in nvkm_dmaobj_ctor()
86 dmaobj->dma = dma; in nvkm_dmaobj_ctor()
94 dmaobj->target = args->v0.target; in nvkm_dmaobj_ctor()
95 dmaobj->access = args->v0.access; in nvkm_dmaobj_ctor()
96 dmaobj in nvkm_dmaobj_ctor()
68 nvkm_dmaobj_ctor(const struct nvkm_dmaobj_func *func, struct nvkm_dma *dma, const struct nvkm_oclass *oclass, void **pdata, u32 *psize, struct nvkm_dmaobj *dmaobj) nvkm_dmaobj_ctor() argument
[all...]
H A Dusergf100.c44 struct gf100_dmaobj *dmaobj = gf100_dmaobj(base); in gf100_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gf100_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gf100_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit)); in gf100_dmaobj_bind()
53 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start)); in gf100_dmaobj_bind()
54 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 | in gf100_dmaobj_bind()
55 upper_32_bits(dmaobj->base.start)); in gf100_dmaobj_bind()
57 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5); in gf100_dmaobj_bind()
77 struct gf100_dmaobj *dmaobj; in gf100_dmaobj_new() local
81 if (!(dmaobj in gf100_dmaobj_new()
[all...]
H A Dusernv50.c44 struct nv50_dmaobj *dmaobj = nv50_dmaobj(base); in nv50_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in nv50_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in nv50_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit)); in nv50_dmaobj_bind()
53 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start)); in nv50_dmaobj_bind()
54 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 | in nv50_dmaobj_bind()
55 upper_32_bits(dmaobj->base.start)); in nv50_dmaobj_bind()
57 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5); in nv50_dmaobj_bind()
77 struct nv50_dmaobj *dmaobj; in nv50_dmaobj_new() local
81 if (!(dmaobj in nv50_dmaobj_new()
[all...]
H A Dusergv100.c41 struct gv100_dmaobj *dmaobj = gv100_dmaobj(base); in gv100_dmaobj_bind() local
42 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gv100_dmaobj_bind()
43 u64 start = dmaobj->base.start >> 8; in gv100_dmaobj_bind()
44 u64 limit = dmaobj->base.limit >> 8; in gv100_dmaobj_bind()
50 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gv100_dmaobj_bind()
74 struct gv100_dmaobj *dmaobj; in gv100_dmaobj_new() local
78 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))) in gv100_dmaobj_new()
80 *pdmaobj = &dmaobj->base; in gv100_dmaobj_new()
83 &data, &size, &dmaobj in gv100_dmaobj_new()
[all...]
H A Dusergf119.c43 struct gf119_dmaobj *dmaobj = gf119_dmaobj(base); in gf119_dmaobj_bind() local
44 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gf119_dmaobj_bind()
50 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gf119_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x04, dmaobj->base.start >> 8); in gf119_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x08, dmaobj->base.limit >> 8); in gf119_dmaobj_bind()
75 struct gf119_dmaobj *dmaobj; in gf119_dmaobj_new() local
79 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))) in gf119_dmaobj_new()
81 *pdmaobj = &dmaobj->base; in gf119_dmaobj_new()
84 &data, &size, &dmaobj in gf119_dmaobj_new()
[all...]
H A Dbase.c37 struct nvkm_dmaobj *dmaobj = NULL; in nvkm_dma_oclass_new() local
40 ret = dma->func->class_new(dma, oclass, data, size, &dmaobj); in nvkm_dma_oclass_new()
41 if (dmaobj) in nvkm_dma_oclass_new()
42 *pobject = &dmaobj->object; in nvkm_dma_oclass_new()
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/engine/dma/
H A Dusernv04.c44 struct nv04_dmaobj *dmaobj = nv04_dmaobj(base); in nv04_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in nv04_dmaobj_bind()
46 u64 offset = dmaobj->base.start & 0xfffff000; in nv04_dmaobj_bind()
47 u64 adjust = dmaobj->base.start & 0x00000fff; in nv04_dmaobj_bind()
48 u32 length = dmaobj->base.limit - dmaobj->base.start; in nv04_dmaobj_bind()
51 if (dmaobj->clone) { in nv04_dmaobj_bind()
54 if (!dmaobj->base.start) in nv04_dmaobj_bind()
65 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0 | (adjust << 20)); in nv04_dmaobj_bind()
67 nvkm_wo32(*pgpuobj, 0x08, dmaobj in nv04_dmaobj_bind()
85 struct nv04_dmaobj *dmaobj; nv04_dmaobj_new() local
[all...]
H A Dusergf100.c44 struct gf100_dmaobj *dmaobj = gf100_dmaobj(base); in gf100_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gf100_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gf100_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit)); in gf100_dmaobj_bind()
53 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start)); in gf100_dmaobj_bind()
54 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 | in gf100_dmaobj_bind()
55 upper_32_bits(dmaobj->base.start)); in gf100_dmaobj_bind()
57 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5); in gf100_dmaobj_bind()
77 struct gf100_dmaobj *dmaobj; in gf100_dmaobj_new() local
81 if (!(dmaobj in gf100_dmaobj_new()
[all...]
H A Duser.c50 struct nvkm_dmaobj *dmaobj = nvkm_dmaobj(base); in nvkm_dmaobj_bind() local
51 return dmaobj->func->bind(dmaobj, gpuobj, align, pgpuobj); in nvkm_dmaobj_bind()
69 struct nvkm_dmaobj *dmaobj) in nvkm_dmaobj_ctor()
79 nvkm_object_ctor(&nvkm_dmaobj_func, oclass, &dmaobj->object); in nvkm_dmaobj_ctor()
80 dmaobj->func = func; in nvkm_dmaobj_ctor()
81 dmaobj->dma = dma; in nvkm_dmaobj_ctor()
89 dmaobj->target = args->v0.target; in nvkm_dmaobj_ctor()
90 dmaobj->access = args->v0.access; in nvkm_dmaobj_ctor()
91 dmaobj in nvkm_dmaobj_ctor()
67 nvkm_dmaobj_ctor(const struct nvkm_dmaobj_func *func, struct nvkm_dma *dma, const struct nvkm_oclass *oclass, void **pdata, u32 *psize, struct nvkm_dmaobj *dmaobj) nvkm_dmaobj_ctor() argument
[all...]
H A Dusernv50.c44 struct nv50_dmaobj *dmaobj = nv50_dmaobj(base); in nv50_dmaobj_bind() local
45 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in nv50_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in nv50_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x04, lower_32_bits(dmaobj->base.limit)); in nv50_dmaobj_bind()
53 nvkm_wo32(*pgpuobj, 0x08, lower_32_bits(dmaobj->base.start)); in nv50_dmaobj_bind()
54 nvkm_wo32(*pgpuobj, 0x0c, upper_32_bits(dmaobj->base.limit) << 24 | in nv50_dmaobj_bind()
55 upper_32_bits(dmaobj->base.start)); in nv50_dmaobj_bind()
57 nvkm_wo32(*pgpuobj, 0x14, dmaobj->flags5); in nv50_dmaobj_bind()
77 struct nv50_dmaobj *dmaobj; in nv50_dmaobj_new() local
81 if (!(dmaobj in nv50_dmaobj_new()
[all...]
H A Dusergv100.c41 struct gv100_dmaobj *dmaobj = gv100_dmaobj(base); in gv100_dmaobj_bind() local
42 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gv100_dmaobj_bind()
43 u64 start = dmaobj->base.start >> 8; in gv100_dmaobj_bind()
44 u64 limit = dmaobj->base.limit >> 8; in gv100_dmaobj_bind()
50 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gv100_dmaobj_bind()
74 struct gv100_dmaobj *dmaobj; in gv100_dmaobj_new() local
78 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))) in gv100_dmaobj_new()
80 *pdmaobj = &dmaobj->base; in gv100_dmaobj_new()
83 &data, &size, &dmaobj in gv100_dmaobj_new()
[all...]
H A Dusergf119.c43 struct gf119_dmaobj *dmaobj = gf119_dmaobj(base); in gf119_dmaobj_bind() local
44 struct nvkm_device *device = dmaobj->base.dma->engine.subdev.device; in gf119_dmaobj_bind()
50 nvkm_wo32(*pgpuobj, 0x00, dmaobj->flags0); in gf119_dmaobj_bind()
51 nvkm_wo32(*pgpuobj, 0x04, dmaobj->base.start >> 8); in gf119_dmaobj_bind()
52 nvkm_wo32(*pgpuobj, 0x08, dmaobj->base.limit >> 8); in gf119_dmaobj_bind()
75 struct gf119_dmaobj *dmaobj; in gf119_dmaobj_new() local
79 if (!(dmaobj = kzalloc(sizeof(*dmaobj), GFP_KERNEL))) in gf119_dmaobj_new()
81 *pdmaobj = &dmaobj->base; in gf119_dmaobj_new()
84 &data, &size, &dmaobj in gf119_dmaobj_new()
[all...]
H A Dbase.c37 struct nvkm_dmaobj *dmaobj = NULL; in nvkm_dma_oclass_new() local
40 ret = dma->func->class_new(dma, oclass, data, size, &dmaobj); in nvkm_dma_oclass_new()
41 if (dmaobj) in nvkm_dma_oclass_new()
42 *pobject = &dmaobj->object; in nvkm_dma_oclass_new()
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
H A Dchan.c361 struct nvkm_dmaobj *dmaobj; in nvkm_fifo_chan_ctor() local
378 dmaobj = nvkm_dmaobj_search(client, push); in nvkm_fifo_chan_ctor()
379 if (IS_ERR(dmaobj)) in nvkm_fifo_chan_ctor()
380 return PTR_ERR(dmaobj); in nvkm_fifo_chan_ctor()
382 ret = nvkm_object_bind(&dmaobj->object, chan->inst, -16, in nvkm_fifo_chan_ctor()
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
H A Dchan.c348 struct nvkm_dmaobj *dmaobj, u64 offset, u64 length, in nvkm_chan_new_()
360 (!func->ramfc->ctxdma != !dmaobj) || in nvkm_chan_new_()
366 func->userd->bar < 0, userd, func->ramfc->ctxdma, dmaobj, in nvkm_chan_new_()
433 ret = nvkm_object_bind(&dmaobj->object, chan->inst, -16, &chan->push); in nvkm_chan_new_()
346 nvkm_chan_new_(const struct nvkm_chan_func *func, struct nvkm_runl *runl, int runq, struct nvkm_cgrp *cgrp, const char *name, bool priv, u32 devm, struct nvkm_vmm *vmm, struct nvkm_dmaobj *dmaobj, u64 offset, u64 length, struct nvkm_memory *userd, u64 ouserd, struct nvkm_chan **pchan) nvkm_chan_new_() argument

Completed in 6 milliseconds