Lines Matching defs:ops
78 struct drm_gpuva_ops *ops;
453 struct drm_gpuva_ops *ops,
462 drm_gpuva_for_each_op_from_reverse(op, ops) {
493 drm_gpuva_for_each_op(op, ops) {
554 struct drm_gpuva_ops *ops,
557 struct drm_gpuva_op *last = drm_gpuva_last_op(ops);
563 nouveau_uvmm_sm_prepare_unwind(uvmm, new, ops, last, &args);
569 struct drm_gpuva_ops *ops)
571 struct drm_gpuva_op *last = drm_gpuva_last_op(ops);
573 nouveau_uvmm_sm_prepare_unwind(uvmm, new, ops, last, NULL);
610 struct drm_gpuva_ops *ops,
618 drm_gpuva_for_each_op(op, ops) {
718 if (op != drm_gpuva_first_op(ops))
719 nouveau_uvmm_sm_prepare_unwind(uvmm, new, ops,
729 struct drm_gpuva_ops *ops,
739 return nouveau_uvmm_sm_prepare(uvmm, new, ops, &args);
745 struct drm_gpuva_ops *ops)
747 return nouveau_uvmm_sm_prepare(uvmm, new, ops, NULL);
821 struct drm_gpuva_ops *ops)
825 drm_gpuva_for_each_op(op, ops) {
847 struct drm_gpuva_ops *ops)
849 return nouveau_uvmm_sm(uvmm, new, ops);
855 struct drm_gpuva_ops *ops)
857 return nouveau_uvmm_sm(uvmm, new, ops);
863 struct drm_gpuva_ops *ops, bool unmap)
867 drm_gpuva_for_each_op(op, ops) {
916 struct drm_gpuva_ops *ops)
918 nouveau_uvmm_sm_cleanup(uvmm, new, ops, false);
924 struct drm_gpuva_ops *ops)
926 nouveau_uvmm_sm_cleanup(uvmm, new, ops, true);
1022 list_for_each_op(op, &bind_job->ops) {
1091 list_for_each_op(op, &bind_job->ops) {
1116 bind_link_gpuvas(struct drm_gpuva_ops *ops, struct nouveau_uvma_prealloc *new)
1120 drm_gpuva_for_each_op(op, ops) {
1151 list_for_each_op(op, &bind_job->ops) {
1179 list_for_each_op(op, &bind_job->ops) {
1197 op->ops = drm_gpuva_sm_unmap_ops_create(&uvmm->umgr,
1200 if (IS_ERR(op->ops)) {
1201 ret = PTR_ERR(op->ops);
1206 op->ops);
1208 drm_gpuva_ops_free(&uvmm->umgr, op->ops);
1209 op->ops = NULL;
1243 op->ops = drm_gpuva_sm_map_ops_create(&uvmm->umgr,
1248 if (IS_ERR(op->ops)) {
1249 ret = PTR_ERR(op->ops);
1254 reg, op->ops,
1259 drm_gpuva_ops_free(&uvmm->umgr, op->ops);
1260 op->ops = NULL;
1267 op->ops = drm_gpuva_sm_unmap_ops_create(&uvmm->umgr,
1270 if (IS_ERR(op->ops)) {
1271 ret = PTR_ERR(op->ops);
1276 op->ops);
1278 drm_gpuva_ops_free(&uvmm->umgr, op->ops);
1279 op->ops = NULL;
1293 list_for_each_op(op, &bind_job->ops) {
1296 if (IS_ERR_OR_NULL(op->ops))
1299 drm_gpuva_for_each_op(va_op, op->ops) {
1308 op = list_last_op(&bind_job->ops);
1315 list_for_each_op(op, &bind_job->ops) {
1318 if (IS_ERR_OR_NULL(op->ops))
1321 drm_gpuva_for_each_op(va_op, op->ops) {
1336 op = list_last_op(&bind_job->ops);
1362 list_for_each_op(op, &bind_job->ops) {
1367 bind_link_gpuvas(op->ops, &op->new);
1384 list_for_each_op_from_reverse(op, &bind_job->ops) {
1393 op->ops);
1397 op->ops,
1403 op->ops);
1407 drm_gpuva_ops_free(&uvmm->umgr, op->ops);
1408 op->ops = NULL;
1438 list_for_each_op(op, &bind_job->ops) {
1444 ret = nouveau_uvmm_sm_map(uvmm, &op->new, op->ops);
1451 ret = nouveau_uvmm_sm_unmap(uvmm, &op->new, op->ops);
1474 list_for_each_op(op, &bind_job->ops) {
1477 /* When nouveau_uvmm_bind_job_submit() fails op->ops and op->reg
1485 if (!IS_ERR_OR_NULL(op->ops))
1487 op->ops);
1500 if (!IS_ERR_OR_NULL(op->ops))
1502 op->ops);
1505 if (!IS_ERR_OR_NULL(op->ops))
1507 op->ops);
1511 if (!IS_ERR_OR_NULL(op->ops))
1512 drm_gpuva_ops_free(&uvmm->umgr, op->ops);
1525 /* Remove and free ops after removing the bind job from the job list to
1528 list_for_each_op_safe(op, next, &bind_job->ops) {
1586 bind_job_ops_free(struct list_head *ops)
1590 list_for_each_op_safe(op, next, ops) {
1609 INIT_LIST_HEAD(&job->ops);
1617 list_add_tail(&op->entry, &job->ops);
1633 args.ops = &nouveau_bind_job_ops;
1644 bind_job_ops_free(&job->ops);
1694 u64 ops = req->op_ptr;
1701 args->op.s = u_memcpya(ops, opc,