Lines Matching defs:vgpu

39 static int vgpu_pin_dma_address(struct intel_vgpu *vgpu,
45 if (intel_gvt_hypervisor_dma_pin_guest_page(vgpu, dma_addr))
51 static void vgpu_unpin_dma_address(struct intel_vgpu *vgpu,
54 intel_gvt_hypervisor_dma_unmap_guest_page(vgpu, dma_addr);
61 struct intel_vgpu *vgpu;
73 vgpu = fb_info->obj->vgpu;
74 if (drm_WARN_ON(&dev_priv->drm, !vgpu))
92 if (vgpu_pin_dma_address(vgpu, PAGE_SIZE, dma_addr)) {
111 vgpu_unpin_dma_address(vgpu, dma_addr);
129 struct intel_vgpu *vgpu = obj->vgpu;
133 vgpu_unpin_dma_address(vgpu,
145 struct intel_vgpu *vgpu = obj->vgpu;
149 if (vgpu && vgpu->active && !list_empty(&vgpu->dmabuf_obj_list_head)) {
150 list_for_each(pos, &vgpu->dmabuf_obj_list_head) {
155 intel_gvt_hypervisor_put_vfio_device(vgpu);
156 idr_remove(&vgpu->object_idr,
186 struct intel_vgpu *vgpu = obj->vgpu;
188 if (vgpu) {
189 mutex_lock(&vgpu->dmabuf_lock);
192 mutex_unlock(&vgpu->dmabuf_lock);
194 /* vgpu is NULL, as it has been removed already */
265 struct intel_vgpu *vgpu,
276 ret = intel_vgpu_decode_primary_plane(vgpu, &p);
306 ret = intel_vgpu_decode_cursor_plane(vgpu, &c);
342 if (!intel_gvt_ggtt_validate_range(vgpu, info->start, info->size)) {
351 pick_dmabuf_by_info(struct intel_vgpu *vgpu,
359 list_for_each(pos, &vgpu->dmabuf_obj_list_head) {
383 pick_dmabuf_by_num(struct intel_vgpu *vgpu, u32 id)
389 list_for_each(pos, &vgpu->dmabuf_obj_list_head) {
419 int intel_vgpu_query_plane(struct intel_vgpu *vgpu, void *args)
421 struct drm_device *dev = &vgpu->gvt->gt->i915->drm;
434 ret = vgpu_get_plane_info(dev, vgpu, &fb_info,
439 mutex_lock(&vgpu->dmabuf_lock);
441 dmabuf_obj = pick_dmabuf_by_info(vgpu, &fb_info);
455 gvt_dbg_dpy("vgpu%d: re-use dmabuf_obj ref %d, id %d\n",
456 vgpu->id, kref_read(&dmabuf_obj->kref),
458 mutex_unlock(&vgpu->dmabuf_lock);
462 mutex_unlock(&vgpu->dmabuf_lock);
475 gvt_vgpu_err("allocate intel vgpu fb info failed\n");
483 dmabuf_obj->vgpu = vgpu;
485 ret = idr_alloc(&vgpu->object_idr, dmabuf_obj, 1, 0, GFP_NOWAIT);
495 mutex_lock(&vgpu->dmabuf_lock);
496 if (intel_gvt_hypervisor_get_vfio_device(vgpu)) {
498 mutex_unlock(&vgpu->dmabuf_lock);
501 mutex_unlock(&vgpu->dmabuf_lock);
506 mutex_lock(&vgpu->dmabuf_lock);
507 list_add_tail(&dmabuf_obj->list, &vgpu->dmabuf_obj_list_head);
508 mutex_unlock(&vgpu->dmabuf_lock);
510 gvt_dbg_dpy("vgpu%d: %s new dmabuf_obj ref %d, id %d\n", vgpu->id,
525 int intel_vgpu_get_dmabuf(struct intel_vgpu *vgpu, unsigned int dmabuf_id)
527 struct drm_device *dev = &vgpu->gvt->gt->i915->drm;
534 mutex_lock(&vgpu->dmabuf_lock);
536 dmabuf_obj = pick_dmabuf_by_num(vgpu, dmabuf_id);
573 mutex_unlock(&vgpu->dmabuf_lock);
575 gvt_dbg_dpy("vgpu%d: dmabuf:%d, dmabuf ref %d, fd:%d\n"
577 vgpu->id, dmabuf_obj->dmabuf_id,
592 mutex_unlock(&vgpu->dmabuf_lock);
596 void intel_vgpu_dmabuf_cleanup(struct intel_vgpu *vgpu)
601 mutex_lock(&vgpu->dmabuf_lock);
602 list_for_each_safe(pos, n, &vgpu->dmabuf_obj_list_head) {
605 dmabuf_obj->vgpu = NULL;
607 idr_remove(&vgpu->object_idr, dmabuf_obj->dmabuf_id);
608 intel_gvt_hypervisor_put_vfio_device(vgpu);
618 mutex_unlock(&vgpu->dmabuf_lock);