Lines Matching refs:robj

40 	struct radeon_bo *robj = gem_to_radeon_bo(gobj);
42 if (robj) {
43 radeon_mn_unregister(robj);
44 radeon_bo_unref(&robj);
53 struct radeon_bo *robj;
75 flags, NULL, NULL, &robj);
87 *obj = &robj->tbo.base;
88 robj->pid = task_pid_nr(current);
91 list_add_tail(&robj->list, &rdev->gem.objects);
100 struct radeon_bo *robj;
105 robj = gem_to_radeon_bo(gobj);
118 r = dma_resv_wait_timeout_rcu(robj->tbo.base.resv, true, true, 30 * HZ);
127 if (domain == RADEON_GEM_DOMAIN_VRAM && robj->prime_shared_count) {
414 struct radeon_bo *robj;
420 robj = gem_to_radeon_bo(gobj);
421 if (radeon_ttm_tt_has_userptr(robj->rdev, robj->tbo.ttm)) {
425 *offset_p = radeon_bo_mmap_offset(robj);
443 struct radeon_bo *robj;
451 robj = gem_to_radeon_bo(gobj);
453 r = dma_resv_test_signaled_rcu(robj->tbo.base.resv, true);
459 cur_placement = READ_ONCE(robj->tbo.mem.mem_type);
471 struct radeon_bo *robj;
480 robj = gem_to_radeon_bo(gobj);
482 ret = dma_resv_wait_timeout_rcu(robj->tbo.base.resv, true, true, 30 * HZ);
489 cur_placement = READ_ONCE(robj->tbo.mem.mem_type);
492 robj->rdev->asic->mmio_hdp_flush(rdev);
503 struct radeon_bo *robj;
510 robj = gem_to_radeon_bo(gobj);
511 r = radeon_bo_set_tiling_flags(robj, args->tiling_flags, args->pitch);
712 struct radeon_bo *robj;
719 robj = gem_to_radeon_bo(gobj);
722 if (radeon_ttm_tt_has_userptr(robj->rdev, robj->tbo.ttm))
725 r = radeon_bo_reserve(robj, false);
731 args->value = robj->initial_domain;
734 robj->initial_domain = args->value & (RADEON_GEM_DOMAIN_VRAM |
742 radeon_bo_unreserve(robj);