Lines Matching refs:v3d
22 v3d_init_core(struct v3d_dev *v3d, int core)
30 if (v3d->ver < 40)
42 v3d_init_hw_state(struct v3d_dev *v3d)
44 v3d_init_core(v3d, 0);
48 v3d_idle_axi(struct v3d_dev *v3d, int core)
61 v3d_idle_gca(struct v3d_dev *v3d)
63 if (v3d->ver >= 41)
76 v3d_reset_by_bridge(struct v3d_dev *v3d)
99 v3d_reset_v3d(struct v3d_dev *v3d)
101 if (v3d->reset)
102 reset_control_reset(v3d->reset);
104 v3d_reset_by_bridge(v3d);
106 v3d_init_hw_state(v3d);
110 v3d_reset(struct v3d_dev *v3d)
112 struct drm_device *dev = &v3d->drm;
121 v3d_idle_axi(v3d, 0);
123 v3d_idle_gca(v3d);
124 v3d_reset_v3d(v3d);
126 v3d_mmu_set_page_table(v3d);
127 v3d_irq_reset(v3d);
129 v3d_perfmon_stop(v3d, v3d->active_perfmon, false);
135 v3d_flush_l3(struct v3d_dev *v3d)
137 if (v3d->ver < 41) {
143 if (v3d->ver < 33) {
154 v3d_invalidate_l2c(struct v3d_dev *v3d, int core)
156 if (v3d->ver > 32)
166 v3d_flush_l2t(struct v3d_dev *v3d, int core)
175 mutex_lock(&v3d->cache_clean_lock);
179 mutex_unlock(&v3d->cache_clean_lock);
191 v3d_clean_caches(struct v3d_dev *v3d)
193 struct drm_device *dev = &v3d->drm;
204 mutex_lock(&v3d->cache_clean_lock);
214 mutex_unlock(&v3d->cache_clean_lock);
221 v3d_invalidate_slices(struct v3d_dev *v3d, int core)
231 v3d_invalidate_caches(struct v3d_dev *v3d)
238 v3d_flush_l3(v3d);
239 v3d_invalidate_l2c(v3d, 0);
240 v3d_flush_l2t(v3d, 0);
241 v3d_invalidate_slices(v3d, 0);
249 * to v3d, so we don't attach dma-buf fences to them.
400 v3d_job_init(struct v3d_dev *v3d, struct drm_file *file_priv,
411 DRM_ERROR("Cannot allocate memory for v3d job.");
416 job->v3d = v3d;
664 struct v3d_dev *v3d = to_v3d_dev(dev);
675 trace_v3d_submit_cl_ioctl(&v3d->drm, args->rcl_start, args->rcl_end);
695 ret = v3d_job_init(v3d, file_priv, (void *)&render, sizeof(*render),
705 ret = v3d_job_init(v3d, file_priv, (void *)&bin, sizeof(*bin),
719 ret = v3d_job_init(v3d, file_priv, (void *)&clean_job, sizeof(*clean_job),
748 mutex_lock(&v3d->sched_lock);
774 mutex_unlock(&v3d->sched_lock);
792 mutex_unlock(&v3d->sched_lock);
818 struct v3d_dev *v3d = to_v3d_dev(dev);
825 trace_v3d_submit_tfu_ioctl(&v3d->drm, args->iia);
840 ret = v3d_job_init(v3d, file_priv, (void *)&job, sizeof(*job),
877 mutex_lock(&v3d->sched_lock);
879 mutex_unlock(&v3d->sched_lock);
911 struct v3d_dev *v3d = to_v3d_dev(dev);
920 trace_v3d_submit_csd_ioctl(&v3d->drm, args->cfg[5], args->cfg[6]);
925 if (!v3d_has_csd(v3d)) {
943 ret = v3d_job_init(v3d, file_priv, (void *)&job, sizeof(*job),
948 ret = v3d_job_init(v3d, file_priv, (void *)&clean_job, sizeof(*clean_job),
973 mutex_lock(&v3d->sched_lock);
982 mutex_unlock(&v3d->sched_lock);
997 mutex_unlock(&v3d->sched_lock);
1012 struct v3d_dev *v3d = to_v3d_dev(dev);
1017 v3d->queue[i].fence_context = dma_fence_context_alloc(1);
1019 spin_lock_init(&v3d->mm_lock);
1020 spin_lock_init(&v3d->job_lock);
1021 ret = drmm_mutex_init(dev, &v3d->bo_lock);
1024 ret = drmm_mutex_init(dev, &v3d->reset_lock);
1027 ret = drmm_mutex_init(dev, &v3d->sched_lock);
1030 ret = drmm_mutex_init(dev, &v3d->cache_clean_lock);
1038 drm_mm_init(&v3d->mm, 1, pt_size / sizeof(u32) - 1);
1040 v3d->pt = dma_alloc_wc(v3d->drm.dev, pt_size,
1041 &v3d->pt_paddr,
1043 if (!v3d->pt) {
1044 drm_mm_takedown(&v3d->mm);
1045 dev_err(v3d->drm.dev,
1050 v3d_init_hw_state(v3d);
1051 v3d_mmu_set_page_table(v3d);
1053 ret = v3d_sched_init(v3d);
1055 drm_mm_takedown(&v3d->mm);
1056 dma_free_coherent(v3d->drm.dev, 4096 * 1024, (void *)v3d->pt,
1057 v3d->pt_paddr);
1066 struct v3d_dev *v3d = to_v3d_dev(dev);
1068 v3d_sched_fini(v3d);
1073 WARN_ON(v3d->bin_job);
1074 WARN_ON(v3d->render_job);
1076 drm_mm_takedown(&v3d->mm);
1078 dma_free_coherent(v3d->drm.dev, 4096 * 1024, (void *)v3d->pt,
1079 v3d->pt_paddr);