/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/backend/gpu/ |
H A D | mali_kbase_jm_hw.c | 106 if (!kbdev->hwaccess.backend.slot_rb[js].job_chain_flag) { in kbase_job_hw_submit() 109 kbdev->hwaccess.backend.slot_rb[js].job_chain_flag = true; in kbase_job_hw_submit() 112 kbdev->hwaccess.backend.slot_rb[js].job_chain_flag = false; in kbase_job_hw_submit() 147 kbdev->hwaccess.backend.slot_rb[js].last_context = katom->kctx; in kbase_job_hw_submit() 379 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) == KBASE_RESET_GPU_COMMITTED) { in kbase_job_done() 665 * kbdev->hwaccess.backend.reset_waitq is signalled 739 wait_event(kbdev->hwaccess.backend.reset_wait, in kbase_jm_wait_for_zero_jobs() 740 atomic_read(&kbdev->hwaccess.backend.reset_gpu) == KBASE_RESET_GPU_NOT_PENDING); in kbase_jm_wait_for_zero_jobs() 771 kbdev->hwaccess.backend.reset_workq = alloc_workqueue("Mali reset workqueue", 0, 1); in kbase_job_slot_init() 772 if (kbdev->hwaccess in kbase_job_slot_init() [all...] |
H A D | mali_kbase_js_backend.c | 38 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in timer_callback_should_run() 96 kbdev = container_of(backend, struct kbase_device, hwaccess.backend); in timer_callback() 255 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_ctx_count_changed() 287 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_init() 299 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_term() 306 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_suspend() 315 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_resume() 324 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timeouts_changed()
|
H A D | mali_kbase_jm_as.c | 65 if (kbdev->hwaccess.active_kctx == kctx) { in kbase_backend_use_ctx_sched() 200 if (kbdev->hwaccess.active_kctx == kctx) { in kbase_backend_use_ctx()
|
H A D | mali_kbase_jm_rb.c | 51 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[katom->slot_nr]; in kbase_gpu_enqueue_atom() 77 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_gpu_dequeue_atom() 102 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_gpu_inspect() 120 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_backend_inspect_tail() 286 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) != KBASE_RESET_GPU_NOT_PENDING) { in kbase_backend_slot_free() 621 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_gpu_rmu_workaround() 1279 kbdev->hwaccess.backend.slot_rb[js].last_context = next_katom->kctx; in kbase_gpu_complete_hw() 1285 kbdev->hwaccess.backend.slot_rb[js].last_context = 0; in kbase_gpu_complete_hw()
|
H A D | mali_kbase_gpu.c | 100 init_waitqueue_head(&kbdev->hwaccess.backend.reset_wait); in kbase_backend_late_init()
|
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/backend/gpu/ |
H A D | mali_kbase_jm_hw.c | 113 if (!kbdev->hwaccess.backend.slot_rb[js].job_chain_flag) { in kbase_job_hw_submit() 116 kbdev->hwaccess.backend.slot_rb[js].job_chain_flag = in kbase_job_hw_submit() 120 kbdev->hwaccess.backend.slot_rb[js].job_chain_flag = in kbase_job_hw_submit() 169 kbdev->hwaccess.backend.slot_rb[js].last_context = katom->kctx; in kbase_job_hw_submit() 442 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) == in kbase_job_done() 766 * kbdev->hwaccess.backend.reset_waitq is signalled 841 wait_event(kbdev->hwaccess.backend.reset_wait, in kbase_jm_wait_for_zero_jobs() 842 atomic_read(&kbdev->hwaccess.backend.reset_gpu) in kbase_jm_wait_for_zero_jobs() 874 kbdev->hwaccess.backend.reset_workq = alloc_workqueue( in kbase_job_slot_init() 876 if (NULL == kbdev->hwaccess in kbase_job_slot_init() [all...] |
H A D | mali_kbase_js_backend.c | 41 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in timer_callback_should_run() 103 kbdev = container_of(backend, struct kbase_device, hwaccess.backend); in timer_callback() 280 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_ctx_count_changed() 314 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_init() 327 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_term() 334 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_suspend() 343 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_resume() 352 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timeouts_changed()
|
H A D | mali_kbase_jm_as.c | 70 if (kbdev->hwaccess.active_kctx == kctx) { in kbase_backend_use_ctx_sched() 216 if (kbdev->hwaccess.active_kctx == kctx) { in kbase_backend_use_ctx()
|
H A D | mali_kbase_jm_rb.c | 56 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[katom->slot_nr]; in kbase_gpu_enqueue_atom() 84 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_gpu_dequeue_atom() 110 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_gpu_inspect() 129 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_backend_inspect_tail() 293 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) != in kbase_backend_slot_free() 707 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_gpu_rmu_workaround() 1456 kbdev->hwaccess.backend.slot_rb[js].last_context = in kbase_gpu_complete_hw() 1466 kbdev->hwaccess.backend.slot_rb[js].last_context = 0; in kbase_gpu_complete_hw()
|
H A D | mali_kbase_gpu.c | 97 init_waitqueue_head(&kbdev->hwaccess.backend.reset_wait); in kbase_backend_late_init()
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/backend/gpu/ |
H A D | mali_kbase_jm_hw.c | 224 if (!kbdev->hwaccess.backend.slot_rb[js].job_chain_flag) { in kbase_job_hw_submit() 227 kbdev->hwaccess.backend.slot_rb[js].job_chain_flag = true; in kbase_job_hw_submit() 230 kbdev->hwaccess.backend.slot_rb[js].job_chain_flag = false; in kbase_job_hw_submit() 264 kbdev->hwaccess.backend.slot_rb[js].last_context = katom->kctx; in kbase_job_hw_submit() 484 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) == KBASE_RESET_GPU_COMMITTED) { in kbase_job_done() 974 kbdev = container_of(data, struct kbase_device, hwaccess.backend.reset_work); in kbasep_reset_timeout_worker() 979 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) == KBASE_RESET_GPU_SILENT) { in kbasep_reset_timeout_worker() 993 hrtimer_cancel(&kbdev->hwaccess.backend.reset_timer); in kbasep_reset_timeout_worker() 998 atomic_set(&kbdev->hwaccess.backend.reset_gpu, KBASE_RESET_GPU_NOT_PENDING); in kbasep_reset_timeout_worker() 1000 wake_up(&kbdev->hwaccess in kbasep_reset_timeout_worker() [all...] |
H A D | mali_kbase_js_backend.c | 39 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in timer_callback_should_run() 97 kbdev = container_of(backend, struct kbase_device, hwaccess.backend); in timer_callback() 257 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_ctx_count_changed() 293 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_init() 308 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_term() 318 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_suspend() 327 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_resume() 336 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timeouts_changed()
|
H A D | mali_kbase_jm_as.c | 74 if (kbdev->hwaccess.active_kctx[js] == kctx) { in kbase_backend_use_ctx_sched() 209 if (kbdev->hwaccess.active_kctx[js] == kctx) { in kbase_backend_use_ctx()
|
H A D | mali_kbase_jm_rb.c | 59 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[katom->slot_nr]; in kbase_gpu_enqueue_atom() 85 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_gpu_dequeue_atom() 108 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_gpu_inspect() 121 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_backend_inspect_tail() 253 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) != KBASE_RESET_GPU_NOT_PENDING) { in kbase_backend_slot_free() 1125 kbdev->hwaccess.backend.slot_rb[js].last_context = next_katom->kctx; 1131 kbdev->hwaccess.backend.slot_rb[js].last_context = 0;
|
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/backend/gpu/ |
H A D | mali_kbase_jm_hw.c | 202 struct slot_rb *ptr_slot_rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_job_hw_submit() 583 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) == in kbase_job_done() 1097 hwaccess.backend.reset_work); in kbasep_reset_timeout_worker() 1102 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) == in kbasep_reset_timeout_worker() 1117 hrtimer_cancel(&kbdev->hwaccess.backend.reset_timer); in kbasep_reset_timeout_worker() 1124 atomic_set(&kbdev->hwaccess.backend.reset_gpu, in kbasep_reset_timeout_worker() 1127 wake_up(&kbdev->hwaccess.backend.reset_wait); in kbasep_reset_timeout_worker() 1241 atomic_set(&kbdev->hwaccess.backend.reset_gpu, in kbasep_reset_timeout_worker() 1244 wake_up(&kbdev->hwaccess.backend.reset_wait); in kbasep_reset_timeout_worker() 1270 hwaccess in kbasep_reset_timer_callback() [all...] |
H A D | mali_kbase_js_backend.c | 38 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in timer_callback_should_run() 101 kbdev = container_of(backend, struct kbase_device, hwaccess.backend); in timer_callback() 282 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_ctx_count_changed() 320 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_init() 336 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_term() 346 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_suspend() 355 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timer_resume() 364 struct kbase_backend_data *backend = &kbdev->hwaccess.backend; in kbase_backend_timeouts_changed()
|
H A D | mali_kbase_jm_rb.c | 63 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[katom->slot_nr]; in kbase_gpu_enqueue_atom() 91 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_gpu_dequeue_atom() 115 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_gpu_inspect() 128 struct slot_rb *rb = &kbdev->hwaccess.backend.slot_rb[js]; in kbase_backend_inspect_tail() 259 if (atomic_read(&kbdev->hwaccess.backend.reset_gpu) != in kbase_backend_slot_free() 1143 kbdev->hwaccess.backend.slot_rb[js].last_kctx_tagged = in kbase_gpu_irq_evict() 1443 kbdev->hwaccess.backend.slot_rb[js].last_kctx_tagged = SLOT_RB_NULL_TAG_VAL; in kbase_backend_reset() 1667 kbdev->hwaccess.backend.slot_rb[js] in kbase_backend_soft_hard_stop_slot() 1747 kbdev->hwaccess.backend.slot_rb[js].last_kctx_tagged = in kbase_backend_soft_hard_stop_slot() 1847 u64 tagged_kctx = kbdev->hwaccess in kbase_backend_slot_kctx_purge_locked() [all...] |
H A D | mali_kbase_jm_as.c | 76 if (kbdev->hwaccess.active_kctx[js] == kctx) { in kbase_backend_use_ctx_sched() 221 if (kbdev->hwaccess.active_kctx[js] == kctx) { in kbase_backend_use_ctx()
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_jm.c | 38 kctx = kbdev->hwaccess.active_kctx;
in kbase_jm_next_job() 104 if (kbdev->hwaccess.active_kctx == kctx) {
in kbase_jm_idle_ctx() 105 kbdev->hwaccess.active_kctx = NULL;
in kbase_jm_idle_ctx()
|
H A D | mali_kbase_js.c | 1077 if (enqueue_required && kctx == kbdev->hwaccess.active_kctx) {
in kbasep_js_add_job() 1365 if (kbdev->hwaccess.active_kctx == kctx) {
in kbasep_js_runpool_release_ctx_internal() 1366 kbdev->hwaccess.active_kctx = NULL;
in kbasep_js_runpool_release_ctx_internal() 1625 kbdev->hwaccess.active_kctx = kctx;
in kbasep_js_schedule_ctx() 1689 kbdev->hwaccess.active_kctx = kctx;
in kbase_js_use_ctx() 2370 last_active = kbdev->hwaccess.active_kctx;
in kbase_js_sched() 2492 if (kbdev->hwaccess.active_kctx == last_active && ctx_waiting) {
in kbase_js_sched() 2493 kbdev->hwaccess.active_kctx = NULL;
in kbase_js_sched()
|
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_jm.c | 42 kctx = kbdev->hwaccess.active_kctx; in kbase_jm_next_job() 106 if (kbdev->hwaccess.active_kctx == kctx) in kbase_jm_idle_ctx() 107 kbdev->hwaccess.active_kctx = NULL; in kbase_jm_idle_ctx()
|
H A D | mali_kbase_js.c | 1136 if (enqueue_required && kctx == kbdev->hwaccess.active_kctx) in kbasep_js_add_job() 1436 if (kbdev->hwaccess.active_kctx == kctx) in kbasep_js_runpool_release_ctx_internal() 1437 kbdev->hwaccess.active_kctx = NULL; in kbasep_js_runpool_release_ctx_internal() 1711 kbdev->hwaccess.active_kctx = kctx; in kbasep_js_schedule_ctx() 1777 kbdev->hwaccess.active_kctx = kctx; in kbase_js_use_ctx() 2484 last_active = kbdev->hwaccess.active_kctx; in kbase_js_sched() 2635 if (kbdev->hwaccess.active_kctx == last_active && ctx_waiting) in kbase_js_sched() 2636 kbdev->hwaccess.active_kctx = NULL; in kbase_js_sched()
|
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_jm.c | 46 kctx = kbdev->hwaccess.active_kctx[js]; in kbase_jm_next_job() 119 if (kbdev->hwaccess.active_kctx[js] == kctx) { in kbase_jm_idle_ctx() 122 kbdev->hwaccess.active_kctx[js] = NULL; in kbase_jm_idle_ctx()
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_jm.c | 46 kctx = kbdev->hwaccess.active_kctx[js]; in kbase_jm_next_job() 120 if (kbdev->hwaccess.active_kctx[js] == kctx) { in kbase_jm_idle_ctx() 122 kbdev->hwaccess.active_kctx[js] = NULL; in kbase_jm_idle_ctx()
|
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/device/backend/ |
H A D | mali_kbase_device_jm.c | 114 init_waitqueue_head(&kbdev->hwaccess.backend.reset_wait); in kbase_backend_late_init()
|