/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/backend/gpu/ |
H A D | mali_kbase_pm_backend.c | 55 kbdev->pm.backend.callback_power_on = in kbase_pm_runtime_init() 57 kbdev->pm.backend.callback_power_off = in kbase_pm_runtime_init() 59 kbdev->pm.backend.callback_power_suspend = in kbase_pm_runtime_init() 61 kbdev->pm.backend.callback_power_resume = in kbase_pm_runtime_init() 63 kbdev->pm.callback_power_runtime_init = in kbase_pm_runtime_init() 65 kbdev->pm.callback_power_runtime_term = in kbase_pm_runtime_init() 67 kbdev->pm.backend.callback_power_runtime_on = in kbase_pm_runtime_init() 69 kbdev->pm.backend.callback_power_runtime_off = in kbase_pm_runtime_init() 71 kbdev->pm.backend.callback_power_runtime_idle = in kbase_pm_runtime_init() 73 kbdev->pm in kbase_pm_runtime_init() 350 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_gpu_poweroff_wait_wq() local 475 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_gpu_clock_control_worker() local 512 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_hwcnt_disable_worker() local [all...] |
H A D | mali_kbase_pm_metrics.c | 66 HR_TIMER_DELAY_MSEC(metrics->kbdev->pm.dvfs_period), in dvfs_callback() 85 kbdev->pm.backend.metrics.kbdev = kbdev; in kbasep_pm_metrics_init() 86 kbdev->pm.backend.metrics.time_period_start = ktime_get(); in kbasep_pm_metrics_init() 87 kbdev->pm.backend.metrics.values.time_busy = 0; in kbasep_pm_metrics_init() 88 kbdev->pm.backend.metrics.values.time_idle = 0; in kbasep_pm_metrics_init() 89 kbdev->pm.backend.metrics.values.time_in_protm = 0; in kbasep_pm_metrics_init() 104 &kbdev->pm.backend.metrics.ipa_control_client); in kbasep_pm_metrics_init() 113 kbdev->pm.backend.metrics.kbdev = kbdev; in kbasep_pm_metrics_init() 114 kbdev->pm.backend.metrics.time_period_start = ktime_get(); in kbasep_pm_metrics_init() 116 kbdev->pm in kbasep_pm_metrics_init() [all...] |
H A D | mali_kbase_pm_policy.c | 76 kbdev->pm.backend.pm_current_policy = default_policy; in kbase_pm_policy_init() 77 kbdev->pm.backend.csf_pm_sched_flags = default_policy->pm_sched_flags; in kbase_pm_policy_init() 81 kbdev->pm.backend.pm_current_policy = default_policy; in kbase_pm_policy_init() 87 kbdev->pm.backend.pm_current_policy->term(kbdev); in kbase_pm_policy_term() 92 struct kbase_pm_device_data *pm = &kbdev->pm; in kbase_pm_update_active() local 93 struct kbase_pm_backend_data *backend = &pm->backend; in kbase_pm_update_active() 97 lockdep_assert_held(&pm->lock); in kbase_pm_update_active() 99 /* pm_current_policy will never be NULL while pm.lock is held */ in kbase_pm_update_active() 107 kbdev->pm in kbase_pm_update_active() [all...] |
H A D | mali_kbase_pm_driver.c | 113 kbdev->pm.backend.mcu_desired) in kbase_pm_is_mcu_desired() 117 if (kbdev->pm.backend.gpu_wakeup_override) in kbase_pm_is_mcu_desired() 126 return (kbdev->pm.backend.mcu_desired && in kbase_pm_is_mcu_desired() 128 !kbdev->pm.backend.policy_change_clamp_state_to_off); in kbase_pm_is_mcu_desired() 135 if (kbdev->pm.backend.protected_entry_transition_override) in kbase_pm_is_l2_desired() 138 if (kbdev->pm.backend.protected_transition_override && in kbase_pm_is_l2_desired() 139 kbdev->pm.backend.protected_l2_override) in kbase_pm_is_l2_desired() 142 if (kbdev->pm.backend.protected_transition_override && in kbase_pm_is_l2_desired() 143 !kbdev->pm.backend.shaders_desired) in kbase_pm_is_l2_desired() 146 if (unlikely(kbdev->pm in kbase_pm_is_l2_desired() [all...] |
H A D | mali_kbase_pm_ca.c | 37 struct kbase_pm_backend_data *pm_backend = &kbdev->pm.backend; in kbase_pm_ca_init() 56 struct kbase_pm_backend_data *pm_backend = &kbdev->pm.backend; in kbase_devfreq_set_core_mask() 62 if (!(core_mask & kbdev->pm.debug_core_mask)) { in kbase_devfreq_set_core_mask() 65 core_mask, kbdev->pm.debug_core_mask); in kbase_devfreq_set_core_mask() 69 if (!(core_mask & kbdev->pm.debug_core_mask_all)) { in kbase_devfreq_set_core_mask() 71 core_mask, kbdev->pm.debug_core_mask_all); in kbase_devfreq_set_core_mask() 97 u64 debug_core_mask = kbdev->pm.debug_core_mask; in kbase_pm_ca_get_core_mask() 99 u64 debug_core_mask = kbdev->pm.debug_core_mask_all; in kbase_pm_ca_get_core_mask() 112 kbdev->pm.backend.ca_cores_enabled & in kbase_pm_ca_get_core_mask() 131 return kbdev->pm in kbase_pm_ca_get_instr_core_mask() [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/backend/gpu/ |
H A D | mali_kbase_pm_backend.c | 53 kbdev->pm.backend.callback_power_on = callbacks->power_on_callback; in kbase_pm_runtime_init() 54 kbdev->pm.backend.callback_power_off = callbacks->power_off_callback; in kbase_pm_runtime_init() 55 kbdev->pm.backend.callback_power_suspend = callbacks->power_suspend_callback; in kbase_pm_runtime_init() 56 kbdev->pm.backend.callback_power_resume = callbacks->power_resume_callback; in kbase_pm_runtime_init() 57 kbdev->pm.callback_power_runtime_init = callbacks->power_runtime_init_callback; in kbase_pm_runtime_init() 58 kbdev->pm.callback_power_runtime_term = callbacks->power_runtime_term_callback; in kbase_pm_runtime_init() 59 kbdev->pm.backend.callback_power_runtime_on = callbacks->power_runtime_on_callback; in kbase_pm_runtime_init() 60 kbdev->pm.backend.callback_power_runtime_off = callbacks->power_runtime_off_callback; in kbase_pm_runtime_init() 61 kbdev->pm.backend.callback_power_runtime_idle = callbacks->power_runtime_idle_callback; in kbase_pm_runtime_init() 62 kbdev->pm in kbase_pm_runtime_init() 252 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_gpu_poweroff_wait_wq() local 419 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_gpu_clock_control_worker() local 454 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_hwcnt_disable_worker() local [all...] |
H A D | mali_kbase_pm_metrics.c | 62 hrtimer_start(timer, HR_TIMER_DELAY_MSEC(metrics->kbdev->pm.dvfs_period), HRTIMER_MODE_REL); in dvfs_callback() 75 kbdev->pm.backend.metrics.kbdev = kbdev; in kbasep_pm_metrics_init() 77 kbdev->pm.backend.metrics.time_period_start = ktime_get(); in kbasep_pm_metrics_init() 78 kbdev->pm.backend.metrics.gpu_active = false; in kbasep_pm_metrics_init() 79 memset(kbdev->pm.backend.metrics.active_gl_ctx, 0, sizeof(u32) * BASE_JM_MAX_NR_SLOTS); in kbasep_pm_metrics_init() 80 memset(kbdev->pm.backend.metrics.active_cl_ctx, 0, sizeof(u32) * BASE_MAX_NR_CLOCKS_REGULATORS); in kbasep_pm_metrics_init() 82 kbdev->pm.backend.metrics.values.time_busy = 0; in kbasep_pm_metrics_init() 83 kbdev->pm.backend.metrics.values.time_idle = 0; in kbasep_pm_metrics_init() 84 kbdev->pm.backend.metrics.values.busy_cl[0] = 0; in kbasep_pm_metrics_init() 85 kbdev->pm in kbasep_pm_metrics_init() [all...] |
H A D | mali_kbase_pm_policy.c | 50 kbdev->pm.backend.pm_current_policy = all_policy_list[0]; in kbase_pm_policy_init() 51 kbdev->pm.backend.pm_current_policy->init(kbdev); in kbase_pm_policy_init() 56 kbdev->pm.backend.pm_current_policy->term(kbdev); in kbase_pm_policy_term() 61 struct kbase_pm_device_data *pm = &kbdev->pm; in kbase_pm_update_active() local 62 struct kbase_pm_backend_data *backend = &pm->backend; in kbase_pm_update_active() 66 lockdep_assert_held(&pm->lock); in kbase_pm_update_active() 68 /* pm_current_policy will never be NULL while pm.lock is held */ in kbase_pm_update_active() 76 kbdev->pm.backend.pm_current_policy->name); in kbase_pm_update_active() 80 if (!pm in kbase_pm_update_active() [all...] |
H A D | mali_kbase_pm_driver.c | 109 return (kbdev->pm.backend.mcu_desired && (kbdev->pm.backend.pm_current_policy == &kbase_pm_always_on_policy_ops)); in kbase_pm_is_mcu_desired() 115 if (kbdev->pm.backend.protected_entry_transition_override) { in kbase_pm_is_l2_desired() 119 if (kbdev->pm.backend.protected_transition_override && kbdev->pm.backend.protected_l2_override) { in kbase_pm_is_l2_desired() 123 if (kbdev->pm.backend.protected_transition_override && !kbdev->pm.backend.shaders_desired) { in kbase_pm_is_l2_desired() 127 return kbdev->pm.backend.l2_desired; in kbase_pm_is_l2_desired() 134 kbdev->pm.backend.protected_transition_override = true; in kbase_pm_protected_override_enable() 140 kbdev->pm in kbase_pm_protected_override_disable() [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/backend/gpu/ |
H A D | mali_kbase_pm_backend.c | 46 kbdev->pm.backend.gpu_powered = true; in kbase_pm_register_access_enable() 59 kbdev->pm.backend.gpu_powered = false; in kbase_pm_register_access_disable() 69 mutex_init(&kbdev->pm.lock); in kbase_hwaccess_pm_init() 71 kbdev->pm.backend.gpu_poweroff_wait_wq = alloc_workqueue("kbase_pm_poweroff_wait", WQ_HIGHPRI | WQ_UNBOUND, 1); in kbase_hwaccess_pm_init() 72 if (!kbdev->pm.backend.gpu_poweroff_wait_wq) { in kbase_hwaccess_pm_init() 76 INIT_WORK(&kbdev->pm.backend.gpu_poweroff_wait_work, kbase_pm_gpu_poweroff_wait_wq); in kbase_hwaccess_pm_init() 78 kbdev->pm.backend.gpu_powered = false; in kbase_hwaccess_pm_init() 79 kbdev->pm.suspending = false; in kbase_hwaccess_pm_init() 81 kbdev->pm.backend.driver_ready_for_irqs = false; in kbase_hwaccess_pm_init() 83 kbdev->pm in kbase_hwaccess_pm_init() 166 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_gpu_poweroff_wait_wq() local [all...] |
H A D | mali_kbase_pm_metrics.c | 56 hrtimer_start(timer, HR_TIMER_DELAY_MSEC(metrics->kbdev->pm.dvfs_period), HRTIMER_MODE_REL); in dvfs_callback() 69 kbdev->pm.backend.metrics.kbdev = kbdev; in kbasep_pm_metrics_init() 71 kbdev->pm.backend.metrics.time_period_start = ktime_get(); in kbasep_pm_metrics_init() 72 kbdev->pm.backend.metrics.time_busy = 0; in kbasep_pm_metrics_init() 73 kbdev->pm.backend.metrics.time_idle = 0; in kbasep_pm_metrics_init() 74 kbdev->pm.backend.metrics.prev_busy = 0; in kbasep_pm_metrics_init() 75 kbdev->pm.backend.metrics.prev_idle = 0; in kbasep_pm_metrics_init() 76 kbdev->pm.backend.metrics.gpu_active = false; in kbasep_pm_metrics_init() 77 kbdev->pm.backend.metrics.active_cl_ctx[0] = 0; in kbasep_pm_metrics_init() 78 kbdev->pm in kbasep_pm_metrics_init() [all...] |
H A D | mali_kbase_pm_policy.c | 146 u64 prev_shader_state = kbdev->pm.backend.desired_shader_state; in kbasep_pm_do_poweroff_cores() 147 u64 prev_tiler_state = kbdev->pm.backend.desired_tiler_state; in kbasep_pm_do_poweroff_cores() 151 kbdev->pm.backend.desired_shader_state &= ~kbdev->pm.backend.shader_poweroff_pending; in kbasep_pm_do_poweroff_cores() 152 kbdev->pm.backend.desired_tiler_state &= ~kbdev->pm.backend.tiler_poweroff_pending; in kbasep_pm_do_poweroff_cores() 154 kbdev->pm.backend.shader_poweroff_pending = 0; in kbasep_pm_do_poweroff_cores() 155 kbdev->pm.backend.tiler_poweroff_pending = 0; in kbasep_pm_do_poweroff_cores() 157 if (prev_shader_state != kbdev->pm.backend.desired_shader_state || in kbasep_pm_do_poweroff_cores() 158 prev_tiler_state != kbdev->pm in kbasep_pm_do_poweroff_cores() 313 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_update_active() local [all...] |
H A D | mali_kbase_pm_ca.c | 44 kbdev->pm.backend.ca_current_policy = policy_list[0]; in kbase_pm_ca_init() 46 kbdev->pm.backend.ca_current_policy->init(kbdev); in kbase_pm_ca_init() 53 kbdev->pm.backend.ca_current_policy->term(kbdev); in kbase_pm_ca_term() 73 return kbdev->pm.backend.ca_current_policy; in kbase_pm_ca_get_policy() 93 mutex_lock(&kbdev->pm.lock); in kbase_pm_ca_set_policy() 97 old_policy = kbdev->pm.backend.ca_current_policy; in kbase_pm_ca_set_policy() 98 kbdev->pm.backend.ca_current_policy = NULL; in kbase_pm_ca_set_policy() 110 kbdev->pm.backend.ca_current_policy = new_policy; in kbase_pm_ca_set_policy() 117 kbdev->pm.backend.ca_current_policy->update_core_status(kbdev, kbdev->shader_ready_bitmap, in kbase_pm_ca_set_policy() 122 mutex_unlock(&kbdev->pm in kbase_pm_ca_set_policy() [all...] |
H A D | mali_kbase_pm_driver.c | 475 kbdev->pm.backend.l2_powered = 1; in kbase_pm_transition_core_type() 485 wake_up(&kbdev->pm.backend.l2_powered_wait); in kbase_pm_transition_core_type() 488 kbdev->pm.backend.l2_powered = 0; in kbase_pm_transition_core_type() 637 spin_lock(&kbdev->pm.backend.gpu_powered_lock); in kbase_pm_check_transitions_nolock() 638 if (kbdev->pm.backend.gpu_powered == false) { in kbase_pm_check_transitions_nolock() 639 spin_unlock(&kbdev->pm.backend.gpu_powered_lock); in kbase_pm_check_transitions_nolock() 640 if (kbdev->pm.backend.desired_shader_state == 0 && kbdev->pm.backend.desired_tiler_state == 0) { in kbase_pm_check_transitions_nolock() 656 cores_powered |= kbdev->pm.backend.desired_shader_state; in kbase_pm_check_transitions_nolock() 667 tilers_powered |= kbdev->pm in kbase_pm_check_transitions_nolock() [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/backend/gpu/ |
H A D | mali_kbase_pm_backend.c | 48 kbdev->pm.backend.gpu_powered = true; in kbase_pm_register_access_enable() 60 kbdev->pm.backend.gpu_powered = false; in kbase_pm_register_access_disable() 70 mutex_init(&kbdev->pm.lock); in kbase_hwaccess_pm_init() 72 kbdev->pm.backend.gpu_poweroff_wait_wq = alloc_workqueue("kbase_pm_poweroff_wait", in kbase_hwaccess_pm_init() 74 if (!kbdev->pm.backend.gpu_poweroff_wait_wq) in kbase_hwaccess_pm_init() 77 INIT_WORK(&kbdev->pm.backend.gpu_poweroff_wait_work, in kbase_hwaccess_pm_init() 80 kbdev->pm.backend.gpu_powered = false; in kbase_hwaccess_pm_init() 81 kbdev->pm.suspending = false; in kbase_hwaccess_pm_init() 83 kbdev->pm.backend.driver_ready_for_irqs = false; in kbase_hwaccess_pm_init() 85 kbdev->pm in kbase_hwaccess_pm_init() 177 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_gpu_poweroff_wait_wq() local [all...] |
H A D | mali_kbase_pm_metrics.c | 60 HR_TIMER_DELAY_MSEC(metrics->kbdev->pm.dvfs_period), in dvfs_callback() 73 kbdev->pm.backend.metrics.kbdev = kbdev; in kbasep_pm_metrics_init() 75 kbdev->pm.backend.metrics.time_period_start = ktime_get(); in kbasep_pm_metrics_init() 76 kbdev->pm.backend.metrics.time_busy = 0; in kbasep_pm_metrics_init() 77 kbdev->pm.backend.metrics.time_idle = 0; in kbasep_pm_metrics_init() 78 kbdev->pm.backend.metrics.prev_busy = 0; in kbasep_pm_metrics_init() 79 kbdev->pm.backend.metrics.prev_idle = 0; in kbasep_pm_metrics_init() 80 kbdev->pm.backend.metrics.gpu_active = false; in kbasep_pm_metrics_init() 81 kbdev->pm.backend.metrics.active_cl_ctx[0] = 0; in kbasep_pm_metrics_init() 82 kbdev->pm in kbasep_pm_metrics_init() [all...] |
H A D | mali_kbase_pm_policy.c | 160 u64 prev_shader_state = kbdev->pm.backend.desired_shader_state; in kbasep_pm_do_poweroff_cores() 161 u64 prev_tiler_state = kbdev->pm.backend.desired_tiler_state; in kbasep_pm_do_poweroff_cores() 165 kbdev->pm.backend.desired_shader_state &= in kbasep_pm_do_poweroff_cores() 166 ~kbdev->pm.backend.shader_poweroff_pending; in kbasep_pm_do_poweroff_cores() 167 kbdev->pm.backend.desired_tiler_state &= in kbasep_pm_do_poweroff_cores() 168 ~kbdev->pm.backend.tiler_poweroff_pending; in kbasep_pm_do_poweroff_cores() 170 kbdev->pm.backend.shader_poweroff_pending = 0; in kbasep_pm_do_poweroff_cores() 171 kbdev->pm.backend.tiler_poweroff_pending = 0; in kbasep_pm_do_poweroff_cores() 173 if (prev_shader_state != kbdev->pm.backend.desired_shader_state || in kbasep_pm_do_poweroff_cores() 175 kbdev->pm in kbasep_pm_do_poweroff_cores() 341 struct kbase_pm_device_data *pm = &kbdev->pm; kbase_pm_update_active() local [all...] |
H A D | mali_kbase_pm_ca.c | 47 kbdev->pm.backend.ca_current_policy = policy_list[0]; in kbase_pm_ca_init() 49 kbdev->pm.backend.ca_current_policy->init(kbdev); in kbase_pm_ca_init() 56 kbdev->pm.backend.ca_current_policy->term(kbdev); in kbase_pm_ca_term() 76 return kbdev->pm.backend.ca_current_policy; in kbase_pm_ca_get_policy() 98 mutex_lock(&kbdev->pm.lock); in kbase_pm_ca_set_policy() 102 old_policy = kbdev->pm.backend.ca_current_policy; in kbase_pm_ca_set_policy() 103 kbdev->pm.backend.ca_current_policy = NULL; in kbase_pm_ca_set_policy() 113 kbdev->pm.backend.ca_current_policy = new_policy; in kbase_pm_ca_set_policy() 120 kbdev->pm.backend.ca_current_policy->update_core_status(kbdev, in kbase_pm_ca_set_policy() 126 mutex_unlock(&kbdev->pm in kbase_pm_ca_set_policy() [all...] |
H A D | mali_kbase_pm_driver.c | 505 kbdev->pm.backend.l2_powered = 1; in kbase_pm_transition_core_type() 515 wake_up(&kbdev->pm.backend.l2_powered_wait); in kbase_pm_transition_core_type() 518 kbdev->pm.backend.l2_powered = 0; in kbase_pm_transition_core_type() 665 spin_lock(&kbdev->pm.backend.gpu_powered_lock); in kbase_pm_check_transitions_nolock() 666 if (kbdev->pm.backend.gpu_powered == false) { in kbase_pm_check_transitions_nolock() 667 spin_unlock(&kbdev->pm.backend.gpu_powered_lock); in kbase_pm_check_transitions_nolock() 668 if (kbdev->pm.backend.desired_shader_state == 0 && in kbase_pm_check_transitions_nolock() 669 kbdev->pm.backend.desired_tiler_state == 0) in kbase_pm_check_transitions_nolock() 687 cores_powered |= kbdev->pm.backend.desired_shader_state; in kbase_pm_check_transitions_nolock() 700 tilers_powered |= kbdev->pm in kbase_pm_check_transitions_nolock() [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_pm.c | 54 old_count = kbdev->pm.active_count; in kbase_pm_context_active_handle_suspend() 60 mutex_lock(&kbdev->pm.lock); in kbase_pm_context_active_handle_suspend() 64 if (kbdev->pm.active_count != 0) { in kbase_pm_context_active_handle_suspend() 69 mutex_unlock(&kbdev->pm.lock); in kbase_pm_context_active_handle_suspend() 83 c = ++kbdev->pm.active_count; in kbase_pm_context_active_handle_suspend() 98 mutex_unlock(&kbdev->pm.lock); in kbase_pm_context_active_handle_suspend() 118 old_count = kbdev->pm.active_count; in kbase_pm_context_idle() 124 mutex_lock(&kbdev->pm.lock); in kbase_pm_context_idle() 126 c = --kbdev->pm.active_count; in kbase_pm_context_idle() 142 * waiters must synchronize with us by locking the pm in kbase_pm_context_idle() [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_pm.c | 58 old_count = kbdev->pm.active_count; in kbase_pm_context_active_handle_suspend() 63 mutex_lock(&kbdev->pm.lock); in kbase_pm_context_active_handle_suspend() 67 if (kbdev->pm.active_count != 0) in kbase_pm_context_active_handle_suspend() 71 mutex_unlock(&kbdev->pm.lock); in kbase_pm_context_active_handle_suspend() 84 c = ++kbdev->pm.active_count; in kbase_pm_context_active_handle_suspend() 97 mutex_unlock(&kbdev->pm.lock); in kbase_pm_context_active_handle_suspend() 117 old_count = kbdev->pm.active_count; in kbase_pm_context_idle() 122 mutex_lock(&kbdev->pm.lock); in kbase_pm_context_idle() 124 c = --kbdev->pm.active_count; in kbase_pm_context_idle() 139 * waiters must synchronize with us by locking the pm in kbase_pm_context_idle() [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_pm.c | 78 if (kbdev->pm.active_count != 0) in kbase_pm_context_active_handle_suspend() 92 c = ++kbdev->pm.active_count; in kbase_pm_context_active_handle_suspend() 107 dev_dbg(kbdev->dev, "%s %d\n", __func__, kbdev->pm.active_count); in kbase_pm_context_active_handle_suspend() 123 c = --kbdev->pm.active_count; in kbase_pm_context_idle() 134 * The waiters must synchronize with us by locking the pm.lock in kbase_pm_context_idle() 137 wake_up(&kbdev->pm.zero_active_count_wait); in kbase_pm_context_idle() 142 kbdev->pm.active_count, current->pid); in kbase_pm_context_idle() 162 mutex_lock(&kbdev->pm.lock); in kbase_pm_driver_suspend() 164 mutex_unlock(&kbdev->pm.lock); in kbase_pm_driver_suspend() 167 kbdev->pm in kbase_pm_driver_suspend() [all...] |
/device/soc/hisilicon/hi3751v350/sdk_linux/source/common/drv/himedia/ |
H A D | himedia_base.c | 142 if (drv != NULL && drv->pm && drv->pm->prepare) { in himedia_pm_prepare() 143 ret = drv->pm->prepare(dev); in himedia_pm_prepare() 153 if (drv != NULL && drv->pm && drv->pm->complete) { in himedia_pm_complete() 154 drv->pm->complete(dev); in himedia_pm_complete() 169 if (drv->pm) { in himedia_pm_suspend() 170 if (drv->pm->suspend) { in himedia_pm_suspend() 171 ret = drv->pm->suspend(dev); in himedia_pm_suspend() 189 if (drv->pm) { in himedia_pm_suspend_noirq() [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_pm.c | 75 if (kbdev->pm.active_count != 0) { in kbase_pm_context_active_handle_suspend() 90 c = ++kbdev->pm.active_count; in kbase_pm_context_active_handle_suspend() 105 dev_dbg(kbdev->dev, "%s %d\n", __func__, kbdev->pm.active_count); in kbase_pm_context_active_handle_suspend() 120 c = --kbdev->pm.active_count; in kbase_pm_context_idle() 131 * The waiters must synchronize with us by locking the pm.lock in kbase_pm_context_idle() 134 wake_up(&kbdev->pm.zero_active_count_wait); in kbase_pm_context_idle() 138 dev_dbg(kbdev->dev, "%s %d (pid = %d)\n", __func__, kbdev->pm.active_count, current->pid); in kbase_pm_context_idle() 157 mutex_lock(&kbdev->pm.lock); in kbase_pm_driver_suspend() 159 mutex_unlock(&kbdev->pm.lock); in kbase_pm_driver_suspend() 162 kbdev->pm in kbase_pm_driver_suspend() [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/platform/devicetree/ |
H A D | mali_kbase_runtime_pm.c | 86 WARN_ON(kbdev->pm.backend.gpu_powered); in pm_callback_power_on() 89 WARN_ON(!kbdev->pm.active_count); in pm_callback_power_on() 90 WARN_ON(kbdev->pm.runtime_active); in pm_callback_power_on() 123 WARN_ON(kbdev->pm.backend.gpu_powered); in pm_callback_power_off() 127 WARN_ON(kbdev->pm.backend.mcu_state != KBASE_MCU_OFF); in pm_callback_power_off() 152 lockdep_assert_held(&kbdev->pm.lock); in pm_callback_runtime_gpu_active() 155 WARN_ON(!kbdev->pm.backend.gpu_powered); in pm_callback_runtime_gpu_active() 156 WARN_ON(!kbdev->pm.active_count); in pm_callback_runtime_gpu_active() 157 WARN_ON(kbdev->pm.runtime_active); in pm_callback_runtime_gpu_active() 166 * Caller would have taken the kbdev->pm in pm_callback_runtime_gpu_active() [all...] |