/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/backend/gpu/ |
H A D | mali_kbase_pm_backend.c | 49 int kbase_pm_runtime_init(struct kbase_device *kbdev) in kbase_pm_runtime_init() argument 55 kbdev->pm.backend.callback_power_on = in kbase_pm_runtime_init() 57 kbdev->pm.backend.callback_power_off = in kbase_pm_runtime_init() 59 kbdev->pm.backend.callback_power_suspend = in kbase_pm_runtime_init() 61 kbdev->pm.backend.callback_power_resume = in kbase_pm_runtime_init() 63 kbdev->pm.callback_power_runtime_init = in kbase_pm_runtime_init() 65 kbdev->pm.callback_power_runtime_term = in kbase_pm_runtime_init() 67 kbdev->pm.backend.callback_power_runtime_on = in kbase_pm_runtime_init() 69 kbdev->pm.backend.callback_power_runtime_off = in kbase_pm_runtime_init() 71 kbdev in kbase_pm_runtime_init() 102 kbase_pm_runtime_term(struct kbase_device *kbdev) kbase_pm_runtime_term() argument 109 kbase_pm_register_access_enable(struct kbase_device *kbdev) kbase_pm_register_access_enable() argument 126 kbase_pm_register_access_disable(struct kbase_device *kbdev) kbase_pm_register_access_disable() argument 138 kbase_hwaccess_pm_init(struct kbase_device *kbdev) kbase_hwaccess_pm_init() argument 248 kbase_pm_do_poweron(struct kbase_device *kbdev, bool is_resume) kbase_pm_do_poweron() argument 276 pm_handle_power_off(struct kbase_device *kbdev) pm_handle_power_off() argument 348 struct kbase_device *kbdev = container_of(data, struct kbase_device, kbase_pm_gpu_poweroff_wait_wq() local 393 kbase_pm_l2_clock_slow(struct kbase_device *kbdev) kbase_pm_l2_clock_slow() argument 438 kbase_pm_l2_clock_normalize(struct kbase_device *kbdev) kbase_pm_l2_clock_normalize() argument 473 struct kbase_device *kbdev = container_of(data, struct kbase_device, kbase_pm_gpu_clock_control_worker() local 510 struct kbase_device *kbdev = container_of(data, struct kbase_device, kbase_pm_hwcnt_disable_worker() local 574 kbase_pm_do_poweroff_sync(struct kbase_device *kbdev) kbase_pm_do_poweroff_sync() argument 630 kbase_pm_do_poweroff(struct kbase_device *kbdev) kbase_pm_do_poweroff() argument 667 is_poweroff_in_progress(struct kbase_device *kbdev) is_poweroff_in_progress() argument 679 kbase_pm_wait_for_poweroff_work_complete(struct kbase_device *kbdev) kbase_pm_wait_for_poweroff_work_complete() argument 686 is_gpu_powered_down(struct kbase_device *kbdev) is_gpu_powered_down() argument 698 kbase_pm_wait_for_gpu_power_down(struct kbase_device *kbdev) kbase_pm_wait_for_gpu_power_down() argument 705 kbase_hwaccess_pm_powerup(struct kbase_device *kbdev, unsigned int flags) kbase_hwaccess_pm_powerup() argument 795 kbase_hwaccess_pm_halt(struct kbase_device *kbdev) kbase_hwaccess_pm_halt() argument 812 kbase_hwaccess_pm_term(struct kbase_device *kbdev) kbase_hwaccess_pm_term() argument 852 kbase_pm_power_changed(struct kbase_device *kbdev) kbase_pm_power_changed() argument 867 kbase_pm_set_debug_core_mask(struct kbase_device *kbdev, u64 new_core_mask) kbase_pm_set_debug_core_mask() argument 877 kbase_pm_set_debug_core_mask(struct kbase_device *kbdev, u64 new_core_mask_js0, u64 new_core_mask_js1, u64 new_core_mask_js2) kbase_pm_set_debug_core_mask() argument 899 kbase_hwaccess_pm_gpu_active(struct kbase_device *kbdev) kbase_hwaccess_pm_gpu_active() argument 904 kbase_hwaccess_pm_gpu_idle(struct kbase_device *kbdev) kbase_hwaccess_pm_gpu_idle() argument 909 kbase_hwaccess_pm_suspend(struct kbase_device *kbdev) kbase_hwaccess_pm_suspend() argument 944 kbase_hwaccess_pm_resume(struct kbase_device *kbdev) kbase_hwaccess_pm_resume() argument 967 kbase_pm_handle_gpu_lost(struct kbase_device *kbdev) kbase_pm_handle_gpu_lost() argument 1022 kbase_pm_force_mcu_wakeup_after_sleep(struct kbase_device *kbdev) kbase_pm_force_mcu_wakeup_after_sleep() argument 1037 pm_handle_mcu_sleep_on_runtime_suspend(struct kbase_device *kbdev) pm_handle_mcu_sleep_on_runtime_suspend() argument 1110 kbase_pm_handle_runtime_suspend(struct kbase_device *kbdev) kbase_pm_handle_runtime_suspend() argument [all...] |
H A D | mali_kbase_pm_driver.c | 100 struct kbase_device *kbdev, 105 bool kbase_pm_is_mcu_desired(struct kbase_device *kbdev) in kbase_pm_is_mcu_desired() argument 107 lockdep_assert_held(&kbdev->hwaccess_lock); in kbase_pm_is_mcu_desired() 109 if (unlikely(!kbdev->csf.firmware_inited)) in kbase_pm_is_mcu_desired() 112 if (kbdev->csf.scheduler.pm_active_count && in kbase_pm_is_mcu_desired() 113 kbdev->pm.backend.mcu_desired) in kbase_pm_is_mcu_desired() 117 if (kbdev->pm.backend.gpu_wakeup_override) in kbase_pm_is_mcu_desired() 126 return (kbdev->pm.backend.mcu_desired && in kbase_pm_is_mcu_desired() 127 kbase_pm_no_mcu_core_pwroff(kbdev) && in kbase_pm_is_mcu_desired() 128 !kbdev in kbase_pm_is_mcu_desired() 132 kbase_pm_is_l2_desired(struct kbase_device *kbdev) kbase_pm_is_l2_desired() argument 158 kbase_pm_protected_override_enable(struct kbase_device *kbdev) kbase_pm_protected_override_enable() argument 164 kbase_pm_protected_override_disable(struct kbase_device *kbdev) kbase_pm_protected_override_disable() argument 171 kbase_pm_protected_entry_override_enable(struct kbase_device *kbdev) kbase_pm_protected_entry_override_enable() argument 195 kbase_pm_protected_entry_override_disable(struct kbase_device *kbdev) kbase_pm_protected_entry_override_disable() argument 209 kbase_pm_protected_l2_override(struct kbase_device *kbdev, bool override) kbase_pm_protected_l2_override() argument 265 mali_cci_flush_l2(struct kbase_device *kbdev) mali_cci_flush_l2() argument 307 kbase_pm_invoke(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, u64 cores, enum kbasep_pm_action action) kbase_pm_invoke() argument 394 kbase_pm_get_state(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, enum kbasep_pm_action action) kbase_pm_get_state() argument 419 kbase_pm_get_present_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_present_cores() argument 452 kbase_pm_get_active_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_active_cores() argument 469 kbase_pm_get_trans_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_trans_cores() argument 485 kbase_pm_get_ready_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_ready_cores() argument 511 kbase_pm_trigger_hwcnt_disable(struct kbase_device *kbdev) kbase_pm_trigger_hwcnt_disable() argument 529 kbase_pm_l2_config_override(struct kbase_device *kbdev) kbase_pm_l2_config_override() argument 577 kbase_pm_control_gpu_clock(struct kbase_device *kbdev) kbase_pm_control_gpu_clock() argument 600 kbase_pm_handle_mcu_core_attr_update(struct kbase_device *kbdev) kbase_pm_handle_mcu_core_attr_update() argument 629 kbase_pm_is_mcu_inactive(struct kbase_device *kbdev, enum kbase_mcu_state state) kbase_pm_is_mcu_inactive() argument 647 kbase_pm_enable_mcu_db_notification(struct kbase_device *kbdev) kbase_pm_enable_mcu_db_notification() argument 658 kbase_pm_mcu_update_state(struct kbase_device *kbdev) kbase_pm_mcu_update_state() argument 964 struct kbase_device *kbdev = core_idle_worker() local 1008 kbase_pm_l2_clear_backend_slot_submit_kctx(struct kbase_device *kbdev) kbase_pm_l2_clear_backend_slot_submit_kctx() argument 1020 kbase_pm_l2_update_state(struct kbase_device *kbdev) global() argument 1371 struct kbase_device *kbdev = container_of(stt, struct kbase_device, global() local 1410 shader_poweroff_timer_queue_cancel(struct kbase_device *kbdev) global() argument 1440 kbase_pm_shaders_update_state(struct kbase_device *kbdev) global() argument 1800 kbase_pm_is_in_desired_state_nolock(struct kbase_device *kbdev) global() argument 1833 kbase_pm_is_in_desired_state(struct kbase_device *kbdev) global() argument 1845 kbase_pm_is_in_desired_state_with_l2_powered( struct kbase_device *kbdev) global() argument 1860 kbase_pm_trace_power_state(struct kbase_device *kbdev) global() argument 1890 kbase_pm_update_state(struct kbase_device *kbdev) global() argument 1948 struct kbase_device *kbdev = container_of(stt, struct kbase_device, global() local 1977 kbase_pm_state_machine_init(struct kbase_device *kbdev) global() argument 2007 kbase_pm_state_machine_term(struct kbase_device *kbdev) global() argument 2016 kbase_pm_reset_start_locked(struct kbase_device *kbdev) global() argument 2057 kbase_pm_reset_complete(struct kbase_device *kbdev) global() argument 2086 kbase_pm_timed_out(struct kbase_device *kbdev) global() argument 2147 kbase_pm_wait_for_l2_powered(struct kbase_device *kbdev) global() argument 2186 kbase_pm_wait_for_desired_state(struct kbase_device *kbdev) global() argument 2226 kbase_pm_enable_interrupts(struct kbase_device *kbdev) global() argument 2254 kbase_pm_disable_interrupts_nolock(struct kbase_device *kbdev) global() argument 2272 kbase_pm_disable_interrupts(struct kbase_device *kbdev) global() argument 2284 update_user_reg_page_mapping(struct kbase_device *kbdev) global() argument 2306 kbase_pm_clock_on(struct kbase_device *kbdev, bool is_resume) global() argument 2441 kbase_pm_clock_off(struct kbase_device *kbdev) global() argument 2518 struct kbase_device *kbdev; global() member 2521 kbase_pm_reset_done(struct kbase_device *kbdev) global() argument 2535 kbase_pm_wait_for_reset(struct kbase_device *kbdev) global() argument 2561 kbase_set_gpu_quirks(struct kbase_device *kbdev, const u32 prod_id) global() argument 2617 kbase_set_sc_quirks(struct kbase_device *kbdev, const u32 prod_id) global() argument 2641 kbase_set_tiler_quirks(struct kbase_device *kbdev) global() argument 2658 kbase_pm_hw_issues_detect(struct kbase_device *kbdev) global() argument 2715 kbase_pm_hw_issues_apply(struct kbase_device *kbdev) global() argument 2734 kbase_pm_cache_snoop_enable(struct kbase_device *kbdev) global() argument 2747 kbase_pm_cache_snoop_disable(struct kbase_device *kbdev) global() argument 2762 reenable_protected_mode_hwcnt(struct kbase_device *kbdev) global() argument 2776 kbase_pm_do_reset(struct kbase_device *kbdev) global() argument 2879 kbase_pm_protected_mode_enable(struct kbase_device *const kbdev) global() argument 2886 kbase_pm_protected_mode_disable(struct kbase_device *const kbdev) global() argument 2893 kbase_pm_init_hw(struct kbase_device *kbdev, unsigned int flags) global() argument 3014 kbase_pm_request_gpu_cycle_counter_do_request(struct kbase_device *kbdev) global() argument 3043 kbase_pm_request_gpu_cycle_counter(struct kbase_device *kbdev) global() argument 3059 kbase_pm_request_gpu_cycle_counter_l2_is_on(struct kbase_device *kbdev) global() argument 3073 kbase_pm_release_gpu_cycle_counter_nolock(struct kbase_device *kbdev) global() argument 3097 kbase_pm_release_gpu_cycle_counter(struct kbase_device *kbdev) global() argument [all...] |
H A D | mali_kbase_pm_internal.h | 38 * @kbdev: The kbase device structure for the device (must be a valid pointer) 42 void kbase_pm_dev_idle(struct kbase_device *kbdev); 47 * @kbdev: The kbase device structure for the device (must be a valid pointer) 51 void kbase_pm_dev_activate(struct kbase_device *kbdev); 57 * @kbdev: The kbase device structure for the device (must be a valid 67 u64 kbase_pm_get_present_cores(struct kbase_device *kbdev, 74 * @kbdev: The kbase device structure for the device (must be a valid pointer) 83 u64 kbase_pm_get_active_cores(struct kbase_device *kbdev, 90 * @kbdev: The kbase device structure for the device (must be a valid pointer) 99 u64 kbase_pm_get_trans_cores(struct kbase_device *kbdev, 771 kbase_pm_idle_groups_sched_suspendable(struct kbase_device *kbdev) kbase_pm_idle_groups_sched_suspendable() argument 789 kbase_pm_no_runnables_sched_suspendable(struct kbase_device *kbdev) kbase_pm_no_runnables_sched_suspendable() argument 806 kbase_pm_no_mcu_core_pwroff(struct kbase_device *kbdev) kbase_pm_no_mcu_core_pwroff() argument 822 kbase_pm_lock(struct kbase_device *kbdev) kbase_pm_lock() argument 835 kbase_pm_unlock(struct kbase_device *kbdev) kbase_pm_unlock() argument 854 kbase_pm_gpu_sleep_allowed(struct kbase_device *kbdev) kbase_pm_gpu_sleep_allowed() argument 883 kbase_pm_enable_db_mirror_interrupt(struct kbase_device *kbdev) kbase_pm_enable_db_mirror_interrupt() argument 907 kbase_pm_disable_db_mirror_interrupt(struct kbase_device *kbdev) kbase_pm_disable_db_mirror_interrupt() argument [all...] |
H A D | mali_kbase_model_linux.c | 36 struct kbase_device *kbdev; member 44 struct kbase_device *kbdev = data->kbdev; in serve_job_irq() local 47 while (atomic_cmpxchg(&kbdev->serving_job_irq, 1, 0) == 1) { in serve_job_irq() 50 while ((val = kbase_reg_read(kbdev, in serve_job_irq() 55 spin_lock_irqsave(&kbdev->hwaccess_lock, flags); in serve_job_irq() 57 kbase_csf_interrupt(kbdev, val); in serve_job_irq() 59 kbase_job_done(kbdev, val); in serve_job_irq() 61 spin_unlock_irqrestore(&kbdev->hwaccess_lock, flags); in serve_job_irq() 65 kmem_cache_free(kbdev in serve_job_irq() 72 struct kbase_device *kbdev = data->kbdev; serve_gpu_irq() local 92 struct kbase_device *kbdev = data->kbdev; serve_mmu_irq() local 112 struct kbase_device *kbdev = gpu_device_get_data(model); gpu_device_raise_irq() local 142 kbase_reg_write(struct kbase_device *kbdev, u32 offset, u32 value) kbase_reg_write() argument 153 kbase_reg_read(struct kbase_device *kbdev, u32 offset) kbase_reg_read() argument 175 kbase_is_gpu_removed(struct kbase_device *kbdev) kbase_is_gpu_removed() argument 180 kbase_install_interrupts(struct kbase_device *kbdev) kbase_install_interrupts() argument 202 kbase_release_interrupts(struct kbase_device *kbdev) kbase_release_interrupts() argument 209 kbase_synchronize_irqs(struct kbase_device *kbdev) kbase_synchronize_irqs() argument 217 kbase_set_custom_irq_handler(struct kbase_device *kbdev, irq_handler_t custom_handler, int irq_type) kbase_set_custom_irq_handler() argument 236 kbase_gpu_device_create(struct kbase_device *kbdev) kbase_gpu_device_create() argument 251 kbase_gpu_device_destroy(struct kbase_device *kbdev) kbase_gpu_device_destroy() argument [all...] |
H A D | mali_kbase_pm_policy.c | 48 void kbase_pm_policy_init(struct kbase_device *kbdev) in kbase_pm_policy_init() argument 51 struct device_node *np = kbdev->dev->of_node; in kbase_pm_policy_init() 72 default_policy->init(kbdev); in kbase_pm_policy_init() 75 spin_lock_irqsave(&kbdev->hwaccess_lock, flags); in kbase_pm_policy_init() 76 kbdev->pm.backend.pm_current_policy = default_policy; in kbase_pm_policy_init() 77 kbdev->pm.backend.csf_pm_sched_flags = default_policy->pm_sched_flags; in kbase_pm_policy_init() 78 spin_unlock_irqrestore(&kbdev->hwaccess_lock, flags); in kbase_pm_policy_init() 81 kbdev->pm.backend.pm_current_policy = default_policy; in kbase_pm_policy_init() 85 void kbase_pm_policy_term(struct kbase_device *kbdev) in kbase_pm_policy_term() argument 87 kbdev in kbase_pm_policy_term() 90 kbase_pm_update_active(struct kbase_device *kbdev) kbase_pm_update_active() argument 154 kbase_pm_update_dynamic_cores_onoff(struct kbase_device *kbdev) kbase_pm_update_dynamic_cores_onoff() argument 189 kbase_pm_update_cores_state_nolock(struct kbase_device *kbdev) kbase_pm_update_cores_state_nolock() argument 218 kbase_pm_update_cores_state(struct kbase_device *kbdev) kbase_pm_update_cores_state() argument 229 kbase_pm_list_policies(struct kbase_device *kbdev, const struct kbase_pm_policy * const **list) kbase_pm_list_policies() argument 240 kbase_pm_get_policy(struct kbase_device *kbdev) kbase_pm_get_policy() argument 250 policy_change_wait_for_L2_off(struct kbase_device *kbdev) policy_change_wait_for_L2_off() argument 288 kbase_pm_set_policy(struct kbase_device *kbdev, const struct kbase_pm_policy *new_policy) kbase_pm_set_policy() argument [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/backend/gpu/ |
H A D | mali_kbase_pm_backend.c | 47 int kbase_pm_runtime_init(struct kbase_device *kbdev) in kbase_pm_runtime_init() argument 53 kbdev->pm.backend.callback_power_on = callbacks->power_on_callback; in kbase_pm_runtime_init() 54 kbdev->pm.backend.callback_power_off = callbacks->power_off_callback; in kbase_pm_runtime_init() 55 kbdev->pm.backend.callback_power_suspend = callbacks->power_suspend_callback; in kbase_pm_runtime_init() 56 kbdev->pm.backend.callback_power_resume = callbacks->power_resume_callback; in kbase_pm_runtime_init() 57 kbdev->pm.callback_power_runtime_init = callbacks->power_runtime_init_callback; in kbase_pm_runtime_init() 58 kbdev->pm.callback_power_runtime_term = callbacks->power_runtime_term_callback; in kbase_pm_runtime_init() 59 kbdev->pm.backend.callback_power_runtime_on = callbacks->power_runtime_on_callback; in kbase_pm_runtime_init() 60 kbdev->pm.backend.callback_power_runtime_off = callbacks->power_runtime_off_callback; in kbase_pm_runtime_init() 61 kbdev in kbase_pm_runtime_init() 85 kbase_pm_runtime_term(struct kbase_device *kbdev) kbase_pm_runtime_term() argument 92 kbase_pm_register_access_enable(struct kbase_device *kbdev) kbase_pm_register_access_enable() argument 111 kbase_pm_register_access_disable(struct kbase_device *kbdev) kbase_pm_register_access_disable() argument 124 kbase_hwaccess_pm_init(struct kbase_device *kbdev) kbase_hwaccess_pm_init() argument 223 kbase_pm_do_poweron(struct kbase_device *kbdev, bool is_resume) kbase_pm_do_poweron() argument 251 struct kbase_device *kbdev = container_of(data, struct kbase_device, pm.backend.gpu_poweroff_wait_work); kbase_pm_gpu_poweroff_wait_wq() local 333 kbase_pm_l2_clock_slow(struct kbase_device *kbdev) kbase_pm_l2_clock_slow() argument 381 kbase_pm_l2_clock_normalize(struct kbase_device *kbdev) kbase_pm_l2_clock_normalize() argument 418 struct kbase_device *kbdev = container_of(data, struct kbase_device, pm.backend.gpu_clock_control_work); kbase_pm_gpu_clock_control_worker() local 453 struct kbase_device *kbdev = container_of(data, struct kbase_device, pm.backend.hwcnt_disable_work); kbase_pm_hwcnt_disable_worker() local 493 kbase_pm_do_poweroff(struct kbase_device *kbdev) kbase_pm_do_poweroff() argument 532 is_poweroff_in_progress(struct kbase_device *kbdev) is_poweroff_in_progress() argument 544 kbase_pm_wait_for_poweroff_complete(struct kbase_device *kbdev) kbase_pm_wait_for_poweroff_complete() argument 550 kbase_hwaccess_pm_powerup(struct kbase_device *kbdev, unsigned int flags) kbase_hwaccess_pm_powerup() argument 611 kbase_hwaccess_pm_halt(struct kbase_device *kbdev) kbase_hwaccess_pm_halt() argument 622 kbase_hwaccess_pm_term(struct kbase_device *kbdev) kbase_hwaccess_pm_term() argument 649 kbase_pm_power_changed(struct kbase_device *kbdev) kbase_pm_power_changed() argument 663 kbase_pm_set_debug_core_mask(struct kbase_device *kbdev, u64 new_core_mask_js0, u64 new_core_mask_js1, u64 new_core_mask_js2) kbase_pm_set_debug_core_mask() argument 683 kbase_hwaccess_pm_gpu_active(struct kbase_device *kbdev) kbase_hwaccess_pm_gpu_active() argument 688 kbase_hwaccess_pm_gpu_idle(struct kbase_device *kbdev) kbase_hwaccess_pm_gpu_idle() argument 693 kbase_hwaccess_pm_suspend(struct kbase_device *kbdev) kbase_hwaccess_pm_suspend() argument 715 kbase_hwaccess_pm_resume(struct kbase_device *kbdev) kbase_hwaccess_pm_resume() argument 737 kbase_pm_handle_gpu_lost(struct kbase_device *kbdev) kbase_pm_handle_gpu_lost() argument [all...] |
H A D | mali_kbase_pm_driver.c | 88 static u64 kbase_pm_get_state(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, 92 bool kbase_pm_is_mcu_desired(struct kbase_device *kbdev) in kbase_pm_is_mcu_desired() argument 94 lockdep_assert_held(&kbdev->hwaccess_lock); in kbase_pm_is_mcu_desired() 96 if (unlikely(!kbdev->csf.firmware_inited)) { in kbase_pm_is_mcu_desired() 100 if (kbdev->csf.scheduler.pm_active_count) { in kbase_pm_is_mcu_desired() 109 return (kbdev->pm.backend.mcu_desired && (kbdev->pm.backend.pm_current_policy == &kbase_pm_always_on_policy_ops)); in kbase_pm_is_mcu_desired() 113 bool kbase_pm_is_l2_desired(struct kbase_device *kbdev) in kbase_pm_is_l2_desired() argument 115 if (kbdev->pm.backend.protected_entry_transition_override) { in kbase_pm_is_l2_desired() 119 if (kbdev in kbase_pm_is_l2_desired() 130 kbase_pm_protected_override_enable(struct kbase_device *kbdev) kbase_pm_protected_override_enable() argument 136 kbase_pm_protected_override_disable(struct kbase_device *kbdev) kbase_pm_protected_override_disable() argument 143 kbase_pm_protected_entry_override_enable(struct kbase_device *kbdev) kbase_pm_protected_entry_override_enable() argument 167 kbase_pm_protected_entry_override_disable(struct kbase_device *kbdev) kbase_pm_protected_entry_override_disable() argument 180 kbase_pm_protected_l2_override(struct kbase_device *kbdev, bool override) kbase_pm_protected_l2_override() argument 234 mali_cci_flush_l2(struct kbase_device *kbdev) mali_cci_flush_l2() argument 272 kbase_pm_invoke(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, u64 cores, enum kbasep_pm_action action) kbase_pm_invoke() argument 358 kbase_pm_get_state(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, enum kbasep_pm_action action) kbase_pm_get_state() argument 382 kbase_pm_get_present_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_present_cores() argument 414 kbase_pm_get_active_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_active_cores() argument 430 kbase_pm_get_trans_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_trans_cores() argument 445 kbase_pm_get_ready_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_ready_cores() argument 470 kbase_pm_trigger_hwcnt_disable(struct kbase_device *kbdev) kbase_pm_trigger_hwcnt_disable() argument 490 kbase_pm_l2_config_override(struct kbase_device *kbdev) kbase_pm_l2_config_override() argument 527 kbase_pm_control_gpu_clock(struct kbase_device *kbdev) kbase_pm_control_gpu_clock() argument 551 kbase_pm_mcu_update_state(struct kbase_device *kbdev) kbase_pm_mcu_update_state() argument 687 kbase_pm_l2_update_state(struct kbase_device *kbdev) global() argument 984 struct kbase_device *kbdev = container_of(stt, struct kbase_device, pm.backend.shader_tick_timer); global() local 1023 shader_poweroff_timer_queue_cancel(struct kbase_device *kbdev) global() argument 1052 kbase_pm_shaders_update_state(struct kbase_device *kbdev) global() argument 1339 kbase_pm_is_in_desired_state_nolock(struct kbase_device *kbdev) global() argument 1369 kbase_pm_is_in_desired_state(struct kbase_device *kbdev) global() argument 1381 kbase_pm_is_in_desired_state_with_l2_powered(struct kbase_device *kbdev) global() argument 1395 kbase_pm_trace_power_state(struct kbase_device *kbdev) global() argument 1408 kbase_pm_update_state(struct kbase_device *kbdev) global() argument 1466 struct kbase_device *kbdev = container_of(stt, struct kbase_device, pm.backend.shader_tick_timer); global() local 1494 kbase_pm_state_machine_init(struct kbase_device *kbdev) global() argument 1514 kbase_pm_state_machine_term(struct kbase_device *kbdev) global() argument 1520 kbase_pm_reset_start_locked(struct kbase_device *kbdev) global() argument 1557 kbase_pm_reset_complete(struct kbase_device *kbdev) global() argument 1582 kbase_pm_timed_out(struct kbase_device *kbdev) global() argument 1621 kbase_pm_wait_for_l2_powered(struct kbase_device *kbdev) global() argument 1641 kbase_pm_wait_for_desired_state(struct kbase_device *kbdev) global() argument 1677 kbase_pm_enable_interrupts(struct kbase_device *kbdev) global() argument 1705 kbase_pm_disable_interrupts_nolock(struct kbase_device *kbdev) global() argument 1723 kbase_pm_disable_interrupts(struct kbase_device *kbdev) global() argument 1740 kbase_pm_clock_on(struct kbase_device *kbdev, bool is_resume) global() argument 1826 kbase_pm_clock_off(struct kbase_device *kbdev) global() argument 1891 struct kbase_device *kbdev; global() member 1894 kbase_pm_reset_done(struct kbase_device *kbdev) global() argument 1908 kbase_pm_wait_for_reset(struct kbase_device *kbdev) global() argument 1931 kbase_set_jm_quirks(struct kbase_device *kbdev, const u32 prod_id) global() argument 1982 kbase_set_sc_quirks(struct kbase_device *kbdev, const u32 prod_id) global() argument 2009 kbase_set_tiler_quirks(struct kbase_device *kbdev) global() argument 2027 kbase_pm_hw_issues_detect(struct kbase_device *kbdev) global() argument 2075 kbase_pm_hw_issues_apply(struct kbase_device *kbdev) global() argument 2089 kbase_pm_cache_snoop_enable(struct kbase_device *kbdev) global() argument 2102 kbase_pm_cache_snoop_disable(struct kbase_device *kbdev) global() argument 2116 reenable_protected_mode_hwcnt(struct kbase_device *kbdev) global() argument 2129 kbase_pm_do_reset(struct kbase_device *kbdev) global() argument 2218 kbase_pm_protected_mode_enable(struct kbase_device *const kbdev) global() argument 2224 kbase_pm_protected_mode_disable(struct kbase_device *const kbdev) global() argument 2231 kbase_pm_init_hw(struct kbase_device *kbdev, unsigned int flags) global() argument 2335 kbase_pm_request_gpu_cycle_counter_do_request(struct kbase_device *kbdev) global() argument 2350 kbase_pm_request_gpu_cycle_counter(struct kbase_device *kbdev) global() argument 2363 kbase_pm_request_gpu_cycle_counter_l2_is_on(struct kbase_device *kbdev) global() argument 2376 kbase_pm_release_gpu_cycle_counter_nolock(struct kbase_device *kbdev) global() argument 2397 kbase_pm_release_gpu_cycle_counter(struct kbase_device *kbdev) global() argument [all...] |
H A D | mali_kbase_devfreq.c | 64 * @kbdev: Device pointer 75 static void opp_translate(struct kbase_device *kbdev, unsigned long freq, unsigned long volt, u64 *core_mask, in opp_translate() argument 80 for (i = 0; i < kbdev->num_opps; i++) { in opp_translate() 81 if (kbdev->devfreq_table[i].opp_freq == freq) { in opp_translate() 84 *core_mask = kbdev->devfreq_table[i].core_mask; in opp_translate() 85 for (j = 0; j < kbdev->nr_clocks; j++) { in opp_translate() 86 freqs[j] = kbdev->devfreq_table[i].real_freqs[j]; in opp_translate() 87 volts[j] = kbdev->devfreq_table[i].opp_volts[j]; in opp_translate() 97 if (i == kbdev->num_opps) { in opp_translate() 98 *core_mask = kbdev in opp_translate() 108 struct kbase_device *kbdev = dev_get_drvdata(dev); kbase_devfreq_target() local 244 kbase_devfreq_force_freq(struct kbase_device *kbdev, unsigned long freq) kbase_devfreq_force_freq() argument 253 struct kbase_device *kbdev = dev_get_drvdata(dev); kbase_devfreq_cur_freq() local 262 struct kbase_device *kbdev = dev_get_drvdata(dev); kbase_devfreq_status() local 275 kbase_devfreq_init_freq_table(struct kbase_device *kbdev, struct devfreq_dev_profile *dp) kbase_devfreq_init_freq_table() argument 340 kbase_devfreq_term_freq_table(struct kbase_device *kbdev) kbase_devfreq_term_freq_table() argument 347 kbase_devfreq_term_core_mask_table(struct kbase_device *kbdev) kbase_devfreq_term_core_mask_table() argument 354 struct kbase_device *kbdev = dev_get_drvdata(dev); kbase_devfreq_exit() local 359 kbasep_devfreq_read_suspend_clock(struct kbase_device *kbdev, struct device_node *node) kbasep_devfreq_read_suspend_clock() argument 394 kbase_devfreq_init_core_mask_table(struct kbase_device *kbdev) kbase_devfreq_init_core_mask_table() argument 550 struct kbase_device *kbdev = container_of(info, struct kbase_device, devfreq_queue); kbase_devfreq_suspend_resume_worker() local 582 kbase_devfreq_enqueue_work(struct kbase_device *kbdev, enum kbase_devfreq_work_type work_type) kbase_devfreq_enqueue_work() argument 596 kbase_devfreq_work_init(struct kbase_device *kbdev) kbase_devfreq_work_init() argument 612 kbase_devfreq_work_term(struct kbase_device *kbdev) kbase_devfreq_work_term() argument 622 struct kbase_device *kbdev = dev_get_drvdata(dev); kbase_devfreq_get_static_power() local 631 kbase_devfreq_init(struct kbase_device *kbdev) kbase_devfreq_init() argument 777 kbase_devfreq_term(struct kbase_device *kbdev) kbase_devfreq_term() argument [all...] |
H A D | mali_kbase_instr_backend.c | 33 int kbase_instr_hwcnt_enable_internal(struct kbase_device *kbdev, struct kbase_context *kctx, in kbase_instr_hwcnt_enable_internal() argument 43 lockdep_assert_held(&kbdev->hwaccess_lock); in kbase_instr_hwcnt_enable_internal() 50 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal() 52 if (kbdev->hwcnt.backend.state != KBASE_INSTR_STATE_DISABLED) { in kbase_instr_hwcnt_enable_internal() 54 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal() 60 irq_mask = kbase_reg_read(kbdev, GPU_CONTROL_REG(GPU_IRQ_MASK)); in kbase_instr_hwcnt_enable_internal() 61 kbase_reg_write(kbdev, GPU_CONTROL_REG(GPU_IRQ_MASK), irq_mask | PRFCNT_SAMPLE_COMPLETED); in kbase_instr_hwcnt_enable_internal() 65 kbdev->hwcnt.kctx = kctx; in kbase_instr_hwcnt_enable_internal() 67 kbdev->hwcnt.addr = enable->dump_buffer; in kbase_instr_hwcnt_enable_internal() 68 kbdev in kbase_instr_hwcnt_enable_internal() 134 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_disable_internal() local 199 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_request_dump() local 258 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_dump_complete() local 279 struct kbase_device *kbdev; kbasep_cache_clean_worker() local 325 struct kbase_device *kbdev = (struct kbase_device *)data; kbasep_hwcnt_irq_poll_tasklet() local 350 kbase_instr_hwcnt_sample_done(struct kbase_device *kbdev) kbase_instr_hwcnt_sample_done() argument 380 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_wait_for_dump() local 407 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_clear() local 433 kbase_instr_backend_init(struct kbase_device *kbdev) kbase_instr_backend_init() argument 461 kbase_instr_backend_term(struct kbase_device *kbdev) kbase_instr_backend_term() argument 470 kbase_instr_backend_debugfs_init(struct kbase_device *kbdev) kbase_instr_backend_debugfs_init() argument [all...] |
H A D | mali_kbase_pm_internal.h | 42 * @kbdev: The kbase device structure for the device (must be a valid pointer) 44 void kbase_pm_dev_idle(struct kbase_device *kbdev); 51 * @kbdev: The kbase device structure for the device (must be a valid pointer) 53 void kbase_pm_dev_activate(struct kbase_device *kbdev); 63 * @kbdev: The kbase device structure for the device (must be a valid 69 u64 kbase_pm_get_present_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type); 79 * @kbdev: The kbase device structure for the device (must be a valid pointer) 84 u64 kbase_pm_get_active_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type); 94 * @kbdev: The kbase device structure for the device (must be a valid pointer) 99 u64 kbase_pm_get_trans_cores(struct kbase_device *kbdev, enu 710 kbase_pm_lock(struct kbase_device *kbdev) kbase_pm_lock() argument 723 kbase_pm_unlock(struct kbase_device *kbdev) kbase_pm_unlock() argument [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/backend/gpu/ |
H A D | mali_kbase_pm_driver.c | 91 static u64 kbase_pm_get_state(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, 133 static void mali_cci_flush_l2(struct kbase_device *kbdev) in mali_cci_flush_l2() argument 147 kbase_reg_write(kbdev, GPU_CONTROL_REG(GPU_COMMAND), GPU_COMMAND_CLEAN_INV_CACHES, NULL); in mali_cci_flush_l2() 149 raw = kbase_reg_read(kbdev, GPU_CONTROL_REG(GPU_IRQ_RAWSTAT), NULL); in mali_cci_flush_l2() 154 raw = kbase_reg_read(kbdev, GPU_CONTROL_REG(GPU_IRQ_RAWSTAT), NULL); in mali_cci_flush_l2() 166 * @kbdev: The kbase device structure of the device 171 static void kbase_pm_invoke(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, u64 cores, in kbase_pm_invoke() argument 178 lockdep_assert_held(&kbdev->hwaccess_lock); in kbase_pm_invoke() 211 u64 state = kbase_pm_get_state(kbdev, core_type, ACTION_READY); in kbase_pm_invoke() 226 KBASE_TRACE_ADD(kbdev, PM_PWRO in kbase_pm_invoke() 279 kbase_pm_get_state(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, enum kbasep_pm_action action) kbase_pm_get_state() argument 295 kbasep_pm_init_core_use_bitmaps(struct kbase_device *kbdev) kbasep_pm_init_core_use_bitmaps() argument 317 kbase_pm_get_present_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_present_cores() argument 351 kbase_pm_get_active_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_active_cores() argument 367 kbase_pm_get_trans_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_trans_cores() argument 382 kbase_pm_get_ready_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_ready_cores() argument 432 kbase_pm_transition_core_type(struct kbase_device *kbdev, enum kbase_pm_core_type type, u64 desired_state, u64 in_use, u64 *const available, u64 *powering_on) kbase_pm_transition_core_type() argument 614 kbase_pm_check_transitions_nolock(struct kbase_device *kbdev) kbase_pm_check_transitions_nolock() argument 844 kbase_pm_check_transitions_sync(struct kbase_device *kbdev) kbase_pm_check_transitions_sync() argument 898 kbase_pm_enable_interrupts(struct kbase_device *kbdev) kbase_pm_enable_interrupts() argument 921 kbase_pm_disable_interrupts_nolock(struct kbase_device *kbdev) kbase_pm_disable_interrupts_nolock() argument 939 kbase_pm_disable_interrupts(struct kbase_device *kbdev) kbase_pm_disable_interrupts() argument 956 kbase_pm_clock_on(struct kbase_device *kbdev, bool is_resume) kbase_pm_clock_on() argument 1014 kbase_pm_clock_off(struct kbase_device *kbdev, bool is_suspend) kbase_pm_clock_off() argument 1069 struct kbase_device *kbdev; global() member 1072 kbase_pm_reset_done(struct kbase_device *kbdev) kbase_pm_reset_done() argument 1086 kbase_pm_wait_for_reset(struct kbase_device *kbdev) kbase_pm_wait_for_reset() argument 1109 kbase_pm_hw_issues_detect(struct kbase_device *kbdev) kbase_pm_hw_issues_detect() argument 1225 kbase_pm_hw_issues_apply(struct kbase_device *kbdev) kbase_pm_hw_issues_apply() argument 1236 kbase_pm_cache_snoop_enable(struct kbase_device *kbdev) kbase_pm_cache_snoop_enable() argument 1249 kbase_pm_cache_snoop_disable(struct kbase_device *kbdev) kbase_pm_cache_snoop_disable() argument 1263 kbase_pm_do_reset(struct kbase_device *kbdev) kbase_pm_do_reset() argument 1338 struct kbase_device *kbdev = pdev->data; kbasep_protected_mode_enable() local 1346 struct kbase_device *kbdev = pdev->data; kbasep_protected_mode_disable() local 1356 kbase_pm_init_hw(struct kbase_device *kbdev, unsigned int flags) kbase_pm_init_hw() argument 1481 kbase_pm_request_gpu_cycle_counter_do_request(struct kbase_device *kbdev) kbase_pm_request_gpu_cycle_counter_do_request() argument 1496 kbase_pm_request_gpu_cycle_counter(struct kbase_device *kbdev) kbase_pm_request_gpu_cycle_counter() argument 1511 kbase_pm_request_gpu_cycle_counter_l2_is_on(struct kbase_device *kbdev) kbase_pm_request_gpu_cycle_counter_l2_is_on() argument 1526 kbase_pm_release_gpu_cycle_counter_nolock(struct kbase_device *kbdev) kbase_pm_release_gpu_cycle_counter_nolock() argument 1549 kbase_pm_release_gpu_cycle_counter(struct kbase_device *kbdev) kbase_pm_release_gpu_cycle_counter() argument [all...] |
H A D | mali_kbase_pm_backend.c | 36 void kbase_pm_register_access_enable(struct kbase_device *kbdev) in kbase_pm_register_access_enable() argument 43 callbacks->power_on_callback(kbdev); in kbase_pm_register_access_enable() 46 kbdev->pm.backend.gpu_powered = true; in kbase_pm_register_access_enable() 49 void kbase_pm_register_access_disable(struct kbase_device *kbdev) in kbase_pm_register_access_disable() argument 56 callbacks->power_off_callback(kbdev); in kbase_pm_register_access_disable() 59 kbdev->pm.backend.gpu_powered = false; in kbase_pm_register_access_disable() 62 int kbase_hwaccess_pm_init(struct kbase_device *kbdev) in kbase_hwaccess_pm_init() argument 67 KBASE_DEBUG_ASSERT(kbdev != NULL); in kbase_hwaccess_pm_init() 69 mutex_init(&kbdev->pm.lock); in kbase_hwaccess_pm_init() 71 kbdev in kbase_hwaccess_pm_init() 146 kbase_pm_do_poweron(struct kbase_device *kbdev, bool is_resume) kbase_pm_do_poweron() argument 165 struct kbase_device *kbdev = container_of(data, struct kbase_device, pm.backend.gpu_poweroff_wait_work); kbase_pm_gpu_poweroff_wait_wq() local 244 kbase_pm_do_poweroff(struct kbase_device *kbdev, bool is_suspend) kbase_pm_do_poweroff() argument 275 is_poweroff_in_progress(struct kbase_device *kbdev) is_poweroff_in_progress() argument 287 kbase_pm_wait_for_poweroff_complete(struct kbase_device *kbdev) kbase_pm_wait_for_poweroff_complete() argument 292 kbase_hwaccess_pm_powerup(struct kbase_device *kbdev, unsigned int flags) kbase_hwaccess_pm_powerup() argument 349 kbase_hwaccess_pm_halt(struct kbase_device *kbdev) kbase_hwaccess_pm_halt() argument 361 kbase_hwaccess_pm_term(struct kbase_device *kbdev) kbase_hwaccess_pm_term() argument 377 kbase_pm_power_changed(struct kbase_device *kbdev) kbase_pm_power_changed() argument 397 kbase_pm_set_debug_core_mask(struct kbase_device *kbdev, u64 new_core_mask_js0, u64 new_core_mask_js1, u64 new_core_mask_js2) kbase_pm_set_debug_core_mask() argument 408 kbase_hwaccess_pm_gpu_active(struct kbase_device *kbdev) kbase_hwaccess_pm_gpu_active() argument 413 kbase_hwaccess_pm_gpu_idle(struct kbase_device *kbdev) kbase_hwaccess_pm_gpu_idle() argument 418 kbase_hwaccess_pm_suspend(struct kbase_device *kbdev) kbase_hwaccess_pm_suspend() argument 439 kbase_hwaccess_pm_resume(struct kbase_device *kbdev) kbase_hwaccess_pm_resume() argument [all...] |
H A D | mali_kbase_pm_policy.c | 119 static inline void kbase_timeline_pm_cores_func(struct kbase_device *kbdev, enum kbase_pm_func_id func_id, in kbase_timeline_pm_cores_func() argument 128 KBASE_TIMELINE_PM_CHECKTRANS(kbdev, trace_code); in kbase_timeline_pm_cores_func() 132 static inline void kbase_timeline_pm_cores_func(struct kbase_device *kbdev, enum kbase_pm_func_id func_id, in kbase_timeline_pm_cores_func() argument 142 * @kbdev: Device pointer 144 static void kbasep_pm_do_poweroff_cores(struct kbase_device *kbdev) in kbasep_pm_do_poweroff_cores() argument 146 u64 prev_shader_state = kbdev->pm.backend.desired_shader_state; in kbasep_pm_do_poweroff_cores() 147 u64 prev_tiler_state = kbdev->pm.backend.desired_tiler_state; in kbasep_pm_do_poweroff_cores() 149 lockdep_assert_held(&kbdev->hwaccess_lock); in kbasep_pm_do_poweroff_cores() 151 kbdev->pm.backend.desired_shader_state &= ~kbdev in kbasep_pm_do_poweroff_cores() 173 struct kbase_device *kbdev; kbasep_pm_do_gpu_poweroff_callback() local 219 struct kbase_device *kbdev; kbasep_pm_do_gpu_poweroff_wq() local 261 kbase_pm_policy_init(struct kbase_device *kbdev) kbase_pm_policy_init() argument 283 kbase_pm_policy_term(struct kbase_device *kbdev) kbase_pm_policy_term() argument 289 kbase_pm_cancel_deferred_poweroff(struct kbase_device *kbdev) kbase_pm_cancel_deferred_poweroff() argument 311 kbase_pm_update_active(struct kbase_device *kbdev) kbase_pm_update_active() argument 392 kbase_pm_update_cores_state_nolock(struct kbase_device *kbdev) kbase_pm_update_cores_state_nolock() argument 502 kbase_pm_update_cores_state(struct kbase_device *kbdev) kbase_pm_update_cores_state() argument 526 kbase_pm_get_policy(struct kbase_device *kbdev) kbase_pm_get_policy() argument 535 kbase_pm_set_policy(struct kbase_device *kbdev, const struct kbase_pm_policy *new_policy) kbase_pm_set_policy() argument 591 kbase_pm_trace_check_and_finish_state_change(struct kbase_device *kbdev) kbase_pm_trace_check_and_finish_state_change() argument 601 kbase_pm_request_cores(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_request_cores() argument 652 kbase_pm_unrequest_cores(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_unrequest_cores() argument 703 kbase_pm_register_inuse_cores(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_register_inuse_cores() argument 782 kbase_pm_release_cores(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_release_cores() argument 833 kbase_pm_request_cores_sync(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_request_cores_sync() argument 848 kbase_pm_request_l2_caches(struct kbase_device *kbdev) kbase_pm_request_l2_caches() argument 875 kbase_pm_request_l2_caches_l2_is_on(struct kbase_device *kbdev) kbase_pm_request_l2_caches_l2_is_on() argument 884 kbase_pm_release_l2_caches(struct kbase_device *kbdev) kbase_pm_release_l2_caches() argument [all...] |
H A D | mali_kbase_instr_backend.c | 31 * @kbdev: Kbase device 33 static void kbasep_instr_hwcnt_cacheclean(struct kbase_device *kbdev) in kbasep_instr_hwcnt_cacheclean() argument 39 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbasep_instr_hwcnt_cacheclean() 40 KBASE_DEBUG_ASSERT(kbdev->hwcnt.backend.state == KBASE_INSTR_STATE_REQUEST_CLEAN); in kbasep_instr_hwcnt_cacheclean() 43 spin_lock_irqsave(&kbdev->hwaccess_lock, pm_flags); in kbasep_instr_hwcnt_cacheclean() 44 irq_mask = kbase_reg_read(kbdev, GPU_CONTROL_REG(GPU_IRQ_MASK), NULL); in kbasep_instr_hwcnt_cacheclean() 45 kbase_reg_write(kbdev, GPU_CONTROL_REG(GPU_IRQ_MASK), irq_mask | CLEAN_CACHES_COMPLETED, NULL); in kbasep_instr_hwcnt_cacheclean() 46 spin_unlock_irqrestore(&kbdev->hwaccess_lock, pm_flags); in kbasep_instr_hwcnt_cacheclean() 50 KBASE_TRACE_ADD(kbdev, CORE_GPU_CLEAN_INV_CACHES, NULL, NULL, 0u, 0); in kbasep_instr_hwcnt_cacheclean() 51 kbase_reg_write(kbdev, GPU_CONTROL_RE in kbasep_instr_hwcnt_cacheclean() 57 kbase_instr_hwcnt_enable_internal(struct kbase_device *kbdev, struct kbase_context *kctx, struct kbase_uk_hwcnt_setup *setup) kbase_instr_hwcnt_enable_internal() argument 179 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_disable_internal() local 241 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_request_dump() local 284 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_dump_complete() local 305 struct kbase_device *kbdev; kbasep_cache_clean_worker() local 331 kbase_instr_hwcnt_sample_done(struct kbase_device *kbdev) kbase_instr_hwcnt_sample_done() argument 352 kbase_clean_caches_done(struct kbase_device *kbdev) kbase_clean_caches_done() argument 380 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_wait_for_dump() local 407 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_clear() local 429 kbase_instr_backend_init(struct kbase_device *kbdev) kbase_instr_backend_init() argument 448 kbase_instr_backend_term(struct kbase_device *kbdev) kbase_instr_backend_term() argument [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/backend/gpu/ |
H A D | mali_kbase_pm_driver.c | 96 struct kbase_device *kbdev, 140 static void mali_cci_flush_l2(struct kbase_device *kbdev) in mali_cci_flush_l2() argument 154 kbase_reg_write(kbdev, in mali_cci_flush_l2() 159 raw = kbase_reg_read(kbdev, in mali_cci_flush_l2() 166 raw = kbase_reg_read(kbdev, in mali_cci_flush_l2() 180 * @kbdev: The kbase device structure of the device 185 static void kbase_pm_invoke(struct kbase_device *kbdev, in kbase_pm_invoke() argument 194 lockdep_assert_held(&kbdev->hwaccess_lock); in kbase_pm_invoke() 228 u64 state = kbase_pm_get_state(kbdev, core_type, ACTION_READY); in kbase_pm_invoke() 242 KBASE_TRACE_ADD(kbdev, PM_PWRO in kbase_pm_invoke() 298 kbase_pm_get_state(struct kbase_device *kbdev, enum kbase_pm_core_type core_type, enum kbasep_pm_action action) kbase_pm_get_state() argument 315 kbasep_pm_init_core_use_bitmaps(struct kbase_device *kbdev) kbasep_pm_init_core_use_bitmaps() argument 337 kbase_pm_get_present_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_present_cores() argument 372 kbase_pm_get_active_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_active_cores() argument 389 kbase_pm_get_trans_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_trans_cores() argument 405 kbase_pm_get_ready_cores(struct kbase_device *kbdev, enum kbase_pm_core_type type) kbase_pm_get_ready_cores() argument 459 kbase_pm_transition_core_type(struct kbase_device *kbdev, enum kbase_pm_core_type type, u64 desired_state, u64 in_use, u64 * const available, u64 *powering_on) kbase_pm_transition_core_type() argument 642 kbase_pm_check_transitions_nolock(struct kbase_device *kbdev) kbase_pm_check_transitions_nolock() argument 925 kbase_pm_check_transitions_sync(struct kbase_device *kbdev) kbase_pm_check_transitions_sync() argument 1002 kbase_pm_enable_interrupts(struct kbase_device *kbdev) kbase_pm_enable_interrupts() argument 1028 kbase_pm_disable_interrupts_nolock(struct kbase_device *kbdev) kbase_pm_disable_interrupts_nolock() argument 1048 kbase_pm_disable_interrupts(struct kbase_device *kbdev) kbase_pm_disable_interrupts() argument 1066 kbase_pm_clock_on(struct kbase_device *kbdev, bool is_resume) kbase_pm_clock_on() argument 1123 kbase_pm_clock_off(struct kbase_device *kbdev, bool is_suspend) kbase_pm_clock_off() argument 1177 struct kbase_device *kbdev; global() member 1180 kbase_pm_reset_done(struct kbase_device *kbdev) kbase_pm_reset_done() argument 1194 kbase_pm_wait_for_reset(struct kbase_device *kbdev) kbase_pm_wait_for_reset() argument 1219 kbase_pm_hw_issues_detect(struct kbase_device *kbdev) kbase_pm_hw_issues_detect() argument 1352 kbase_pm_hw_issues_apply(struct kbase_device *kbdev) kbase_pm_hw_issues_apply() argument 1368 kbase_pm_cache_snoop_enable(struct kbase_device *kbdev) kbase_pm_cache_snoop_enable() argument 1381 kbase_pm_cache_snoop_disable(struct kbase_device *kbdev) kbase_pm_cache_snoop_disable() argument 1395 kbase_pm_do_reset(struct kbase_device *kbdev) kbase_pm_do_reset() argument 1477 struct kbase_device *kbdev = pdev->data; kbasep_protected_mode_enable() local 1486 struct kbase_device *kbdev = pdev->data; kbasep_protected_mode_disable() local 1498 kbase_pm_init_hw(struct kbase_device *kbdev, unsigned int flags) kbase_pm_init_hw() argument 1626 kbase_pm_request_gpu_cycle_counter_do_request(struct kbase_device *kbdev) kbase_pm_request_gpu_cycle_counter_do_request() argument 1644 kbase_pm_request_gpu_cycle_counter(struct kbase_device *kbdev) kbase_pm_request_gpu_cycle_counter() argument 1660 kbase_pm_request_gpu_cycle_counter_l2_is_on(struct kbase_device *kbdev) kbase_pm_request_gpu_cycle_counter_l2_is_on() argument 1676 kbase_pm_release_gpu_cycle_counter_nolock(struct kbase_device *kbdev) kbase_pm_release_gpu_cycle_counter_nolock() argument 1702 kbase_pm_release_gpu_cycle_counter(struct kbase_device *kbdev) kbase_pm_release_gpu_cycle_counter() argument [all...] |
H A D | mali_kbase_pm_backend.c | 39 void kbase_pm_register_access_enable(struct kbase_device *kbdev) in kbase_pm_register_access_enable() argument 46 callbacks->power_on_callback(kbdev); in kbase_pm_register_access_enable() 48 kbdev->pm.backend.gpu_powered = true; in kbase_pm_register_access_enable() 51 void kbase_pm_register_access_disable(struct kbase_device *kbdev) in kbase_pm_register_access_disable() argument 58 callbacks->power_off_callback(kbdev); in kbase_pm_register_access_disable() 60 kbdev->pm.backend.gpu_powered = false; in kbase_pm_register_access_disable() 63 int kbase_hwaccess_pm_init(struct kbase_device *kbdev) in kbase_hwaccess_pm_init() argument 68 KBASE_DEBUG_ASSERT(kbdev != NULL); in kbase_hwaccess_pm_init() 70 mutex_init(&kbdev->pm.lock); in kbase_hwaccess_pm_init() 72 kbdev in kbase_hwaccess_pm_init() 154 kbase_pm_do_poweron(struct kbase_device *kbdev, bool is_resume) kbase_pm_do_poweron() argument 175 struct kbase_device *kbdev = container_of(data, struct kbase_device, kbase_pm_gpu_poweroff_wait_wq() local 260 kbase_pm_do_poweroff(struct kbase_device *kbdev, bool is_suspend) kbase_pm_do_poweroff() argument 292 is_poweroff_in_progress(struct kbase_device *kbdev) is_poweroff_in_progress() argument 304 kbase_pm_wait_for_poweroff_complete(struct kbase_device *kbdev) kbase_pm_wait_for_poweroff_complete() argument 310 kbase_hwaccess_pm_powerup(struct kbase_device *kbdev, unsigned int flags) kbase_hwaccess_pm_powerup() argument 373 kbase_hwaccess_pm_halt(struct kbase_device *kbdev) kbase_hwaccess_pm_halt() argument 385 kbase_hwaccess_pm_term(struct kbase_device *kbdev) kbase_hwaccess_pm_term() argument 401 kbase_pm_power_changed(struct kbase_device *kbdev) kbase_pm_power_changed() argument 424 kbase_pm_set_debug_core_mask(struct kbase_device *kbdev, u64 new_core_mask_js0, u64 new_core_mask_js1, u64 new_core_mask_js2) kbase_pm_set_debug_core_mask() argument 437 kbase_hwaccess_pm_gpu_active(struct kbase_device *kbdev) kbase_hwaccess_pm_gpu_active() argument 442 kbase_hwaccess_pm_gpu_idle(struct kbase_device *kbdev) kbase_hwaccess_pm_gpu_idle() argument 447 kbase_hwaccess_pm_suspend(struct kbase_device *kbdev) kbase_hwaccess_pm_suspend() argument 468 kbase_hwaccess_pm_resume(struct kbase_device *kbdev) kbase_hwaccess_pm_resume() argument [all...] |
H A D | mali_kbase_pm_policy.c | 131 static inline void kbase_timeline_pm_cores_func(struct kbase_device *kbdev, in kbase_timeline_pm_cores_func() argument 142 KBASE_TIMELINE_PM_CHECKTRANS(kbdev, trace_code); in kbase_timeline_pm_cores_func() 146 static inline void kbase_timeline_pm_cores_func(struct kbase_device *kbdev, in kbase_timeline_pm_cores_func() argument 156 * @kbdev: Device pointer 158 static void kbasep_pm_do_poweroff_cores(struct kbase_device *kbdev) in kbasep_pm_do_poweroff_cores() argument 160 u64 prev_shader_state = kbdev->pm.backend.desired_shader_state; in kbasep_pm_do_poweroff_cores() 161 u64 prev_tiler_state = kbdev->pm.backend.desired_tiler_state; in kbasep_pm_do_poweroff_cores() 163 lockdep_assert_held(&kbdev->hwaccess_lock); in kbasep_pm_do_poweroff_cores() 165 kbdev->pm.backend.desired_shader_state &= in kbasep_pm_do_poweroff_cores() 166 ~kbdev in kbasep_pm_do_poweroff_cores() 194 struct kbase_device *kbdev; kbasep_pm_do_gpu_poweroff_callback() local 243 struct kbase_device *kbdev; kbasep_pm_do_gpu_poweroff_wq() local 285 kbase_pm_policy_init(struct kbase_device *kbdev) kbase_pm_policy_init() argument 311 kbase_pm_policy_term(struct kbase_device *kbdev) kbase_pm_policy_term() argument 317 kbase_pm_cancel_deferred_poweroff(struct kbase_device *kbdev) kbase_pm_cancel_deferred_poweroff() argument 339 kbase_pm_update_active(struct kbase_device *kbdev) kbase_pm_update_active() argument 433 kbase_pm_update_cores_state_nolock(struct kbase_device *kbdev) kbase_pm_update_cores_state_nolock() argument 559 kbase_pm_update_cores_state(struct kbase_device *kbdev) kbase_pm_update_cores_state() argument 582 kbase_pm_get_policy(struct kbase_device *kbdev) kbase_pm_get_policy() argument 591 kbase_pm_set_policy(struct kbase_device *kbdev, const struct kbase_pm_policy *new_policy) kbase_pm_set_policy() argument 649 kbase_pm_trace_check_and_finish_state_change(struct kbase_device *kbdev) kbase_pm_trace_check_and_finish_state_change() argument 661 kbase_pm_request_cores(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_request_cores() argument 717 kbase_pm_unrequest_cores(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_unrequest_cores() argument 770 kbase_pm_register_inuse_cores(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_register_inuse_cores() argument 849 kbase_pm_release_cores(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_release_cores() argument 905 kbase_pm_request_cores_sync(struct kbase_device *kbdev, bool tiler_required, u64 shader_cores) kbase_pm_request_cores_sync() argument 922 kbase_pm_request_l2_caches(struct kbase_device *kbdev) kbase_pm_request_l2_caches() argument 949 kbase_pm_request_l2_caches_l2_is_on(struct kbase_device *kbdev) kbase_pm_request_l2_caches_l2_is_on() argument 958 kbase_pm_release_l2_caches(struct kbase_device *kbdev) kbase_pm_release_l2_caches() argument [all...] |
H A D | mali_kbase_instr_backend.c | 35 * @kbdev: Kbase device 37 static void kbasep_instr_hwcnt_cacheclean(struct kbase_device *kbdev) in kbasep_instr_hwcnt_cacheclean() argument 43 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbasep_instr_hwcnt_cacheclean() 44 KBASE_DEBUG_ASSERT(kbdev->hwcnt.backend.state == in kbasep_instr_hwcnt_cacheclean() 48 spin_lock_irqsave(&kbdev->hwaccess_lock, pm_flags); in kbasep_instr_hwcnt_cacheclean() 49 irq_mask = kbase_reg_read(kbdev, GPU_CONTROL_REG(GPU_IRQ_MASK), NULL); in kbasep_instr_hwcnt_cacheclean() 50 kbase_reg_write(kbdev, GPU_CONTROL_REG(GPU_IRQ_MASK), in kbasep_instr_hwcnt_cacheclean() 52 spin_unlock_irqrestore(&kbdev->hwaccess_lock, pm_flags); in kbasep_instr_hwcnt_cacheclean() 56 KBASE_TRACE_ADD(kbdev, CORE_GPU_CLEAN_INV_CACHES, NULL, NULL, 0u, 0); in kbasep_instr_hwcnt_cacheclean() 57 kbase_reg_write(kbdev, GPU_CONTROL_RE in kbasep_instr_hwcnt_cacheclean() 64 kbase_instr_hwcnt_enable_internal(struct kbase_device *kbdev, struct kbase_context *kctx, struct kbase_uk_hwcnt_setup *setup) kbase_instr_hwcnt_enable_internal() argument 200 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_disable_internal() local 265 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_request_dump() local 313 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_dump_complete() local 334 struct kbase_device *kbdev; kbasep_cache_clean_worker() local 364 kbase_instr_hwcnt_sample_done(struct kbase_device *kbdev) kbase_instr_hwcnt_sample_done() argument 386 kbase_clean_caches_done(struct kbase_device *kbdev) kbase_clean_caches_done() argument 416 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_wait_for_dump() local 445 struct kbase_device *kbdev = kctx->kbdev; kbase_instr_hwcnt_clear() local 468 kbase_instr_backend_init(struct kbase_device *kbdev) kbase_instr_backend_init() argument 488 kbase_instr_backend_term(struct kbase_device *kbdev) kbase_instr_backend_term() argument [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_pm.c | 42 int kbase_pm_powerup(struct kbase_device *kbdev, unsigned int flags) in kbase_pm_powerup() argument 44 return kbase_hwaccess_pm_powerup(kbdev, flags); in kbase_pm_powerup() 47 void kbase_pm_halt(struct kbase_device *kbdev) in kbase_pm_halt() argument 49 kbase_hwaccess_pm_halt(kbdev); in kbase_pm_halt() 52 void kbase_pm_context_active(struct kbase_device *kbdev) in kbase_pm_context_active() argument 54 (void)kbase_pm_context_active_handle_suspend(kbdev, KBASE_PM_SUSPEND_HANDLER_NOT_POSSIBLE); in kbase_pm_context_active() 57 int kbase_pm_context_active_handle_suspend(struct kbase_device *kbdev, enum kbase_pm_suspend_handler suspend_handler) in kbase_pm_context_active_handle_suspend() argument 61 KBASE_DEBUG_ASSERT(kbdev != NULL); in kbase_pm_context_active_handle_suspend() 62 dev_dbg(kbdev->dev, "%s - reason = %d, pid = %d\n", __func__, suspend_handler, current->pid); in kbase_pm_context_active_handle_suspend() 63 kbase_pm_lock(kbdev); in kbase_pm_context_active_handle_suspend() 112 kbase_pm_context_idle(struct kbase_device *kbdev) kbase_pm_context_idle() argument 143 kbase_pm_driver_suspend(struct kbase_device *kbdev) kbase_pm_driver_suspend() argument 216 kbase_pm_driver_resume(struct kbase_device *kbdev, bool arb_gpu_start) kbase_pm_driver_resume() argument 262 kbase_pm_suspend(struct kbase_device *kbdev) kbase_pm_suspend() argument 275 kbase_pm_resume(struct kbase_device *kbdev) kbase_pm_resume() argument [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/csf/ |
H A D | mali_kbase_csf_reset_gpu.c | 99 int kbase_reset_gpu_prevent_and_wait(struct kbase_device *kbdev) in kbase_reset_gpu_prevent_and_wait() argument 101 down_read(&kbdev->csf.reset.sem); in kbase_reset_gpu_prevent_and_wait() 103 if (atomic_read(&kbdev->csf.reset.state) == in kbase_reset_gpu_prevent_and_wait() 105 up_read(&kbdev->csf.reset.sem); in kbase_reset_gpu_prevent_and_wait() 109 if (WARN_ON(kbase_reset_gpu_is_active(kbdev))) { in kbase_reset_gpu_prevent_and_wait() 110 up_read(&kbdev->csf.reset.sem); in kbase_reset_gpu_prevent_and_wait() 118 int kbase_reset_gpu_try_prevent(struct kbase_device *kbdev) in kbase_reset_gpu_try_prevent() argument 120 if (!down_read_trylock(&kbdev->csf.reset.sem)) in kbase_reset_gpu_try_prevent() 123 if (atomic_read(&kbdev->csf.reset.state) == in kbase_reset_gpu_try_prevent() 125 up_read(&kbdev in kbase_reset_gpu_try_prevent() 137 kbase_reset_gpu_allow(struct kbase_device *kbdev) kbase_reset_gpu_allow() argument 143 kbase_reset_gpu_assert_prevented(struct kbase_device *kbdev) kbase_reset_gpu_assert_prevented() argument 153 kbase_reset_gpu_assert_failed_or_prevented(struct kbase_device *kbdev) kbase_reset_gpu_assert_failed_or_prevented() argument 169 kbase_csf_reset_begin_hw_access_sync( struct kbase_device *kbdev, enum kbase_csf_reset_gpu_state initial_reset_state) kbase_csf_reset_begin_hw_access_sync() argument 196 kbase_csf_reset_end_hw_access(struct kbase_device *kbdev, int err_during_reset, bool firmware_inited) kbase_csf_reset_end_hw_access() argument 234 kbase_csf_debug_dump_registers(struct kbase_device *kbdev) kbase_csf_debug_dump_registers() argument 260 kbase_csf_dump_firmware_trace_buffer(struct kbase_device *kbdev) kbase_csf_dump_firmware_trace_buffer() argument 315 kbase_csf_hwcnt_on_reset_error(struct kbase_device *kbdev) kbase_csf_hwcnt_on_reset_error() argument 335 kbase_csf_reset_gpu_now(struct kbase_device *kbdev, bool firmware_inited, bool silent) kbase_csf_reset_gpu_now() argument 462 struct kbase_device *kbdev = container_of(data, struct kbase_device, kbase_csf_reset_gpu_worker() local 513 kbase_prepare_to_reset_gpu(struct kbase_device *kbdev, unsigned int flags) kbase_prepare_to_reset_gpu() argument 530 kbase_prepare_to_reset_gpu_locked(struct kbase_device *kbdev, unsigned int flags) kbase_prepare_to_reset_gpu_locked() argument 538 kbase_reset_gpu(struct kbase_device *kbdev) kbase_reset_gpu() argument 556 kbase_reset_gpu_locked(struct kbase_device *kbdev) kbase_reset_gpu_locked() argument 563 kbase_reset_gpu_silent(struct kbase_device *kbdev) kbase_reset_gpu_silent() argument 581 kbase_reset_gpu_is_active(struct kbase_device *kbdev) kbase_reset_gpu_is_active() argument 593 kbase_reset_gpu_wait(struct kbase_device *kbdev) kbase_reset_gpu_wait() argument 634 kbase_reset_gpu_init(struct kbase_device *kbdev) kbase_reset_gpu_init() argument 648 kbase_reset_gpu_term(struct kbase_device *kbdev) kbase_reset_gpu_term() argument [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/platform/devicetree/ |
H A D | mali_kbase_runtime_pm.c | 31 static void enable_gpu_power_control(struct kbase_device *kbdev) in enable_gpu_power_control() argument 36 for (i = 0; i < kbdev->nr_regulators; i++) { in enable_gpu_power_control() 37 if (WARN_ON(kbdev->regulators[i] == NULL)) in enable_gpu_power_control() 39 else if (!regulator_is_enabled(kbdev->regulators[i])) in enable_gpu_power_control() 40 WARN_ON(regulator_enable(kbdev->regulators[i])); in enable_gpu_power_control() 44 for (i = 0; i < kbdev->nr_clocks; i++) { in enable_gpu_power_control() 45 if (WARN_ON(kbdev->clocks[i] == NULL)) in enable_gpu_power_control() 47 else if (!__clk_is_enabled(kbdev->clocks[i])) in enable_gpu_power_control() 48 WARN_ON(clk_prepare_enable(kbdev->clocks[i])); in enable_gpu_power_control() 52 static void disable_gpu_power_control(struct kbase_device *kbdev) in disable_gpu_power_control() argument 76 pm_callback_power_on(struct kbase_device *kbdev) pm_callback_power_on() argument 116 pm_callback_power_off(struct kbase_device *kbdev) pm_callback_power_off() argument 147 pm_callback_runtime_gpu_active(struct kbase_device *kbdev) pm_callback_runtime_gpu_active() argument 177 pm_callback_runtime_gpu_idle(struct kbase_device *kbdev) pm_callback_runtime_gpu_idle() argument 199 kbase_device_runtime_init(struct kbase_device *kbdev) kbase_device_runtime_init() argument 224 kbase_device_runtime_disable(struct kbase_device *kbdev) kbase_device_runtime_disable() argument 237 pm_callback_runtime_on(struct kbase_device *kbdev) pm_callback_runtime_on() argument 245 pm_callback_runtime_off(struct kbase_device *kbdev) pm_callback_runtime_off() argument 252 pm_callback_resume(struct kbase_device *kbdev) pm_callback_resume() argument 259 pm_callback_suspend(struct kbase_device *kbdev) pm_callback_suspend() argument [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_pm.c | 41 int kbase_pm_powerup(struct kbase_device *kbdev, unsigned int flags) in kbase_pm_powerup() argument 43 return kbase_hwaccess_pm_powerup(kbdev, flags); in kbase_pm_powerup() 46 void kbase_pm_halt(struct kbase_device *kbdev) in kbase_pm_halt() argument 48 kbase_hwaccess_pm_halt(kbdev); in kbase_pm_halt() 51 void kbase_pm_context_active(struct kbase_device *kbdev) in kbase_pm_context_active() argument 53 (void)kbase_pm_context_active_handle_suspend(kbdev, in kbase_pm_context_active() 57 int kbase_pm_context_active_handle_suspend(struct kbase_device *kbdev, in kbase_pm_context_active_handle_suspend() argument 62 KBASE_DEBUG_ASSERT(kbdev != NULL); in kbase_pm_context_active_handle_suspend() 63 dev_dbg(kbdev->dev, "%s - reason = %d, pid = %d\n", __func__, in kbase_pm_context_active_handle_suspend() 65 kbase_pm_lock(kbdev); in kbase_pm_context_active_handle_suspend() 114 kbase_pm_context_idle(struct kbase_device *kbdev) kbase_pm_context_idle() argument 147 kbase_pm_driver_suspend(struct kbase_device *kbdev) kbase_pm_driver_suspend() argument 235 kbase_pm_driver_resume(struct kbase_device *kbdev, bool arb_gpu_start) kbase_pm_driver_resume() argument 286 kbase_pm_suspend(struct kbase_device *kbdev) kbase_pm_suspend() argument 301 kbase_pm_resume(struct kbase_device *kbdev) kbase_pm_resume() argument [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/device/ |
H A D | mali_kbase_device.c | 79 * @kbdev: Pointer to kbase device. 83 static int kbase_device_all_as_init(struct kbase_device *kbdev) in kbase_device_all_as_init() argument 87 for (i = 0; i < kbdev->nr_hw_address_spaces; i++) { in kbase_device_all_as_init() 88 err = kbase_mmu_as_init(kbdev, i); in kbase_device_all_as_init() 96 kbase_mmu_as_term(kbdev, i); in kbase_device_all_as_init() 103 static void kbase_device_all_as_term(struct kbase_device *kbdev) in kbase_device_all_as_term() argument 107 for (i = 0; i < kbdev->nr_hw_address_spaces; i++) { in kbase_device_all_as_term() 108 kbase_mmu_as_term(kbdev, i); in kbase_device_all_as_term() 112 int kbase_device_misc_init(struct kbase_device *const kbdev) in kbase_device_misc_init() argument 119 spin_lock_init(&kbdev in kbase_device_misc_init() 230 kbase_device_misc_term(struct kbase_device *kbdev) kbase_device_misc_term() argument 247 kbase_device_free(struct kbase_device *kbdev) kbase_device_free() argument 252 kbase_device_id_init(struct kbase_device *kbdev) kbase_device_id_init() argument 263 kbase_device_hwcnt_backend_jm_init(struct kbase_device *kbdev) kbase_device_hwcnt_backend_jm_init() argument 268 kbase_device_hwcnt_backend_jm_term(struct kbase_device *kbdev) kbase_device_hwcnt_backend_jm_term() argument 273 kbase_device_hwcnt_context_init(struct kbase_device *kbdev) kbase_device_hwcnt_context_init() argument 278 kbase_device_hwcnt_context_term(struct kbase_device *kbdev) kbase_device_hwcnt_context_term() argument 283 kbase_device_hwcnt_virtualizer_init(struct kbase_device *kbdev) kbase_device_hwcnt_virtualizer_init() argument 289 kbase_device_hwcnt_virtualizer_term(struct kbase_device *kbdev) kbase_device_hwcnt_virtualizer_term() argument 294 kbase_device_timeline_init(struct kbase_device *kbdev) kbase_device_timeline_init() argument 300 kbase_device_timeline_term(struct kbase_device *kbdev) kbase_device_timeline_term() argument 305 kbase_device_vinstr_init(struct kbase_device *kbdev) kbase_device_vinstr_init() argument 310 kbase_device_vinstr_term(struct kbase_device *kbdev) kbase_device_vinstr_term() argument 315 kbase_device_io_history_init(struct kbase_device *kbdev) kbase_device_io_history_init() argument 320 kbase_device_io_history_term(struct kbase_device *kbdev) kbase_device_io_history_term() argument 325 kbase_device_misc_register(struct kbase_device *kbdev) kbase_device_misc_register() argument 330 kbase_device_misc_deregister(struct kbase_device *kbdev) kbase_device_misc_deregister() argument 335 kbase_device_list_init(struct kbase_device *kbdev) kbase_device_list_init() argument 346 kbase_device_list_term(struct kbase_device *kbdev) kbase_device_list_term() argument 368 kbase_device_early_init(struct kbase_device *kbdev) kbase_device_early_init() argument 406 kbase_device_early_term(struct kbase_device *kbdev) kbase_device_early_term() argument [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/arbiter/ |
H A D | mali_kbase_arbiter_pm.c | 49 static void kbase_arbiter_pm_vm_wait_gpu_assignment(struct kbase_device *kbdev); 51 struct kbase_device *kbdev); 129 * @kbdev: The kbase device structure for the device (must be a valid pointer) 134 static void kbase_arbiter_pm_vm_set_state(struct kbase_device *kbdev, in kbase_arbiter_pm_vm_set_state() argument 137 struct kbase_arbiter_vm_state *arb_vm_state = kbdev->pm.arb_vm_state; in kbase_arbiter_pm_vm_set_state() 139 dev_dbg(kbdev->dev, "VM set_state %s -> %s", in kbase_arbiter_pm_vm_set_state() 147 KBASE_KTRACE_ADD(kbdev, ARB_VM_STATE, NULL, new_state); in kbase_arbiter_pm_vm_set_state() 163 struct kbase_device *kbdev = arb_vm_state->kbdev; in kbase_arbiter_pm_suspend_wq() local 166 dev_dbg(kbdev in kbase_arbiter_pm_suspend_wq() 194 struct kbase_device *kbdev = arb_vm_state->kbdev; kbase_arbiter_pm_resume_wq() local 245 start_request_timer(struct kbase_device *kbdev) start_request_timer() argument 261 cancel_request_timer(struct kbase_device *kbdev) cancel_request_timer() argument 278 kbase_arbiter_pm_early_init(struct kbase_device *kbdev) kbase_arbiter_pm_early_init() argument 356 kbase_arbiter_pm_early_term(struct kbase_device *kbdev) kbase_arbiter_pm_early_term() argument 380 kbase_arbiter_pm_release_interrupts(struct kbase_device *kbdev) kbase_arbiter_pm_release_interrupts() argument 398 kbase_arbiter_pm_install_interrupts(struct kbase_device *kbdev) kbase_arbiter_pm_install_interrupts() argument 416 kbase_arbiter_pm_vm_stopped(struct kbase_device *kbdev) kbase_arbiter_pm_vm_stopped() argument 460 kbase_arbiter_set_max_config(struct kbase_device *kbdev, uint32_t max_l2_slices, uint32_t max_core_mask) kbase_arbiter_set_max_config() argument 486 kbase_arbiter_pm_gpu_assigned(struct kbase_device *kbdev) kbase_arbiter_pm_gpu_assigned() argument 529 kbase_arbiter_pm_vm_gpu_start(struct kbase_device *kbdev) kbase_arbiter_pm_vm_gpu_start() argument 587 kbase_arbiter_pm_vm_gpu_stop(struct kbase_device *kbdev) kbase_arbiter_pm_vm_gpu_stop() argument 647 kbase_gpu_lost(struct kbase_device *kbdev) kbase_gpu_lost() argument 713 kbase_arbiter_pm_vm_os_suspend_ready_state( struct kbase_device *kbdev) kbase_arbiter_pm_vm_os_suspend_ready_state() argument 735 kbase_arbiter_pm_vm_os_prepare_suspend(struct kbase_device *kbdev) kbase_arbiter_pm_vm_os_prepare_suspend() argument 808 kbase_arbiter_pm_vm_os_resume(struct kbase_device *kbdev) kbase_arbiter_pm_vm_os_resume() argument 840 kbase_arbiter_pm_vm_event(struct kbase_device *kbdev, enum kbase_arbif_evt evt) kbase_arbiter_pm_vm_event() argument 934 kbase_arbiter_pm_vm_wait_gpu_assignment(struct kbase_device *kbdev) kbase_arbiter_pm_vm_wait_gpu_assignment() argument 951 kbase_arbiter_pm_vm_gpu_assigned_lockheld( struct kbase_device *kbdev) kbase_arbiter_pm_vm_gpu_assigned_lockheld() argument 974 kbase_arbiter_pm_ctx_active_handle_suspend(struct kbase_device *kbdev, enum kbase_pm_suspend_handler suspend_handler) kbase_arbiter_pm_ctx_active_handle_suspend() argument 1077 enumerate_arb_gpu_clk(struct kbase_device *kbdev, unsigned int index) enumerate_arb_gpu_clk() argument 1092 get_arb_gpu_clk_rate(struct kbase_device *kbdev, void *gpu_clk_handle) get_arb_gpu_clk_rate() argument 1117 arb_gpu_clk_notifier_register(struct kbase_device *kbdev, void *gpu_clk_handle, struct notifier_block *nb) arb_gpu_clk_notifier_register() argument 1142 arb_gpu_clk_notifier_unregister(struct kbase_device *kbdev, void *gpu_clk_handle, struct notifier_block *nb) arb_gpu_clk_notifier_unregister() argument [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/tl/ |
H A D | mali_kbase_tracepoints.h | 59 #define MALI_KBASE_TL_DISPATCH_STREAM(kbdev, stype) \ 61 ((u8 *)kbdev->timeline + __ ## stype ## _stream_offset)) 463 * @kbdev: Kbase device 469 kbdev, \ 475 int enabled = atomic_read(&(kbdev)->timeline_flags); \ 478 MALI_KBASE_TL_DISPATCH_STREAM(kbdev, obj), \ 486 * @kbdev: Kbase device 492 kbdev, \ 498 int enabled = atomic_read(&(kbdev)->timeline_flags); \ 501 MALI_KBASE_TL_DISPATCH_STREAM(kbdev, ob [all...] |