Home
last modified time | relevance | path

Searched refs:hwcnt (Results 1 - 24 of 24) sorted by relevance

/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/backend/gpu/
H A Dmali_kbase_instr_backend.c43 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbasep_instr_hwcnt_cacheclean()
44 KBASE_DEBUG_ASSERT(kbdev->hwcnt.backend.state == in kbasep_instr_hwcnt_cacheclean()
59 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_CLEANING; in kbasep_instr_hwcnt_cacheclean()
61 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbasep_instr_hwcnt_cacheclean()
90 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
92 if (kbdev->hwcnt.backend.state != KBASE_INSTR_STATE_DISABLED) { in kbase_instr_hwcnt_enable_internal()
94 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
106 kbdev->hwcnt.kctx = kctx; in kbase_instr_hwcnt_enable_internal()
108 kbdev->hwcnt.addr = setup->dump_buffer; in kbase_instr_hwcnt_enable_internal()
111 kbdev->hwcnt in kbase_instr_hwcnt_enable_internal()
[all...]
H A Dmali_kbase_jm_hw.c1202 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbasep_reset_timeout_worker()
1214 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbasep_reset_timeout_worker()
H A Dmali_kbase_jm_rb.c1840 KBASE_DEBUG_ASSERT_MSG(kbdev->hwcnt.backend.state != in kbase_gpu_cacheclean()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/backend/gpu/
H A Dmali_kbase_instr_backend.c39 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbasep_instr_hwcnt_cacheclean()
40 KBASE_DEBUG_ASSERT(kbdev->hwcnt.backend.state == KBASE_INSTR_STATE_REQUEST_CLEAN); in kbasep_instr_hwcnt_cacheclean()
52 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_CLEANING; in kbasep_instr_hwcnt_cacheclean()
54 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbasep_instr_hwcnt_cacheclean()
82 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
84 if (kbdev->hwcnt.backend.state != KBASE_INSTR_STATE_DISABLED) { in kbase_instr_hwcnt_enable_internal()
86 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
97 kbdev->hwcnt.kctx = kctx; in kbase_instr_hwcnt_enable_internal()
99 kbdev->hwcnt.addr = setup->dump_buffer; in kbase_instr_hwcnt_enable_internal()
102 kbdev->hwcnt in kbase_instr_hwcnt_enable_internal()
[all...]
H A Dmali_kbase_jm_hw.c1083 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbasep_reset_timeout_worker()
1095 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbasep_reset_timeout_worker()
H A Dmali_kbase_jm_rb.c1606 KBASE_DEBUG_ASSERT_MSG(kbdev->hwcnt.backend.state != KBASE_INSTR_STATE_CLEANING, in kbase_gpu_cacheclean()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/backend/gpu/
H A Dmali_kbase_instr_backend.c50 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
52 if (kbdev->hwcnt.backend.state != KBASE_INSTR_STATE_DISABLED) { in kbase_instr_hwcnt_enable_internal()
54 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
65 kbdev->hwcnt.kctx = kctx; in kbase_instr_hwcnt_enable_internal()
67 kbdev->hwcnt.addr = enable->dump_buffer; in kbase_instr_hwcnt_enable_internal()
68 kbdev->hwcnt.addr_bytes = enable->dump_buffer_bytes; in kbase_instr_hwcnt_enable_internal()
70 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
75 if (kbdev->hwcnt.backend.use_secondary_override) in kbase_instr_hwcnt_enable_internal()
111 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
113 kbdev->hwcnt in kbase_instr_hwcnt_enable_internal()
[all...]
H A Dmali_kbase_pm_backend.c770 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_pm_handle_gpu_lost()
771 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_IDLE; in kbase_pm_handle_gpu_lost()
772 kbdev->hwcnt.backend.triggered = 1; in kbase_pm_handle_gpu_lost()
773 wake_up(&kbdev->hwcnt.backend.wait); in kbase_pm_handle_gpu_lost()
774 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_pm_handle_gpu_lost()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/backend/gpu/
H A Dmali_kbase_instr_backend.c48 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
50 if (kbdev->hwcnt.backend.state != KBASE_INSTR_STATE_DISABLED) { in kbase_instr_hwcnt_enable_internal()
52 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
58 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
68 kbdev->hwcnt.kctx = kctx; in kbase_instr_hwcnt_enable_internal()
70 kbdev->hwcnt.addr = enable->dump_buffer; in kbase_instr_hwcnt_enable_internal()
71 kbdev->hwcnt.addr_bytes = enable->dump_buffer_bytes; in kbase_instr_hwcnt_enable_internal()
73 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_instr_hwcnt_enable_internal()
78 prfcnt_config |= kbdev->hwcnt.backend.override_counter_set in kbase_instr_hwcnt_enable_internal()
106 spin_lock_irqsave(&kbdev->hwcnt in kbase_instr_hwcnt_enable_internal()
[all...]
H A Dmali_kbase_pm_backend.c1006 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_pm_handle_gpu_lost()
1007 if (kbdev->hwcnt.backend.state == KBASE_INSTR_STATE_DUMPING || in kbase_pm_handle_gpu_lost()
1008 kbdev->hwcnt.backend.state == KBASE_INSTR_STATE_FAULT) { in kbase_pm_handle_gpu_lost()
1009 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_FAULT; in kbase_pm_handle_gpu_lost()
1010 kbdev->hwcnt.backend.triggered = 1; in kbase_pm_handle_gpu_lost()
1011 wake_up(&kbdev->hwcnt.backend.wait); in kbase_pm_handle_gpu_lost()
1013 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_pm_handle_gpu_lost()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/mmu/backend/
H A Dmali_kbase_mmu_jm.c146 spin_lock_irqsave(&kbdev->hwcnt.lock, flags); in kbase_mmu_report_fault_and_kill()
147 if ((kbdev->hwcnt.kctx) && (kbdev->hwcnt.kctx->as_nr == as_no) && in kbase_mmu_report_fault_and_kill()
148 (kbdev->hwcnt.backend.state == in kbase_mmu_report_fault_and_kill()
150 if ((fault->addr >= kbdev->hwcnt.addr) && in kbase_mmu_report_fault_and_kill()
151 (fault->addr < (kbdev->hwcnt.addr + in kbase_mmu_report_fault_and_kill()
152 kbdev->hwcnt.addr_bytes))) in kbase_mmu_report_fault_and_kill()
153 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_FAULT; in kbase_mmu_report_fault_and_kill()
155 spin_unlock_irqrestore(&kbdev->hwcnt.lock, flags); in kbase_mmu_report_fault_and_kill()
244 spin_lock_irqsave(&kbdev->hwcnt in kbase_mmu_interrupt_process()
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/mmu/backend/
H A Dmali_kbase_mmu_jm.c135 if ((kbdev->hwcnt.kctx) && (kbdev->hwcnt.kctx->as_nr == as_no) && in kbase_mmu_report_fault_and_kill()
136 (kbdev->hwcnt.backend.state == KBASE_INSTR_STATE_DUMPING)) { in kbase_mmu_report_fault_and_kill()
137 if ((fault->addr >= kbdev->hwcnt.addr) && (fault->addr < (kbdev->hwcnt.addr + kbdev->hwcnt.addr_bytes))) { in kbase_mmu_report_fault_and_kill()
138 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_FAULT; in kbase_mmu_report_fault_and_kill()
215 if ((kbdev->hwcnt.kctx == kctx) && (kbdev->hwcnt.backend.state == KBASE_INSTR_STATE_DUMPING)) { in kbase_mmu_interrupt_process()
216 kbdev->hwcnt in kbase_mmu_interrupt_process()
[all...]
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/
H A Dmali_kbase_hwcnt_backend_csf_if_fw.c571 kbdev->csf.hwcnt.enable_pending = true; in kbasep_hwcnt_backend_csf_if_fw_dump_enable()
619 kbdev->csf.hwcnt.enable_pending = true; in kbasep_hwcnt_backend_csf_if_fw_dump_disable()
638 kbdev->csf.hwcnt.request_pending = false; in kbasep_hwcnt_backend_csf_if_fw_dump_disable()
659 kbdev->csf.hwcnt.request_pending = true; in kbasep_hwcnt_backend_csf_if_fw_dump_request()
H A Dmali_kbase_defs.h130 /* Minimum threshold period for hwcnt dumps between different hwcnt virtualizer
139 /* The buffer count of CSF hwcnt backend ring buffer, which is used when CSF
140 * hwcnt backend allocate the ring buffer to communicate with CSF firmware for
143 * CSF hwcnt backend creation will be failed.
741 * @hwcnt: Structure used for instrumentation and HW counters
743 * @hwcnt.lock: The lock should be used when accessing any of the
745 * @hwcnt.kctx: kbase context
746 * @hwcnt.addr: HW counter address
747 * @hwcnt
1036 } hwcnt; global() member
[all...]
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/
H A Dmali_kbase_mmu.c1714 if ((kbdev->hwcnt.kctx) && (kbdev->hwcnt.kctx->as_nr == as_no) && in kbase_mmu_report_fault_and_kill()
1715 (kbdev->hwcnt.backend.state == KBASE_INSTR_STATE_DUMPING)) { in kbase_mmu_report_fault_and_kill()
1718 if ((as->fault_addr >= kbdev->hwcnt.addr) && in kbase_mmu_report_fault_and_kill()
1719 (as->fault_addr < (kbdev->hwcnt.addr + (num_core_groups * MMU_OFFSET_SIZE)))) { in kbase_mmu_report_fault_and_kill()
1720 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_FAULT; in kbase_mmu_report_fault_and_kill()
1970 if ((kbdev->hwcnt.kctx == kctx) && (kbdev->hwcnt.backend.state == KBASE_INSTR_STATE_DUMPING)) { in kbase_mmu_interrupt_process()
1971 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_FAULT; in kbase_mmu_interrupt_process()
H A Dmali_kbase_defs.h1058 } hwcnt; member
H A Dmali_kbase_device.c219 spin_lock_init(&kbdev->hwcnt.lock); in kbase_device_init()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/
H A Dmali_kbase_mmu.c1780 if ((kbdev->hwcnt.kctx) && (kbdev->hwcnt.kctx->as_nr == as_no) && in kbase_mmu_report_fault_and_kill()
1781 (kbdev->hwcnt.backend.state == in kbase_mmu_report_fault_and_kill()
1785 if ((as->fault_addr >= kbdev->hwcnt.addr) && in kbase_mmu_report_fault_and_kill()
1786 (as->fault_addr < (kbdev->hwcnt.addr + in kbase_mmu_report_fault_and_kill()
1788 kbdev->hwcnt.backend.state = KBASE_INSTR_STATE_FAULT; in kbase_mmu_report_fault_and_kill()
2039 if ((kbdev->hwcnt.kctx == kctx) && in kbase_mmu_interrupt_process()
2040 (kbdev->hwcnt.backend.state == in kbase_mmu_interrupt_process()
2042 kbdev->hwcnt.backend.state = in kbase_mmu_interrupt_process()
H A Dmali_kbase_defs.h1069 } hwcnt; member
H A Dmali_kbase_device.c216 spin_lock_init(&kbdev->hwcnt.lock); in kbase_device_init()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/device/
H A Dmali_kbase_device.c187 spin_lock_init(&kbdev->hwcnt.lock); in kbase_device_misc_init()
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/csf/
H A Dmali_kbase_csf_defs.h1234 * @hwcnt: Contain members required for handling the dump of
1273 struct kbase_csf_hwcnt hwcnt; member
H A Dmali_kbase_csf.c2667 if (kbdev->csf.hwcnt.request_pending && in process_prfcnt_interrupts()
2670 kbdev->csf.hwcnt.request_pending = false; in process_prfcnt_interrupts()
2679 if (kbdev->csf.hwcnt.enable_pending && in process_prfcnt_interrupts()
2682 kbdev->csf.hwcnt.enable_pending = false; in process_prfcnt_interrupts()
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/
H A Dmali_kbase_defs.h129 /* Minimum threshold period for hwcnt dumps between different hwcnt virtualizer
700 * @hwcnt: Structure used for instrumentation and HW counters
948 } hwcnt; member

Completed in 37 milliseconds