/device/soc/rockchip/common/sdk_linux/drivers/gpu/drm/bridge/analogix/ |
H A D | analogix_dp_reg.c | 26 static void analogix_dp_write(struct analogix_dp_device *dp, u32 reg, u32 val) in analogix_dp_write() argument 30 writel(val, dp->reg_base + reg); in analogix_dp_write() 33 writel(val, dp->reg_base + reg); in analogix_dp_write() 36 static u32 analogix_dp_read(struct analogix_dp_device *dp, u32 reg) in analogix_dp_read() argument 39 readl(dp->reg_base + reg); in analogix_dp_read() 42 return readl(dp->reg_base + reg); in analogix_dp_read() 47 u32 reg; in analogix_dp_enable_video_mute() local 50 reg = analogix_dp_read(dp, ANALOGIX_DP_VIDEO_CTL_1); in analogix_dp_enable_video_mute() 51 reg |= HDCP_VIDEO_MUTE; in analogix_dp_enable_video_mute() 52 analogix_dp_write(dp, ANALOGIX_DP_VIDEO_CTL_1, reg); in analogix_dp_enable_video_mute() 62 u32 reg; analogix_dp_stop_video() local 71 u32 reg; analogix_dp_lane_swap() local 84 u32 reg; analogix_dp_init_analog_param() local 138 u32 reg; analogix_dp_reset() local 190 u32 reg; analogix_dp_config_interrupt() local 211 u32 reg; analogix_dp_mute_hpd_interrupt() local 225 u32 reg; analogix_dp_unmute_hpd_interrupt() local 238 u32 reg; analogix_dp_get_pll_lock_status() local 250 u32 reg; analogix_dp_set_pll_power_down() local 270 u32 reg; analogix_dp_set_analog_power_down() local 385 u32 reg; analogix_dp_init_analog_func() local 408 u32 reg; analogix_dp_clear_hotplug_interrupts() local 423 u32 reg; analogix_dp_init_hpd() local 438 u32 reg; analogix_dp_force_hpd() local 447 u32 reg; analogix_dp_get_irq_type() local 468 u32 reg; analogix_dp_reset_aux() local 478 u32 reg; analogix_dp_init_aux() local 515 u32 reg; analogix_dp_get_plug_in_status() local 533 u32 reg; analogix_dp_enable_sw_function() local 542 u32 reg; analogix_dp_ssc_enable() local 559 u32 reg; analogix_dp_ssc_disable() local 611 u32 reg; analogix_dp_get_link_bandwidth() local 619 u32 reg; analogix_dp_set_lane_count() local 642 u32 reg; analogix_dp_get_lane_count() local 690 u32 reg; analogix_dp_enable_enhanced_mode() local 705 u32 reg; analogix_dp_set_training_pattern() local 739 u32 reg; analogix_dp_reset_macro() local 754 u32 reg; analogix_dp_init_video() local 774 u32 reg; analogix_dp_set_video_color_format() local 794 u32 reg; analogix_dp_is_slave_video_stream_clock_on() local 822 u32 reg; analogix_dp_set_video_cr_mn() local 854 u32 reg; analogix_dp_set_video_timing_mode() local 869 u32 reg; analogix_dp_enable_video_master() local 886 u32 reg; analogix_dp_start_video() local 895 u32 reg; analogix_dp_is_video_stream_on() local 911 u32 reg; analogix_dp_config_video_slave_mode() local 943 u32 reg; analogix_dp_enable_scrambling() local 952 u32 reg; analogix_dp_disable_scrambling() local 1074 u32 reg; analogix_dp_transfer() local 1247 u32 reg; analogix_dp_video_bist_enable() local 1263 u32 reg; analogix_dp_audio_config_i2s() local 1276 u32 reg; analogix_dp_audio_config_spdif() local 1289 u32 reg; analogix_dp_audio_enable() local 1302 u32 reg; analogix_dp_audio_disable() local [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_mem_linux.c | 54 * @reg: The GPU region 64 struct kbase_va_region *reg, 70 * @reg: The GPU region or NULL if there isn't one 81 struct kbase_va_region *reg, 91 struct kbase_va_region *reg; in kbase_mem_alloc() local 149 reg = kbase_alloc_free_region(kctx, 0, va_pages, zone); in kbase_mem_alloc() 150 if (!reg) { in kbase_mem_alloc() 155 if (kbase_update_region_flags(kctx, reg, *flags) != 0) in kbase_mem_alloc() 158 if (kbase_reg_prepare_native(reg, kctx) != 0) { in kbase_mem_alloc() 164 reg in kbase_mem_alloc() 261 struct kbase_va_region *reg; kbase_mem_query() local 582 struct kbase_va_region *reg; kbase_mem_flags_change() local 696 struct kbase_va_region *reg; kbase_mem_from_ump() local 815 struct kbase_va_region *reg; kbase_mem_from_umm() local 924 struct kbase_va_region *reg; kbase_mem_from_user_buffer() local 1107 struct kbase_va_region *reg; global() local 1296 struct kbase_va_region *reg; global() local 1439 kbase_mem_grow_gpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 1457 kbase_mem_shrink_cpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 1472 kbase_mem_shrink_gpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 1490 struct kbase_va_region *reg; global() local 1713 kbase_cpu_mmap(struct kbase_va_region *reg, struct vm_area_struct *vma, void *kaddr, size_t nr_pages, unsigned long aligned_offset, int free_on_close) global() argument 1813 kbase_trace_buffer_mmap(struct kbase_context *kctx, struct vm_area_struct *vma, struct kbase_va_region **const reg, void **const kaddr) global() argument 1898 kbase_mmu_dump_mmap(struct kbase_context *kctx, struct vm_area_struct *vma, struct kbase_va_region **const reg, void **const kmap_addr) global() argument 1977 struct kbase_va_region *reg; global() local 2041 struct kbase_va_region *reg = NULL; global() local 2184 struct kbase_va_region *reg; global() local 2455 struct kbase_va_region *reg; global() local 2544 struct kbase_va_region *reg; global() local [all...] |
H A D | mali_kbase_mem.c | 47 struct kbase_va_region *reg) in kbase_reg_flags_to_rbtree() 51 switch (reg->flags & KBASE_REG_ZONE_MASK) { in kbase_reg_flags_to_rbtree() 136 struct kbase_va_region *reg = NULL; in kbase_region_tracker_find_region_enclosing_range_free() local 148 reg = rb_entry(rbnode, struct kbase_va_region, rblink); in kbase_region_tracker_find_region_enclosing_range_free() 149 tmp_start_pfn = reg->start_pfn; in kbase_region_tracker_find_region_enclosing_range_free() 150 tmp_end_pfn = reg->start_pfn + reg->nr_pages; in kbase_region_tracker_find_region_enclosing_range_free() 159 return reg; in kbase_region_tracker_find_region_enclosing_range_free() 169 struct kbase_va_region *reg; in kbase_region_tracker_find_region_enclosing_address() local 184 reg in kbase_region_tracker_find_region_enclosing_address() 46 kbase_reg_flags_to_rbtree(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_reg_flags_to_rbtree() argument 208 struct kbase_va_region *reg = NULL; kbase_region_tracker_find_region_base_address() local 239 struct kbase_va_region *reg = NULL; kbase_region_tracker_find_region_meeting_reqs() local 275 kbase_remove_va_region(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_remove_va_region() argument 413 kbase_add_va_region(struct kbase_context *kctx, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_add_va_region() argument 528 struct kbase_va_region *reg; kbase_region_tracker_erase_rbtree() local 832 kbase_free_alloced_region(struct kbase_va_region *reg) kbase_free_alloced_region() argument 884 kbase_gpu_mmap(struct kbase_context *kctx, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_gpu_mmap() argument 968 kbase_gpu_munmap(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_gpu_munmap() argument 1128 struct kbase_va_region *reg; kbase_do_syncset() local 1244 kbase_mem_free_region(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_mem_free_region() argument 1297 struct kbase_va_region *reg; kbase_mem_free() local 1356 kbase_update_region_flags(struct kbase_context *kctx, struct kbase_va_region *reg, unsigned long flags) kbase_update_region_flags() argument 1573 kbase_alloc_phy_pages(struct kbase_va_region *reg, size_t vsize, size_t size) kbase_alloc_phy_pages() argument 1808 struct kbase_va_region *reg; kbase_jit_debugfs_vm_get() local 1832 struct kbase_va_region *reg; kbase_jit_debugfs_phys_get() local 1885 struct kbase_va_region *reg; kbase_jit_destroy_worker() local 1923 struct kbase_va_region *reg = NULL; kbase_jit_allocate() local 2048 kbase_jit_free(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jit_free() argument 2060 kbase_jit_backing_lost(struct kbase_va_region *reg) kbase_jit_backing_lost() argument 2085 struct kbase_va_region *reg = NULL; kbase_jit_evict() local 2141 kbase_jd_user_buf_map(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jd_user_buf_map() argument 2281 kbase_jd_umm_map(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jd_umm_map() argument 2404 kbase_map_external_resource( struct kbase_context *kctx, struct kbase_va_region *reg, struct mm_struct *locked_mm , u32 *kds_res_count, struct kds_resource **kds_resources, unsigned long *kds_access_bitmap, bool exclusive ) kbase_map_external_resource() argument 2480 kbase_unmap_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_unmap_external_resource() argument 2550 struct kbase_va_region *reg; kbase_sticky_resource_acquire() local 2594 struct kbase_va_region *reg; kbase_sticky_resource_release() local [all...] |
H A D | mali_kbase_mem.h | 125 struct kbase_va_region *reg; member 311 static inline phys_addr_t *kbase_get_cpu_phy_pages(struct kbase_va_region *reg) in kbase_get_cpu_phy_pages() argument 313 KBASE_DEBUG_ASSERT(reg); in kbase_get_cpu_phy_pages() 314 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_cpu_phy_pages() 315 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_cpu_phy_pages() 316 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages() 318 return reg->cpu_alloc->pages; in kbase_get_cpu_phy_pages() 321 static inline phys_addr_t *kbase_get_gpu_phy_pages(struct kbase_va_region *reg) in kbase_get_gpu_phy_pages() argument 323 KBASE_DEBUG_ASSERT(reg); in kbase_get_gpu_phy_pages() 331 kbase_reg_current_backed_size(struct kbase_va_region *reg) kbase_reg_current_backed_size() argument 395 kbase_reg_prepare_native(struct kbase_va_region *reg, struct kbase_context *kctx) kbase_reg_prepare_native() argument [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_mem_linux.c | 98 struct kbase_va_region *reg, u64 offset_bytes, 107 struct kbase_va_region *const reg, 120 struct kbase_va_region *reg; in kbase_find_event_mem_region() local 124 list_for_each_entry(reg, &kctx->csf.event_pages_head, link) in kbase_find_event_mem_region() 126 if ((reg->start_pfn <= gpu_pfn) && in kbase_find_event_mem_region() 127 (gpu_pfn < (reg->start_pfn + reg->nr_pages))) { in kbase_find_event_mem_region() 128 if (WARN_ON(reg->flags & KBASE_REG_FREE)) { in kbase_find_event_mem_region() 132 if (WARN_ON(!(reg->flags & KBASE_REG_CSF_EVENT))) { in kbase_find_event_mem_region() 136 return reg; in kbase_find_event_mem_region() 179 kbase_phy_alloc_mapping_init(struct kbase_context *kctx, struct kbase_va_region *reg, size_t vsize, size_t size) kbase_phy_alloc_mapping_init() argument 248 struct kbase_va_region *reg; kbase_phy_alloc_mapping_get() local 312 struct kbase_va_region *reg; kbase_mem_alloc() local 555 struct kbase_va_region *reg; kbase_mem_query() local 927 struct kbase_va_region *reg; kbase_mem_flags_change() local 1075 kbase_mem_do_sync_imported(struct kbase_context *kctx, struct kbase_va_region *reg, enum kbase_sync_type sync_fn) kbase_mem_do_sync_imported() argument 1209 kbase_mem_umm_map_attachment(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_mem_umm_map_attachment() argument 1274 kbase_mem_umm_map(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_mem_umm_map() argument 1348 kbase_mem_umm_unmap(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_mem_umm_unmap() argument 1413 struct kbase_va_region *reg; kbase_mem_from_umm() local 1569 struct kbase_va_region *reg; kbase_mem_from_user_buffer() local 1784 struct kbase_va_region *reg; global() local 1995 struct kbase_va_region *reg; global() local 2133 kbase_mem_grow_gpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 2153 kbase_mem_shrink_cpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 2182 kbase_mem_shrink_gpu_mapping(struct kbase_context *const kctx, struct kbase_va_region *const reg, u64 const new_pages, u64 const old_pages) global() argument 2202 struct kbase_va_region *reg; global() local 2320 kbase_mem_shrink(struct kbase_context *const kctx, struct kbase_va_region *const reg, u64 const new_pages) global() argument 2405 get_aliased_alloc(struct vm_area_struct *vma, struct kbase_va_region *reg, pgoff_t *start_off, size_t nr_pages) global() argument 2516 kbase_cpu_mmap(struct kbase_context *kctx, struct kbase_va_region *reg, struct vm_area_struct *vma, void *kaddr, size_t nr_pages, unsigned long aligned_offset, int free_on_close) global() argument 2630 kbase_mmu_dump_mmap(struct kbase_context *kctx, struct vm_area_struct *vma, struct kbase_va_region **const reg, void **const kmap_addr) global() argument 2714 struct kbase_va_region *reg; global() local 2778 struct kbase_va_region *reg = NULL; global() local 2980 kbase_vmap_phy_pages(struct kbase_context *kctx, struct kbase_va_region *reg, u64 offset_bytes, size_t size, struct kbase_vmap_struct *map) global() argument 3069 struct kbase_va_region *reg; global() local [all...] |
H A D | mali_kbase_mem.c | 172 struct kbase_va_region *reg; in find_region_enclosing_range_rbtree() local 180 reg = rb_entry(rbnode, struct kbase_va_region, rblink); in find_region_enclosing_range_rbtree() 181 tmp_start_pfn = reg->start_pfn; in find_region_enclosing_range_rbtree() 182 tmp_end_pfn = reg->start_pfn + reg->nr_pages; in find_region_enclosing_range_rbtree() 190 return reg; in find_region_enclosing_range_rbtree() 201 struct kbase_va_region *reg; in kbase_find_region_enclosing_address() local 208 reg = rb_entry(rbnode, struct kbase_va_region, rblink); in kbase_find_region_enclosing_address() 209 tmp_start_pfn = reg->start_pfn; in kbase_find_region_enclosing_address() 210 tmp_end_pfn = reg in kbase_find_region_enclosing_address() 246 struct kbase_va_region *reg = NULL; kbase_find_region_base_address() local 285 struct kbase_va_region *reg = NULL; kbase_region_tracker_find_region_meeting_reqs() local 348 kbase_remove_va_region(struct kbase_va_region *reg) kbase_remove_va_region() argument 488 kbase_add_va_region(struct kbase_context *kctx, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_add_va_region() argument 546 kbase_add_va_region_rbtree(struct kbase_device *kbdev, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_add_va_region_rbtree() argument 649 struct kbase_va_region *reg; kbase_region_tracker_erase_rbtree() local 1112 kbase_reg_flags_to_kctx(struct kbase_va_region *reg) kbase_reg_flags_to_kctx() argument 1145 kbase_free_alloced_region(struct kbase_va_region *reg) kbase_free_alloced_region() argument 1227 kbase_gpu_mmap(struct kbase_context *kctx, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_gpu_mmap() argument 1337 kbase_gpu_munmap(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_gpu_munmap() argument 1540 struct kbase_va_region *reg; kbase_do_syncset() local 1661 kbase_mem_free_region(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_mem_free_region() argument 1720 struct kbase_va_region *reg; kbase_mem_free() local 1779 kbase_update_region_flags(struct kbase_context *kctx, struct kbase_va_region *reg, unsigned long flags) kbase_update_region_flags() argument 2527 kbase_alloc_phy_pages(struct kbase_va_region *reg, size_t vsize, size_t size) kbase_alloc_phy_pages() argument 2923 struct kbase_va_region *reg; kbase_jit_debugfs_vm_get() local 2949 struct kbase_va_region *reg; kbase_jit_debugfs_phys_get() local 2976 struct kbase_va_region *reg; kbase_jit_debugfs_used_get() local 3002 struct kbase_va_region *reg; kbase_jit_debugfs_trim_get() local 3094 struct kbase_va_region *reg; kbase_jit_destroy_worker() local 3170 kbase_mem_jit_trim_pages_from_region(struct kbase_context *kctx, struct kbase_va_region *reg, size_t pages_needed, size_t *freed, bool shrink) kbase_mem_jit_trim_pages_from_region() argument 3295 struct kbase_va_region *reg, *tmp; kbase_mem_jit_trim_pages() local 3329 kbase_jit_grow(struct kbase_context *kctx, const struct base_jit_alloc_info *info, struct kbase_va_region *reg, struct kbase_sub_alloc **prealloc_sas) kbase_jit_grow() argument 3622 struct kbase_va_region *reg = NULL; kbase_jit_allocate() local 3829 kbase_jit_free(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jit_free() argument 3883 kbase_jit_backing_lost(struct kbase_va_region *reg) kbase_jit_backing_lost() argument 3913 struct kbase_va_region *reg = NULL; kbase_jit_evict() local 3977 kbase_trace_jit_report_gpu_mem_trace_enabled(struct kbase_context *kctx, struct kbase_va_region *reg, unsigned int flags) kbase_trace_jit_report_gpu_mem_trace_enabled() argument 4020 kbase_jit_report_update_pressure(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_used_pages, unsigned int flags) kbase_jit_report_update_pressure() argument 4085 kbase_jd_user_buf_pin_pages(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jd_user_buf_pin_pages() argument 4146 kbase_jd_user_buf_map(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jd_user_buf_map() argument 4289 kbase_map_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct mm_struct *locked_mm) kbase_map_external_resource() argument 4329 kbase_unmap_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_unmap_external_resource() argument 4384 struct kbase_va_region *reg; kbase_sticky_resource_acquire() local 4439 struct kbase_va_region *reg; release_sticky_resource_meta() local [all...] |
H A D | mali_kbase_mem.h | 113 * @reg: Back reference to the region structure which created this 135 struct kbase_va_region *reg; member 457 static inline bool kbase_is_region_free(struct kbase_va_region *reg) in kbase_is_region_free() argument 459 return (!reg || (reg->flags & KBASE_REG_FREE)); in kbase_is_region_free() 462 static inline bool kbase_is_region_invalid(struct kbase_va_region *reg) in kbase_is_region_invalid() argument 464 return (!reg || (reg->flags & KBASE_REG_VA_FREED)); in kbase_is_region_invalid() 467 static inline bool kbase_is_region_invalid_or_free(struct kbase_va_region *reg) in kbase_is_region_invalid_or_free() argument 472 return (kbase_is_region_invalid(reg) || kbase_is_region_fre in kbase_is_region_invalid_or_free() 476 kbase_region_refcnt_free(struct kbase_va_region *reg) kbase_region_refcnt_free() argument 521 kbase_get_cpu_phy_pages(struct kbase_va_region *reg) kbase_get_cpu_phy_pages() argument 531 kbase_get_gpu_phy_pages(struct kbase_va_region *reg) kbase_get_gpu_phy_pages() argument 541 kbase_reg_current_backed_size(struct kbase_va_region *reg) kbase_reg_current_backed_size() argument 615 kbase_reg_prepare_native(struct kbase_va_region *reg, struct kbase_context *kctx, int group_id) kbase_reg_prepare_native() argument 1790 kbase_link_event_mem_page(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_link_event_mem_page() argument 1809 kbase_unlink_event_mem_page(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_unlink_event_mem_page() argument [all...] |
/device/soc/rockchip/common/kernel/drivers/gpu/arm/bifrost/ |
H A D | mali_kbase_mem_linux.c | 96 struct kbase_va_region *reg, u64 offset_bytes, size_t size, 104 struct kbase_va_region *reg, 116 struct kbase_va_region *reg; in kbase_find_event_mem_region() local 120 list_for_each_entry(reg, &kctx->csf.event_pages_head, link) { in kbase_find_event_mem_region() 121 if ((reg->start_pfn <= gpu_pfn) && in kbase_find_event_mem_region() 122 (gpu_pfn < (reg->start_pfn + reg->nr_pages))) { in kbase_find_event_mem_region() 123 if (WARN_ON(reg->flags & KBASE_REG_FREE)) in kbase_find_event_mem_region() 126 if (WARN_ON(!(reg->flags & KBASE_REG_CSF_EVENT))) in kbase_find_event_mem_region() 129 return reg; in kbase_find_event_mem_region() 172 kbase_phy_alloc_mapping_init(struct kbase_context *kctx, struct kbase_va_region *reg, size_t vsize, size_t size) kbase_phy_alloc_mapping_init() argument 237 struct kbase_va_region *reg; kbase_phy_alloc_mapping_get() local 300 struct kbase_va_region *reg; kbase_mem_alloc() local 523 struct kbase_va_region *reg; kbase_mem_query() local 883 struct kbase_va_region *reg; kbase_mem_flags_change() local 1028 kbase_mem_do_sync_imported(struct kbase_context *kctx, struct kbase_va_region *reg, enum kbase_sync_type sync_fn) kbase_mem_do_sync_imported() argument 1158 kbase_mem_umm_map_attachment(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_mem_umm_map_attachment() argument 1221 kbase_mem_umm_map(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_mem_umm_map() argument 1301 kbase_mem_umm_unmap(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_mem_umm_unmap() argument 1368 struct kbase_va_region *reg; kbase_mem_from_umm() local 1518 struct kbase_va_region *reg; kbase_mem_from_user_buffer() local 1729 struct kbase_va_region *reg; global() local 1956 struct kbase_va_region *reg; global() local 2096 kbase_mem_grow_gpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages, enum kbase_caller_mmu_sync_info mmu_sync_info) global() argument 2118 kbase_mem_shrink_cpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 2146 kbase_mem_shrink_gpu_mapping(struct kbase_context *const kctx, struct kbase_va_region *const reg, u64 const new_pages, u64 const old_pages) global() argument 2164 struct kbase_va_region *reg; global() local 2291 kbase_mem_shrink(struct kbase_context *const kctx, struct kbase_va_region *const reg, u64 const new_pages) global() argument 2373 get_aliased_alloc(struct vm_area_struct *vma, struct kbase_va_region *reg, pgoff_t *start_off, size_t nr_pages) global() argument 2484 kbase_cpu_mmap(struct kbase_context *kctx, struct kbase_va_region *reg, struct vm_area_struct *vma, void *kaddr, size_t nr_pages, unsigned long aligned_offset, int free_on_close) global() argument 2594 kbase_mmu_dump_mmap(struct kbase_context *kctx, struct vm_area_struct *vma, struct kbase_va_region **const reg, void **const kmap_addr) global() argument 2680 struct kbase_va_region *reg; global() local 2755 struct kbase_va_region *reg = NULL; global() local 2956 kbase_vmap_phy_pages(struct kbase_context *kctx, struct kbase_va_region *reg, u64 offset_bytes, size_t size, struct kbase_vmap_struct *map) global() argument 3036 struct kbase_va_region *reg; global() local [all...] |
H A D | mali_kbase_mem.c | 177 struct kbase_va_region *reg; in find_region_enclosing_range_rbtree() local 185 reg = rb_entry(rbnode, struct kbase_va_region, rblink); in find_region_enclosing_range_rbtree() 186 tmp_start_pfn = reg->start_pfn; in find_region_enclosing_range_rbtree() 187 tmp_end_pfn = reg->start_pfn + reg->nr_pages; in find_region_enclosing_range_rbtree() 196 return reg; in find_region_enclosing_range_rbtree() 207 struct kbase_va_region *reg; in kbase_find_region_enclosing_address() local 214 reg = rb_entry(rbnode, struct kbase_va_region, rblink); in kbase_find_region_enclosing_address() 215 tmp_start_pfn = reg->start_pfn; in kbase_find_region_enclosing_address() 216 tmp_end_pfn = reg in kbase_find_region_enclosing_address() 254 struct kbase_va_region *reg = NULL; kbase_find_region_base_address() local 294 struct kbase_va_region *reg = NULL; kbase_region_tracker_find_region_meeting_reqs() local 362 kbase_remove_va_region(struct kbase_device *kbdev, struct kbase_va_region *reg) kbase_remove_va_region() argument 555 kbase_add_va_region(struct kbase_context *kctx, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_add_va_region() argument 617 kbase_add_va_region_rbtree(struct kbase_device *kbdev, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_add_va_region_rbtree() argument 726 struct kbase_va_region *reg; kbase_region_tracker_erase_rbtree() local 900 struct kbase_va_region *reg; kbase_region_tracker_has_allocs() local 1382 kbase_reg_flags_to_kctx( struct kbase_va_region *reg) kbase_reg_flags_to_kctx() argument 1420 kbase_free_alloced_region(struct kbase_va_region *reg) kbase_free_alloced_region() argument 1502 kbase_gpu_mmap(struct kbase_context *kctx, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align, enum kbase_caller_mmu_sync_info mmu_sync_info) kbase_gpu_mmap() argument 1622 kbase_gpu_munmap(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_gpu_munmap() argument 1849 struct kbase_va_region *reg; kbase_do_syncset() local 1977 kbase_mem_free_region(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_mem_free_region() argument 2039 struct kbase_va_region *reg; kbase_mem_free() local 2100 kbase_update_region_flags(struct kbase_context *kctx, struct kbase_va_region *reg, unsigned long flags) kbase_update_region_flags() argument 2941 kbase_alloc_phy_pages(struct kbase_va_region *reg, size_t vsize, size_t size) kbase_alloc_phy_pages() argument 3342 struct kbase_va_region *reg; kbase_jit_debugfs_vm_get() local 3366 struct kbase_va_region *reg; kbase_jit_debugfs_phys_get() local 3391 struct kbase_va_region *reg; kbase_jit_debugfs_used_get() local 3418 struct kbase_va_region *reg; kbase_jit_debugfs_trim_get() local 3519 struct kbase_va_region *reg; kbase_jit_destroy_worker() local 3595 kbase_mem_jit_trim_pages_from_region(struct kbase_context *kctx, struct kbase_va_region *reg, size_t pages_needed, size_t *freed, bool shrink) kbase_mem_jit_trim_pages_from_region() argument 3728 struct kbase_va_region *reg, *tmp; kbase_mem_jit_trim_pages() local 3762 kbase_jit_grow(struct kbase_context *kctx, const struct base_jit_alloc_info *info, struct kbase_va_region *reg, struct kbase_sub_alloc **prealloc_sas, enum kbase_caller_mmu_sync_info mmu_sync_info) kbase_jit_grow() argument 4074 struct kbase_va_region *reg = NULL; kbase_jit_allocate() local 4292 kbase_jit_free(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jit_free() argument 4351 kbase_jit_backing_lost(struct kbase_va_region *reg) kbase_jit_backing_lost() argument 4379 struct kbase_va_region *reg = NULL; kbase_jit_evict() local 4446 kbase_trace_jit_report_gpu_mem_trace_enabled(struct kbase_context *kctx, struct kbase_va_region *reg, unsigned int flags) kbase_trace_jit_report_gpu_mem_trace_enabled() argument 4491 kbase_jit_report_update_pressure(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_used_pages, unsigned int flags) kbase_jit_report_update_pressure() argument 4554 kbase_jd_user_buf_pin_pages(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jd_user_buf_pin_pages() argument 4628 kbase_jd_user_buf_map(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jd_user_buf_map() argument 4781 kbase_map_external_resource( struct kbase_context *kctx, struct kbase_va_region *reg, struct mm_struct *locked_mm) kbase_map_external_resource() argument 4822 kbase_unmap_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_unmap_external_resource() argument 4880 struct kbase_va_region *reg; kbase_sticky_resource_acquire() local 4937 struct kbase_va_region *reg; release_sticky_resource_meta() local [all...] |
H A D | mali_kbase_mem.h | 119 * @reg: Back reference to the region structure which created this 142 struct kbase_va_region *reg; member 533 static inline bool kbase_is_region_free(struct kbase_va_region *reg) in kbase_is_region_free() argument 535 return (!reg || reg->flags & KBASE_REG_FREE); in kbase_is_region_free() 538 static inline bool kbase_is_region_invalid(struct kbase_va_region *reg) in kbase_is_region_invalid() argument 540 return (!reg || reg->flags & KBASE_REG_VA_FREED); in kbase_is_region_invalid() 543 static inline bool kbase_is_region_invalid_or_free(struct kbase_va_region *reg) in kbase_is_region_invalid_or_free() argument 548 return (kbase_is_region_invalid(reg) || kbase_is_region_fre in kbase_is_region_invalid_or_free() 553 kbase_region_refcnt_free(struct kbase_device *kbdev, struct kbase_va_region *reg) kbase_region_refcnt_free() argument 599 kbase_get_cpu_phy_pages( struct kbase_va_region *reg) kbase_get_cpu_phy_pages() argument 610 kbase_get_gpu_phy_pages( struct kbase_va_region *reg) kbase_get_gpu_phy_pages() argument 621 kbase_reg_current_backed_size(struct kbase_va_region *reg) kbase_reg_current_backed_size() argument 696 kbase_reg_prepare_native(struct kbase_va_region *reg, struct kbase_context *kctx, int group_id) kbase_reg_prepare_native() argument 1935 kbase_link_event_mem_page(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_link_event_mem_page() argument 1955 kbase_unlink_event_mem_page(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_unlink_event_mem_page() argument [all...] |
/device/soc/rockchip/common/vendor/drivers/gpu/arm/midgard/ |
H A D | mali_kbase_mem_linux.c | 49 * @reg: The GPU region
58 static void kbase_mem_shrink_cpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages,
64 * @reg: The GPU region or NULL if there isn't one
74 static int kbase_mem_shrink_gpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages,
83 struct kbase_va_region *reg;
in kbase_mem_alloc() local 140 reg = kbase_alloc_free_region(kctx, 0, va_pages, zone);
in kbase_mem_alloc() 141 if (!reg) {
in kbase_mem_alloc() 146 if (kbase_update_region_flags(kctx, reg, *flags) != 0) {
in kbase_mem_alloc() 150 if (kbase_reg_prepare_native(reg, kctx) != 0) {
in kbase_mem_alloc() 156 reg in kbase_mem_alloc() 253 struct kbase_va_region *reg; kbase_mem_query() local 569 struct kbase_va_region *reg; kbase_mem_flags_change() local 695 struct kbase_va_region *reg; kbase_mem_from_ump() local 819 struct kbase_va_region *reg; kbase_mem_from_umm() local 934 struct kbase_va_region *reg; kbase_mem_from_user_buffer() local 1110 struct kbase_va_region *reg; global() local 1309 struct kbase_va_region *reg; global() local 1447 kbase_mem_grow_gpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 1462 kbase_mem_shrink_cpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 1476 kbase_mem_shrink_gpu_mapping(struct kbase_context *kctx, struct kbase_va_region *reg, u64 new_pages, u64 old_pages) global() argument 1492 struct kbase_va_region *reg; global() local 1717 kbase_cpu_mmap(struct kbase_va_region *reg, struct vm_area_struct *vma, void *kaddr, size_t nr_pages, unsigned long aligned_offset, int free_on_close) global() argument 1816 kbase_trace_buffer_mmap(struct kbase_context *kctx, struct vm_area_struct *vma, struct kbase_va_region **const reg, void **const kaddr) global() argument 1901 kbase_mmu_dump_mmap(struct kbase_context *kctx, struct vm_area_struct *vma, struct kbase_va_region **const reg, void **const kmap_addr) global() argument 1977 struct kbase_va_region *reg; global() local 2039 struct kbase_va_region *reg = NULL; global() local 2176 struct kbase_va_region *reg; global() local 2453 struct kbase_va_region *reg; global() local 2545 struct kbase_va_region *reg; global() local [all...] |
H A D | mali_kbase_mem.c | 42 static struct rb_root *kbase_reg_flags_to_rbtree(struct kbase_context *kctx, struct kbase_va_region *reg)
in kbase_reg_flags_to_rbtree() argument 46 switch (reg->flags & KBASE_REG_ZONE_MASK) {
in kbase_reg_flags_to_rbtree() 132 struct kbase_va_region *reg = NULL;
in kbase_region_tracker_find_region_enclosing_range_free() local 144 reg = rb_entry(rbnode, struct kbase_va_region, rblink);
in kbase_region_tracker_find_region_enclosing_range_free() 145 tmp_start_pfn = reg->start_pfn;
in kbase_region_tracker_find_region_enclosing_range_free() 146 tmp_end_pfn = reg->start_pfn + reg->nr_pages;
in kbase_region_tracker_find_region_enclosing_range_free() 154 return reg;
in kbase_region_tracker_find_region_enclosing_range_free() 165 struct kbase_va_region *reg;
in kbase_region_tracker_find_region_enclosing_address() local 180 reg in kbase_region_tracker_find_region_enclosing_address() 204 struct kbase_va_region *reg = NULL; kbase_region_tracker_find_region_base_address() local 237 struct kbase_va_region *reg = NULL; kbase_region_tracker_find_region_meeting_reqs() local 272 kbase_remove_va_region(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_remove_va_region() argument 397 kbase_add_va_region(struct kbase_context *kctx, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_add_va_region() argument 515 struct kbase_va_region *reg; kbase_region_tracker_erase_rbtree() local 804 kbase_free_alloced_region(struct kbase_va_region *reg) kbase_free_alloced_region() argument 855 kbase_gpu_mmap(struct kbase_context *kctx, struct kbase_va_region *reg, u64 addr, size_t nr_pages, size_t align) kbase_gpu_mmap() argument 938 kbase_gpu_munmap(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_gpu_munmap() argument 1095 struct kbase_va_region *reg; kbase_do_syncset() local 1205 kbase_mem_free_region(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_mem_free_region() argument 1258 struct kbase_va_region *reg; kbase_mem_free() local 1316 kbase_update_region_flags(struct kbase_context *kctx, struct kbase_va_region *reg, unsigned long flags) kbase_update_region_flags() argument 1524 kbase_alloc_phy_pages(struct kbase_va_region *reg, size_t vsize, size_t size) kbase_alloc_phy_pages() argument 1776 struct kbase_va_region *reg; kbase_jit_debugfs_vm_get() local 1802 struct kbase_va_region *reg; kbase_jit_debugfs_phys_get() local 1854 struct kbase_va_region *reg; kbase_jit_destroy_worker() local 1890 struct kbase_va_region *reg = NULL; kbase_jit_allocate() local 2012 kbase_jit_free(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jit_free() argument 2024 kbase_jit_backing_lost(struct kbase_va_region *reg) kbase_jit_backing_lost() argument 2050 struct kbase_va_region *reg = NULL; kbase_jit_evict() local 2104 kbase_jd_user_buf_map(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jd_user_buf_map() argument 2223 kbase_jd_umm_map(struct kbase_context *kctx, struct kbase_va_region *reg) kbase_jd_umm_map() argument 2335 kbase_map_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct mm_struct *locked_mm, u32 *kds_res_count, struct kds_resource **kds_resources, unsigned long *kds_access_bitmap, bool exclusive) kbase_map_external_resource() argument 2409 kbase_unmap_external_resource(struct kbase_context *kctx, struct kbase_va_region *reg, struct kbase_mem_phy_alloc *alloc) kbase_unmap_external_resource() argument 2475 struct kbase_va_region *reg; kbase_sticky_resource_acquire() local 2519 struct kbase_va_region *reg; kbase_sticky_resource_release() local [all...] |
H A D | mali_kbase_mem.h | 123 struct kbase_va_region *reg; member 312 static inline phys_addr_t *kbase_get_cpu_phy_pages(struct kbase_va_region *reg) in kbase_get_cpu_phy_pages() argument 314 KBASE_DEBUG_ASSERT(reg); in kbase_get_cpu_phy_pages() 315 KBASE_DEBUG_ASSERT(reg->cpu_alloc); in kbase_get_cpu_phy_pages() 316 KBASE_DEBUG_ASSERT(reg->gpu_alloc); in kbase_get_cpu_phy_pages() 317 KBASE_DEBUG_ASSERT(reg->cpu_alloc->nents == reg->gpu_alloc->nents); in kbase_get_cpu_phy_pages() 319 return reg->cpu_alloc->pages; in kbase_get_cpu_phy_pages() 322 static inline phys_addr_t *kbase_get_gpu_phy_pages(struct kbase_va_region *reg) in kbase_get_gpu_phy_pages() argument 324 KBASE_DEBUG_ASSERT(reg); in kbase_get_gpu_phy_pages() 332 kbase_reg_current_backed_size(struct kbase_va_region *reg) kbase_reg_current_backed_size() argument 399 kbase_reg_prepare_native(struct kbase_va_region *reg, struct kbase_context *kctx) kbase_reg_prepare_native() argument [all...] |
/device/soc/hisilicon/common/platform/wifi/hi3881v100/driver/oal/ |
H A D | oal_sdio_comm.h | 112 #define hisdio_short_pkt_set(reg, num) do {(reg) = (((reg) & 0xFFFFFF00) | (((num) & 0xFF)));} while (0) 113 #define hisdio_large_pkt_set(reg, num) do {(reg) = (((reg) & 0xFFFF00FF) | (((num) & 0xFF) << 8));} while (0) 114 #define hisdio_reserve_pkt_set(reg, num) do {(reg) = (((reg) & 0xFF00FFFF) | (((num) & 0xFF) << 16));} while (0) 115 #define hisdio_comm_reg_seq_set(reg, nu [all...] |
/device/soc/hisilicon/common/platform/mtd/hifmc100/spi_nor/ |
H A D | w25qh.c | 28 unsigned long reg; in HifmcCntlrSpinorEntry4AddrW25qh() local 51 reg = (enable == 1) ? MTD_SPI_CMD_EN4B : MTD_SPI_CMD_FIRST_RESET_4ADDR; in HifmcCntlrSpinorEntry4AddrW25qh() 52 reg = HIFMC_CMD_CMD1(reg); in HifmcCntlrSpinorEntry4AddrW25qh() 53 HIFMC_REG_WRITE(cntlr, reg, HIFMC_CMD_REG_OFF); in HifmcCntlrSpinorEntry4AddrW25qh() 55 reg = HIFMC_OP_CFG_FM_CS(spi->cs); in HifmcCntlrSpinorEntry4AddrW25qh() 56 HIFMC_REG_WRITE(cntlr, reg, HIFMC_OP_CFG_REG_OFF); in HifmcCntlrSpinorEntry4AddrW25qh() 58 reg = HIFMC_OP_CMD1_EN(1) | HIFMC_OP_REG_OP_START; in HifmcCntlrSpinorEntry4AddrW25qh() 59 HIFMC_REG_WRITE(cntlr, reg, HIFMC_OP_REG_OFF); in HifmcCntlrSpinorEntry4AddrW25qh() 74 reg in HifmcCntlrSpinorEntry4AddrW25qh() 97 unsigned long reg; HifmcCntlrSpinorQeEnableW25qh() local [all...] |
H A D | hifmc100_spi_nor_ids.c | 47 unsigned long reg; in HifmcCntlrSpinorWriteEnableDefault() local 54 reg = HifmcCntlrReadDevReg(cntlr, spi, MTD_SPI_CMD_RDSR); in HifmcCntlrSpinorWriteEnableDefault() 56 HDF_LOGD("%s: enter, read status register[%#x]:%#lx", __func__, MTD_SPI_CMD_RDSR, reg); in HifmcCntlrSpinorWriteEnableDefault() 59 if ((reg & MTD_SPI_SR_WEL_MASK) != 0) { in HifmcCntlrSpinorWriteEnableDefault() 64 reg = HIFMC_REG_READ(cntlr, HIFMC_GLOBAL_CFG_REG_OFF); in HifmcCntlrSpinorWriteEnableDefault() 65 if ((reg & HIFMC_GLOBAL_CFG_WP_ENABLE) != 0) { in HifmcCntlrSpinorWriteEnableDefault() 66 reg &= ~HIFMC_GLOBAL_CFG_WP_ENABLE; in HifmcCntlrSpinorWriteEnableDefault() 67 HIFMC_REG_WRITE(cntlr, reg, HIFMC_GLOBAL_CFG_REG_OFF); in HifmcCntlrSpinorWriteEnableDefault() 70 reg = HIFMC_CMD_CMD1(MTD_SPI_CMD_WREN); in HifmcCntlrSpinorWriteEnableDefault() 71 HIFMC_REG_WRITE(cntlr, reg, HIFMC_CMD_REG_OF in HifmcCntlrSpinorWriteEnableDefault() 93 unsigned long reg; HifmcCntlrSpinorQeEnableDefault() local 159 unsigned long reg; HifmcCntlrSpinorEntry4AddrDefault() local [all...] |
H A D | hifmc100_spi_nor.c | 46 HDF_LOGE("%s: read reg base fail:%d", __func__, ret); in HifmcCntlrReadSpinorInfo() 59 HDF_LOGE("%s: read reg base fail:%d", __func__, ret); in HifmcCntlrReadSpinorInfo() 131 unsigned long reg; in HifmcCntlrReadSpinorReg() local 136 reg = HIFMC_OP_CFG_FM_CS(spi->cs); in HifmcCntlrReadSpinorReg() 137 HIFMC_REG_WRITE(cntlr, reg, HIFMC_OP_CFG_REG_OFF); in HifmcCntlrReadSpinorReg() 140 reg = HIFMC_OP_READ_STATUS_EN(1) | HIFMC_OP_REG_OP_START; in HifmcCntlrReadSpinorReg() 144 reg = cmd; in HifmcCntlrReadSpinorReg() 145 HIFMC_REG_WRITE(cntlr, reg, HIFMC_CMD_REG_OFF); in HifmcCntlrReadSpinorReg() 147 reg = HIFMC_DATA_NUM_CNT(1); in HifmcCntlrReadSpinorReg() 148 HIFMC_REG_WRITE(cntlr, reg, HIFMC_DATA_NUM_REG_OF in HifmcCntlrReadSpinorReg() 174 unsigned long reg; HifmcCntlrReadIdSpiNor() local 207 unsigned long reg; HifmcCntlrEraseOneBlock() local 285 unsigned long reg; HifmcCntlrDmaTransfer() local [all...] |
/device/soc/hisilicon/common/platform/mtd/hifmc100/spi_nand/ |
H A D | hifmc100_spi_nand.c | 49 HDF_LOGE("%s: read reg base fail:%d", __func__, ret); in HifmcCntlrReadSpinandInfo() 62 HDF_LOGE("%s: read reg base fail:%d", __func__, ret); in HifmcCntlrReadSpinandInfo() 148 unsigned int reg; in HifmcCntlrReadSpinandReg() local 153 reg = HIFMC_OP_CFG_FM_CS(spi->cs); in HifmcCntlrReadSpinandReg() 154 HIFMC_REG_WRITE(cntlr, reg, HIFMC_OP_CFG_REG_OFF); in HifmcCntlrReadSpinandReg() 157 reg = HIFMC_OP_READ_STATUS_EN(1) | HIFMC_OP_REG_OP_START; in HifmcCntlrReadSpinandReg() 161 reg = cmd; in HifmcCntlrReadSpinandReg() 162 HIFMC_REG_WRITE(cntlr, reg, HIFMC_CMD_REG_OFF); in HifmcCntlrReadSpinandReg() 164 reg = HIFMC_DATA_NUM_CNT(1); in HifmcCntlrReadSpinandReg() 165 HIFMC_REG_WRITE(cntlr, reg, HIFMC_DATA_NUM_REG_OF in HifmcCntlrReadSpinandReg() 203 unsigned int reg; HifmcCntlrDevFeatureOp() local 265 unsigned int reg; HifmcCntlrReadIdSpiNand() local 308 unsigned int reg; HifmcCntlrEraseOneBlock() local 450 unsigned int reg; HifmcCntlrInitOob() local 569 unsigned int reg; HifmcCntlrPageProgram() local 630 unsigned int reg; HifmcCntlrReadOnePageToBuf() local [all...] |
/device/board/hisilicon/hispark_aries/uboot/secureboot_release/ddr_init/boot/ |
H A D | init_regs.c | 49 static inline void reg_read(struct regentry *reg, unsigned int *ret) in reg_read() argument 55 bit_start_r = ((reg->attr&R_REG_BIT_MASK)>>R_REG_BIT_OFFSET); in reg_read() 56 bit_num_r = ((reg->attr&R_BIT_MASK)>>R_BIT_OFFSET)+1; in reg_read() 57 reg_val_r = (*(volatile unsigned *) (reg->reg_addr)); in reg_read() 64 *ret = ((reg_val_r == reg->value)?0:1); in reg_read() 67 static inline void reg_write(struct regentry *reg) in reg_write() argument 74 delay_2 = reg->delay; in reg_write() 75 bit_start_w = ((reg->attr&W_REG_BIT_MASK)>>W_REG_BIT_OFFSET); in reg_write() 76 bit_num_w = ((reg->attr&W_BIT_MASK)>>W_BIT_OFFSET)+1; in reg_write() 77 reg_val_w = (*(volatile unsigned *) (reg in reg_write() 92 read_write(struct regentry *reg, unsigned int pm) read_write() argument [all...] |
/device/board/hisilicon/hispark_taurus/uboot/secureboot_release/ddr_init/boot/ |
H A D | init_regs.c | 50 static inline void reg_read(const struct regentry *reg, unsigned int *ret) in reg_read() argument 56 bit_start_r = ((reg->attr&R_REG_BIT_MASK)>>R_REG_BIT_OFFSET); in reg_read() 57 bit_num_r = ((reg->attr&R_BIT_MASK)>>R_BIT_OFFSET)+1; in reg_read() 58 reg_val_r = (*(volatile unsigned *)((uintptr_t)(reg->reg_addr))); in reg_read() 65 *ret = ((reg_val_r == reg->value)?0:1); in reg_read() 68 static inline void reg_write(const struct regentry *reg) in reg_write() argument 75 delay_2 = reg->delay; in reg_write() 76 bit_start_w = ((reg->attr&W_REG_BIT_MASK)>>W_REG_BIT_OFFSET); in reg_write() 77 bit_num_w = ((reg->attr&W_BIT_MASK)>>W_BIT_OFFSET)+1; in reg_write() 78 reg_val_w = (*(volatile unsigned *)((uintptr_t)(reg in reg_write() 93 read_write(const struct regentry *reg, unsigned int pm) read_write() argument [all...] |
/device/soc/hisilicon/common/platform/pwm/ |
H A D | pwm_hi35xx.h | 59 static inline void HiPwmDisable(struct HiPwmRegs *reg) in HiPwmDisable() argument 61 reg->ctrl &= ~1; in HiPwmDisable() 64 static inline void HiPwmAlwaysOutput(struct HiPwmRegs *reg) in HiPwmAlwaysOutput() argument 67 reg->ctrl |= ((1 << PWM_KEEP_OFFSET) | PWM_ENABLE); in HiPwmAlwaysOutput() 70 static inline void HiPwmOutputNumberSquareWaves(struct HiPwmRegs *reg, uint32_t number) in HiPwmOutputNumberSquareWaves() argument 75 reg->cfg2 = number; in HiPwmOutputNumberSquareWaves() 77 reg->ctrl &= mask; in HiPwmOutputNumberSquareWaves() 78 reg->ctrl |= PWM_ENABLE; in HiPwmOutputNumberSquareWaves() 81 static inline void HiPwmSetPolarity(struct HiPwmRegs *reg, uint8_t polarity) in HiPwmSetPolarity() argument 86 reg in HiPwmSetPolarity() 90 HiPwmSetPeriod(struct HiPwmRegs *reg, uint32_t period) HiPwmSetPeriod() argument 95 HiPwmSetDuty(struct HiPwmRegs *reg, uint32_t duty) HiPwmSetDuty() argument [all...] |
/device/soc/rockchip/common/sdk_linux/drivers/tty/serial/8250/ |
H A D | 8250_dwlib.c | 49 static inline void dw8250_writel_ext(struct uart_port *p, int offset, u32 reg) in dw8250_writel_ext() argument 52 iowrite32be(reg, p->membase + offset); in dw8250_writel_ext() 54 writel(reg, p->membase + offset); in dw8250_writel_ext() 89 u32 reg; in dw8250_setup_port() local 95 reg = dw8250_readl_ext(p, DW_UART_UCV); in dw8250_setup_port() 96 if (!reg) { in dw8250_setup_port() 100 dev_dbg(p->dev, "Designware UART version %c.%c%c\n", (reg >> DWLIB_EIGHT_TWENTYFOUR) & 0xff, in dw8250_setup_port() 101 (reg >> DWLIB_SIXTEEN) & 0xff, (reg >> DWLIB_EIGHT) & 0xff); in dw8250_setup_port() 104 reg in dw8250_setup_port() [all...] |
/device/soc/rockchip/rk3588/kernel/drivers/video/rockchip/rga3/ |
H A D | rga3_reg_info.c | 35 u32 reg = 0; in RGA3_set_reg_win0_info() local 297 reg = in RGA3_set_reg_win0_info() 298 ((reg & (~m_RGA3_WIN0_RD_CTRL_SW_WIN0_R2Y_EN)) | in RGA3_set_reg_win0_info() 300 reg = in RGA3_set_reg_win0_info() 301 ((reg & (~m_RGA3_WIN0_RD_CTRL_SW_WIN0_Y2R_EN)) | in RGA3_set_reg_win0_info() 304 reg = in RGA3_set_reg_win0_info() 305 ((reg & (~m_RGA3_WIN0_RD_CTRL_SW_WIN0_PIC_FORMAT)) | in RGA3_set_reg_win0_info() 307 reg = in RGA3_set_reg_win0_info() 308 ((reg & (~m_RGA3_WIN0_RD_CTRL_SW_WIN0_PIX_SWAP)) | in RGA3_set_reg_win0_info() 310 reg in RGA3_set_reg_win0_info() 454 u32 reg = 0; RGA3_set_reg_win1_info() local 842 u32 reg = 0; RGA3_set_reg_wr_info() local 1068 u32 reg; RGA3_set_reg_overlap_info() local 1581 u32 reg; rga3_soft_reset() local [all...] |
/device/board/hisilicon/hispark_aries/uboot/secureboot_release/ddr_init/drv/ |
H A D | ddr_training_custom.c | 29 static struct tr_relate_reg *reg = &relate_reg; variable 44 reg->custom.ive_ddrt_mst_sel = ddr_read(DDR_REG_BASE_SYSCTRL + SYSCTRL_MISC_CTRL4); in ddr_cmd_site_save() 45 ddr_write(reg->custom.ive_ddrt_mst_sel & 0xffffffdf, DDR_REG_BASE_SYSCTRL + SYSCTRL_MISC_CTRL4); in ddr_cmd_site_save() 48 reg->custom.ddrt_clk_reg = ddr_read(CRG_REG_BASE + PERI_CRG_DDRT); in ddr_cmd_site_save() 50 ddr_write(reg->custom.ddrt_clk_reg | (1U << 1), CRG_REG_BASE + PERI_CRG_DDRT); in ddr_cmd_site_save() 56 reg->custom.phy0_age_compst_en = ddr_read(DDR_REG_BASE_PHY0 + DDR_PHY_PHYRSCTRL); in ddr_cmd_site_save() 57 ddr_write((reg->custom.phy0_age_compst_en & 0x7fffffff), DDR_REG_BASE_PHY0 + DDR_PHY_PHYRSCTRL); in ddr_cmd_site_save() 59 reg->custom.phy1_age_compst_en = ddr_read(DDR_REG_BASE_PHY1 + DDR_PHY_PHYRSCTRL); in ddr_cmd_site_save() 60 ddr_write((reg->custom.phy1_age_compst_en & 0x7fffffff), DDR_REG_BASE_PHY1 + DDR_PHY_PHYRSCTRL); in ddr_cmd_site_save() 71 ddr_write(reg in ddr_cmd_site_restore() 83 ddr_training_save_reg_custom(void *reg, unsigned int mask) ddr_training_save_reg_custom() argument 84 ddr_training_restore_reg_custom(void *reg) ddr_training_restore_reg_custom() argument [all...] |
/device/soc/hisilicon/common/platform/wifi/hi3881v100/adapter/ |
H A D | hdf_wlan_sdio_adapt.c | 58 void __iomem *reg = ioremap(ADDR, sizeof(uint32_t)); \
59 if (reg == NULL) { \
63 HDF_LOGW("%s: Change register[0x%08x] %04x to %04x", __func__, ADDR, readl(reg), (VALUE)); \
64 writel(VALUE, reg); \
65 iounmap(reg); \
70 void __iomem *reg = ioremap(ADDR, sizeof(uint32_t)); \
71 if (reg == NULL) { \
75 HDF_LOGW("%s: Change register[0x%08x] %04x to %04x", __func__, ADDR, readl(reg), readl(reg) | (VALUE)); \
76 writel(readl(reg) | (VALU [all...] |