/kernel/linux/linux-6.6/arch/x86/kernel/cpu/resctrl/ |
H A D | core.c | 137 struct rdt_hw_resource *hw_res = &rdt_resources_all[RDT_RESOURCE_L3]; in cache_alloc_hsw_probe() local 138 struct rdt_resource *r = &hw_res->r_resctrl; in cache_alloc_hsw_probe() 150 hw_res->num_closid = 4; in cache_alloc_hsw_probe() 198 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in __get_mem_config_intel() local 204 hw_res->num_closid = edx.split.cos_max + 1; in __get_mem_config_intel() 232 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in __rdt_get_mem_config_amd() local 242 hw_res->num_closid = edx + 1; in __rdt_get_mem_config_amd() 266 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in rdt_get_cache_alloc_cfg() local 272 hw_res->num_closid = edx.split.cos_max + 1; in rdt_get_cache_alloc_cfg() 305 struct rdt_hw_resource *hw_res in mba_wrmsr_amd() local 331 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); mba_wrmsr_intel() local 343 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); cat_wrmsr() local 370 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(m->res); rdt_ctrl_update() local 419 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); setup_default_ctrlval() local 441 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); domain_setup_ctrlval() local 751 struct rdt_hw_resource *hw_res = &rdt_resources_all[RDT_RESOURCE_MBA]; get_mem_config() local 766 struct rdt_hw_resource *hw_res = &rdt_resources_all[RDT_RESOURCE_SMBA]; get_slow_mem_config() local 865 struct rdt_hw_resource *hw_res; rdt_init_res_defs_intel() local 885 struct rdt_hw_resource *hw_res; rdt_init_res_defs_amd() local [all...] |
H A D | monitor.c | 235 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in resctrl_arch_rmid_read() local 251 hw_res->mbm_width); in resctrl_arch_rmid_read() 258 *val = chunks * hw_res->mon_scale; in resctrl_arch_rmid_read() 759 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in rdt_get_mon_l3_config() local 764 hw_res->mon_scale = boot_cpu_data.x86_cache_occ_scale; in rdt_get_mon_l3_config() 766 hw_res->mbm_width = MBM_CNTR_WIDTH_BASE; in rdt_get_mon_l3_config() 769 hw_res->mbm_width += mbm_offset; in rdt_get_mon_l3_config() 784 * to the nearest multiple of hw_res->mon_scale so it matches a in rdt_get_mon_l3_config() 798 hw_res->mbm_cfg_mask = ecx & MAX_EVT_CONFIG_BITS; in rdt_get_mon_l3_config()
|
H A D | internal.h | 443 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(res); in resctrl_inc() local 445 hw_res++; in resctrl_inc() 446 return &hw_res->r_resctrl; in resctrl_inc()
|
H A D | rdtgroup.c | 1593 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in mon_config_write() local 1618 if ((val & hw_res->mbm_cfg_mask) != val) { in mon_config_write() 1620 hw_res->mbm_cfg_mask); in mon_config_write() 2164 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in rdt_domain_reconfigure_cdp() local 2170 l2_qos_cfg_update(&hw_res->cdp_enabled); in rdt_domain_reconfigure_cdp() 2173 l3_qos_cfg_update(&hw_res->cdp_enabled); in rdt_domain_reconfigure_cdp() 2263 struct rdt_hw_resource *hw_res = &rdt_resources_all[l]; in resctrl_arch_set_cdp_enabled() local 2265 if (!hw_res->r_resctrl.cdp_capable) in resctrl_arch_set_cdp_enabled() 2635 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in reset_all_ctrls() local 2647 msr_param.high = hw_res in reset_all_ctrls() [all...] |
H A D | ctrlmondata.c | 287 struct rdt_hw_resource *hw_res = resctrl_to_arch_res(r); in resctrl_arch_update_one() local 300 hw_res->msr_update(d, &msr_param, r); in resctrl_arch_update_one()
|
/third_party/mesa3d/src/gallium/drivers/virgl/ |
H A D | virgl_staging_mgr.c | 42 vws->resource_reference(vws, &staging->hw_res, NULL); in virgl_staging_alloc_buffer() 48 staging->hw_res = vws->resource_create(vws, in virgl_staging_alloc_buffer() 61 if (staging->hw_res == NULL) in virgl_staging_alloc_buffer() 64 staging->map = vws->resource_map(vws, staging->hw_res); in virgl_staging_alloc_buffer() 66 vws->resource_reference(vws, &staging->hw_res, NULL); in virgl_staging_alloc_buffer() 90 vws->resource_reference(vws, &staging->hw_res, NULL); in virgl_staging_destroy() 124 assert(staging->hw_res); in virgl_staging_alloc() 131 vws->resource_reference(vws, outbuf, staging->hw_res); in virgl_staging_alloc()
|
H A D | virgl_resource.c | 124 if (!vws->res_is_referenced(vws, vctx->cbuf, res->hw_res)) in virgl_res_needs_flush() 226 wait = (flush || vws->resource_is_busy(vws, res->hw_res)); in virgl_resource_transfer_prepare() 275 (readback || (wait && vws->resource_is_busy(vws, res->hw_res)))) in virgl_resource_transfer_prepare() 283 vws->resource_wait(vws, res->hw_res); in virgl_resource_transfer_prepare() 284 vws->transfer_get(vws, res->hw_res, &xfer->base.box, xfer->base.stride, in virgl_resource_transfer_prepare() 292 vws->resource_wait(vws, res->hw_res); in virgl_resource_transfer_prepare() 441 struct virgl_hw_res *hw_res; in virgl_resource_realloc() local 448 hw_res = vs->vws->resource_create(vs->vws, in virgl_resource_realloc() 461 if (!hw_res) in virgl_resource_realloc() 464 vs->vws->resource_reference(vs->vws, &res->hw_res, NUL in virgl_resource_realloc() [all...] |
H A D | virgl_transfer_queue.c | 106 const struct virgl_hw_res *hw_res, in transfer_overlap() 113 if (xfer->hw_res != hw_res || xfer->base.level != level) in transfer_overlap() 141 const struct virgl_hw_res *hw_res, in virgl_transfer_queue_find_overlap() 148 if (transfer_overlap(xfer, hw_res, level, box, include_touching)) in virgl_transfer_queue_find_overlap() 158 return transfer_overlap(queued, current->hw_res, current->base.level, in transfers_intersect() 187 queue->vs->vws->transfer_put(queue->vs->vws, queued->hw_res, in transfer_put() 352 transfer->hw_res, in virgl_transfer_queue_is_queued() 360 const struct virgl_hw_res *hw_res, in virgl_transfer_queue_extend_buffer() 368 queued = virgl_transfer_queue_find_overlap(queue, hw_res, in virgl_transfer_queue_extend_buffer() 105 transfer_overlap(const struct virgl_transfer *xfer, const struct virgl_hw_res *hw_res, unsigned level, const struct pipe_box *box, bool include_touching) transfer_overlap() argument 140 virgl_transfer_queue_find_overlap(const struct virgl_transfer_queue *queue, const struct virgl_hw_res *hw_res, unsigned level, const struct pipe_box *box, bool include_touching) virgl_transfer_queue_find_overlap() argument 359 virgl_transfer_queue_extend_buffer(struct virgl_transfer_queue *queue, const struct virgl_hw_res *hw_res, unsigned offset, unsigned size, const void *data) virgl_transfer_queue_extend_buffer() argument [all...] |
H A D | virgl_query.c | 158 host_state = vs->vws->resource_map(vs->vws, query->buf->hw_res); in virgl_end_query() 169 vs->vws->emit_res(vs->vws, vctx->cbuf, query->buf->hw_res, false); in virgl_end_query() 187 if (vs->vws->res_is_referenced(vs->vws, vctx->cbuf, query->buf->hw_res)) in virgl_get_query_result() 191 vs->vws->resource_wait(vs->vws, query->buf->hw_res); in virgl_get_query_result() 192 else if (vs->vws->resource_is_busy(vs->vws, query->buf->hw_res)) in virgl_get_query_result() 195 host_state = vs->vws->resource_map(vs->vws, query->buf->hw_res); in virgl_get_query_result()
|
H A D | virgl_resource.h | 55 struct virgl_hw_res *hw_res; member 83 struct virgl_hw_res *hw_res; member
|
H A D | virgl_staging_mgr.h | 41 struct virgl_hw_res *hw_res; /* Staging buffer hw_res. */ member
|
H A D | virgl_transfer_queue.h | 62 const struct virgl_hw_res *hw_res,
|
H A D | virgl_context.c | 183 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_framebuffer() 192 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_framebuffer() 209 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_sampler_views() 223 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_vertex_buffers() 235 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_index_buffer() 247 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_so_targets() 264 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_uniform_buffers() 281 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_shader_buffers() 298 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_attach_res_shader_images() 312 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALS in virgl_attach_res_atomic_buffers() [all...] |
H A D | virgl_texture.c | 184 ptr = vws->resource_map(vws, vtex->hw_res); in texture_transfer_map_resolve() 252 vws->transfer_put(vws, trans->hw_res, box, in flush_data()
|
H A D | virgl_streamout.c | 86 vws->emit_res(vws, vctx->cbuf, res->hw_res, FALSE); in virgl_set_so_targets()
|
/third_party/mesa3d/src/gallium/drivers/virgl/tests/ |
H A D | virgl_staging_mgr_test.cpp | 53 struct virgl_hw_res *hw_res = CALLOC_STRUCT(virgl_hw_res); in fake_resource_create() local 55 pipe_reference_init(&hw_res->reference, 1); in fake_resource_create() 57 hw_res->target = target; in fake_resource_create() 58 hw_res->bind = bind; in fake_resource_create() 59 hw_res->size = size; in fake_resource_create() 60 hw_res->data = CALLOC(size, 1); in fake_resource_create() 62 return hw_res; in fake_resource_create() 81 fake_resource_map(struct virgl_winsys *vws, struct virgl_hw_res *hw_res) in fake_resource_map() argument 83 return hw_res->data; in fake_resource_map() 115 resource_map(struct virgl_hw_res *hw_res) in resource_map() argument 350 failing_resource_map(struct virgl_winsys *vws, struct virgl_hw_res *hw_res) failing_resource_map() argument [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/msm/disp/dpu1/ |
H A D | dpu_rm.c | 26 * @hw_res: Hardware resources required as reported by the encoders 30 struct dpu_encoder_hw_resources hw_res; member 464 struct dpu_encoder_hw_resources *hw_res) in _dpu_rm_reserve_intf_related_hw() 469 for (i = 0; i < ARRAY_SIZE(hw_res->intfs); i++) { in _dpu_rm_reserve_intf_related_hw() 470 if (hw_res->intfs[i] == INTF_MODE_NONE) in _dpu_rm_reserve_intf_related_hw() 503 &reqs->hw_res); in _dpu_rm_make_reservation() 515 dpu_encoder_get_hw_resources(enc, &reqs->hw_res); in _dpu_rm_populate_requirements() 460 _dpu_rm_reserve_intf_related_hw( struct dpu_rm *rm, struct dpu_global_state *global_state, uint32_t enc_id, struct dpu_encoder_hw_resources *hw_res) _dpu_rm_reserve_intf_related_hw() argument
|
H A D | dpu_encoder.h | 32 * @hw_res: resource table to populate with encoder required resources 35 struct dpu_encoder_hw_resources *hw_res);
|
H A D | dpu_encoder_phys.h | 133 struct dpu_encoder_hw_resources *hw_res);
|
/third_party/mesa3d/src/gallium/winsys/virgl/drm/ |
H A D | virgl_drm_winsys.h | 108 struct virgl_hw_res *hw_res; member
|
H A D | virgl_drm_winsys.c | 924 fence->hw_res = virgl_drm_winsys_resource_create(vws, PIPE_BUFFER, in virgl_drm_fence_create_legacy() 926 if (!fence->hw_res) { in virgl_drm_fence_create_legacy() 1055 return !virgl_drm_resource_is_busy(vws, fence->hw_res); in virgl_fence_wait() 1060 while (virgl_drm_resource_is_busy(vws, fence->hw_res)) { in virgl_fence_wait() 1067 virgl_drm_resource_wait(vws, fence->hw_res); in virgl_fence_wait() 1083 virgl_drm_resource_reference(vws, &dfence->hw_res, NULL); in virgl_fence_reference()
|
/kernel/linux/linux-5.10/drivers/net/fjes/ |
H A D | fjes_hw.c | 37 if (!request_mem_region(hw->hw_res.start, hw->hw_res.size, in fjes_hw_iomap() 43 base = (u8 *)ioremap(hw->hw_res.start, hw->hw_res.size); in fjes_hw_iomap() 51 release_mem_region(hw->hw_res.start, hw->hw_res.size); in fjes_hw_iounmap()
|
H A D | fjes_main.c | 230 result = request_irq(adapter->hw.hw_res.irq, fjes_intr, in fjes_request_irq() 251 free_irq(adapter->hw.hw_res.irq, adapter); in fjes_free_irq() 1269 hw->hw_res.start = res->start; in fjes_probe() 1270 hw->hw_res.size = resource_size(res); in fjes_probe() 1271 hw->hw_res.irq = platform_get_irq(plat_dev, 0); in fjes_probe() 1272 if (hw->hw_res.irq < 0) { in fjes_probe() 1273 err = hw->hw_res.irq; in fjes_probe() 1369 fjes_intr(adapter->hw.hw_res.irq, adapter); in fjes_irq_watch_task()
|
/kernel/linux/linux-6.6/drivers/net/fjes/ |
H A D | fjes_hw.c | 37 if (!request_mem_region(hw->hw_res.start, hw->hw_res.size, in fjes_hw_iomap() 43 base = (u8 *)ioremap(hw->hw_res.start, hw->hw_res.size); in fjes_hw_iomap() 51 release_mem_region(hw->hw_res.start, hw->hw_res.size); in fjes_hw_iounmap()
|
H A D | fjes_main.c | 401 result = request_irq(adapter->hw.hw_res.irq, fjes_intr, in fjes_request_irq() 422 free_irq(adapter->hw.hw_res.irq, adapter); in fjes_free_irq() 1320 fjes_intr(adapter->hw.hw_res.irq, adapter); in fjes_irq_watch_task() 1397 hw->hw_res.start = res->start; in fjes_probe() 1398 hw->hw_res.size = resource_size(res); in fjes_probe() 1399 hw->hw_res.irq = platform_get_irq(plat_dev, 0); in fjes_probe() 1400 if (hw->hw_res.irq < 0) { in fjes_probe() 1401 err = hw->hw_res.irq; in fjes_probe()
|