/kernel/linux/linux-5.10/drivers/net/ethernet/netronome/nfp/flower/ |
H A D | metadata.c | 298 struct nfp_fl_stats_ctx_to_flow *ctx_entry; in nfp_compile_flow_metadata() local 315 ctx_entry = kzalloc(sizeof(*ctx_entry), GFP_KERNEL); in nfp_compile_flow_metadata() 316 if (!ctx_entry) { in nfp_compile_flow_metadata() 321 ctx_entry->stats_cxt = stats_cxt; in nfp_compile_flow_metadata() 322 ctx_entry->flow = nfp_flow; in nfp_compile_flow_metadata() 324 if (rhashtable_insert_fast(&priv->stats_ctx_table, &ctx_entry->ht_node, in nfp_compile_flow_metadata() 390 &ctx_entry->ht_node, in nfp_compile_flow_metadata() 393 kfree(ctx_entry); in nfp_compile_flow_metadata() 411 struct nfp_fl_stats_ctx_to_flow *ctx_entry; in nfp_modify_flow_metadata() local 445 struct nfp_fl_stats_ctx_to_flow *ctx_entry; nfp_flower_get_fl_payload_from_ctx() local [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/netronome/nfp/flower/ |
H A D | metadata.c | 310 struct nfp_fl_stats_ctx_to_flow *ctx_entry; in nfp_compile_flow_metadata() local 327 ctx_entry = kzalloc(sizeof(*ctx_entry), GFP_KERNEL); in nfp_compile_flow_metadata() 328 if (!ctx_entry) { in nfp_compile_flow_metadata() 333 ctx_entry->stats_cxt = stats_cxt; in nfp_compile_flow_metadata() 334 ctx_entry->flow = nfp_flow; in nfp_compile_flow_metadata() 336 if (rhashtable_insert_fast(&priv->stats_ctx_table, &ctx_entry->ht_node, in nfp_compile_flow_metadata() 382 &ctx_entry->ht_node, in nfp_compile_flow_metadata() 385 kfree(ctx_entry); in nfp_compile_flow_metadata() 403 struct nfp_fl_stats_ctx_to_flow *ctx_entry; in nfp_modify_flow_metadata() local 437 struct nfp_fl_stats_ctx_to_flow *ctx_entry; nfp_flower_get_fl_payload_from_ctx() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/ |
H A D | drm_context.c | 368 struct drm_ctx_list *ctx_entry; in drm_legacy_addctx() local 390 ctx_entry = kmalloc(sizeof(*ctx_entry), GFP_KERNEL); in drm_legacy_addctx() 391 if (!ctx_entry) { in drm_legacy_addctx() 396 INIT_LIST_HEAD(&ctx_entry->head); in drm_legacy_addctx() 397 ctx_entry->handle = ctx->handle; in drm_legacy_addctx() 398 ctx_entry->tag = file_priv; in drm_legacy_addctx() 401 list_add(&ctx_entry->head, &dev->ctxlist); in drm_legacy_addctx()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/ |
H A D | drm_context.c | 362 struct drm_ctx_list *ctx_entry; in drm_legacy_addctx() local 383 ctx_entry = kmalloc(sizeof(*ctx_entry), GFP_KERNEL); in drm_legacy_addctx() 384 if (!ctx_entry) { in drm_legacy_addctx() 389 INIT_LIST_HEAD(&ctx_entry->head); in drm_legacy_addctx() 390 ctx_entry->handle = ctx->handle; in drm_legacy_addctx() 391 ctx_entry->tag = file_priv; in drm_legacy_addctx() 394 list_add(&ctx_entry->head, &dev->ctxlist); in drm_legacy_addctx()
|
/kernel/linux/linux-5.10/drivers/iommu/intel/ |
H A D | debugfs.c | 25 struct context_entry *ctx_entry; member 163 tbl_wlk->rt_entry->lo, tbl_wlk->ctx_entry->hi, in print_tbl_walk() 164 tbl_wlk->ctx_entry->lo); in print_tbl_walk() 246 tbl_wlk.ctx_entry = context; in ctx_tbl_walk()
|
/kernel/linux/linux-6.6/drivers/iommu/intel/ |
H A D | debugfs.c | 26 struct context_entry *ctx_entry; member 167 tbl_wlk->rt_entry->lo, tbl_wlk->ctx_entry->hi, in print_tbl_walk() 168 tbl_wlk->ctx_entry->lo); in print_tbl_walk() 250 tbl_wlk.ctx_entry = context; in ctx_tbl_walk()
|
H A D | iommu.c | 838 struct context_entry *ctx_entry; in dmar_fault_dump_ptes() local 861 ctx_entry = iommu_context_addr(iommu, bus, devfn, 0); in dmar_fault_dump_ptes() 862 if (!ctx_entry) { in dmar_fault_dump_ptes() 868 ctx_entry->hi, ctx_entry->lo); in dmar_fault_dump_ptes() 872 level = agaw_to_level(ctx_entry->hi & 7); in dmar_fault_dump_ptes() 873 pgtable = phys_to_virt(ctx_entry->lo & VTD_PAGE_MASK); in dmar_fault_dump_ptes() 878 dir = phys_to_virt(ctx_entry->lo & VTD_PAGE_MASK); in dmar_fault_dump_ptes()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/vmwgfx/ |
H A D | vmwgfx_execbuf.c | 1036 struct vmw_res_cache_entry *ctx_entry = in vmw_query_bo_switch_prepare() local 1040 BUG_ON(!ctx_entry->valid); in vmw_query_bo_switch_prepare() 1041 sw_context->last_query_ctx = ctx_entry->res; in vmw_query_bo_switch_prepare() 1095 struct vmw_res_cache_entry *ctx_entry = in vmw_query_bo_switch_commit() local 1100 BUG_ON(!ctx_entry->valid); in vmw_query_bo_switch_commit() 1101 ctx = ctx_entry->res; in vmw_query_bo_switch_commit()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/vmwgfx/ |
H A D | vmwgfx_execbuf.c | 1027 struct vmw_res_cache_entry *ctx_entry = in vmw_query_bo_switch_prepare() local 1031 BUG_ON(!ctx_entry->valid); in vmw_query_bo_switch_prepare() 1032 sw_context->last_query_ctx = ctx_entry->res; in vmw_query_bo_switch_prepare() 1086 struct vmw_res_cache_entry *ctx_entry = in vmw_query_bo_switch_commit() local 1091 BUG_ON(!ctx_entry->valid); in vmw_query_bo_switch_commit() 1092 ctx = ctx_entry->res; in vmw_query_bo_switch_commit()
|