/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gt/ |
H A D | intel_ring_types.h | 20 #define CACHELINE_BYTES 64 macro 21 #define CACHELINE_DWORDS (CACHELINE_BYTES / sizeof(u32))
|
H A D | intel_timeline.c | 54 BUILD_BUG_ON(BITS_PER_TYPE(u64) * CACHELINE_BYTES > PAGE_SIZE); in hwsp_alloc() 248 timeline->hwsp_offset = cacheline * CACHELINE_BYTES; in intel_timeline_init() 259 memset(vaddr + timeline->hwsp_offset, 0, CACHELINE_BYTES); in intel_timeline_init() 520 tl->hwsp_offset = cacheline * CACHELINE_BYTES; in __intel_timeline_get_seqno() 522 memset(vaddr + tl->hwsp_offset, 0, CACHELINE_BYTES); in __intel_timeline_get_seqno()
|
H A D | intel_ring.h | 110 #define cacheline(a) round_down(a, CACHELINE_BYTES) in assert_ring_tail_valid() 138 return (head - tail - CACHELINE_BYTES) & (size - 1); in __intel_ring_space()
|
H A D | intel_ring.c | 161 ring->effective_size -= 2 * CACHELINE_BYTES; in intel_engine_create_ring() 307 num_dwords = (rq->ring->emit & (CACHELINE_BYTES - 1)) / sizeof(u32); in intel_ring_cacheline_align() 321 GEM_BUG_ON(rq->ring->emit & (CACHELINE_BYTES - 1)); in intel_ring_cacheline_align()
|
H A D | intel_engine.h | 29 #define CACHELINE_BYTES 64 macro 30 #define CACHELINE_DWORDS (CACHELINE_BYTES / sizeof(u32))
|
H A D | selftest_timeline.c | 36 return (address + tl->hwsp_offset) / CACHELINE_BYTES; in hwsp_cacheline() 39 #define CACHELINES_PER_PAGE (PAGE_SIZE / CACHELINE_BYTES)
|
H A D | intel_lrc.c | 340 GEM_BUG_ON(!IS_ALIGNED(size, CACHELINE_BYTES)); in lrc_ring_setup_indirect_ctx() 343 ctx_bb_ggtt_addr | (size / CACHELINE_BYTES); in lrc_ring_setup_indirect_ctx() 3444 while ((unsigned long)cs % CACHELINE_BYTES) in setup_indirect_ctx_bb() 3769 while ((unsigned long)batch % CACHELINE_BYTES) in gen8_init_indirectctx_bb() 3866 while ((unsigned long)batch % CACHELINE_BYTES) in gen9_init_indirectctx_bb() 3900 while ((unsigned long)batch % CACHELINE_BYTES) in gen10_init_indirectctx_bb() 3998 CACHELINE_BYTES))) { in intel_init_workaround_bb()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/ |
H A D | intel_ring_types.h | 19 #define CACHELINE_BYTES 64 macro 20 #define CACHELINE_DWORDS (CACHELINE_BYTES / sizeof(u32))
|
H A D | intel_ring.h | 111 #define cacheline(a) round_down(a, CACHELINE_BYTES) in assert_ring_tail_valid() 139 return (head - tail - CACHELINE_BYTES) & (size - 1); in __intel_ring_space()
|
H A D | intel_ring.c | 170 ring->effective_size -= 2 * CACHELINE_BYTES; in intel_engine_create_ring() 317 num_dwords = (rq->ring->emit & (CACHELINE_BYTES - 1)) / sizeof(u32); in intel_ring_cacheline_align() 331 GEM_BUG_ON(rq->ring->emit & (CACHELINE_BYTES - 1)); in intel_ring_cacheline_align()
|
H A D | intel_engine.h | 32 #define CACHELINE_BYTES 64 macro 33 #define CACHELINE_DWORDS (CACHELINE_BYTES / sizeof(u32))
|
H A D | intel_lrc.c | 838 GEM_BUG_ON(!IS_ALIGNED(size, CACHELINE_BYTES)); in lrc_setup_indirect_ctx() 841 ctx_bb_ggtt_addr | (size / CACHELINE_BYTES); in lrc_setup_indirect_ctx() 1414 while ((unsigned long)cs % CACHELINE_BYTES) in setup_indirect_ctx_bb() 1639 while ((unsigned long)batch % CACHELINE_BYTES) in gen8_init_indirectctx_bb() 1736 while ((unsigned long)batch % CACHELINE_BYTES) in gen9_init_indirectctx_bb() 1838 CACHELINE_BYTES))) { in lrc_init_wa_ctx()
|
/kernel/linux/linux-5.10/arch/powerpc/lib/ |
H A D | string_32.S | 16 CACHELINE_BYTES = L1_CACHE_BYTES define 51 addi r6, r6, CACHELINE_BYTES
|
H A D | checksum_32.S | 123 CACHELINE_BYTES = L1_CACHE_BYTES define 183 addi r3,r3,CACHELINE_BYTES 187 addi r3,r3,CACHELINE_BYTES
|
H A D | copy_32.S | 64 CACHELINE_BYTES = L1_CACHE_BYTES define 127 addi r6,r6,CACHELINE_BYTES 377 addi r3,r3,CACHELINE_BYTES 381 addi r3,r3,CACHELINE_BYTES
|
/kernel/linux/linux-6.6/arch/powerpc/lib/ |
H A D | string_32.S | 16 CACHELINE_BYTES = L1_CACHE_BYTES define 51 addi r6, r6, CACHELINE_BYTES
|
H A D | checksum_32.S | 120 CACHELINE_BYTES = L1_CACHE_BYTES define 180 addi r3,r3,CACHELINE_BYTES 184 addi r3,r3,CACHELINE_BYTES
|
H A D | copy_32.S | 61 CACHELINE_BYTES = L1_CACHE_BYTES define 124 addi r6,r6,CACHELINE_BYTES 374 addi r3,r3,CACHELINE_BYTES 378 addi r3,r3,CACHELINE_BYTES
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/display/ |
H A D | intel_dsb.c | 208 aligned_tail = ALIGN(tail, CACHELINE_BYTES); in intel_dsb_align_tail() 237 if (drm_WARN_ON(&dev_priv->drm, !IS_ALIGNED(tail, CACHELINE_BYTES))) in intel_dsb_commit() 304 size = ALIGN(max_cmds * 8, CACHELINE_BYTES); in intel_dsb_prepare()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/display/ |
H A D | intel_dsb.c | 226 tail = ALIGN(dsb->free_pos * 4, CACHELINE_BYTES); in intel_dsb_commit()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gvt/ |
H A D | scheduler.c | 586 0, CACHELINE_BYTES, 0); in prepare_shadow_wa_ctx() 598 memset(per_ctx_va, 0, CACHELINE_BYTES); in prepare_shadow_wa_ctx() 1669 CACHELINE_BYTES; in intel_vgpu_create_workload() 1688 CACHELINE_BYTES)) { in intel_vgpu_create_workload()
|
H A D | cmd_parser.c | 2852 ring_size = round_up(wa_ctx->indirect_ctx.size + CACHELINE_BYTES, in scan_wa_ctx() 2971 roundup(ctx_size + CACHELINE_BYTES, in shadow_indirect_ctx() 3026 memcpy(bb_start_sva, per_ctx_start, CACHELINE_BYTES); in combine_wa_ctx()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gvt/ |
H A D | scheduler.c | 623 0, CACHELINE_BYTES, 0); in prepare_shadow_wa_ctx() 644 memset(per_ctx_va, 0, CACHELINE_BYTES); in prepare_shadow_wa_ctx() 1713 CACHELINE_BYTES; in intel_vgpu_create_workload() 1732 CACHELINE_BYTES)) { in intel_vgpu_create_workload()
|
H A D | cmd_parser.c | 2889 ring_size = round_up(wa_ctx->indirect_ctx.size + CACHELINE_BYTES, in scan_wa_ctx() 3007 roundup(ctx_size + CACHELINE_BYTES, in shadow_indirect_ctx() 3062 memcpy(bb_start_sva, per_ctx_start, CACHELINE_BYTES); in combine_wa_ctx()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/uc/ |
H A D | intel_guc_submission.c | 410 * + CACHELINE_BYTES child join[0] 412 * + CACHELINE_BYTES child join[n - 1] 423 u8 unused[CACHELINE_BYTES - sizeof(u32)]; 459 BUILD_BUG_ON(sizeof(struct sync_semaphore) != CACHELINE_BYTES); in __get_parent_scratch()
|