Home
last modified time | relevance | path

Searched refs:cur_seq (Results 1 - 25 of 31) sorted by relevance

12

/kernel/linux/linux-5.10/drivers/gpu/drm/via/
H A Dvia_verifier.c290 static __inline__ int finish_current_sequence(drm_via_state_t * cur_seq) in finish_current_sequence() argument
292 switch (cur_seq->unfinished) { in finish_current_sequence()
294 DRM_DEBUG("Z Buffer start address is 0x%x\n", cur_seq->z_addr); in finish_current_sequence()
298 cur_seq->d_addr); in finish_current_sequence()
301 if (cur_seq->agp_texture) { in finish_current_sequence()
303 cur_seq->tex_level_lo[cur_seq->texture]; in finish_current_sequence()
304 unsigned end = cur_seq->tex_level_hi[cur_seq->texture]; in finish_current_sequence()
316 &(cur_seq in finish_current_sequence()
348 investigate_hazard(uint32_t cmd, hazard_t hz, drm_via_state_t *cur_seq) investigate_hazard() argument
521 via_check_prim_list(uint32_t const **buffer, const uint32_t * buf_end, drm_via_state_t *cur_seq) via_check_prim_list() argument
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_mn.c58 * @cur_seq: Value to pass to mmu_interval_set_seq()
65 unsigned long cur_seq) in amdgpu_mn_invalidate_gfx()
76 mmu_interval_set_seq(mni, cur_seq); in amdgpu_mn_invalidate_gfx()
95 * @cur_seq: Value to pass to mmu_interval_set_seq()
102 unsigned long cur_seq) in amdgpu_mn_invalidate_hsa()
112 mmu_interval_set_seq(mni, cur_seq); in amdgpu_mn_invalidate_hsa()
63 amdgpu_mn_invalidate_gfx(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) amdgpu_mn_invalidate_gfx() argument
100 amdgpu_mn_invalidate_hsa(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) amdgpu_mn_invalidate_hsa() argument
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_hmm.c61 * @cur_seq: Value to pass to mmu_interval_set_seq()
68 unsigned long cur_seq) in amdgpu_hmm_invalidate_gfx()
79 mmu_interval_set_seq(mni, cur_seq); in amdgpu_hmm_invalidate_gfx()
98 * @cur_seq: Value to pass to mmu_interval_set_seq()
105 unsigned long cur_seq) in amdgpu_hmm_invalidate_hsa()
112 amdgpu_amdkfd_evict_userptr(mni, cur_seq, bo->kfd_bo); in amdgpu_hmm_invalidate_hsa()
66 amdgpu_hmm_invalidate_gfx(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) amdgpu_hmm_invalidate_gfx() argument
103 amdgpu_hmm_invalidate_hsa(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) amdgpu_hmm_invalidate_hsa() argument
H A Damdgpu_amdkfd.h193 unsigned long cur_seq, struct kgd_mem *mem);
215 unsigned long cur_seq, struct kgd_mem *mem) in amdgpu_amdkfd_evict_userptr()
214 amdgpu_amdkfd_evict_userptr(struct mmu_interval_notifier *mni, unsigned long cur_seq, struct kgd_mem *mem) amdgpu_amdkfd_evict_userptr() argument
/kernel/linux/linux-5.10/mm/
H A Dmmu_notifier.c240 * mmu_interval_set_seq() using the provided cur_seq from in mmu_interval_read_begin()
272 unsigned long cur_seq; in mn_itree_release() local
276 mn_itree_inv_start_range(subscriptions, &range, &cur_seq); in mn_itree_release()
280 cur_seq); in mn_itree_release()
448 unsigned long cur_seq; in mn_itree_invalidate() local
451 mn_itree_inv_start_range(subscriptions, range, &cur_seq); in mn_itree_invalidate()
457 cur_seq); in mn_itree_invalidate()
/kernel/linux/linux-6.6/mm/
H A Dmmu_notifier.c240 * mmu_interval_set_seq() using the provided cur_seq from in mmu_interval_read_begin()
272 unsigned long cur_seq; in mn_itree_release() local
276 mn_itree_inv_start_range(subscriptions, &range, &cur_seq); in mn_itree_release()
280 cur_seq); in mn_itree_release()
448 unsigned long cur_seq; in mn_itree_invalidate() local
451 mn_itree_inv_start_range(subscriptions, range, &cur_seq); in mn_itree_invalidate()
457 cur_seq); in mn_itree_invalidate()
/kernel/linux/linux-6.6/include/linux/
H A Dmmu_notifier.h254 unsigned long cur_seq);
322 * @cur_seq - The cur_seq passed to the invalidate() callback
327 * mmu_interval_read_retry(). The provided cur_seq will always be odd.
334 unsigned long cur_seq) in mmu_interval_set_seq()
336 WRITE_ONCE(interval_sub->invalidate_seq, cur_seq); in mmu_interval_set_seq()
333 mmu_interval_set_seq(struct mmu_interval_notifier *interval_sub, unsigned long cur_seq) mmu_interval_set_seq() argument
H A Dkmsg_dump.h34 * @cur_seq: Points to the oldest message to dump
38 u64 cur_seq; member
/kernel/linux/linux-5.10/drivers/gpu/drm/radeon/
H A Dradeon_mn.c50 unsigned long cur_seq) in radeon_mn_invalidate()
48 radeon_mn_invalidate(struct mmu_interval_notifier *mn, const struct mmu_notifier_range *range, unsigned long cur_seq) radeon_mn_invalidate() argument
/kernel/linux/linux-6.6/drivers/gpu/drm/radeon/
H A Dradeon_mn.c44 * @cur_seq: Value to pass to mmu_interval_set_seq()
51 unsigned long cur_seq) in radeon_mn_invalidate()
49 radeon_mn_invalidate(struct mmu_interval_notifier *mn, const struct mmu_notifier_range *range, unsigned long cur_seq) radeon_mn_invalidate() argument
/kernel/linux/linux-5.10/kernel/printk/
H A Dprintk.c3258 dumper->cur_seq = clear_seq; in kmsg_dump()
3306 if (!prb_read_valid(prb, dumper->cur_seq, &r)) in kmsg_dump_get_line_nolock()
3310 if (!prb_read_valid_info(prb, dumper->cur_seq, in kmsg_dump_get_line_nolock()
3319 dumper->cur_seq = r.info->seq + 1; in kmsg_dump_get_line_nolock()
3396 if (prb_read_valid_info(prb, dumper->cur_seq, &info, NULL)) { in kmsg_dump_get_buffer()
3397 if (info.seq != dumper->cur_seq) { in kmsg_dump_get_buffer()
3399 dumper->cur_seq = info.seq; in kmsg_dump_get_buffer()
3404 if (dumper->cur_seq >= dumper->next_seq) { in kmsg_dump_get_buffer()
3410 seq = dumper->cur_seq; in kmsg_dump_get_buffer()
3419 seq = dumper->cur_seq; in kmsg_dump_get_buffer()
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/hfi1/
H A Duser_exp_rcv.c67 unsigned long cur_seq);
70 unsigned long cur_seq);
943 unsigned long cur_seq) in tid_rb_invalidate()
994 unsigned long cur_seq) in tid_cover_invalidate()
1002 mmu_interval_set_seq(mni, cur_seq); in tid_cover_invalidate()
941 tid_rb_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) tid_rb_invalidate() argument
992 tid_cover_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) tid_cover_invalidate() argument
/kernel/linux/linux-6.6/drivers/infiniband/hw/hfi1/
H A Duser_exp_rcv.c25 unsigned long cur_seq);
28 unsigned long cur_seq);
894 unsigned long cur_seq) in tid_rb_invalidate()
944 unsigned long cur_seq) in tid_cover_invalidate()
952 mmu_interval_set_seq(mni, cur_seq); in tid_cover_invalidate()
892 tid_rb_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) tid_rb_invalidate() argument
942 tid_cover_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) tid_cover_invalidate() argument
/kernel/linux/linux-5.10/include/linux/
H A Dkmsg_dump.h50 u64 cur_seq; member
H A Dmmu_notifier.h248 unsigned long cur_seq);
317 * @cur_seq - The cur_seq passed to the invalidate() callback
322 * mmu_interval_read_retry(). The provided cur_seq will always be odd.
329 unsigned long cur_seq) in mmu_interval_set_seq()
331 WRITE_ONCE(interval_sub->invalidate_seq, cur_seq); in mmu_interval_set_seq()
328 mmu_interval_set_seq(struct mmu_interval_notifier *interval_sub, unsigned long cur_seq) mmu_interval_set_seq() argument
/kernel/linux/linux-5.10/fs/nfs/
H A Dnfs4session.c204 u32 cur_seq = 0; in nfs4_slot_seqid_in_use() local
208 if (nfs4_slot_get_seqid(tbl, slotid, &cur_seq) == 0 && in nfs4_slot_seqid_in_use()
209 cur_seq == seq_nr && test_bit(slotid, tbl->used_slots)) in nfs4_slot_seqid_in_use()
/kernel/linux/linux-6.6/fs/nfs/
H A Dnfs4session.c204 u32 cur_seq = 0; in nfs4_slot_seqid_in_use() local
208 if (nfs4_slot_get_seqid(tbl, slotid, &cur_seq) == 0 && in nfs4_slot_seqid_in_use()
209 cur_seq == seq_nr && test_bit(slotid, tbl->used_slots)) in nfs4_slot_seqid_in_use()
/kernel/linux/linux-6.6/kernel/printk/
H A Dprintk.c4147 if (iter->cur_seq < min_seq) in kmsg_dump_get_line()
4148 iter->cur_seq = min_seq; in kmsg_dump_get_line()
4154 if (!prb_read_valid(prb, iter->cur_seq, &r)) in kmsg_dump_get_line()
4158 if (!prb_read_valid_info(prb, iter->cur_seq, in kmsg_dump_get_line()
4167 iter->cur_seq = r.info->seq + 1; in kmsg_dump_get_line()
4210 if (iter->cur_seq < min_seq) in kmsg_dump_get_buffer()
4211 iter->cur_seq = min_seq; in kmsg_dump_get_buffer()
4213 if (prb_read_valid_info(prb, iter->cur_seq, &info, NULL)) { in kmsg_dump_get_buffer()
4214 if (info.seq != iter->cur_seq) { in kmsg_dump_get_buffer()
4216 iter->cur_seq in kmsg_dump_get_buffer()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gem/
H A Di915_gem_userptr.c55 * @cur_seq: Value to pass to mmu_interval_set_seq()
62 unsigned long cur_seq) in i915_gem_userptr_invalidate()
73 mmu_interval_set_seq(mni, cur_seq); in i915_gem_userptr_invalidate()
60 i915_gem_userptr_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) i915_gem_userptr_invalidate() argument
/kernel/linux/linux-5.10/lib/
H A Dtest_hmm.c212 unsigned long cur_seq) in dmirror_interval_invalidate()
229 mmu_interval_set_seq(mni, cur_seq); in dmirror_interval_invalidate()
777 unsigned long cur_seq) in dmirror_snapshot_invalidate()
792 mmu_interval_set_seq(mni, cur_seq); in dmirror_snapshot_invalidate()
210 dmirror_interval_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) dmirror_interval_invalidate() argument
775 dmirror_snapshot_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) dmirror_snapshot_invalidate() argument
/kernel/linux/linux-6.6/lib/
H A Dtest_hmm.c259 unsigned long cur_seq) in dmirror_interval_invalidate()
276 mmu_interval_set_seq(mni, cur_seq); in dmirror_interval_invalidate()
1082 unsigned long cur_seq) in dmirror_snapshot_invalidate()
1097 mmu_interval_set_seq(mni, cur_seq); in dmirror_snapshot_invalidate()
257 dmirror_interval_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) dmirror_interval_invalidate() argument
1080 dmirror_snapshot_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) dmirror_snapshot_invalidate() argument
/kernel/linux/linux-6.6/net/mptcp/
H A Dprotocol.h757 u64 __mptcp_expand_seq(u64 old_seq, u64 cur_seq);
758 static inline u64 mptcp_expand_seq(u64 old_seq, u64 cur_seq, bool use_64bit) in mptcp_expand_seq() argument
761 return cur_seq; in mptcp_expand_seq()
763 return __mptcp_expand_seq(old_seq, cur_seq); in mptcp_expand_seq()
H A Doptions.c1015 u64 __mptcp_expand_seq(u64 old_seq, u64 cur_seq) in __mptcp_expand_seq() argument
1020 cur_seq32 = (u32)cur_seq; in __mptcp_expand_seq()
1021 cur_seq = (old_seq & GENMASK_ULL(63, 32)) + cur_seq32; in __mptcp_expand_seq()
1023 return cur_seq + (1LL << 32); in __mptcp_expand_seq()
1027 return cur_seq - (1LL << 32); in __mptcp_expand_seq()
1028 return cur_seq; in __mptcp_expand_seq()
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/
H A Dnouveau_svm.c489 unsigned long cur_seq) in nouveau_svm_range_invalidate()
505 mmu_interval_set_seq(mni, cur_seq); in nouveau_svm_range_invalidate()
487 nouveau_svm_range_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) nouveau_svm_range_invalidate() argument
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/
H A Dnouveau_svm.c511 unsigned long cur_seq) in nouveau_svm_range_invalidate()
531 mmu_interval_set_seq(mni, cur_seq); in nouveau_svm_range_invalidate()
509 nouveau_svm_range_invalidate(struct mmu_interval_notifier *mni, const struct mmu_notifier_range *range, unsigned long cur_seq) nouveau_svm_range_invalidate() argument

Completed in 34 milliseconds

12