/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/ |
H A D | intel_timeline.c | 40 struct intel_timeline *tl = in __timeline_retire() local 41 container_of(active, typeof(*tl), active); in __timeline_retire() 43 i915_vma_unpin(tl->hwsp_ggtt); in __timeline_retire() 44 intel_timeline_put(tl); in __timeline_retire() 49 struct intel_timeline *tl = in __timeline_active() local 50 container_of(active, typeof(*tl), active); in __timeline_active() 52 __i915_vma_pin(tl->hwsp_ggtt); in __timeline_active() 53 intel_timeline_get(tl); in __timeline_active() 171 struct intel_timeline *tl; in intel_timeline_create_from_engine() local 173 tl in intel_timeline_create_from_engine() 185 __intel_timeline_pin(struct intel_timeline *tl) __intel_timeline_pin() argument 191 intel_timeline_pin(struct intel_timeline *tl, struct i915_gem_ww_ctx *ww) intel_timeline_pin() argument 223 intel_timeline_reset_seqno(const struct intel_timeline *tl) intel_timeline_reset_seqno() argument 234 intel_timeline_enter(struct intel_timeline *tl) intel_timeline_enter() argument 275 intel_timeline_exit(struct intel_timeline *tl) intel_timeline_exit() argument 299 timeline_advance(struct intel_timeline *tl) timeline_advance() argument 308 __intel_timeline_get_seqno(struct intel_timeline *tl, u32 *seqno) __intel_timeline_get_seqno() argument 326 intel_timeline_get_seqno(struct intel_timeline *tl, struct i915_request *rq, u32 *seqno) intel_timeline_get_seqno() argument 343 struct intel_timeline *tl; intel_timeline_read_hwsp() local 381 intel_timeline_unpin(struct intel_timeline *tl) intel_timeline_unpin() argument 418 struct intel_timeline *tl, *tn; intel_gt_show_timelines() local [all...] |
H A D | selftest_timeline.c | 27 static struct page *hwsp_page(struct intel_timeline *tl) in hwsp_page() argument 29 struct drm_i915_gem_object *obj = tl->hwsp_ggtt->obj; in hwsp_page() 35 static unsigned long hwsp_cacheline(struct intel_timeline *tl) in hwsp_cacheline() argument 37 unsigned long address = (unsigned long)page_address(hwsp_page(tl)); in hwsp_cacheline() 39 return (address + offset_in_page(tl->hwsp_offset)) / TIMELINE_SEQNO_BYTES; in hwsp_cacheline() 42 static int selftest_tl_pin(struct intel_timeline *tl) in selftest_tl_pin() argument 49 err = i915_gem_object_lock(tl->hwsp_ggtt->obj, &ww); in selftest_tl_pin() 51 err = intel_timeline_pin(tl, &ww); in selftest_tl_pin() 79 struct intel_timeline *tl) in __mock_hwsp_record() 81 tl in __mock_hwsp_record() 77 __mock_hwsp_record(struct mock_hwsp_freelist *state, unsigned int idx, struct intel_timeline *tl) __mock_hwsp_record() argument 93 struct intel_timeline *tl; __mock_hwsp_timeline() local 204 __igt_sync(struct intel_timeline *tl, u64 ctx, const struct __igt_sync *p, const char *name) __igt_sync() argument 246 struct intel_timeline tl; igt_sync() local 290 struct intel_timeline tl; bench_sync() local 485 checked_tl_write(struct intel_timeline *tl, struct intel_engine_cs *engine, u32 value) checked_tl_write() argument 553 struct intel_timeline *tl; live_hwsp_engine() local 582 struct intel_timeline *tl = timelines[n]; live_hwsp_engine() local 623 struct intel_timeline *tl; live_hwsp_alternate() local 654 struct intel_timeline *tl = timelines[n]; live_hwsp_alternate() local 674 struct intel_timeline *tl; live_hwsp_wrap() local 828 setup_watcher(struct hwsp_watcher *w, struct intel_gt *gt, struct intel_timeline *tl) setup_watcher() argument 943 retire_requests(struct intel_timeline *tl) retire_requests() argument 959 struct intel_timeline *tl = ce->timeline; wrap_timeline() local 988 struct intel_timeline *tl; live_hwsp_read() local 1193 struct intel_timeline *tl = ce->timeline; live_hwsp_rollover_kernel() local 1271 struct intel_timeline *tl; live_hwsp_rollover_user() local 1369 struct intel_timeline *tl; live_hwsp_recycle() local [all...] |
H A D | intel_gt_requests.c | 17 static bool retire_requests(struct intel_timeline *tl) in retire_requests() argument 21 list_for_each_entry_safe(rq, rn, &tl->requests, link) in retire_requests() 26 return !i915_active_fence_isset(&tl->last_request); in retire_requests() 64 struct intel_timeline *tl = xchg(&engine->retire, NULL); in engine_retire() local 67 struct intel_timeline *next = xchg(&tl->retire, NULL); in engine_retire() 77 if (mutex_trylock(&tl->mutex)) { in engine_retire() 78 retire_requests(tl); in engine_retire() 79 mutex_unlock(&tl->mutex); in engine_retire() 81 intel_timeline_put(tl); in engine_retire() 84 tl in engine_retire() 88 add_retire(struct intel_engine_cs *engine, struct intel_timeline *tl) add_retire() argument 112 intel_engine_add_retire(struct intel_engine_cs *engine, struct intel_timeline *tl) intel_engine_add_retire() argument 137 struct intel_timeline *tl, *tn; intel_gt_retire_requests_timeout() local [all...] |
H A D | intel_timeline.h | 45 static inline int __intel_timeline_sync_set(struct intel_timeline *tl, in __intel_timeline_sync_set() argument 48 return i915_syncmap_set(&tl->sync, context, seqno); in __intel_timeline_sync_set() 51 static inline int intel_timeline_sync_set(struct intel_timeline *tl, in intel_timeline_sync_set() argument 54 return __intel_timeline_sync_set(tl, fence->context, fence->seqno); in intel_timeline_sync_set() 57 static inline bool __intel_timeline_sync_is_later(struct intel_timeline *tl, in __intel_timeline_sync_is_later() argument 60 return i915_syncmap_is_later(&tl->sync, context, seqno); in __intel_timeline_sync_is_later() 63 static inline bool intel_timeline_sync_is_later(struct intel_timeline *tl, in intel_timeline_sync_is_later() argument 66 return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno); in intel_timeline_sync_is_later() 69 void __intel_timeline_pin(struct intel_timeline *tl); 70 int intel_timeline_pin(struct intel_timeline *tl, struc 95 intel_timeline_is_last(const struct intel_timeline *tl, const struct i915_request *rq) intel_timeline_is_last() argument [all...] |
H A D | selftest_context.c | 17 struct intel_timeline *tl = i915_request_timeline(rq); in request_sync() local 21 intel_timeline_get(tl); in request_sync() 35 lockdep_unpin_lock(&tl->mutex, rq->cookie); in request_sync() 36 mutex_unlock(&tl->mutex); in request_sync() 39 intel_timeline_put(tl); in request_sync() 46 struct intel_timeline *tl = ce->timeline; in context_sync() local 49 mutex_lock(&tl->mutex); in context_sync() 54 if (list_empty(&tl->requests)) in context_sync() 57 rq = list_last_entry(&tl->requests, typeof(*rq), link); in context_sync() 68 mutex_unlock(&tl in context_sync() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gt/ |
H A D | selftest_timeline.c | 24 static struct page *hwsp_page(struct intel_timeline *tl) in hwsp_page() argument 26 struct drm_i915_gem_object *obj = tl->hwsp_ggtt->obj; in hwsp_page() 32 static unsigned long hwsp_cacheline(struct intel_timeline *tl) in hwsp_cacheline() argument 34 unsigned long address = (unsigned long)page_address(hwsp_page(tl)); in hwsp_cacheline() 36 return (address + tl->hwsp_offset) / CACHELINE_BYTES; in hwsp_cacheline() 55 struct intel_timeline *tl) in __mock_hwsp_record() 57 tl = xchg(&state->history[idx], tl); in __mock_hwsp_record() 58 if (tl) { in __mock_hwsp_record() 59 radix_tree_delete(&state->cachelines, hwsp_cacheline(tl)); in __mock_hwsp_record() 53 __mock_hwsp_record(struct mock_hwsp_freelist *state, unsigned int idx, struct intel_timeline *tl) __mock_hwsp_record() argument 68 struct intel_timeline *tl; __mock_hwsp_timeline() local 172 __igt_sync(struct intel_timeline *tl, u64 ctx, const struct __igt_sync *p, const char *name) __igt_sync() argument 214 struct intel_timeline tl; igt_sync() local 258 struct intel_timeline tl; bench_sync() local 453 tl_write(struct intel_timeline *tl, struct intel_engine_cs *engine, u32 value) tl_write() argument 488 struct intel_timeline *tl; checked_intel_timeline_create() local 533 struct intel_timeline *tl; live_hwsp_engine() local 562 struct intel_timeline *tl = timelines[n]; live_hwsp_engine() local 603 struct intel_timeline *tl; live_hwsp_alternate() local 634 struct intel_timeline *tl = timelines[n]; live_hwsp_alternate() local 654 struct intel_timeline *tl; live_hwsp_wrap() local 772 struct intel_timeline *tl = ce->timeline; live_hwsp_rollover_kernel() local 852 struct intel_timeline *tl; live_hwsp_rollover_user() local 946 struct intel_timeline *tl; live_hwsp_recycle() local [all...] |
H A D | intel_timeline.c | 330 void __intel_timeline_pin(struct intel_timeline *tl) in __intel_timeline_pin() argument 332 GEM_BUG_ON(!atomic_read(&tl->pin_count)); in __intel_timeline_pin() 333 atomic_inc(&tl->pin_count); in __intel_timeline_pin() 336 int intel_timeline_pin(struct intel_timeline *tl, struct i915_gem_ww_ctx *ww) in intel_timeline_pin() argument 340 if (atomic_add_unless(&tl->pin_count, 1, 0)) in intel_timeline_pin() 343 err = i915_ggtt_pin(tl->hwsp_ggtt, ww, 0, PIN_HIGH); in intel_timeline_pin() 347 tl->hwsp_offset = in intel_timeline_pin() 348 i915_ggtt_offset(tl->hwsp_ggtt) + in intel_timeline_pin() 349 offset_in_page(tl->hwsp_offset); in intel_timeline_pin() 350 GT_TRACE(tl in intel_timeline_pin() 362 intel_timeline_reset_seqno(const struct intel_timeline *tl) intel_timeline_reset_seqno() argument 369 intel_timeline_enter(struct intel_timeline *tl) intel_timeline_enter() argument 410 intel_timeline_exit(struct intel_timeline *tl) intel_timeline_exit() argument 434 timeline_advance(struct intel_timeline *tl) timeline_advance() argument 442 timeline_rollback(struct intel_timeline *tl) timeline_rollback() argument 448 __intel_timeline_get_seqno(struct intel_timeline *tl, struct i915_request *rq, u32 *seqno) __intel_timeline_get_seqno() argument 544 intel_timeline_get_seqno(struct intel_timeline *tl, struct i915_request *rq, u32 *seqno) intel_timeline_get_seqno() argument 598 intel_timeline_unpin(struct intel_timeline *tl) intel_timeline_unpin() argument [all...] |
H A D | intel_gt_requests.c | 17 static bool retire_requests(struct intel_timeline *tl) in retire_requests() argument 21 list_for_each_entry_safe(rq, rn, &tl->requests, link) in retire_requests() 26 return !i915_active_fence_isset(&tl->last_request); in retire_requests() 64 struct intel_timeline *tl = xchg(&engine->retire, NULL); in engine_retire() local 67 struct intel_timeline *next = xchg(&tl->retire, NULL); in engine_retire() 77 if (mutex_trylock(&tl->mutex)) { in engine_retire() 78 retire_requests(tl); in engine_retire() 79 mutex_unlock(&tl->mutex); in engine_retire() 81 intel_timeline_put(tl); in engine_retire() 84 tl in engine_retire() 88 add_retire(struct intel_engine_cs *engine, struct intel_timeline *tl) add_retire() argument 112 intel_engine_add_retire(struct intel_engine_cs *engine, struct intel_timeline *tl) intel_engine_add_retire() argument 136 struct intel_timeline *tl, *tn; intel_gt_retire_requests_timeout() local [all...] |
H A D | intel_timeline.h | 67 static inline int __intel_timeline_sync_set(struct intel_timeline *tl, in __intel_timeline_sync_set() argument 70 return i915_syncmap_set(&tl->sync, context, seqno); in __intel_timeline_sync_set() 73 static inline int intel_timeline_sync_set(struct intel_timeline *tl, in intel_timeline_sync_set() argument 76 return __intel_timeline_sync_set(tl, fence->context, fence->seqno); in intel_timeline_sync_set() 79 static inline bool __intel_timeline_sync_is_later(struct intel_timeline *tl, in __intel_timeline_sync_is_later() argument 82 return i915_syncmap_is_later(&tl->sync, context, seqno); in __intel_timeline_sync_is_later() 85 static inline bool intel_timeline_sync_is_later(struct intel_timeline *tl, in intel_timeline_sync_is_later() argument 88 return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno); in intel_timeline_sync_is_later() 91 void __intel_timeline_pin(struct intel_timeline *tl); 92 int intel_timeline_pin(struct intel_timeline *tl, struc [all...] |
H A D | selftest_context.c | 18 struct intel_timeline *tl = i915_request_timeline(rq); in request_sync() local 22 intel_timeline_get(tl); in request_sync() 36 lockdep_unpin_lock(&tl->mutex, rq->cookie); in request_sync() 37 mutex_unlock(&tl->mutex); in request_sync() 40 intel_timeline_put(tl); in request_sync() 47 struct intel_timeline *tl = ce->timeline; in context_sync() local 50 mutex_lock(&tl->mutex); in context_sync() 55 if (list_empty(&tl->requests)) in context_sync() 58 rq = list_last_entry(&tl->requests, typeof(*rq), link); in context_sync() 69 mutex_unlock(&tl in context_sync() [all...] |
/kernel/linux/linux-6.6/fs/smb/client/ |
H A D | dfs_cache.h | 55 dfs_cache_get_next_tgt(struct dfs_cache_tgt_list *tl, in dfs_cache_get_next_tgt() argument 58 if (!tl || !tl->tl_numtgts || list_empty(&tl->tl_list) || in dfs_cache_get_next_tgt() 59 !it || list_is_last(&it->it_list, &tl->tl_list)) in dfs_cache_get_next_tgt() 65 dfs_cache_get_tgt_iterator(struct dfs_cache_tgt_list *tl) in dfs_cache_get_tgt_iterator() argument 67 if (!tl) in dfs_cache_get_tgt_iterator() 69 return list_first_entry_or_null(&tl->tl_list, in dfs_cache_get_tgt_iterator() 74 static inline void dfs_cache_free_tgts(struct dfs_cache_tgt_list *tl) in dfs_cache_free_tgts() argument 78 if (!tl || !t in dfs_cache_free_tgts() 95 dfs_cache_get_nr_tgts(const struct dfs_cache_tgt_list *tl) dfs_cache_get_nr_tgts() argument [all...] |
H A D | dfs.h | 20 struct dfs_cache_tgt_list tl; member 38 #define ref_walk_tl(w) (&ref_walk_cur(w)->tl) 60 dfs_cache_free_tgts(&ref->tl); in __ref_walk_free() 91 tit = dfs_cache_get_tgt_iterator(&ref->tl); in ref_walk_next_tgt() 93 tit = dfs_cache_get_next_tgt(&ref->tl, ref->tit); in ref_walk_next_tgt() 132 struct dfs_info3_param *ref, struct dfs_cache_tgt_list *tl) in dfs_get_referral() 138 cifs_remap(cifs_sb), path, ref, tl); in dfs_get_referral() 131 dfs_get_referral(struct cifs_mount_ctx *mnt_ctx, const char *path, struct dfs_info3_param *ref, struct dfs_cache_tgt_list *tl) dfs_get_referral() argument
|
/kernel/linux/linux-5.10/fs/cifs/ |
H A D | dfs_cache.h | 56 dfs_cache_get_next_tgt(struct dfs_cache_tgt_list *tl, in dfs_cache_get_next_tgt() argument 59 if (!tl || list_empty(&tl->tl_list) || !it || in dfs_cache_get_next_tgt() 60 list_is_last(&it->it_list, &tl->tl_list)) in dfs_cache_get_next_tgt() 66 dfs_cache_get_tgt_iterator(struct dfs_cache_tgt_list *tl) in dfs_cache_get_tgt_iterator() argument 68 if (!tl) in dfs_cache_get_tgt_iterator() 70 return list_first_entry_or_null(&tl->tl_list, in dfs_cache_get_tgt_iterator() 75 static inline void dfs_cache_free_tgts(struct dfs_cache_tgt_list *tl) in dfs_cache_free_tgts() argument 79 if (!tl || list_empty(&tl in dfs_cache_free_tgts() 96 dfs_cache_get_nr_tgts(const struct dfs_cache_tgt_list *tl) dfs_cache_get_nr_tgts() argument [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/netronome/nfp/ |
H A D | nfp_net_debugdump.c | 58 struct nfp_dump_tl tl; member 64 struct nfp_dump_tl tl; member 70 struct nfp_dump_tl tl; member 78 struct nfp_dump_tl tl; member 87 struct nfp_dump_tl tl; member 92 struct nfp_dump_tl tl; member 112 typedef int (*nfp_tlv_visit)(struct nfp_pf *pf, struct nfp_dump_tl *tl, 120 struct nfp_dump_tl *tl; in nfp_traverse_tlvs() local 125 while (remaining >= sizeof(*tl)) { in nfp_traverse_tlvs() 126 tl in nfp_traverse_tlvs() 251 nfp_add_tlv_size(struct nfp_pf *pf, struct nfp_dump_tl *tl, void *param) nfp_add_tlv_size() argument 331 struct nfp_dump_tl *tl = dump->p; nfp_add_tlv() local 654 nfp_dump_for_tlv(struct nfp_pf *pf, struct nfp_dump_tl *tl, void *param) nfp_dump_for_tlv() argument [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/netronome/nfp/ |
H A D | nfp_net_debugdump.c | 58 struct nfp_dump_tl tl; member 64 struct nfp_dump_tl tl; member 70 struct nfp_dump_tl tl; member 78 struct nfp_dump_tl tl; member 87 struct nfp_dump_tl tl; member 92 struct nfp_dump_tl tl; member 112 typedef int (*nfp_tlv_visit)(struct nfp_pf *pf, struct nfp_dump_tl *tl, 120 struct nfp_dump_tl *tl; in nfp_traverse_tlvs() local 125 while (remaining >= sizeof(*tl)) { in nfp_traverse_tlvs() 126 tl in nfp_traverse_tlvs() 251 nfp_add_tlv_size(struct nfp_pf *pf, struct nfp_dump_tl *tl, void *param) nfp_add_tlv_size() argument 331 struct nfp_dump_tl *tl = dump->p; nfp_add_tlv() local 654 nfp_dump_for_tlv(struct nfp_pf *pf, struct nfp_dump_tl *tl, void *param) nfp_dump_for_tlv() argument [all...] |
/kernel/linux/linux-5.10/fs/ext4/ |
H A D | fast_commit.c | 602 struct ext4_fc_tl *tl; in ext4_fc_reserve_space() local 631 tl = (struct ext4_fc_tl *)(sbi->s_fc_bh->b_data + off); in ext4_fc_reserve_space() 632 tl->fc_tag = cpu_to_le16(EXT4_FC_TAG_PAD); in ext4_fc_reserve_space() 634 tl->fc_len = cpu_to_le16(pad_len); in ext4_fc_reserve_space() 636 *crc = ext4_chksum(sbi, *crc, tl, sizeof(*tl)); in ext4_fc_reserve_space() 638 ext4_fc_memzero(sb, tl + 1, pad_len, crc); in ext4_fc_reserve_space() 640 *((u8 *)(tl + 1) + pad_len) = 0; in ext4_fc_reserve_space() 672 struct ext4_fc_tl tl; in ext4_fc_write_tail() local 681 dst = ext4_fc_reserve_space(sb, sizeof(tl) in ext4_fc_write_tail() 713 struct ext4_fc_tl tl; ext4_fc_add_tlv() local 736 struct ext4_fc_tl tl; ext4_fc_add_dentry_tlv() local 768 struct ext4_fc_tl tl; ext4_fc_write_inode() local 1258 tl_to_darg(struct dentry_info_args *darg, struct ext4_fc_tl *tl, u8 *val) tl_to_darg() argument 1273 ext4_fc_replay_unlink(struct super_block *sb, struct ext4_fc_tl *tl, u8 *val) ext4_fc_replay_unlink() argument 1371 ext4_fc_replay_link(struct super_block *sb, struct ext4_fc_tl *tl, u8 *val) ext4_fc_replay_link() argument 1426 ext4_fc_replay_inode(struct super_block *sb, struct ext4_fc_tl *tl, u8 *val) ext4_fc_replay_inode() argument 1527 ext4_fc_replay_create(struct super_block *sb, struct ext4_fc_tl *tl, u8 *val) ext4_fc_replay_create() argument 1626 ext4_fc_replay_add_range(struct super_block *sb, struct ext4_fc_tl *tl, u8 *val) ext4_fc_replay_add_range() argument 1748 ext4_fc_replay_del_range(struct super_block *sb, struct ext4_fc_tl *tl, u8 *val) ext4_fc_replay_del_range() argument 1941 struct ext4_fc_tl tl; ext4_fc_replay_scan() local 2053 struct ext4_fc_tl tl; ext4_fc_replay() local [all...] |
/kernel/linux/linux-5.10/crypto/ |
H A D | vmac.c | 150 int i; u64 th, tl; \ 153 MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \ 155 ADD128(rh, rl, th, tl); \ 161 int i; u64 th, tl; \ 164 MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \ 166 ADD128(rh, rl, th, tl); \ 167 MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i+2], \ 169 ADD128(rh1, rl1, th, tl); \ 176 int i; u64 th, tl; \ 179 MUL64(th, tl, pe64_to_cpu [all...] |
/kernel/linux/linux-6.6/crypto/ |
H A D | vmac.c | 151 int i; u64 th, tl; \ 154 MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \ 156 ADD128(rh, rl, th, tl); \ 162 int i; u64 th, tl; \ 165 MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i], \ 167 ADD128(rh, rl, th, tl); \ 168 MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i+2], \ 170 ADD128(rh1, rl1, th, tl); \ 177 int i; u64 th, tl; \ 180 MUL64(th, tl, pe64_to_cpu [all...] |
/kernel/linux/linux-6.6/fs/ext4/ |
H A D | fast_commit.c | 691 struct ext4_fc_tl tl; in ext4_fc_reserve_space() local 729 tl.fc_tag = cpu_to_le16(EXT4_FC_TAG_PAD); in ext4_fc_reserve_space() 730 tl.fc_len = cpu_to_le16(remaining); in ext4_fc_reserve_space() 731 memcpy(dst, &tl, EXT4_FC_TAG_BASE_LEN); in ext4_fc_reserve_space() 756 struct ext4_fc_tl tl; in ext4_fc_write_tail() local 771 tl.fc_tag = cpu_to_le16(EXT4_FC_TAG_TAIL); in ext4_fc_write_tail() 772 tl.fc_len = cpu_to_le16(bsize - off + sizeof(struct ext4_fc_tail)); in ext4_fc_write_tail() 775 memcpy(dst, &tl, EXT4_FC_TAG_BASE_LEN); in ext4_fc_write_tail() 799 struct ext4_fc_tl tl; in ext4_fc_add_tlv() local 806 tl in ext4_fc_add_tlv() 820 struct ext4_fc_tl tl; ext4_fc_add_dentry_tlv() local 852 struct ext4_fc_tl tl; ext4_fc_write_inode() local 1341 tl_to_darg(struct dentry_info_args *darg, struct ext4_fc_tl_mem *tl, u8 *val) tl_to_darg() argument 1354 ext4_fc_get_tl(struct ext4_fc_tl_mem *tl, u8 *val) ext4_fc_get_tl() argument 1364 ext4_fc_replay_unlink(struct super_block *sb, struct ext4_fc_tl_mem *tl, u8 *val) ext4_fc_replay_unlink() argument 1462 ext4_fc_replay_link(struct super_block *sb, struct ext4_fc_tl_mem *tl, u8 *val) ext4_fc_replay_link() argument 1517 ext4_fc_replay_inode(struct super_block *sb, struct ext4_fc_tl_mem *tl, u8 *val) ext4_fc_replay_inode() argument 1620 ext4_fc_replay_create(struct super_block *sb, struct ext4_fc_tl_mem *tl, u8 *val) ext4_fc_replay_create() argument 1718 ext4_fc_replay_add_range(struct super_block *sb, struct ext4_fc_tl_mem *tl, u8 *val) ext4_fc_replay_add_range() argument 1839 ext4_fc_replay_del_range(struct super_block *sb, struct ext4_fc_tl_mem *tl, u8 *val) ext4_fc_replay_del_range() argument 2036 struct ext4_fc_tl_mem tl; ext4_fc_replay_scan() local 2155 struct ext4_fc_tl_mem tl; ext4_fc_replay() local [all...] |
/kernel/linux/linux-5.10/drivers/isdn/mISDN/ |
H A D | fsm.c | 98 struct FsmTimer *ft = from_timer(ft, t, tl); in FsmExpireTimer() 114 timer_setup(&ft->tl, FsmExpireTimer, 0); in mISDN_FsmInitTimer() 126 del_timer(&ft->tl); in mISDN_FsmDelTimer() 141 if (timer_pending(&ft->tl)) { in mISDN_FsmAddTimer() 152 ft->tl.expires = jiffies + (millisec * HZ) / 1000; in mISDN_FsmAddTimer() 153 add_timer(&ft->tl); in mISDN_FsmAddTimer() 169 if (timer_pending(&ft->tl)) in mISDN_FsmRestartTimer() 170 del_timer(&ft->tl); in mISDN_FsmRestartTimer() 173 ft->tl.expires = jiffies + (millisec * HZ) / 1000; in mISDN_FsmRestartTimer() 174 add_timer(&ft->tl); in mISDN_FsmRestartTimer() [all...] |
/kernel/linux/linux-5.10/drivers/s390/net/ |
H A D | fsm.c | 135 fsm_timer *this = from_timer(this, t, tl); in fsm_expire_timer() 151 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_settimer() 161 del_timer(&this->tl); in fsm_deltimer() 173 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_addtimer() 176 this->tl.expires = jiffies + (millisec * HZ) / 1000; in fsm_addtimer() 177 add_timer(&this->tl); in fsm_addtimer() 191 del_timer(&this->tl); in fsm_modtimer() 192 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_modtimer() 195 this->tl.expires = jiffies + (millisec * HZ) / 1000; in fsm_modtimer() 196 add_timer(&this->tl); in fsm_modtimer() [all...] |
/kernel/linux/linux-6.6/drivers/s390/net/ |
H A D | fsm.c | 135 fsm_timer *this = from_timer(this, t, tl); in fsm_expire_timer() 151 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_settimer() 161 del_timer(&this->tl); in fsm_deltimer() 173 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_addtimer() 176 this->tl.expires = jiffies + (millisec * HZ) / 1000; in fsm_addtimer() 177 add_timer(&this->tl); in fsm_addtimer() 191 del_timer(&this->tl); in fsm_modtimer() 192 timer_setup(&this->tl, fsm_expire_timer, 0); in fsm_modtimer() 195 this->tl.expires = jiffies + (millisec * HZ) / 1000; in fsm_modtimer() 196 add_timer(&this->tl); in fsm_modtimer() [all...] |
/kernel/linux/linux-6.6/drivers/isdn/mISDN/ |
H A D | fsm.c | 98 struct FsmTimer *ft = from_timer(ft, t, tl); in FsmExpireTimer() 114 timer_setup(&ft->tl, FsmExpireTimer, 0); in mISDN_FsmInitTimer() 126 del_timer(&ft->tl); in mISDN_FsmDelTimer() 141 if (timer_pending(&ft->tl)) { in mISDN_FsmAddTimer() 152 ft->tl.expires = jiffies + (millisec * HZ) / 1000; in mISDN_FsmAddTimer() 153 add_timer(&ft->tl); in mISDN_FsmAddTimer() 169 if (timer_pending(&ft->tl)) in mISDN_FsmRestartTimer() 170 del_timer(&ft->tl); in mISDN_FsmRestartTimer() 173 ft->tl.expires = jiffies + (millisec * HZ) / 1000; in mISDN_FsmRestartTimer() 174 add_timer(&ft->tl); in mISDN_FsmRestartTimer() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/ |
H A D | i915_request.c | 341 struct intel_timeline * const tl = i915_request_timeline(rq); in i915_request_retire_upto() local 349 tmp = list_first_entry(&tl->requests, typeof(*tmp), link); in i915_request_retire_upto() 722 static void retire_requests(struct intel_timeline *tl) in retire_requests() argument 726 list_for_each_entry_safe(rq, rn, &tl->requests, link) in retire_requests() 732 request_alloc_slow(struct intel_timeline *tl, in request_alloc_slow() argument 747 if (list_empty(&tl->requests)) in request_alloc_slow() 751 rq = list_first_entry(&tl->requests, typeof(*rq), link); in request_alloc_slow() 760 rq = list_last_entry(&tl->requests, typeof(*rq), link); in request_alloc_slow() 764 retire_requests(tl); in request_alloc_slow() 787 struct intel_timeline *tl in __i915_request_create() local 918 struct intel_timeline *tl; i915_request_create() local 1123 intel_timeline_sync_has_start(struct intel_timeline *tl, struct dma_fence *fence) intel_timeline_sync_has_start() argument 1131 intel_timeline_sync_set_start(struct intel_timeline *tl, const struct dma_fence *fence) intel_timeline_sync_set_start() argument 1598 struct intel_timeline * const tl = i915_request_timeline(rq); i915_request_add() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/ |
H A D | i915_request.c | 421 struct intel_timeline * const tl = i915_request_timeline(rq); in i915_request_retire_upto() local 428 tmp = list_first_entry(&tl->requests, typeof(*tmp), link); in i915_request_retire_upto() 825 static void retire_requests(struct intel_timeline *tl) in retire_requests() argument 829 list_for_each_entry_safe(rq, rn, &tl->requests, link) in retire_requests() 835 request_alloc_slow(struct intel_timeline *tl, in request_alloc_slow() argument 850 if (list_empty(&tl->requests)) in request_alloc_slow() 854 rq = list_first_entry(&tl->requests, typeof(*rq), link); in request_alloc_slow() 863 rq = list_last_entry(&tl->requests, typeof(*rq), link); in request_alloc_slow() 867 retire_requests(tl); in request_alloc_slow() 897 struct intel_timeline *tl in __i915_request_create() local 1032 struct intel_timeline *tl; i915_request_create() local 1249 intel_timeline_sync_has_start(struct intel_timeline *tl, struct dma_fence *fence) intel_timeline_sync_has_start() argument 1257 intel_timeline_sync_set_start(struct intel_timeline *tl, const struct dma_fence *fence) intel_timeline_sync_set_start() argument 1847 struct intel_timeline * const tl = i915_request_timeline(rq); i915_request_add() local [all...] |