/kernel/linux/linux-5.10/include/soc/fsl/ |
H A D | dpaa2-global.h | 20 struct dq { struct 33 } dq; member 66 * @dq: the dequeue result. 68 static inline u32 dpaa2_dq_flags(const struct dpaa2_dq *dq) in dpaa2_dq_flags() argument 70 return dq->dq.stat; in dpaa2_dq_flags() 74 * dpaa2_dq_is_pull() - Check whether the dq response is from a pull 76 * @dq: the dequeue result 80 static inline int dpaa2_dq_is_pull(const struct dpaa2_dq *dq) in dpaa2_dq_is_pull() argument 82 return (int)(dpaa2_dq_flags(dq) in dpaa2_dq_is_pull() 91 dpaa2_dq_is_pull_complete(const struct dpaa2_dq *dq) dpaa2_dq_is_pull_complete() argument 104 dpaa2_dq_seqnum(const struct dpaa2_dq *dq) dpaa2_dq_seqnum() argument 117 dpaa2_dq_odpid(const struct dpaa2_dq *dq) dpaa2_dq_odpid() argument 128 dpaa2_dq_fqid(const struct dpaa2_dq *dq) dpaa2_dq_fqid() argument 139 dpaa2_dq_byte_count(const struct dpaa2_dq *dq) dpaa2_dq_byte_count() argument 150 dpaa2_dq_frame_count(const struct dpaa2_dq *dq) dpaa2_dq_frame_count() argument 161 dpaa2_dq_fqd_ctx(const struct dpaa2_dq *dq) dpaa2_dq_fqd_ctx() argument 172 dpaa2_dq_fd(const struct dpaa2_dq *dq) dpaa2_dq_fd() argument [all...] |
/kernel/linux/linux-6.6/include/soc/fsl/ |
H A D | dpaa2-global.h | 20 struct dq { struct 33 } dq; member 66 * @dq: the dequeue result. 68 static inline u32 dpaa2_dq_flags(const struct dpaa2_dq *dq) in dpaa2_dq_flags() argument 70 return dq->dq.stat; in dpaa2_dq_flags() 74 * dpaa2_dq_is_pull() - Check whether the dq response is from a pull 76 * @dq: the dequeue result 80 static inline int dpaa2_dq_is_pull(const struct dpaa2_dq *dq) in dpaa2_dq_is_pull() argument 82 return (int)(dpaa2_dq_flags(dq) in dpaa2_dq_is_pull() 91 dpaa2_dq_is_pull_complete(const struct dpaa2_dq *dq) dpaa2_dq_is_pull_complete() argument 104 dpaa2_dq_seqnum(const struct dpaa2_dq *dq) dpaa2_dq_seqnum() argument 117 dpaa2_dq_odpid(const struct dpaa2_dq *dq) dpaa2_dq_odpid() argument 128 dpaa2_dq_fqid(const struct dpaa2_dq *dq) dpaa2_dq_fqid() argument 139 dpaa2_dq_byte_count(const struct dpaa2_dq *dq) dpaa2_dq_byte_count() argument 150 dpaa2_dq_frame_count(const struct dpaa2_dq *dq) dpaa2_dq_frame_count() argument 161 dpaa2_dq_fqd_ctx(const struct dpaa2_dq *dq) dpaa2_dq_fqd_ctx() argument 172 dpaa2_dq_fd(const struct dpaa2_dq *dq) dpaa2_dq_fd() argument [all...] |
/kernel/linux/linux-5.10/lib/raid6/ |
H A D | recov_loongarch_simd.c | 32 u8 *p, *q, *dp, *dq; in raid6_2data_recov_lsx() local 47 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_lsx() 49 ptrs[disks - 1] = dq; in raid6_2data_recov_lsx() 55 ptrs[failb] = dq; in raid6_2data_recov_lsx() 81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx() 82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx() 83 asm volatile("vld $vr10, %0" : : "m" (dq[32])); in raid6_2data_recov_lsx() 84 asm volatile("vld $vr11, %0" : : "m" (dq[48])); in raid6_2data_recov_lsx() 161 asm volatile("vst $vr4, %0" : "=m" (dq[0])); in raid6_2data_recov_lsx() 162 asm volatile("vst $vr5, %0" : "=m" (dq[1 in raid6_2data_recov_lsx() 189 u8 *p, *q, *dq; raid6_datap_recov_lsx() local 306 u8 *p, *q, *dp, *dq; raid6_2data_recov_lasx() local 428 u8 *p, *q, *dq; raid6_datap_recov_lasx() local [all...] |
H A D | recov_avx512.c | 27 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx512() local 44 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_avx512() 46 ptrs[disks-1] = dq; in raid6_2data_recov_avx512() 52 ptrs[failb] = dq; in raid6_2data_recov_avx512() 78 "m" (p[64]), "m" (dq[0]), "m" (dq[64]), in raid6_2data_recov_avx512() 82 * 1 = dq[0] ^ q[0] in raid6_2data_recov_avx512() 83 * 9 = dq[64] ^ q[64] in raid6_2data_recov_avx512() 148 : "m" (dq[0]), "m" (dq[6 in raid6_2data_recov_avx512() 230 u8 *p, *q, *dq; raid6_datap_recov_avx512() local [all...] |
H A D | recov_avx2.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx2() local 33 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_avx2() 35 ptrs[disks-1] = dq; in raid6_2data_recov_avx2() 41 ptrs[failb] = dq; in raid6_2data_recov_avx2() 61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2() 62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2() 67 * 1 = dq[0] ^ q[0] in raid6_2data_recov_avx2() 68 * 9 = dq[32] ^ q[32] in raid6_2data_recov_avx2() 120 asm volatile("vmovdqa %%ymm1, %0" : "=m" (dq[0])); in raid6_2data_recov_avx2() 121 asm volatile("vmovdqa %%ymm13,%0" : "=m" (dq[3 in raid6_2data_recov_avx2() 189 u8 *p, *q, *dq; raid6_datap_recov_avx2() local [all...] |
H A D | recov_ssse3.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_ssse3() local 35 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_ssse3() 37 ptrs[disks-1] = dq; in raid6_2data_recov_ssse3() 43 ptrs[failb] = dq; in raid6_2data_recov_ssse3() 71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3() 72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3() 124 asm volatile("movdqa %%xmm1,%0" : "=m" (dq[0])); in raid6_2data_recov_ssse3() 125 asm volatile("movdqa %%xmm9,%0" : "=m" (dq[16])); in raid6_2data_recov_ssse3() 136 dq += 32; in raid6_2data_recov_ssse3() 140 asm volatile("pxor %0,%%xmm1" : : "m" (*dq)); in raid6_2data_recov_ssse3() 194 u8 *p, *q, *dq; raid6_datap_recov_ssse3() local [all...] |
H A D | recov_s390xc.c | 26 u8 *p, *q, *dp, *dq; in raid6_2data_recov_s390xc() local 40 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_s390xc() 42 ptrs[disks-1] = dq; in raid6_2data_recov_s390xc() 48 ptrs[failb] = dq; in raid6_2data_recov_s390xc() 59 xor_block(dq, q); in raid6_2data_recov_s390xc() 61 dq[i] = pbmul[dp[i]] ^ qmul[dq[i]]; in raid6_2data_recov_s390xc() 62 xor_block(dp, dq); in raid6_2data_recov_s390xc() 66 dq += 256; in raid6_2data_recov_s390xc() 75 u8 *p, *q, *dq; in raid6_datap_recov_s390xc() local [all...] |
H A D | recov.c | 23 u8 *p, *q, *dp, *dq; in raid6_2data_recov_intx1() local 37 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_intx1() 39 ptrs[disks-1] = dq; in raid6_2data_recov_intx1() 45 ptrs[failb] = dq; in raid6_2data_recov_intx1() 56 qx = qmul[*q ^ *dq]; in raid6_2data_recov_intx1() 57 *dq++ = db = pbmul[px] ^ qx; /* Reconstructed B */ in raid6_2data_recov_intx1() 67 u8 *p, *q, *dq; in raid6_datap_recov_intx1() local 75 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_intx1() 77 ptrs[disks-1] = dq; in raid6_datap_recov_intx1() 82 ptrs[faila] = dq; in raid6_datap_recov_intx1() [all...] |
H A D | recov_neon.c | 23 uint8_t *dq, const uint8_t *pbmul, 26 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq, 32 u8 *p, *q, *dp, *dq; in raid6_2data_recov_neon() local 47 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_neon() 49 ptrs[disks - 1] = dq; in raid6_2data_recov_neon() 55 ptrs[failb] = dq; in raid6_2data_recov_neon() 65 __raid6_2data_recov_neon(bytes, p, q, dp, dq, pbmul, qmul); in raid6_2data_recov_neon() 72 u8 *p, *q, *dq; in raid6_datap_recov_neon() local 82 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_neon() 84 ptrs[disks - 1] = dq; in raid6_datap_recov_neon() [all...] |
H A D | recov_neon_inner.c | 28 uint8_t *dq, const uint8_t *pbmul, in __raid6_2data_recov_neon() 42 * qx = qmul[*q ^ *dq]; in __raid6_2data_recov_neon() 43 * *dq++ = db = pbmul[px] ^ qx; in __raid6_2data_recov_neon() 53 vx = veorq_u8(vld1q_u8(q), vld1q_u8(dq)); in __raid6_2data_recov_neon() 66 vst1q_u8(dq, db); in __raid6_2data_recov_neon() 73 dq += 16; in __raid6_2data_recov_neon() 77 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq, in __raid6_datap_recov_neon() argument 86 * *p++ ^= *dq = qmul[*q ^ *dq]; in __raid6_datap_recov_neon() 87 * q++; dq in __raid6_datap_recov_neon() 27 __raid6_2data_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dp, uint8_t *dq, const uint8_t *pbmul, const uint8_t *qmul) __raid6_2data_recov_neon() argument [all...] |
/kernel/linux/linux-6.6/lib/raid6/ |
H A D | recov_loongarch_simd.c | 32 u8 *p, *q, *dp, *dq; in raid6_2data_recov_lsx() local 47 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_lsx() 49 ptrs[disks - 1] = dq; in raid6_2data_recov_lsx() 55 ptrs[failb] = dq; in raid6_2data_recov_lsx() 81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx() 82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx() 83 asm volatile("vld $vr10, %0" : : "m" (dq[32])); in raid6_2data_recov_lsx() 84 asm volatile("vld $vr11, %0" : : "m" (dq[48])); in raid6_2data_recov_lsx() 161 asm volatile("vst $vr4, %0" : "=m" (dq[0])); in raid6_2data_recov_lsx() 162 asm volatile("vst $vr5, %0" : "=m" (dq[1 in raid6_2data_recov_lsx() 189 u8 *p, *q, *dq; raid6_datap_recov_lsx() local 306 u8 *p, *q, *dp, *dq; raid6_2data_recov_lasx() local 428 u8 *p, *q, *dq; raid6_datap_recov_lasx() local [all...] |
H A D | recov_avx512.c | 27 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx512() local 44 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_avx512() 46 ptrs[disks-1] = dq; in raid6_2data_recov_avx512() 52 ptrs[failb] = dq; in raid6_2data_recov_avx512() 78 "m" (p[64]), "m" (dq[0]), "m" (dq[64]), in raid6_2data_recov_avx512() 82 * 1 = dq[0] ^ q[0] in raid6_2data_recov_avx512() 83 * 9 = dq[64] ^ q[64] in raid6_2data_recov_avx512() 148 : "m" (dq[0]), "m" (dq[6 in raid6_2data_recov_avx512() 230 u8 *p, *q, *dq; raid6_datap_recov_avx512() local [all...] |
H A D | recov_avx2.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_avx2() local 33 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_avx2() 35 ptrs[disks-1] = dq; in raid6_2data_recov_avx2() 41 ptrs[failb] = dq; in raid6_2data_recov_avx2() 61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2() 62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2() 67 * 1 = dq[0] ^ q[0] in raid6_2data_recov_avx2() 68 * 9 = dq[32] ^ q[32] in raid6_2data_recov_avx2() 120 asm volatile("vmovdqa %%ymm1, %0" : "=m" (dq[0])); in raid6_2data_recov_avx2() 121 asm volatile("vmovdqa %%ymm13,%0" : "=m" (dq[3 in raid6_2data_recov_avx2() 189 u8 *p, *q, *dq; raid6_datap_recov_avx2() local [all...] |
H A D | recov_ssse3.c | 19 u8 *p, *q, *dp, *dq; in raid6_2data_recov_ssse3() local 35 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_ssse3() 37 ptrs[disks-1] = dq; in raid6_2data_recov_ssse3() 43 ptrs[failb] = dq; in raid6_2data_recov_ssse3() 71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3() 72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3() 124 asm volatile("movdqa %%xmm1,%0" : "=m" (dq[0])); in raid6_2data_recov_ssse3() 125 asm volatile("movdqa %%xmm9,%0" : "=m" (dq[16])); in raid6_2data_recov_ssse3() 136 dq += 32; in raid6_2data_recov_ssse3() 140 asm volatile("pxor %0,%%xmm1" : : "m" (*dq)); in raid6_2data_recov_ssse3() 194 u8 *p, *q, *dq; raid6_datap_recov_ssse3() local [all...] |
H A D | recov_s390xc.c | 26 u8 *p, *q, *dp, *dq; in raid6_2data_recov_s390xc() local 40 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_s390xc() 42 ptrs[disks-1] = dq; in raid6_2data_recov_s390xc() 48 ptrs[failb] = dq; in raid6_2data_recov_s390xc() 59 xor_block(dq, q); in raid6_2data_recov_s390xc() 61 dq[i] = pbmul[dp[i]] ^ qmul[dq[i]]; in raid6_2data_recov_s390xc() 62 xor_block(dp, dq); in raid6_2data_recov_s390xc() 66 dq += 256; in raid6_2data_recov_s390xc() 75 u8 *p, *q, *dq; in raid6_datap_recov_s390xc() local [all...] |
H A D | recov.c | 22 u8 *p, *q, *dp, *dq; in raid6_2data_recov_intx1() local 36 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_intx1() 38 ptrs[disks-1] = dq; in raid6_2data_recov_intx1() 44 ptrs[failb] = dq; in raid6_2data_recov_intx1() 55 qx = qmul[*q ^ *dq]; in raid6_2data_recov_intx1() 56 *dq++ = db = pbmul[px] ^ qx; /* Reconstructed B */ in raid6_2data_recov_intx1() 66 u8 *p, *q, *dq; in raid6_datap_recov_intx1() local 74 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_intx1() 76 ptrs[disks-1] = dq; in raid6_datap_recov_intx1() 81 ptrs[faila] = dq; in raid6_datap_recov_intx1() [all...] |
H A D | recov_neon.c | 26 u8 *p, *q, *dp, *dq; in raid6_2data_recov_neon() local 41 dq = (u8 *)ptrs[failb]; in raid6_2data_recov_neon() 43 ptrs[disks - 1] = dq; in raid6_2data_recov_neon() 49 ptrs[failb] = dq; in raid6_2data_recov_neon() 59 __raid6_2data_recov_neon(bytes, p, q, dp, dq, pbmul, qmul); in raid6_2data_recov_neon() 66 u8 *p, *q, *dq; in raid6_datap_recov_neon() local 76 dq = (u8 *)ptrs[faila]; in raid6_datap_recov_neon() 78 ptrs[disks - 1] = dq; in raid6_datap_recov_neon() 83 ptrs[faila] = dq; in raid6_datap_recov_neon() 90 __raid6_datap_recov_neon(bytes, p, q, dq, qmu in raid6_datap_recov_neon() [all...] |
H A D | recov_neon_inner.c | 29 uint8_t *dq, const uint8_t *pbmul, in __raid6_2data_recov_neon() 43 * qx = qmul[*q ^ *dq]; in __raid6_2data_recov_neon() 44 * *dq++ = db = pbmul[px] ^ qx; in __raid6_2data_recov_neon() 54 vx = veorq_u8(vld1q_u8(q), vld1q_u8(dq)); in __raid6_2data_recov_neon() 67 vst1q_u8(dq, db); in __raid6_2data_recov_neon() 74 dq += 16; in __raid6_2data_recov_neon() 78 void __raid6_datap_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dq, in __raid6_datap_recov_neon() argument 87 * *p++ ^= *dq = qmul[*q ^ *dq]; in __raid6_datap_recov_neon() 88 * q++; dq in __raid6_datap_recov_neon() 28 __raid6_2data_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dp, uint8_t *dq, const uint8_t *pbmul, const uint8_t *qmul) __raid6_2data_recov_neon() argument [all...] |
/kernel/linux/linux-5.10/fs/xfs/scrub/ |
H A D | quota.c | 75 struct xfs_dquot *dq, in xchk_quota_item() 94 offset = dq->q_id / qi->qi_dqperchunk; in xchk_quota_item() 95 if (dq->q_id && dq->q_id <= sqi->last_id) in xchk_quota_item() 98 sqi->last_id = dq->q_id; in xchk_quota_item() 108 if (dq->q_blk.hardlimit > mp->m_sb.sb_dblocks) in xchk_quota_item() 110 if (dq->q_blk.softlimit > dq->q_blk.hardlimit) in xchk_quota_item() 113 if (dq->q_ino.hardlimit > M_IGEO(mp)->maxicount) in xchk_quota_item() 115 if (dq in xchk_quota_item() 74 xchk_quota_item( struct xfs_dquot *dq, xfs_dqtype_t dqtype, void *priv) xchk_quota_item() argument [all...] |
/kernel/linux/linux-6.6/fs/xfs/scrub/ |
H A D | quota.c | 81 struct xfs_dquot *dq, in xchk_quota_item() 100 offset = dq->q_id / qi->qi_dqperchunk; in xchk_quota_item() 101 if (dq->q_id && dq->q_id <= sqi->last_id) in xchk_quota_item() 104 sqi->last_id = dq->q_id; in xchk_quota_item() 114 if (dq->q_blk.hardlimit > mp->m_sb.sb_dblocks) in xchk_quota_item() 116 if (dq->q_blk.softlimit > dq->q_blk.hardlimit) in xchk_quota_item() 119 if (dq->q_ino.hardlimit > M_IGEO(mp)->maxicount) in xchk_quota_item() 121 if (dq in xchk_quota_item() 80 xchk_quota_item( struct xfs_dquot *dq, xfs_dqtype_t dqtype, void *priv) xchk_quota_item() argument [all...] |
/kernel/linux/linux-5.10/drivers/soc/fsl/dpio/ |
H A D | qbman-portal.h | 212 void qbman_swp_dqrr_consume(struct qbman_swp *s, const struct dpaa2_dq *dq); 214 int qbman_result_has_new_result(struct qbman_swp *p, const struct dpaa2_dq *dq); 297 * @dq: the dequeue result to be checked 301 static inline int qbman_result_is_DQ(const struct dpaa2_dq *dq) in qbman_result_is_DQ() argument 303 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_DQ); in qbman_result_is_DQ() 308 * @dq: the dequeue result to be checked 311 static inline int qbman_result_is_SCN(const struct dpaa2_dq *dq) in qbman_result_is_SCN() argument 313 return !qbman_result_is_DQ(dq); in qbman_result_is_SCN() 317 static inline int qbman_result_is_FQDAN(const struct dpaa2_dq *dq) in qbman_result_is_FQDAN() argument 323 qbman_result_is_CDAN(const struct dpaa2_dq *dq) qbman_result_is_CDAN() argument 329 qbman_result_is_CSCN(const struct dpaa2_dq *dq) qbman_result_is_CSCN() argument 335 qbman_result_is_BPSCN(const struct dpaa2_dq *dq) qbman_result_is_BPSCN() argument 341 qbman_result_is_CGCU(const struct dpaa2_dq *dq) qbman_result_is_CGCU() argument 347 qbman_result_is_FQRN(const struct dpaa2_dq *dq) qbman_result_is_FQRN() argument 353 qbman_result_is_FQRNI(const struct dpaa2_dq *dq) qbman_result_is_FQRNI() argument 359 qbman_result_is_FQPN(const struct dpaa2_dq *dq) qbman_result_is_FQPN() argument [all...] |
/kernel/linux/linux-6.6/drivers/soc/fsl/dpio/ |
H A D | qbman-portal.h | 219 void qbman_swp_dqrr_consume(struct qbman_swp *s, const struct dpaa2_dq *dq); 221 int qbman_result_has_new_result(struct qbman_swp *p, const struct dpaa2_dq *dq); 304 * @dq: the dequeue result to be checked 308 static inline int qbman_result_is_DQ(const struct dpaa2_dq *dq) in qbman_result_is_DQ() argument 310 return ((dq->dq.verb & QBMAN_RESULT_MASK) == QBMAN_RESULT_DQ); in qbman_result_is_DQ() 315 * @dq: the dequeue result to be checked 318 static inline int qbman_result_is_SCN(const struct dpaa2_dq *dq) in qbman_result_is_SCN() argument 320 return !qbman_result_is_DQ(dq); in qbman_result_is_SCN() 324 static inline int qbman_result_is_FQDAN(const struct dpaa2_dq *dq) in qbman_result_is_FQDAN() argument 330 qbman_result_is_CDAN(const struct dpaa2_dq *dq) qbman_result_is_CDAN() argument 336 qbman_result_is_CSCN(const struct dpaa2_dq *dq) qbman_result_is_CSCN() argument 342 qbman_result_is_BPSCN(const struct dpaa2_dq *dq) qbman_result_is_BPSCN() argument 348 qbman_result_is_CGCU(const struct dpaa2_dq *dq) qbman_result_is_CGCU() argument 354 qbman_result_is_FQRN(const struct dpaa2_dq *dq) qbman_result_is_FQRN() argument 360 qbman_result_is_FQRNI(const struct dpaa2_dq *dq) qbman_result_is_FQRNI() argument 366 qbman_result_is_FQPN(const struct dpaa2_dq *dq) qbman_result_is_FQPN() argument [all...] |
/kernel/linux/linux-5.10/crypto/async_tx/ |
H A D | async_raid6_recov.c | 208 struct page *p, *q, *g, *dp, *dq; in __2data_recov_5() local 244 dq = blocks[failb]; in __2data_recov_5() 250 tx = async_mult(dq, dq_off, g, g_off, in __2data_recov_5() 263 srcs[0] = dq; in __2data_recov_5() 269 tx = async_xor_offs(dq, dq_off, srcs, src_offs, 2, bytes, submit); in __2data_recov_5() 274 srcs[1] = dq; in __2data_recov_5() 279 tx = async_sum_product(dq, dq_off, srcs, src_offs, coef, bytes, submit); in __2data_recov_5() 284 srcs[1] = dq; in __2data_recov_5() 299 struct page *p, *q, *dp, *dq; in __2data_recov_n() local 323 dq in __2data_recov_n() 476 struct page *p, *q, *dq; async_raid6_datap_recov() local [all...] |
/kernel/linux/linux-6.6/crypto/async_tx/ |
H A D | async_raid6_recov.c | 208 struct page *p, *q, *g, *dp, *dq; in __2data_recov_5() local 244 dq = blocks[failb]; in __2data_recov_5() 250 tx = async_mult(dq, dq_off, g, g_off, in __2data_recov_5() 263 srcs[0] = dq; in __2data_recov_5() 269 tx = async_xor_offs(dq, dq_off, srcs, src_offs, 2, bytes, submit); in __2data_recov_5() 274 srcs[1] = dq; in __2data_recov_5() 279 tx = async_sum_product(dq, dq_off, srcs, src_offs, coef, bytes, submit); in __2data_recov_5() 284 srcs[1] = dq; in __2data_recov_5() 299 struct page *p, *q, *dp, *dq; in __2data_recov_n() local 323 dq in __2data_recov_n() 476 struct page *p, *q, *dq; async_raid6_datap_recov() local [all...] |
/kernel/linux/linux-5.10/fs/xfs/ |
H A D | xfs_dquot.c | 70 struct xfs_dquot *dq) in xfs_qm_adjust_dqlimits() 72 struct xfs_mount *mp = dq->q_mount; in xfs_qm_adjust_dqlimits() 77 ASSERT(dq->q_id); in xfs_qm_adjust_dqlimits() 78 defq = xfs_get_defquota(q, xfs_dquot_type(dq)); in xfs_qm_adjust_dqlimits() 80 if (!dq->q_blk.softlimit) { in xfs_qm_adjust_dqlimits() 81 dq->q_blk.softlimit = defq->blk.soft; in xfs_qm_adjust_dqlimits() 84 if (!dq->q_blk.hardlimit) { in xfs_qm_adjust_dqlimits() 85 dq->q_blk.hardlimit = defq->blk.hard; in xfs_qm_adjust_dqlimits() 88 if (!dq->q_ino.softlimit) in xfs_qm_adjust_dqlimits() 89 dq in xfs_qm_adjust_dqlimits() 69 xfs_qm_adjust_dqlimits( struct xfs_dquot *dq) xfs_qm_adjust_dqlimits() argument 160 xfs_qm_adjust_dqtimers( struct xfs_dquot *dq) xfs_qm_adjust_dqtimers() argument 1404 struct xfs_dquot *dq; xfs_qm_dqiterate() local [all...] |