/kernel/linux/linux-6.6/net/netfilter/ |
H A D | nft_set_pipapo_avx2.c | 45 #define NFT_PIPAPO_AVX2_BUCKET_LOAD4(reg, lt, group, v, bsize) \ 48 (v)) * (bsize)]) 49 #define NFT_PIPAPO_AVX2_BUCKET_LOAD8(reg, lt, group, v, bsize) \ 52 (v)) * (bsize)]) 218 int i, ret = -1, m256_size = f->bsize / NFT_PIPAPO_LONGS_PER_M256, b; in nft_pipapo_avx2_lookup_4b_2() 220 unsigned long *lt = f->lt, bsize = f->bsize; in nft_pipapo_avx2_lookup_4b_2() local 227 NFT_PIPAPO_AVX2_BUCKET_LOAD4(0, lt, 0, pg[0], bsize); in nft_pipapo_avx2_lookup_4b_2() 228 NFT_PIPAPO_AVX2_BUCKET_LOAD4(1, lt, 1, pg[1], bsize); in nft_pipapo_avx2_lookup_4b_2() 231 NFT_PIPAPO_AVX2_BUCKET_LOAD4(0, lt, 0, pg[0], bsize); in nft_pipapo_avx2_lookup_4b_2() 282 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_4b_4() local 360 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_4b_8() local 456 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_4b_12() local 550 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_4b_32() local 676 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_1() local 733 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_2() local 797 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_4() local 872 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_6() local 957 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_16() local 1048 unsigned long bsize = f->bsize; nft_pipapo_avx2_lookup_slow() local [all...] |
H A D | nft_set_pipapo.h | 115 * @bsize: Size of each bucket in lookup table, in longs 124 size_t bsize; member 207 __bitmap_and(dst, dst, lt + v * f->bsize, in pipapo_and_field_buckets_4bit() 208 f->bsize * BITS_PER_LONG); in pipapo_and_field_buckets_4bit() 209 lt += f->bsize * NFT_PIPAPO_BUCKETS(4); in pipapo_and_field_buckets_4bit() 212 __bitmap_and(dst, dst, lt + v * f->bsize, in pipapo_and_field_buckets_4bit() 213 f->bsize * BITS_PER_LONG); in pipapo_and_field_buckets_4bit() 214 lt += f->bsize * NFT_PIPAPO_BUCKETS(4); in pipapo_and_field_buckets_4bit() 232 __bitmap_and(dst, dst, lt + *data * f->bsize, in pipapo_and_field_buckets_8bit() 233 f->bsize * BITS_PER_LON in pipapo_and_field_buckets_8bit() [all...] |
/kernel/linux/linux-5.10/net/netfilter/ |
H A D | nft_set_pipapo_avx2.c | 45 #define NFT_PIPAPO_AVX2_BUCKET_LOAD4(reg, lt, group, v, bsize) \ 48 (v)) * (bsize)]) 49 #define NFT_PIPAPO_AVX2_BUCKET_LOAD8(reg, lt, group, v, bsize) \ 52 (v)) * (bsize)]) 219 int i, ret = -1, m256_size = f->bsize / NFT_PIPAPO_LONGS_PER_M256, b; in nft_pipapo_avx2_lookup_4b_2() 221 unsigned long *lt = f->lt, bsize = f->bsize; in nft_pipapo_avx2_lookup_4b_2() local 228 NFT_PIPAPO_AVX2_BUCKET_LOAD4(0, lt, 0, pg[0], bsize); in nft_pipapo_avx2_lookup_4b_2() 229 NFT_PIPAPO_AVX2_BUCKET_LOAD4(1, lt, 1, pg[1], bsize); in nft_pipapo_avx2_lookup_4b_2() 232 NFT_PIPAPO_AVX2_BUCKET_LOAD4(0, lt, 0, pg[0], bsize); in nft_pipapo_avx2_lookup_4b_2() 283 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_4b_4() local 361 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_4b_8() local 457 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_4b_12() local 551 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_4b_32() local 677 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_1() local 734 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_2() local 798 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_4() local 873 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_6() local 958 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_8b_16() local 1049 unsigned long *lt = f->lt, bsize = f->bsize; nft_pipapo_avx2_lookup_slow() local [all...] |
H A D | nft_set_pipapo.h | 115 * @bsize: Size of each bucket in lookup table, in longs 124 size_t bsize; member 209 __bitmap_and(dst, dst, lt + v * f->bsize, in pipapo_and_field_buckets_4bit() 210 f->bsize * BITS_PER_LONG); in pipapo_and_field_buckets_4bit() 211 lt += f->bsize * NFT_PIPAPO_BUCKETS(4); in pipapo_and_field_buckets_4bit() 214 __bitmap_and(dst, dst, lt + v * f->bsize, in pipapo_and_field_buckets_4bit() 215 f->bsize * BITS_PER_LONG); in pipapo_and_field_buckets_4bit() 216 lt += f->bsize * NFT_PIPAPO_BUCKETS(4); in pipapo_and_field_buckets_4bit() 234 __bitmap_and(dst, dst, lt + *data * f->bsize, in pipapo_and_field_buckets_8bit() 235 f->bsize * BITS_PER_LON in pipapo_and_field_buckets_8bit() [all...] |
/kernel/linux/linux-5.10/crypto/ |
H A D | cbc.c | 19 unsigned int bsize = crypto_skcipher_blocksize(skcipher); in crypto_cbc_encrypt_segment() local 33 crypto_xor(iv, src, bsize); in crypto_cbc_encrypt_segment() 35 memcpy(iv, dst, bsize); in crypto_cbc_encrypt_segment() 37 src += bsize; in crypto_cbc_encrypt_segment() 38 dst += bsize; in crypto_cbc_encrypt_segment() 39 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_segment() 47 unsigned int bsize = crypto_skcipher_blocksize(skcipher); in crypto_cbc_encrypt_inplace() local 60 crypto_xor(src, iv, bsize); in crypto_cbc_encrypt_inplace() 64 src += bsize; in crypto_cbc_encrypt_inplace() 94 unsigned int bsize = crypto_skcipher_blocksize(skcipher); crypto_cbc_decrypt_segment() local 124 unsigned int bsize = crypto_skcipher_blocksize(skcipher); crypto_cbc_decrypt_inplace() local [all...] |
H A D | cfb.c | 60 const unsigned int bsize = crypto_cfb_bsize(tfm); in crypto_cfb_encrypt_segment() local 68 crypto_xor(dst, src, bsize); in crypto_cfb_encrypt_segment() 71 src += bsize; in crypto_cfb_encrypt_segment() 72 dst += bsize; in crypto_cfb_encrypt_segment() 73 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_segment() 75 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_segment() 83 const unsigned int bsize = crypto_cfb_bsize(tfm); in crypto_cfb_encrypt_inplace() local 91 crypto_xor(src, tmp, bsize); in crypto_cfb_encrypt_inplace() 94 src += bsize; in crypto_cfb_encrypt_inplace() 106 unsigned int bsize = crypto_cfb_bsize(tfm); crypto_cfb_encrypt() local 130 const unsigned int bsize = crypto_cfb_bsize(tfm); crypto_cfb_decrypt_segment() local 153 const unsigned int bsize = crypto_cfb_bsize(tfm); crypto_cfb_decrypt_inplace() local 182 const unsigned int bsize = crypto_cfb_bsize(tfm); crypto_cfb_decrypt() local [all...] |
H A D | pcbc.c | 23 int bsize = crypto_cipher_blocksize(tfm); in crypto_pcbc_encrypt_segment() local 30 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment() 32 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment() 34 src += bsize; in crypto_pcbc_encrypt_segment() 35 dst += bsize; in crypto_pcbc_encrypt_segment() 36 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment() 45 int bsize = crypto_cipher_blocksize(tfm); in crypto_pcbc_encrypt_inplace() local 52 memcpy(tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace() 53 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace() 90 int bsize = crypto_cipher_blocksize(tfm); crypto_pcbc_decrypt_segment() local 112 int bsize = crypto_cipher_blocksize(tfm); crypto_pcbc_decrypt_inplace() local [all...] |
H A D | cts.c | 103 int bsize = crypto_skcipher_blocksize(tfm); in cts_cbc_encrypt() local 112 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); in cts_cbc_encrypt() 113 scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0); in cts_cbc_encrypt() 115 memset(d, 0, bsize); in cts_cbc_encrypt() 118 scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1); in cts_cbc_encrypt() 124 skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv); in cts_cbc_encrypt() 149 int bsize = crypto_skcipher_blocksize(tfm); in crypto_cts_encrypt() local 155 if (nbytes < bsize) in crypto_cts_encrypt() 158 if (nbytes == bsize) { in crypto_cts_encrypt() 184 int bsize = crypto_skcipher_blocksize(tfm); cts_cbc_decrypt() local 243 int bsize = crypto_skcipher_blocksize(tfm); crypto_cts_decrypt() local 290 unsigned bsize; crypto_cts_init_tfm() local [all...] |
H A D | ofb.c | 21 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ofb_crypt() local 27 while (walk.nbytes >= bsize) { in crypto_ofb_crypt() 35 crypto_xor_cpy(dst, src, iv, bsize); in crypto_ofb_crypt() 36 dst += bsize; in crypto_ofb_crypt() 37 src += bsize; in crypto_ofb_crypt() 38 } while ((nbytes -= bsize) >= bsize); in crypto_ofb_crypt()
|
/kernel/linux/linux-6.6/crypto/ |
H A D | cbc.c | 20 unsigned int bsize = crypto_skcipher_blocksize(skcipher); in crypto_cbc_encrypt_segment() local 34 crypto_xor(iv, src, bsize); in crypto_cbc_encrypt_segment() 36 memcpy(iv, dst, bsize); in crypto_cbc_encrypt_segment() 38 src += bsize; in crypto_cbc_encrypt_segment() 39 dst += bsize; in crypto_cbc_encrypt_segment() 40 } while ((nbytes -= bsize) >= bsize); in crypto_cbc_encrypt_segment() 48 unsigned int bsize = crypto_skcipher_blocksize(skcipher); in crypto_cbc_encrypt_inplace() local 61 crypto_xor(src, iv, bsize); in crypto_cbc_encrypt_inplace() 65 src += bsize; in crypto_cbc_encrypt_inplace() 95 unsigned int bsize = crypto_skcipher_blocksize(skcipher); crypto_cbc_decrypt_segment() local 125 unsigned int bsize = crypto_skcipher_blocksize(skcipher); crypto_cbc_decrypt_inplace() local [all...] |
H A D | cfb.c | 61 const unsigned int bsize = crypto_cfb_bsize(tfm); in crypto_cfb_encrypt_segment() local 69 crypto_xor(dst, src, bsize); in crypto_cfb_encrypt_segment() 72 src += bsize; in crypto_cfb_encrypt_segment() 73 dst += bsize; in crypto_cfb_encrypt_segment() 74 } while ((nbytes -= bsize) >= bsize); in crypto_cfb_encrypt_segment() 76 memcpy(walk->iv, iv, bsize); in crypto_cfb_encrypt_segment() 84 const unsigned int bsize = crypto_cfb_bsize(tfm); in crypto_cfb_encrypt_inplace() local 92 crypto_xor(src, tmp, bsize); in crypto_cfb_encrypt_inplace() 95 src += bsize; in crypto_cfb_encrypt_inplace() 107 unsigned int bsize = crypto_cfb_bsize(tfm); crypto_cfb_encrypt() local 131 const unsigned int bsize = crypto_cfb_bsize(tfm); crypto_cfb_decrypt_segment() local 154 const unsigned int bsize = crypto_cfb_bsize(tfm); crypto_cfb_decrypt_inplace() local 183 const unsigned int bsize = crypto_cfb_bsize(tfm); crypto_cfb_decrypt() local [all...] |
H A D | pcbc.c | 24 int bsize = crypto_cipher_blocksize(tfm); in crypto_pcbc_encrypt_segment() local 31 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_segment() 33 crypto_xor_cpy(iv, dst, src, bsize); in crypto_pcbc_encrypt_segment() 35 src += bsize; in crypto_pcbc_encrypt_segment() 36 dst += bsize; in crypto_pcbc_encrypt_segment() 37 } while ((nbytes -= bsize) >= bsize); in crypto_pcbc_encrypt_segment() 46 int bsize = crypto_cipher_blocksize(tfm); in crypto_pcbc_encrypt_inplace() local 53 memcpy(tmpbuf, src, bsize); in crypto_pcbc_encrypt_inplace() 54 crypto_xor(iv, src, bsize); in crypto_pcbc_encrypt_inplace() 91 int bsize = crypto_cipher_blocksize(tfm); crypto_pcbc_decrypt_segment() local 113 int bsize = crypto_cipher_blocksize(tfm); crypto_pcbc_decrypt_inplace() local [all...] |
H A D | cts.c | 103 int bsize = crypto_skcipher_blocksize(tfm); in cts_cbc_encrypt() local 112 sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); in cts_cbc_encrypt() 113 scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0); in cts_cbc_encrypt() 115 memset(d, 0, bsize); in cts_cbc_encrypt() 118 scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1); in cts_cbc_encrypt() 124 skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv); in cts_cbc_encrypt() 149 int bsize = crypto_skcipher_blocksize(tfm); in crypto_cts_encrypt() local 155 if (nbytes < bsize) in crypto_cts_encrypt() 158 if (nbytes == bsize) { in crypto_cts_encrypt() 184 int bsize = crypto_skcipher_blocksize(tfm); cts_cbc_decrypt() local 243 int bsize = crypto_skcipher_blocksize(tfm); crypto_cts_decrypt() local 290 unsigned bsize; crypto_cts_init_tfm() local [all...] |
H A D | ofb.c | 22 const unsigned int bsize = crypto_cipher_blocksize(cipher); in crypto_ofb_crypt() local 28 while (walk.nbytes >= bsize) { in crypto_ofb_crypt() 36 crypto_xor_cpy(dst, src, iv, bsize); in crypto_ofb_crypt() 37 dst += bsize; in crypto_ofb_crypt() 38 src += bsize; in crypto_ofb_crypt() 39 } while ((nbytes -= bsize) >= bsize); in crypto_ofb_crypt()
|
/kernel/linux/linux-6.6/arch/x86/crypto/ |
H A D | des3_ede_glue.c | 68 const unsigned int bsize = DES3_EDE_BLOCK_SIZE; in ecb_crypt() local 80 if (nbytes >= bsize * 3) { in ecb_crypt() 85 wsrc += bsize * 3; in ecb_crypt() 86 wdst += bsize * 3; in ecb_crypt() 87 nbytes -= bsize * 3; in ecb_crypt() 88 } while (nbytes >= bsize * 3); in ecb_crypt() 90 if (nbytes < bsize) in ecb_crypt() 98 wsrc += bsize; in ecb_crypt() 99 wdst += bsize; in ecb_crypt() 100 nbytes -= bsize; in ecb_crypt() 129 unsigned int bsize = DES3_EDE_BLOCK_SIZE; __cbc_encrypt() local 170 unsigned int bsize = DES3_EDE_BLOCK_SIZE; __cbc_decrypt() local [all...] |
/kernel/linux/linux-5.10/arch/x86/crypto/ |
H A D | blowfish_glue.c | 75 unsigned int bsize = BF_BLOCK_SIZE; in ecb_crypt() local 89 if (nbytes >= bsize * 4) { in ecb_crypt() 93 wsrc += bsize * 4; in ecb_crypt() 94 wdst += bsize * 4; in ecb_crypt() 95 nbytes -= bsize * 4; in ecb_crypt() 96 } while (nbytes >= bsize * 4); in ecb_crypt() 98 if (nbytes < bsize) in ecb_crypt() 106 wsrc += bsize; in ecb_crypt() 107 wdst += bsize; in ecb_crypt() 108 nbytes -= bsize; in ecb_crypt() 131 unsigned int bsize = BF_BLOCK_SIZE; __cbc_encrypt() local 172 unsigned int bsize = BF_BLOCK_SIZE; __cbc_decrypt() local 266 unsigned int bsize = BF_BLOCK_SIZE; __ctr_crypt() local [all...] |
H A D | des3_ede_glue.c | 78 const unsigned int bsize = DES3_EDE_BLOCK_SIZE; in ecb_crypt() local 90 if (nbytes >= bsize * 3) { in ecb_crypt() 95 wsrc += bsize * 3; in ecb_crypt() 96 wdst += bsize * 3; in ecb_crypt() 97 nbytes -= bsize * 3; in ecb_crypt() 98 } while (nbytes >= bsize * 3); in ecb_crypt() 100 if (nbytes < bsize) in ecb_crypt() 108 wsrc += bsize; in ecb_crypt() 109 wdst += bsize; in ecb_crypt() 110 nbytes -= bsize; in ecb_crypt() 139 unsigned int bsize = DES3_EDE_BLOCK_SIZE; __cbc_encrypt() local 180 unsigned int bsize = DES3_EDE_BLOCK_SIZE; __cbc_decrypt() local 274 unsigned int bsize = DES3_EDE_BLOCK_SIZE; __ctr_crypt() local [all...] |
H A D | cast5_avx_glue.c | 53 const unsigned int bsize = CAST5_BLOCK_SIZE; in ecb_crypt() local 67 if (nbytes >= bsize * CAST5_PARALLEL_BLOCKS) { in ecb_crypt() 72 wsrc += bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt() 73 wdst += bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt() 74 nbytes -= bsize * CAST5_PARALLEL_BLOCKS; in ecb_crypt() 75 } while (nbytes >= bsize * CAST5_PARALLEL_BLOCKS); in ecb_crypt() 77 if (nbytes < bsize) in ecb_crypt() 87 wsrc += bsize; in ecb_crypt() 88 wdst += bsize; in ecb_crypt() 89 nbytes -= bsize; in ecb_crypt() 112 const unsigned int bsize = CAST5_BLOCK_SIZE; cbc_encrypt() local 145 const unsigned int bsize = CAST5_BLOCK_SIZE; __cbc_decrypt() local 234 const unsigned int bsize = CAST5_BLOCK_SIZE; __ctr_crypt() local [all...] |
H A D | glue_helper.c | 25 const unsigned int bsize = 128 / 8; in glue_ecb_req_128bit() local 39 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in glue_ecb_req_128bit() 42 func_bytes = bsize * gctx->funcs[i].num_blocks; in glue_ecb_req_128bit() 55 if (nbytes < bsize) in glue_ecb_req_128bit() 70 const unsigned int bsize = 128 / 8; in glue_cbc_encrypt_req_128bit() local 88 nbytes -= bsize; in glue_cbc_encrypt_req_128bit() 89 } while (nbytes >= bsize); in glue_cbc_encrypt_req_128bit() 102 const unsigned int bsize = 128 / 8; in glue_cbc_decrypt_req_128bit() local 117 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit, in glue_cbc_decrypt_req_128bit() 120 src += nbytes / bsize in glue_cbc_decrypt_req_128bit() 163 const unsigned int bsize = 128 / 8; glue_ctr_req_128bit() local 233 const unsigned int bsize = 128 / 8; __glue_xts_req_128bit() local 271 const unsigned int bsize = 128 / 8; glue_xts_req_128bit() local [all...] |
/kernel/linux/linux-6.6/arch/m68k/emu/ |
H A D | nfblock.c | 55 u32 blocks, bsize; member 97 static int __init nfhd_init_one(int id, u32 blocks, u32 bsize) in nfhd_init_one() argument 104 blocks, bsize); in nfhd_init_one() 106 if (bsize < 512 || (bsize & (bsize - 1))) { in nfhd_init_one() 117 dev->bsize = bsize; in nfhd_init_one() 118 dev->bshift = ffs(bsize) - 10; in nfhd_init_one() 130 set_capacity(dev->disk, (sector_t)blocks * (bsize / 51 in nfhd_init_one() 150 u32 blocks, bsize; nfhd_init() local [all...] |
/kernel/linux/linux-5.10/arch/s390/crypto/ |
H A D | sha_common.c | 19 unsigned int bsize = crypto_shash_blocksize(desc->tfm); in s390_sha_update() local 23 index = ctx->count % bsize; in s390_sha_update() 26 if ((index + len) < bsize) in s390_sha_update() 31 memcpy(ctx->buf + index, data, bsize - index); in s390_sha_update() 32 cpacf_kimd(ctx->func, ctx->state, ctx->buf, bsize); in s390_sha_update() 33 data += bsize - index; in s390_sha_update() 34 len -= bsize - index; in s390_sha_update() 39 if (len >= bsize) { in s390_sha_update() 40 n = (len / bsize) * bsize; in s390_sha_update() 75 unsigned int bsize = crypto_shash_blocksize(desc->tfm); s390_sha_final() local [all...] |
/kernel/linux/linux-6.6/arch/s390/crypto/ |
H A D | sha_common.c | 19 unsigned int bsize = crypto_shash_blocksize(desc->tfm); in s390_sha_update() local 23 index = ctx->count % bsize; in s390_sha_update() 26 if ((index + len) < bsize) in s390_sha_update() 31 memcpy(ctx->buf + index, data, bsize - index); in s390_sha_update() 32 cpacf_kimd(ctx->func, ctx->state, ctx->buf, bsize); in s390_sha_update() 33 data += bsize - index; in s390_sha_update() 34 len -= bsize - index; in s390_sha_update() 39 if (len >= bsize) { in s390_sha_update() 40 n = (len / bsize) * bsize; in s390_sha_update() 75 unsigned int bsize = crypto_shash_blocksize(desc->tfm); s390_sha_final() local [all...] |
/kernel/linux/linux-5.10/arch/m68k/emu/ |
H A D | nfblock.c | 56 u32 blocks, bsize; member 100 static int __init nfhd_init_one(int id, u32 blocks, u32 bsize) in nfhd_init_one() argument 106 blocks, bsize); in nfhd_init_one() 108 if (bsize < 512 || (bsize & (bsize - 1))) { in nfhd_init_one() 119 dev->bsize = bsize; in nfhd_init_one() 120 dev->bshift = ffs(bsize) - 10; in nfhd_init_one() 126 blk_queue_logical_block_size(dev->queue, bsize); in nfhd_init_one() 156 u32 blocks, bsize; nfhd_init() local [all...] |
/kernel/linux/linux-5.10/lib/mpi/ |
H A D | mpi-pow.c | 31 mpi_size_t esize, msize, bsize, rsize; in mpi_powm() local 79 bsize = base->nlimbs; in mpi_powm() 81 if (bsize > msize) { /* The base is larger than the module. Reduce it. */ in mpi_powm() 83 * (The quotient is (bsize - msize + 1) limbs.) */ in mpi_powm() 84 bp = bp_marker = mpi_alloc_limb_space(bsize + 1); in mpi_powm() 87 MPN_COPY(bp, base->d, bsize); in mpi_powm() 90 mpihelp_divrem(bp + msize, 0, bp, bsize, mp, msize); in mpi_powm() 91 bsize = msize; in mpi_powm() 94 MPN_NORMALIZE(bp, bsize); in mpi_powm() 98 if (!bsize) { in mpi_powm() [all...] |
/kernel/linux/linux-6.6/lib/crypto/mpi/ |
H A D | mpi-pow.c | 31 mpi_size_t esize, msize, bsize, rsize; in mpi_powm() local 79 bsize = base->nlimbs; in mpi_powm() 81 if (bsize > msize) { /* The base is larger than the module. Reduce it. */ in mpi_powm() 83 * (The quotient is (bsize - msize + 1) limbs.) */ in mpi_powm() 84 bp = bp_marker = mpi_alloc_limb_space(bsize + 1); in mpi_powm() 87 MPN_COPY(bp, base->d, bsize); in mpi_powm() 90 mpihelp_divrem(bp + msize, 0, bp, bsize, mp, msize); in mpi_powm() 91 bsize = msize; in mpi_powm() 94 MPN_NORMALIZE(bp, bsize); in mpi_powm() 98 if (!bsize) { in mpi_powm() [all...] |