/kernel/linux/linux-5.10/crypto/ |
H A D | rsa-pkcs1pad.c | 186 len = req_ctx->child_req.dst_len; in pkcs1pad_encrypt_sign_complete() 206 req->dst_len = ctx->key_size; in pkcs1pad_encrypt_sign_complete() 241 if (req->dst_len < ctx->key_size) { in pkcs1pad_encrypt() 242 req->dst_len = ctx->key_size; in pkcs1pad_encrypt() 266 req->dst, ctx->key_size - 1, req->dst_len); in pkcs1pad_encrypt() 280 unsigned int dst_len; in pkcs1pad_decrypt_complete() local 288 dst_len = req_ctx->child_req.dst_len; in pkcs1pad_decrypt_complete() 289 if (dst_len < ctx->key_size - 1) in pkcs1pad_decrypt_complete() 293 if (dst_len in pkcs1pad_decrypt_complete() 441 unsigned int dst_len; pkcs1pad_verify_complete() local [all...] |
H A D | ecrdsa.c | 77 unsigned int ndigits = req->dst_len / sizeof(u64); in ecrdsa_verify() 96 req->dst_len != ctx->digest_len || in ecrdsa_verify() 97 req->dst_len != ctx->curve->g.ndigits * sizeof(u64) || in ecrdsa_verify() 99 req->dst_len * 2 != req->src_len || in ecrdsa_verify() 101 WARN_ON(req->dst_len > sizeof(digest))) in ecrdsa_verify() 108 req->src_len + req->dst_len), in ecrdsa_verify() 109 digest, req->dst_len, req->src_len); in ecrdsa_verify()
|
H A D | ecdsa.c | 154 buffer = kmalloc(req->src_len + req->dst_len, GFP_KERNEL); in ecdsa_verify() 159 sg_nents_for_len(req->src, req->src_len + req->dst_len), in ecdsa_verify() 160 buffer, req->src_len + req->dst_len, 0); in ecdsa_verify() 168 diff = keylen - req->dst_len; in ecdsa_verify() 172 memcpy(&rawhash[diff], buffer + req->src_len, req->dst_len); in ecdsa_verify()
|
/kernel/linux/linux-5.10/drivers/base/regmap/ |
H A D | regcache-lzo.c | 22 size_t dst_len; member 49 if (ret != LZO_E_OK || compress_size > lzo_ctx->dst_len) in regcache_lzo_compress() 51 lzo_ctx->dst_len = compress_size; in regcache_lzo_compress() 57 size_t dst_len; in regcache_lzo_decompress() local 60 dst_len = lzo_ctx->dst_len; in regcache_lzo_decompress() 62 lzo_ctx->dst, &dst_len); in regcache_lzo_decompress() 63 if (ret != LZO_E_OK || dst_len != lzo_ctx->dst_len) in regcache_lzo_decompress() 73 lzo_ctx->dst_len in regcache_lzo_compress_cache_block() [all...] |
/kernel/linux/linux-6.6/crypto/ |
H A D | rsa-pkcs1pad.c | 186 len = req_ctx->child_req.dst_len; in pkcs1pad_encrypt_sign_complete() 206 req->dst_len = ctx->key_size; in pkcs1pad_encrypt_sign_complete() 240 if (req->dst_len < ctx->key_size) { in pkcs1pad_encrypt() 241 req->dst_len = ctx->key_size; in pkcs1pad_encrypt() 265 req->dst, ctx->key_size - 1, req->dst_len); in pkcs1pad_encrypt() 279 unsigned int dst_len; in pkcs1pad_decrypt_complete() local 287 dst_len = req_ctx->child_req.dst_len; in pkcs1pad_decrypt_complete() 288 if (dst_len < ctx->key_size - 1) in pkcs1pad_decrypt_complete() 292 if (dst_len in pkcs1pad_decrypt_complete() 441 unsigned int dst_len; pkcs1pad_verify_complete() local [all...] |
H A D | ecrdsa.c | 77 unsigned int ndigits = req->dst_len / sizeof(u64); in ecrdsa_verify() 96 req->dst_len != ctx->digest_len || in ecrdsa_verify() 97 req->dst_len != ctx->curve->g.ndigits * sizeof(u64) || in ecrdsa_verify() 99 req->dst_len * 2 != req->src_len || in ecrdsa_verify() 101 WARN_ON(req->dst_len > sizeof(digest))) in ecrdsa_verify() 108 req->src_len + req->dst_len), in ecrdsa_verify() 109 digest, req->dst_len, req->src_len); in ecrdsa_verify()
|
H A D | ecdsa.c | 154 buffer = kmalloc(req->src_len + req->dst_len, GFP_KERNEL); in ecdsa_verify() 159 sg_nents_for_len(req->src, req->src_len + req->dst_len), in ecdsa_verify() 160 buffer, req->src_len + req->dst_len, 0); in ecdsa_verify() 168 diff = keylen - req->dst_len; in ecdsa_verify() 172 memcpy(&rawhash[diff], buffer + req->src_len, req->dst_len); in ecdsa_verify()
|
/kernel/linux/linux-5.10/lib/ |
H A D | decompress_unlzo.c | 106 u32 src_len, dst_len; in unlzo() local 182 dst_len = get_unaligned_be32(in_buf); in unlzo() 187 if (dst_len == 0) { in unlzo() 193 if (dst_len > LZO_BLOCK_SIZE) { in unlzo() 212 if (src_len <= 0 || src_len > dst_len) { in unlzo() 227 tmp = dst_len; in unlzo() 232 if (unlikely(dst_len == src_len)) in unlzo() 238 if (r != LZO_E_OK || dst_len != tmp) { in unlzo() 244 if (flush && flush(out_buf, dst_len) != dst_len) in unlzo() [all...] |
/kernel/linux/linux-6.6/lib/ |
H A D | decompress_unlzo.c | 105 u32 src_len, dst_len; in unlzo() local 181 dst_len = get_unaligned_be32(in_buf); in unlzo() 186 if (dst_len == 0) { in unlzo() 192 if (dst_len > LZO_BLOCK_SIZE) { in unlzo() 211 if (src_len <= 0 || src_len > dst_len) { in unlzo() 226 tmp = dst_len; in unlzo() 231 if (unlikely(dst_len == src_len)) in unlzo() 237 if (r != LZO_E_OK || dst_len != tmp) { in unlzo() 243 if (flush && flush(out_buf, dst_len) != dst_len) in unlzo() [all...] |
/kernel/linux/linux-6.6/net/smc/ |
H A D | smc_tx.c | 358 size_t dst_off, size_t dst_len, in smcr_tx_rdma_writes() 366 int src_len_sum = src_len, dst_len_sum = dst_len; in smcr_tx_rdma_writes() 377 if (dst_len < link->qp_attr.cap.max_inline_data) { in smcr_tx_rdma_writes() 398 if (src_len_sum == dst_len) in smcr_tx_rdma_writes() 401 src_len = dst_len - src_len; /* remainder */ in smcr_tx_rdma_writes() 411 dst_len = len - dst_len; /* remainder */ in smcr_tx_rdma_writes() 412 dst_len_sum += dst_len; in smcr_tx_rdma_writes() 413 src_len = min_t(int, dst_len, conn->sndbuf_desc->len - in smcr_tx_rdma_writes() 423 size_t dst_off, size_t dst_len) in smcd_tx_rdma_writes() 356 smcr_tx_rdma_writes(struct smc_connection *conn, size_t len, size_t src_off, size_t src_len, size_t dst_off, size_t dst_len, struct smc_rdma_wr *wr_rdma_buf) smcr_tx_rdma_writes() argument 421 smcd_tx_rdma_writes(struct smc_connection *conn, size_t len, size_t src_off, size_t src_len, size_t dst_off, size_t dst_len) smcd_tx_rdma_writes() argument 466 size_t len, src_len, dst_off, dst_len; /* current chunk values */ smc_tx_rdma_writes() local [all...] |
/kernel/linux/linux-5.10/net/smc/ |
H A D | smc_tx.c | 309 size_t dst_off, size_t dst_len, in smcr_tx_rdma_writes() 316 int src_len_sum = src_len, dst_len_sum = dst_len; in smcr_tx_rdma_writes() 336 if (src_len_sum == dst_len) in smcr_tx_rdma_writes() 339 src_len = dst_len - src_len; /* remainder */ in smcr_tx_rdma_writes() 350 dst_len = len - dst_len; /* remainder */ in smcr_tx_rdma_writes() 351 dst_len_sum += dst_len; in smcr_tx_rdma_writes() 352 src_len = min_t(int, dst_len, conn->sndbuf_desc->len - in smcr_tx_rdma_writes() 362 size_t dst_off, size_t dst_len) in smcd_tx_rdma_writes() 364 int src_len_sum = src_len, dst_len_sum = dst_len; in smcd_tx_rdma_writes() 307 smcr_tx_rdma_writes(struct smc_connection *conn, size_t len, size_t src_off, size_t src_len, size_t dst_off, size_t dst_len, struct smc_rdma_wr *wr_rdma_buf) smcr_tx_rdma_writes() argument 360 smcd_tx_rdma_writes(struct smc_connection *conn, size_t len, size_t src_off, size_t src_len, size_t dst_off, size_t dst_len) smcd_tx_rdma_writes() argument 405 size_t len, src_len, dst_off, dst_len; /* current chunk values */ smc_tx_rdma_writes() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/ |
H A D | drm_format_helper.c | 175 size_t dst_len = linepixels * sizeof(u16); in drm_fb_xrgb8888_to_rgb565() local 192 dst += dst_len; in drm_fb_xrgb8888_to_rgb565() 219 size_t dst_len = linepixels * sizeof(u16); in drm_fb_xrgb8888_to_rgb565_dstclip() local 223 dbuf = kmalloc(dst_len, GFP_KERNEL); in drm_fb_xrgb8888_to_rgb565_dstclip() 231 memcpy_toio(dst, dbuf, dst_len); in drm_fb_xrgb8888_to_rgb565_dstclip() 233 dst += dst_len; in drm_fb_xrgb8888_to_rgb565_dstclip() 271 size_t dst_len = linepixels * 3; in drm_fb_xrgb8888_to_rgb888_dstclip() local 275 dbuf = kmalloc(dst_len, GFP_KERNEL); in drm_fb_xrgb8888_to_rgb888_dstclip() 283 memcpy_toio(dst, dbuf, dst_len); in drm_fb_xrgb8888_to_rgb888_dstclip() 285 dst += dst_len; in drm_fb_xrgb8888_to_rgb888_dstclip() [all...] |
/kernel/linux/linux-5.10/net/ipv4/ |
H A D | fib_rules.c | 36 u8 dst_len; member 52 if (r->dst_len || r->src_len || r->tos) in fib4_rule_matchall() 260 if (frh->dst_len) in fib4_rule_configure() 276 rule4->dst_len = frh->dst_len; in fib4_rule_configure() 277 rule4->dstmask = inet_make_mask(rule4->dst_len); in fib4_rule_configure() 318 if (frh->dst_len && (rule4->dst_len != frh->dst_len)) in fib4_rule_compare() 332 if (frh->dst_len in fib4_rule_compare() [all...] |
/kernel/linux/linux-6.6/net/ipv4/ |
H A D | fib_rules.c | 37 u8 dst_len; member 53 if (r->dst_len || r->src_len || r->dscp) in fib4_rule_matchall() 263 if (frh->dst_len) in fib4_rule_configure() 279 rule4->dst_len = frh->dst_len; in fib4_rule_configure() 280 rule4->dstmask = inet_make_mask(rule4->dst_len); in fib4_rule_configure() 320 if (frh->dst_len && (rule4->dst_len != frh->dst_len)) in fib4_rule_compare() 334 if (frh->dst_len in fib4_rule_compare() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/lag/ |
H A D | mp.c | 103 static void mlx5_lag_fib_set(struct lag_mp *mp, struct fib_info *fi, u32 dst, int dst_len) in mlx5_lag_fib_set() argument 108 mp->fib.dst_len = dst_len; in mlx5_lag_fib_set() 167 (mp->fib.dst != fen_info->dst || mp->fib.dst_len != fen_info->dst_len) && in mlx5_lag_fib_route_event() 176 if (mp->fib.dst == fen_info->dst && mp->fib.dst_len == fen_info->dst_len) in mlx5_lag_fib_route_event() 193 mlx5_lag_fib_set(mp, fi, fen_info->dst, fen_info->dst_len); in mlx5_lag_fib_route_event() 208 mlx5_lag_fib_set(mp, fi, fen_info->dst, fen_info->dst_len); in mlx5_lag_fib_route_event()
|
/kernel/linux/linux-5.10/include/linux/ |
H A D | lzo.h | 25 unsigned char *dst, size_t *dst_len, void *wrkmem); 29 unsigned char *dst, size_t *dst_len, void *wrkmem); 33 unsigned char *dst, size_t *dst_len);
|
/kernel/linux/linux-6.6/include/linux/ |
H A D | lzo.h | 25 unsigned char *dst, size_t *dst_len, void *wrkmem); 29 unsigned char *dst, size_t *dst_len, void *wrkmem); 33 unsigned char *dst, size_t *dst_len);
|
/kernel/linux/linux-5.10/drivers/block/zram/ |
H A D | zcomp.c | 116 const void *src, unsigned int *dst_len) in zcomp_compress() 132 *dst_len = PAGE_SIZE * 2; in zcomp_compress() 136 zstrm->buffer, dst_len); in zcomp_compress() 142 unsigned int dst_len = PAGE_SIZE; in zcomp_decompress() local 146 dst, &dst_len); in zcomp_decompress() 115 zcomp_compress(struct zcomp_strm *zstrm, const void *src, unsigned int *dst_len) zcomp_compress() argument
|
/kernel/linux/linux-6.6/drivers/block/zram/ |
H A D | zcomp.c | 118 const void *src, unsigned int *dst_len) in zcomp_compress() 134 *dst_len = PAGE_SIZE * 2; in zcomp_compress() 138 zstrm->buffer, dst_len); in zcomp_compress() 144 unsigned int dst_len = PAGE_SIZE; in zcomp_decompress() local 148 dst, &dst_len); in zcomp_decompress() 117 zcomp_compress(struct zcomp_strm *zstrm, const void *src, unsigned int *dst_len) zcomp_compress() argument
|
/kernel/linux/linux-5.10/lib/crypto/ |
H A D | chacha20poly1305.c | 130 size_t dst_len; in __chacha20poly1305_decrypt() local 148 dst_len = src_len - POLY1305_DIGEST_SIZE; in __chacha20poly1305_decrypt() 149 poly1305_update(&poly1305_state, src, dst_len); in __chacha20poly1305_decrypt() 150 if (dst_len & 0xf) in __chacha20poly1305_decrypt() 151 poly1305_update(&poly1305_state, pad0, 0x10 - (dst_len & 0xf)); in __chacha20poly1305_decrypt() 154 b.lens[1] = cpu_to_le64(dst_len); in __chacha20poly1305_decrypt() 159 ret = crypto_memneq(b.mac, src + dst_len, POLY1305_DIGEST_SIZE); in __chacha20poly1305_decrypt() 161 chacha20_crypt(chacha_state, dst, src, dst_len); in __chacha20poly1305_decrypt()
|
/kernel/linux/linux-6.6/lib/crypto/ |
H A D | chacha20poly1305.c | 130 size_t dst_len; in __chacha20poly1305_decrypt() local 148 dst_len = src_len - POLY1305_DIGEST_SIZE; in __chacha20poly1305_decrypt() 149 poly1305_update(&poly1305_state, src, dst_len); in __chacha20poly1305_decrypt() 150 if (dst_len & 0xf) in __chacha20poly1305_decrypt() 151 poly1305_update(&poly1305_state, pad0, 0x10 - (dst_len & 0xf)); in __chacha20poly1305_decrypt() 154 b.lens[1] = cpu_to_le64(dst_len); in __chacha20poly1305_decrypt() 159 ret = crypto_memneq(b.mac, src + dst_len, POLY1305_DIGEST_SIZE); in __chacha20poly1305_decrypt() 161 chacha20_crypt(chacha_state, dst, src, dst_len); in __chacha20poly1305_decrypt()
|
/kernel/linux/linux-5.10/include/crypto/ |
H A D | akcipher.h | 18 * total size of @src is @src_len + @dst_len. 23 * @dst_len: Size of @dst buffer (for all ops except verify). 38 unsigned int dst_len; member 56 * algorithm. In case of error, where the dst_len was insufficient, 57 * the req->dst_len will be updated to the size required for the 63 * algorithm. In case of error, where the dst_len was insufficient, 64 * the req->dst_len will be updated to the size required for the 67 * algorithm. In case of error, where the dst_len was insufficient, 68 * the req->dst_len will be updated to the size required for the 245 * @dst_len 248 akcipher_request_set_crypt(struct akcipher_request *req, struct scatterlist *src, struct scatterlist *dst, unsigned int src_len, unsigned int dst_len) akcipher_request_set_crypt() argument [all...] |
/kernel/linux/linux-6.6/include/crypto/ |
H A D | akcipher.h | 20 * total size of @src is @src_len + @dst_len. 25 * @dst_len: Size of @dst buffer (for all ops except verify). 40 unsigned int dst_len; member 81 * algorithm. In case of error, where the dst_len was insufficient, 82 * the req->dst_len will be updated to the size required for the 88 * algorithm. In case of error, where the dst_len was insufficient, 89 * the req->dst_len will be updated to the size required for the 92 * algorithm. In case of error, where the dst_len was insufficient, 93 * the req->dst_len will be updated to the size required for the 273 * @dst_len 276 akcipher_request_set_crypt(struct akcipher_request *req, struct scatterlist *src, struct scatterlist *dst, unsigned int src_len, unsigned int dst_len) akcipher_request_set_crypt() argument [all...] |
/kernel/linux/linux-5.10/security/keys/ |
H A D | dh.c | 380 if (copy_from_user(outbuf + req->dst_len, kdfcopy->otherinfo, in __keyctl_dh_compute() 387 req->dst_len + kdfcopy->otherinfolen, in __keyctl_dh_compute() 388 outlen - req->dst_len); in __keyctl_dh_compute() 389 } else if (copy_to_user(buffer, outbuf, req->dst_len) == 0) { in __keyctl_dh_compute() 390 ret = req->dst_len; in __keyctl_dh_compute()
|
/kernel/linux/linux-5.10/drivers/crypto/virtio/ |
H A D | virtio_crypto_algs.c | 340 u64 dst_len; in __virtio_crypto_skcipher_do_req() local 390 dst_len = virtio_crypto_alg_sg_nents_length(req->dst); in __virtio_crypto_skcipher_do_req() 391 if (unlikely(dst_len > U32_MAX)) { in __virtio_crypto_skcipher_do_req() 392 pr_err("virtio_crypto: The dst_len is beyond U32_MAX\n"); in __virtio_crypto_skcipher_do_req() 397 dst_len = min_t(unsigned int, req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 398 pr_debug("virtio_crypto: src_len: %u, dst_len: %llu\n", in __virtio_crypto_skcipher_do_req() 399 req->cryptlen, dst_len); in __virtio_crypto_skcipher_do_req() 401 if (unlikely(req->cryptlen + dst_len + ivsize + in __virtio_crypto_skcipher_do_req() 409 cpu_to_le32((uint32_t)dst_len); in __virtio_crypto_skcipher_do_req() [all...] |