/kernel/linux/linux-5.10/crypto/ |
H A D | rsa-pkcs1pad.c | 178 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); in pkcs1pad_encrypt_sign_complete() local 186 len = req_ctx->child_req.dst_len; in pkcs1pad_encrypt_sign_complete() 208 kfree(req_ctx->in_buf); in pkcs1pad_encrypt_sign_complete() 231 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); in pkcs1pad_encrypt() local 246 req_ctx->in_buf = kmalloc(ctx->key_size - 1 - req->src_len, in pkcs1pad_encrypt() 248 if (!req_ctx->in_buf) in pkcs1pad_encrypt() 252 req_ctx->in_buf[0] = 0x02; in pkcs1pad_encrypt() 254 req_ctx->in_buf[i] = 1 + prandom_u32_max(255); in pkcs1pad_encrypt() 255 req_ctx->in_buf[ps_end] = 0x00; in pkcs1pad_encrypt() 257 pkcs1pad_sg_set_buf(req_ctx in pkcs1pad_encrypt() 279 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_decrypt_complete() local 347 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_decrypt() local 380 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_sign() local 437 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_verify_complete() local 534 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_verify() local [all...] |
/kernel/linux/linux-6.6/crypto/ |
H A D | rsa-pkcs1pad.c | 178 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); in pkcs1pad_encrypt_sign_complete() local 186 len = req_ctx->child_req.dst_len; in pkcs1pad_encrypt_sign_complete() 208 kfree(req_ctx->in_buf); in pkcs1pad_encrypt_sign_complete() 230 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); in pkcs1pad_encrypt() local 245 req_ctx->in_buf = kmalloc(ctx->key_size - 1 - req->src_len, in pkcs1pad_encrypt() 247 if (!req_ctx->in_buf) in pkcs1pad_encrypt() 251 req_ctx->in_buf[0] = 0x02; in pkcs1pad_encrypt() 253 req_ctx->in_buf[i] = get_random_u32_inclusive(1, 255); in pkcs1pad_encrypt() 254 req_ctx->in_buf[ps_end] = 0x00; in pkcs1pad_encrypt() 256 pkcs1pad_sg_set_buf(req_ctx in pkcs1pad_encrypt() 278 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_decrypt_complete() local 345 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_decrypt() local 378 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_sign() local 435 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_verify_complete() local 532 struct pkcs1pad_request *req_ctx = akcipher_request_ctx(req); pkcs1pad_verify() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/axis/ |
H A D | artpec6_crypto.c | 1089 struct artpec6_crypto_request_context *req_ctx = NULL; in artpec6_crypto_encrypt() local 1093 req_ctx = skcipher_request_ctx(req); in artpec6_crypto_encrypt() 1099 req_ctx->decrypt = 0; in artpec6_crypto_encrypt() 1114 ret = artpec6_crypto_common_init(&req_ctx->common, in artpec6_crypto_encrypt() 1123 artpec6_crypto_common_destroy(&req_ctx->common); in artpec6_crypto_encrypt() 1127 return artpec6_crypto_submit(&req_ctx->common); in artpec6_crypto_encrypt() 1135 struct artpec6_crypto_request_context *req_ctx = NULL; in artpec6_crypto_decrypt() local 1138 req_ctx = skcipher_request_ctx(req); in artpec6_crypto_decrypt() 1144 req_ctx->decrypt = 1; in artpec6_crypto_decrypt() 1160 ret = artpec6_crypto_common_init(&req_ctx in artpec6_crypto_decrypt() 1264 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(req); artpec6_crypto_aead_encrypt() local 1285 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(req); artpec6_crypto_aead_decrypt() local 1310 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(areq); artpec6_crypto_prepare_hash() local 1665 struct artpec6_crypto_request_context *req_ctx = NULL; artpec6_crypto_prepare_crypto() local 1832 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq); artpec6_crypto_prepare_aead() local 2186 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq); artpec6_crypto_complete_aead() local 2261 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_init_hash() local 2292 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_prepare_submit_hash() local 2325 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_hash_final() local 2334 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_hash_update() local 2348 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_sha1_digest() local 2364 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_sha256_digest() local 2379 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_hmac_sha256_digest() local [all...] |
/kernel/linux/linux-6.6/drivers/crypto/axis/ |
H A D | artpec6_crypto.c | 1090 struct artpec6_crypto_request_context *req_ctx = NULL; in artpec6_crypto_encrypt() local 1094 req_ctx = skcipher_request_ctx(req); in artpec6_crypto_encrypt() 1100 req_ctx->decrypt = 0; in artpec6_crypto_encrypt() 1115 ret = artpec6_crypto_common_init(&req_ctx->common, in artpec6_crypto_encrypt() 1124 artpec6_crypto_common_destroy(&req_ctx->common); in artpec6_crypto_encrypt() 1128 return artpec6_crypto_submit(&req_ctx->common); in artpec6_crypto_encrypt() 1136 struct artpec6_crypto_request_context *req_ctx = NULL; in artpec6_crypto_decrypt() local 1139 req_ctx = skcipher_request_ctx(req); in artpec6_crypto_decrypt() 1145 req_ctx->decrypt = 1; in artpec6_crypto_decrypt() 1161 ret = artpec6_crypto_common_init(&req_ctx in artpec6_crypto_decrypt() 1265 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(req); artpec6_crypto_aead_encrypt() local 1286 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(req); artpec6_crypto_aead_decrypt() local 1311 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(areq); artpec6_crypto_prepare_hash() local 1666 struct artpec6_crypto_request_context *req_ctx = NULL; artpec6_crypto_prepare_crypto() local 1833 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq); artpec6_crypto_prepare_aead() local 2187 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq); artpec6_crypto_complete_aead() local 2262 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_init_hash() local 2293 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_prepare_submit_hash() local 2326 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_hash_final() local 2335 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_hash_update() local 2349 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_sha1_digest() local 2365 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_sha256_digest() local 2380 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req); artpec6_crypto_hmac_sha256_digest() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/ |
H A D | talitos.c | 1703 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); in common_nonsnoop_hash_unmap() local 1715 if (req_ctx->last) in common_nonsnoop_hash_unmap() 1716 memcpy(areq->result, req_ctx->hw_context, in common_nonsnoop_hash_unmap() 1719 if (req_ctx->psrc) in common_nonsnoop_hash_unmap() 1720 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0); in common_nonsnoop_hash_unmap() 1730 if (is_sec1 && req_ctx->nbuf) in common_nonsnoop_hash_unmap() 1750 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); in ahash_done() local 1752 if (!req_ctx->last && req_ctx->to_hash_later) { in ahash_done() 1754 req_ctx in ahash_done() 1793 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); common_nonsnoop_hash() local 1916 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_edesc_alloc() local 1932 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_init() local 1959 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_init_sha224_swinit() local 1984 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_process_req() local 2094 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_update() local 2103 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_final() local 2112 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_finup() local 2121 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_digest() local 2132 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_export() local 2157 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_import() local [all...] |
H A D | ixp4xx_crypto.c | 347 struct aead_ctx *req_ctx = aead_request_ctx(req); in finish_scattered_hmac() local 352 if (req_ctx->encrypt) { in finish_scattered_hmac() 353 scatterwalk_map_and_copy(req_ctx->hmac_virt, in finish_scattered_hmac() 356 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes); in finish_scattered_hmac() 373 struct aead_ctx *req_ctx = aead_request_ctx(req); in one_packet() local 375 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet() 376 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet() 377 if (req_ctx->hmac_virt) { in one_packet() 385 struct ablk_ctx *req_ctx = skcipher_request_ctx(req); in one_packet() local 392 if (req_ctx in one_packet() 892 struct ablk_ctx *req_ctx = skcipher_request_ctx(req); ablk_perform() local 1005 struct aead_ctx *req_ctx = aead_request_ctx(req); aead_perform() local [all...] |
/kernel/linux/linux-6.6/drivers/crypto/ |
H A D | talitos.c | 1704 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); in common_nonsnoop_hash_unmap() local 1716 if (req_ctx->last) in common_nonsnoop_hash_unmap() 1717 memcpy(areq->result, req_ctx->hw_context, in common_nonsnoop_hash_unmap() 1720 if (req_ctx->psrc) in common_nonsnoop_hash_unmap() 1721 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0); in common_nonsnoop_hash_unmap() 1731 if (is_sec1 && req_ctx->nbuf) in common_nonsnoop_hash_unmap() 1751 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); in ahash_done() local 1753 if (!req_ctx->last && req_ctx->to_hash_later) { in ahash_done() 1755 req_ctx in ahash_done() 1794 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); common_nonsnoop_hash() local 1917 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_edesc_alloc() local 1933 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_init() local 1960 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_init_sha224_swinit() local 1985 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_process_req() local 2095 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_update() local 2104 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_final() local 2113 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_finup() local 2122 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_digest() local 2133 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_export() local 2158 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); ahash_import() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/ccree/ |
H A D | cc_cipher.c | 508 struct cipher_req_ctx *req_ctx, in cc_setup_readiv_desc() 516 int direction = req_ctx->gen_ctx.op_type; in cc_setup_readiv_desc() 517 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_readiv_desc() 564 struct cipher_req_ctx *req_ctx, in cc_setup_state_desc() 573 int direction = req_ctx->gen_ctx.op_type; in cc_setup_state_desc() 574 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_state_desc() 608 struct cipher_req_ctx *req_ctx, in cc_setup_xex_state_desc() 617 int direction = req_ctx->gen_ctx.op_type; in cc_setup_xex_state_desc() 620 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_xex_state_desc() 686 struct cipher_req_ctx *req_ctx, in cc_setup_key_desc() 507 cc_setup_readiv_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, unsigned int ivsize, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_readiv_desc() argument 563 cc_setup_state_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, unsigned int ivsize, unsigned int nbytes, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_state_desc() argument 607 cc_setup_xex_state_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, unsigned int ivsize, unsigned int nbytes, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_xex_state_desc() argument 685 cc_setup_key_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, unsigned int nbytes, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_key_desc() argument 767 cc_setup_mlli_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes, void *areq, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_mlli_desc() argument 794 cc_setup_flow_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_flow_desc() argument 860 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); cc_cipher_complete() local 879 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); cc_cipher_process() local 989 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); cc_cipher_encrypt() local 998 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); cc_cipher_decrypt() local [all...] |
H A D | cc_aead.c | 876 struct aead_req_ctx *req_ctx = aead_request_ctx(req); in cc_proc_digest_desc() local 880 int direct = req_ctx->gen_ctx.op_type; in cc_proc_digest_desc() 887 set_dout_dlli(&desc[idx], req_ctx->icv_dma_addr, ctx->authsize, in cc_proc_digest_desc() 903 set_dout_dlli(&desc[idx], req_ctx->mac_buf_dma_addr, in cc_proc_digest_desc() 926 struct aead_req_ctx *req_ctx = aead_request_ctx(req); in cc_set_cipher_desc() local 927 unsigned int hw_iv_size = req_ctx->hw_iv_size; in cc_set_cipher_desc() 929 int direct = req_ctx->gen_ctx.op_type; in cc_set_cipher_desc() 935 set_din_type(&desc[idx], DMA_DLLI, req_ctx->gen_ctx.iv_dma_addr, in cc_set_cipher_desc() 968 struct aead_req_ctx *req_ctx = aead_request_ctx(req); in cc_proc_cipher() local 969 int direct = req_ctx in cc_proc_cipher() 1161 struct aead_req_ctx *req_ctx = aead_request_ctx(req); cc_mlli_to_sram() local 1215 struct aead_req_ctx *req_ctx = aead_request_ctx(req); cc_hmac_authenc() local 1268 struct aead_req_ctx *req_ctx = aead_request_ctx(req); cc_xcbc_authenc() local 1414 struct aead_req_ctx *req_ctx = aead_request_ctx(req); cc_ccm() local 1536 struct aead_req_ctx *req_ctx = aead_request_ctx(req); config_ccm_adata() local 1618 struct aead_req_ctx *req_ctx = aead_request_ctx(req); cc_set_ghash_desc() local 1696 struct aead_req_ctx *req_ctx = aead_request_ctx(req); cc_set_gctr_desc() local 1733 struct aead_req_ctx *req_ctx = aead_request_ctx(req); cc_proc_gcm_result() local 1795 struct aead_req_ctx *req_ctx = aead_request_ctx(req); cc_gcm() local 1833 struct aead_req_ctx *req_ctx = aead_request_ctx(req); config_gcm_context() local [all...] |
H A D | cc_buffer_mgr.c | 343 struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx; in cc_unmap_cipher_request() local 345 if (req_ctx->gen_ctx.iv_dma_addr) { in cc_unmap_cipher_request() 347 &req_ctx->gen_ctx.iv_dma_addr, ivsize); in cc_unmap_cipher_request() 348 dma_unmap_single(dev, req_ctx->gen_ctx.iv_dma_addr, in cc_unmap_cipher_request() 352 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI && in cc_unmap_cipher_request() 353 req_ctx->mlli_params.mlli_virt_addr) { in cc_unmap_cipher_request() 354 dma_pool_free(req_ctx->mlli_params.curr_pool, in cc_unmap_cipher_request() 355 req_ctx->mlli_params.mlli_virt_addr, in cc_unmap_cipher_request() 356 req_ctx->mlli_params.mlli_dma_addr); in cc_unmap_cipher_request() 360 dma_unmap_sg(dev, src, req_ctx in cc_unmap_cipher_request() 375 struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx; cc_map_cipher_request() local [all...] |
/kernel/linux/linux-6.6/drivers/crypto/ccree/ |
H A D | cc_cipher.c | 511 struct cipher_req_ctx *req_ctx, in cc_setup_readiv_desc() 519 int direction = req_ctx->gen_ctx.op_type; in cc_setup_readiv_desc() 520 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_readiv_desc() 567 struct cipher_req_ctx *req_ctx, in cc_setup_state_desc() 576 int direction = req_ctx->gen_ctx.op_type; in cc_setup_state_desc() 577 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_state_desc() 611 struct cipher_req_ctx *req_ctx, in cc_setup_xex_state_desc() 620 int direction = req_ctx->gen_ctx.op_type; in cc_setup_xex_state_desc() 623 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; in cc_setup_xex_state_desc() 689 struct cipher_req_ctx *req_ctx, in cc_setup_key_desc() 510 cc_setup_readiv_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, unsigned int ivsize, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_readiv_desc() argument 566 cc_setup_state_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, unsigned int ivsize, unsigned int nbytes, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_state_desc() argument 610 cc_setup_xex_state_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, unsigned int ivsize, unsigned int nbytes, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_xex_state_desc() argument 688 cc_setup_key_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, unsigned int nbytes, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_key_desc() argument 770 cc_setup_mlli_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes, void *areq, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_mlli_desc() argument 797 cc_setup_flow_desc(struct crypto_tfm *tfm, struct cipher_req_ctx *req_ctx, struct scatterlist *dst, struct scatterlist *src, unsigned int nbytes, struct cc_hw_desc desc[], unsigned int *seq_size) cc_setup_flow_desc() argument 863 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); cc_cipher_complete() local 882 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); cc_cipher_process() local 992 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); cc_cipher_encrypt() local 1001 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); cc_cipher_decrypt() local [all...] |
H A D | cc_aead.c | 876 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); in cc_proc_digest_desc() local 880 int direct = req_ctx->gen_ctx.op_type; in cc_proc_digest_desc() 887 set_dout_dlli(&desc[idx], req_ctx->icv_dma_addr, ctx->authsize, in cc_proc_digest_desc() 903 set_dout_dlli(&desc[idx], req_ctx->mac_buf_dma_addr, in cc_proc_digest_desc() 926 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); in cc_set_cipher_desc() local 927 unsigned int hw_iv_size = req_ctx->hw_iv_size; in cc_set_cipher_desc() 929 int direct = req_ctx->gen_ctx.op_type; in cc_set_cipher_desc() 935 set_din_type(&desc[idx], DMA_DLLI, req_ctx->gen_ctx.iv_dma_addr, in cc_set_cipher_desc() 968 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); in cc_proc_cipher() local 969 int direct = req_ctx in cc_proc_cipher() 1161 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); cc_mlli_to_sram() local 1215 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); cc_hmac_authenc() local 1268 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); cc_xcbc_authenc() local 1414 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); cc_ccm() local 1536 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); config_ccm_adata() local 1618 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); cc_set_ghash_desc() local 1696 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); cc_set_gctr_desc() local 1733 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); cc_proc_gcm_result() local 1795 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); cc_gcm() local 1833 struct aead_req_ctx *req_ctx = aead_request_ctx_dma(req); config_gcm_context() local [all...] |
H A D | cc_buffer_mgr.c | 343 struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx; in cc_unmap_cipher_request() local 345 if (req_ctx->gen_ctx.iv_dma_addr) { in cc_unmap_cipher_request() 347 &req_ctx->gen_ctx.iv_dma_addr, ivsize); in cc_unmap_cipher_request() 348 dma_unmap_single(dev, req_ctx->gen_ctx.iv_dma_addr, in cc_unmap_cipher_request() 352 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI && in cc_unmap_cipher_request() 353 req_ctx->mlli_params.mlli_virt_addr) { in cc_unmap_cipher_request() 354 dma_pool_free(req_ctx->mlli_params.curr_pool, in cc_unmap_cipher_request() 355 req_ctx->mlli_params.mlli_virt_addr, in cc_unmap_cipher_request() 356 req_ctx->mlli_params.mlli_dma_addr); in cc_unmap_cipher_request() 360 dma_unmap_sg(dev, src, req_ctx in cc_unmap_cipher_request() 375 struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx; cc_map_cipher_request() local [all...] |
/kernel/linux/linux-6.6/arch/x86/crypto/ |
H A D | aria_gfni_avx512_glue.c | 77 struct aria_avx512_request_ctx *req_ctx = skcipher_request_ctx(req); in aria_avx512_ctr_encrypt() local 93 &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 104 &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 115 &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 124 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx512_ctr_encrypt() 128 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 129 &req_ctx->keystream[0]); in aria_avx512_ctr_encrypt() 131 crypto_xor_cpy(dst, src, &req_ctx->keystream[0], in aria_avx512_ctr_encrypt() 139 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx512_ctr_encrypt() 143 aria_encrypt(ctx, &req_ctx in aria_avx512_ctr_encrypt() [all...] |
H A D | aria_aesni_avx_glue.c | 88 struct aria_avx_request_ctx *req_ctx = skcipher_request_ctx(req); in aria_avx_ctr_encrypt() local 104 &req_ctx->keystream[0], in aria_avx_ctr_encrypt() 113 memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE); in aria_avx_ctr_encrypt() 116 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx_ctr_encrypt() 117 &req_ctx->keystream[0]); in aria_avx_ctr_encrypt() 119 crypto_xor_cpy(dst, src, &req_ctx->keystream[0], in aria_avx_ctr_encrypt() 127 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx_ctr_encrypt() 131 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx_ctr_encrypt() 132 &req_ctx->keystream[0]); in aria_avx_ctr_encrypt() 134 crypto_xor_cpy(dst, src, &req_ctx in aria_avx_ctr_encrypt() [all...] |
H A D | aria_aesni_avx2_glue.c | 90 struct aria_avx2_request_ctx *req_ctx = skcipher_request_ctx(req); in aria_avx2_ctr_encrypt() local 106 &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 117 &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 126 memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE); in aria_avx2_ctr_encrypt() 129 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 130 &req_ctx->keystream[0]); in aria_avx2_ctr_encrypt() 132 crypto_xor_cpy(dst, src, &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 140 memcpy(&req_ctx->keystream[0], walk.iv, in aria_avx2_ctr_encrypt() 144 aria_encrypt(ctx, &req_ctx->keystream[0], in aria_avx2_ctr_encrypt() 145 &req_ctx in aria_avx2_ctr_encrypt() [all...] |
/kernel/linux/linux-5.10/drivers/crypto/caam/ |
H A D | caampkc.c | 46 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_io_unmap() local 49 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap() 120 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_pub_done() local 129 edesc = req_ctx->edesc; in rsa_pub_done() 154 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_priv_f_done() local 162 edesc = req_ctx->edesc; in rsa_priv_f_done() 248 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_edesc_alloc() local 270 req_ctx->fixup_src = scatterwalk_ffwd(req_ctx->src, req->src, in rsa_edesc_alloc() 272 req_ctx in rsa_edesc_alloc() 374 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); akcipher_do_one_req() local 399 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); set_rsa_pub_pdb() local 470 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); set_rsa_priv_f1_pdb() local 535 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); set_rsa_priv_f2_pdb() local 624 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); set_rsa_priv_f3_pdb() local 667 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); akcipher_enqueue_req() local [all...] |
H A D | caamalg_qi2.c | 350 struct caam_request *req_ctx = aead_request_ctx(req); in aead_edesc_alloc() local 351 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1]; in aead_edesc_alloc() 352 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0]; in aead_edesc_alloc() 547 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt)); in aead_edesc_alloc() 1110 struct caam_request *req_ctx = skcipher_request_ctx(req); in skcipher_edesc_alloc() local 1111 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1]; in skcipher_edesc_alloc() 1112 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0]; in skcipher_edesc_alloc() 1236 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx in skcipher_edesc_alloc() 1284 struct caam_request *req_ctx = to_caam_req(areq); aead_encrypt_done() local 1305 struct caam_request *req_ctx = to_caam_req(areq); aead_decrypt_done() local 1391 struct caam_request *req_ctx = to_caam_req(areq); skcipher_encrypt_done() local 1429 struct caam_request *req_ctx = to_caam_req(areq); skcipher_decrypt_done() local 3210 struct caam_request *req_ctx; hash_digest_key() local 3521 struct caam_request *req_ctx = &state->caam_req; ahash_update_ctx() local 3640 struct caam_request *req_ctx = &state->caam_req; ahash_final_ctx() local 3711 struct caam_request *req_ctx = &state->caam_req; ahash_finup_ctx() local 3805 struct caam_request *req_ctx = &state->caam_req; ahash_digest() local 3900 struct caam_request *req_ctx = &state->caam_req; ahash_final_no_ctx() local 3973 struct caam_request *req_ctx = &state->caam_req; ahash_update_no_ctx() local 4094 struct caam_request *req_ctx = &state->caam_req; ahash_finup_no_ctx() local 4190 struct caam_request *req_ctx = &state->caam_req; ahash_update_first() local [all...] |
/kernel/linux/linux-6.6/drivers/crypto/caam/ |
H A D | caampkc.c | 52 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_io_unmap() local 55 dma_unmap_sg(dev, req_ctx->fixup_src, edesc->src_nents, DMA_TO_DEVICE); in rsa_io_unmap() 126 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_pub_done() local 135 edesc = req_ctx->edesc; in rsa_pub_done() 160 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_priv_f_done() local 168 edesc = req_ctx->edesc; in rsa_priv_f_done() 255 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); in rsa_edesc_alloc() local 277 req_ctx->fixup_src = scatterwalk_ffwd(req_ctx->src, req->src, in rsa_edesc_alloc() 279 req_ctx in rsa_edesc_alloc() 380 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); akcipher_do_one_req() local 408 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); set_rsa_pub_pdb() local 479 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); set_rsa_priv_f1_pdb() local 544 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); set_rsa_priv_f2_pdb() local 633 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); set_rsa_priv_f3_pdb() local 676 struct caam_rsa_req_ctx *req_ctx = akcipher_request_ctx(req); akcipher_enqueue_req() local [all...] |
H A D | caamalg_qi2.c | 355 struct caam_request *req_ctx = aead_request_ctx_dma(req); in aead_edesc_alloc() local 356 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1]; in aead_edesc_alloc() 357 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0]; in aead_edesc_alloc() 552 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt)); in aead_edesc_alloc() 1115 struct caam_request *req_ctx = skcipher_request_ctx_dma(req); in skcipher_edesc_alloc() local 1116 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1]; in skcipher_edesc_alloc() 1117 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0]; in skcipher_edesc_alloc() 1241 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx in skcipher_edesc_alloc() 1289 struct caam_request *req_ctx = to_caam_req(areq); aead_encrypt_done() local 1310 struct caam_request *req_ctx = to_caam_req(areq); aead_decrypt_done() local 1396 struct caam_request *req_ctx = to_caam_req(areq); skcipher_encrypt_done() local 1434 struct caam_request *req_ctx = to_caam_req(areq); skcipher_decrypt_done() local 3217 struct caam_request *req_ctx; hash_digest_key() local 3534 struct caam_request *req_ctx = &state->caam_req; ahash_update_ctx() local 3653 struct caam_request *req_ctx = &state->caam_req; ahash_final_ctx() local 3724 struct caam_request *req_ctx = &state->caam_req; ahash_finup_ctx() local 3818 struct caam_request *req_ctx = &state->caam_req; ahash_digest() local 3913 struct caam_request *req_ctx = &state->caam_req; ahash_final_no_ctx() local 3986 struct caam_request *req_ctx = &state->caam_req; ahash_update_no_ctx() local 4107 struct caam_request *req_ctx = &state->caam_req; ahash_finup_no_ctx() local 4203 struct caam_request *req_ctx = &state->caam_req; ahash_update_first() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/chelsio/ |
H A D | chcr_algo.c | 1569 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); in create_hash_wr() local 1583 unsigned int rx_channel_id = req_ctx->rxqidx / ctx->rxq_perchan; in create_hash_wr() 1587 req_ctx->hctx_wr.imm = (transhdr_len + param->bfr_len + in create_hash_wr() 1589 nents = sg_nents_xlen(req_ctx->hctx_wr.srcsg, param->sg_len, in create_hash_wr() 1590 CHCR_SRC_SG_SIZE, req_ctx->hctx_wr.src_ofst); in create_hash_wr() 1592 transhdr_len += req_ctx->hctx_wr.imm ? roundup(param->bfr_len + in create_hash_wr() 1617 memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash, in create_hash_wr() 1635 req_ctx->hctx_wr.dma_addr = in create_hash_wr() 1636 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr, in create_hash_wr() 1639 req_ctx in create_hash_wr() 1664 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_ahash_update() local 1766 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_ahash_final() local 1836 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_ahash_finup() local 1931 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_ahash_digest() local 2172 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); chcr_ahash_export() local 2186 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); chcr_ahash_import() local 2299 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); chcr_sha_init() local 2321 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); chcr_hmac_init() local 2809 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_hash_dma_map() local 2825 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_hash_dma_unmap() local [all...] |
/kernel/linux/linux-6.6/drivers/crypto/chelsio/ |
H A D | chcr_algo.c | 1563 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); in create_hash_wr() local 1577 unsigned int rx_channel_id = req_ctx->rxqidx / ctx->rxq_perchan; in create_hash_wr() 1581 req_ctx->hctx_wr.imm = (transhdr_len + param->bfr_len + in create_hash_wr() 1583 nents = sg_nents_xlen(req_ctx->hctx_wr.srcsg, param->sg_len, in create_hash_wr() 1584 CHCR_SRC_SG_SIZE, req_ctx->hctx_wr.src_ofst); in create_hash_wr() 1586 transhdr_len += req_ctx->hctx_wr.imm ? roundup(param->bfr_len + in create_hash_wr() 1611 memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash, in create_hash_wr() 1629 req_ctx->hctx_wr.dma_addr = in create_hash_wr() 1630 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr, in create_hash_wr() 1633 req_ctx in create_hash_wr() 1658 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_ahash_update() local 1760 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_ahash_final() local 1830 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_ahash_finup() local 1925 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_ahash_digest() local 2166 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); chcr_ahash_export() local 2180 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); chcr_ahash_import() local 2294 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); chcr_sha_init() local 2316 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); chcr_hmac_init() local 2804 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_hash_dma_map() local 2820 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req); chcr_hash_dma_unmap() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/ux500/hash/ |
H A D | hash_core.c | 553 struct hash_req_ctx *req_ctx = ahash_request_ctx(req); in ux500_hash_init() local 558 memset(&req_ctx->state, 0, sizeof(struct hash_state)); in ux500_hash_init() 559 req_ctx->updated = 0; in ux500_hash_init() 562 req_ctx->dma_mode = false; /* Don't use DMA */ in ux500_hash_init() 569 req_ctx->dma_mode = true; in ux500_hash_init() 571 req_ctx->dma_mode = false; in ux500_hash_init() 764 struct hash_ctx *ctx, struct hash_req_ctx *req_ctx, in hash_process_data() 780 if (req_ctx->updated) { in hash_process_data() 783 memmove(req_ctx->state.buffer, in hash_process_data() 800 req_ctx in hash_process_data() 763 hash_process_data(struct hash_device_data *device_data, struct hash_ctx *ctx, struct hash_req_ctx *req_ctx, int msg_length, u8 *data_buffer, u8 *buffer, u8 *index) hash_process_data() argument 858 struct hash_req_ctx *req_ctx = ahash_request_ctx(req); hash_dma_final() local 965 struct hash_req_ctx *req_ctx = ahash_request_ctx(req); hash_hw_final() local 1074 struct hash_req_ctx *req_ctx = ahash_request_ctx(req); hash_hw_update() local 1304 struct hash_req_ctx *req_ctx = ahash_request_ctx(req); ahash_update() local 1324 struct hash_req_ctx *req_ctx = ahash_request_ctx(req); ahash_final() local [all...] |
/kernel/linux/linux-6.6/drivers/crypto/intel/ixp4xx/ |
H A D | ixp4xx_crypto.c | 354 struct aead_ctx *req_ctx = aead_request_ctx(req); in finish_scattered_hmac() local 359 if (req_ctx->encrypt) { in finish_scattered_hmac() 360 scatterwalk_map_and_copy(req_ctx->hmac_virt, req->dst, in finish_scattered_hmac() 363 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes); in finish_scattered_hmac() 380 struct aead_ctx *req_ctx = aead_request_ctx(req); in one_packet() local 382 free_buf_chain(dev, req_ctx->src, crypt->src_buf); in one_packet() 383 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf); in one_packet() 384 if (req_ctx->hmac_virt) in one_packet() 392 struct ablk_ctx *req_ctx = skcipher_request_ctx(req); in one_packet() local 399 if (req_ctx in one_packet() 982 struct ablk_ctx *req_ctx = skcipher_request_ctx(req); ablk_perform() local 1099 struct aead_ctx *req_ctx = aead_request_ctx(req); aead_perform() local [all...] |
/kernel/linux/linux-6.6/drivers/mtd/nand/ |
H A D | ecc-mxic.c | 107 struct nand_ecc_req_tweak_ctx req_ctx; member 309 ctx->req_ctx.oob_buffer_size = nanddev_per_page_oobsize(nand) + in mxic_ecc_init_ctx() 311 ret = nand_ecc_init_req_tweaking(&ctx->req_ctx, nand); in mxic_ecc_init_ctx() 355 nand_ecc_cleanup_req_tweaking(&ctx->req_ctx); in mxic_ecc_init_ctx() 417 nand_ecc_cleanup_req_tweaking(&ctx->req_ctx); in mxic_ecc_cleanup_ctx() 563 nand_ecc_tweak_req(&ctx->req_ctx, req); in mxic_ecc_prepare_io_req_external() 623 nand_ecc_restore_req(&ctx->req_ctx, req); in mxic_ecc_finish_io_req_external() 654 nand_ecc_restore_req(&ctx->req_ctx, req); in mxic_ecc_finish_io_req_external() 662 nand_ecc_restore_req(&ctx->req_ctx, req); in mxic_ecc_finish_io_req_external() 678 nand_ecc_tweak_req(&ctx->req_ctx, re in mxic_ecc_prepare_io_req_pipelined() [all...] |