Lines Matching refs:crypt
35 /* hash: cfgword + 2 * digestlen; crypt: keylen + cfgword */
344 static void finish_scattered_hmac(struct crypt_ctl *crypt)
346 struct aead_request *req = crypt->data.aead_req;
356 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes);
362 struct crypt_ctl *crypt;
368 crypt = crypt_phys2virt(phys);
370 switch (crypt->ctl_flags & CTL_FLAG_MASK) {
372 struct aead_request *req = crypt->data.aead_req;
375 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
376 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
378 finish_scattered_hmac(crypt);
384 struct skcipher_request *req = crypt->data.ablk_req;
402 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
404 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
409 ctx = crypto_tfm_ctx(crypt->data.tfm);
410 dma_pool_free(ctx_pool, crypt->regist_ptr,
411 crypt->regist_buf->phys_addr);
412 dma_pool_free(buffer_pool, crypt->regist_buf, crypt->src_buf);
417 ctx = crypto_tfm_ctx(crypt->data.tfm);
425 crypt->ctl_flags = CTL_FLAG_UNUSED;
624 struct crypt_ctl *crypt;
639 crypt = get_crypt_desc_emerg();
640 if (!crypt) {
652 crypt->data.tfm = tfm;
653 crypt->regist_ptr = pad;
654 crypt->regist_buf = buf;
656 crypt->auth_offs = 0;
657 crypt->auth_len = HMAC_PAD_BLOCKLEN;
658 crypt->crypto_ctx = ctx_addr;
659 crypt->src_buf = buf_phys;
660 crypt->icv_rev_aes = target;
661 crypt->mode = NPE_OP_HASH_GEN_ICV;
662 crypt->init_len = init_len;
663 crypt->ctl_flags |= CTL_FLAG_GEN_ICV;
671 qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
725 struct crypt_ctl *crypt;
729 crypt = get_crypt_desc_emerg();
730 if (!crypt) {
735 crypt->data.tfm = tfm;
736 crypt->crypt_offs = 0;
737 crypt->crypt_len = AES_BLOCK128;
738 crypt->src_buf = 0;
739 crypt->crypto_ctx = dir->npe_ctx_phys;
740 crypt->icv_rev_aes = dir->npe_ctx_phys + sizeof(u32);
741 crypt->mode = NPE_OP_ENC_GEN_KEY;
742 crypt->init_len = dir->npe_ctx_idx;
743 crypt->ctl_flags |= CTL_FLAG_GEN_REVAES;
746 qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
889 struct crypt_ctl *crypt;
907 crypt = get_crypt_desc();
908 if (!crypt)
911 crypt->data.ablk_req = req;
912 crypt->crypto_ctx = dir->npe_ctx_phys;
913 crypt->mode = dir->npe_mode;
914 crypt->init_len = dir->npe_ctx_idx;
916 crypt->crypt_offs = 0;
917 crypt->crypt_len = nbytes;
920 memcpy(crypt->iv, req->iv, ivsize);
927 crypt->mode |= NPE_OP_NOT_IN_PLACE;
936 crypt->dst_buf = dst_hook.phys_next;
946 crypt->src_buf = src_hook.phys_next;
947 crypt->ctl_flags |= CTL_FLAG_PERFORM_ABLK;
948 qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
953 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
956 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
958 crypt->ctl_flags = CTL_FLAG_UNUSED;
1002 struct crypt_ctl *crypt;
1026 crypt = get_crypt_desc();
1027 if (!crypt)
1030 crypt->data.aead_req = req;
1031 crypt->crypto_ctx = dir->npe_ctx_phys;
1032 crypt->mode = dir->npe_mode;
1033 crypt->init_len = dir->npe_ctx_idx;
1035 crypt->crypt_offs = cryptoffset;
1036 crypt->crypt_len = eff_cryptlen;
1038 crypt->auth_offs = 0;
1039 crypt->auth_len = req->assoclen + cryptlen;
1041 memcpy(crypt->iv, req->iv, ivsize);
1043 buf = chainup_buffers(dev, req->src, crypt->auth_len,
1046 crypt->src_buf = src_hook.phys_next;
1052 crypt->icv_rev_aes = buf->phys_addr +
1060 crypt->mode |= NPE_OP_NOT_IN_PLACE;
1063 buf = chainup_buffers(dev, req->dst, crypt->auth_len,
1066 crypt->dst_buf = dst_hook.phys_next;
1074 crypt->icv_rev_aes = buf->phys_addr +
1083 &crypt->icv_rev_aes);
1095 crypt->ctl_flags |= CTL_FLAG_PERFORM_AEAD;
1096 qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
1101 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
1103 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
1104 crypt->ctl_flags = CTL_FLAG_UNUSED;