Lines Matching refs:crypt

39 /* hash: cfgword + 2 * digestlen; crypt: keylen + cfgword */
351 static void finish_scattered_hmac(struct crypt_ctl *crypt)
353 struct aead_request *req = crypt->data.aead_req;
363 dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes);
369 struct crypt_ctl *crypt;
375 crypt = crypt_phys2virt(phys);
377 switch (crypt->ctl_flags & CTL_FLAG_MASK) {
379 struct aead_request *req = crypt->data.aead_req;
382 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
383 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
385 finish_scattered_hmac(crypt);
391 struct skcipher_request *req = crypt->data.ablk_req;
409 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
411 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
416 ctx = crypto_tfm_ctx(crypt->data.tfm);
417 dma_pool_free(ctx_pool, crypt->regist_ptr,
418 crypt->regist_buf->phys_addr);
419 dma_pool_free(buffer_pool, crypt->regist_buf, crypt->src_buf);
424 ctx = crypto_tfm_ctx(crypt->data.tfm);
432 crypt->ctl_flags = CTL_FLAG_UNUSED;
685 struct crypt_ctl *crypt;
700 crypt = get_crypt_desc_emerg();
701 if (!crypt) {
712 crypt->data.tfm = tfm;
713 crypt->regist_ptr = pad;
714 crypt->regist_buf = buf;
716 crypt->auth_offs = 0;
717 crypt->auth_len = HMAC_PAD_BLOCKLEN;
718 crypt->crypto_ctx = ctx_addr;
719 crypt->src_buf = buf_phys;
720 crypt->icv_rev_aes = target;
721 crypt->mode = NPE_OP_HASH_GEN_ICV;
722 crypt->init_len = init_len;
723 crypt->ctl_flags |= CTL_FLAG_GEN_ICV;
731 qmgr_put_entry(send_qid, crypt_virt2phys(crypt));
785 struct crypt_ctl *crypt;
789 crypt = get_crypt_desc_emerg();
790 if (!crypt)
795 crypt->data.tfm = tfm;
796 crypt->crypt_offs = 0;
797 crypt->crypt_len = AES_BLOCK128;
798 crypt->src_buf = 0;
799 crypt->crypto_ctx = dir->npe_ctx_phys;
800 crypt->icv_rev_aes = dir->npe_ctx_phys + sizeof(u32);
801 crypt->mode = NPE_OP_ENC_GEN_KEY;
802 crypt->init_len = dir->npe_ctx_idx;
803 crypt->ctl_flags |= CTL_FLAG_GEN_REVAES;
806 qmgr_put_entry(send_qid, crypt_virt2phys(crypt));
979 struct crypt_ctl *crypt;
1000 crypt = get_crypt_desc();
1001 if (!crypt)
1004 crypt->data.ablk_req = req;
1005 crypt->crypto_ctx = dir->npe_ctx_phys;
1006 crypt->mode = dir->npe_mode;
1007 crypt->init_len = dir->npe_ctx_idx;
1009 crypt->crypt_offs = 0;
1010 crypt->crypt_len = nbytes;
1013 memcpy(crypt->iv, req->iv, ivsize);
1021 crypt->mode |= NPE_OP_NOT_IN_PLACE;
1030 crypt->dst_buf = dst_hook.phys_next;
1040 crypt->src_buf = src_hook.phys_next;
1041 crypt->ctl_flags |= CTL_FLAG_PERFORM_ABLK;
1042 qmgr_put_entry(send_qid, crypt_virt2phys(crypt));
1047 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
1050 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
1052 crypt->ctl_flags = CTL_FLAG_UNUSED;
1096 struct crypt_ctl *crypt;
1120 crypt = get_crypt_desc();
1121 if (!crypt)
1124 crypt->data.aead_req = req;
1125 crypt->crypto_ctx = dir->npe_ctx_phys;
1126 crypt->mode = dir->npe_mode;
1127 crypt->init_len = dir->npe_ctx_idx;
1129 crypt->crypt_offs = cryptoffset;
1130 crypt->crypt_len = eff_cryptlen;
1132 crypt->auth_offs = 0;
1133 crypt->auth_len = req->assoclen + cryptlen;
1135 memcpy(crypt->iv, req->iv, ivsize);
1137 buf = chainup_buffers(dev, req->src, crypt->auth_len,
1140 crypt->src_buf = src_hook.phys_next;
1146 crypt->icv_rev_aes = buf->phys_addr +
1154 crypt->mode |= NPE_OP_NOT_IN_PLACE;
1157 buf = chainup_buffers(dev, req->dst, crypt->auth_len,
1160 crypt->dst_buf = dst_hook.phys_next;
1168 crypt->icv_rev_aes = buf->phys_addr +
1180 crypt->icv_rev_aes = dma;
1190 crypt->ctl_flags |= CTL_FLAG_PERFORM_AEAD;
1191 qmgr_put_entry(send_qid, crypt_virt2phys(crypt));
1196 free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
1198 free_buf_chain(dev, req_ctx->src, crypt->src_buf);
1199 crypt->ctl_flags = CTL_FLAG_UNUSED;