Lines Matching refs:rctx
27 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
49 dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src);
51 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
53 if (IS_CCM(rctx->flags)) {
55 sg_free_table(&rctx->src_tbl);
57 sg_free_table(&rctx->dst_tbl);
59 if (!(IS_DECRYPT(rctx->flags) && !diff_dst))
60 sg_free_table(&rctx->dst_tbl);
63 sg_free_table(&rctx->dst_tbl);
70 if (IS_ENCRYPT(rctx->flags)) {
72 if (IS_CCM(rctx->flags))
73 scatterwalk_map_and_copy(rctx->ccmresult_buf, req->dst,
79 } else if (!IS_CCM(rctx->flags)) {
95 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
99 sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ);
100 return qce_sgtable_add(tbl, &rctx->result_sg, QCE_RESULT_BUF_SZ);
106 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
108 sg_init_one(&rctx->result_sg, rctx->ccmresult_buf, QCE_BAM_BURST_SIZE);
109 return qce_sgtable_add(tbl, &rctx->result_sg, QCE_BAM_BURST_SIZE);
115 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
124 totallen = rctx->cryptlen + assoclen;
125 rctx->dst_nents = sg_nents_for_len(req->dst, totallen);
126 if (rctx->dst_nents < 0) {
130 if (IS_CCM(rctx->flags))
131 rctx->dst_nents += 2;
133 rctx->dst_nents += 1;
137 ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp);
141 if (IS_CCM(rctx->flags) && assoclen) {
145 sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->adata_sg,
146 rctx->assoclen);
152 sg = qce_sgtable_add(&rctx->dst_tbl, msg_sg, rctx->cryptlen);
157 totallen = rctx->cryptlen + rctx->assoclen;
160 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, totallen);
165 if (IS_CCM(rctx->flags))
166 sg = qce_aead_prepare_ccm_result_buf(&rctx->dst_tbl, req);
168 sg = qce_aead_prepare_result_buf(&rctx->dst_tbl, req);
174 rctx->dst_sg = rctx->dst_tbl.sgl;
175 rctx->dst_nents = sg_nents_for_len(rctx->dst_sg, totallen) + 1;
180 sg_free_table(&rctx->dst_tbl);
189 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
191 unsigned int assoclen = rctx->assoclen;
197 if (IS_DECRYPT(rctx->flags))
198 cryptlen = rctx->cryptlen + ctx->authsize;
200 cryptlen = rctx->cryptlen;
206 rctx->adata = kzalloc((ALIGN(assoclen, 16) + MAX_CCM_ADATA_HEADER_LEN) *
208 if (!rctx->adata)
220 *(__be16 *)rctx->adata = cpu_to_be16(assoclen);
223 *(__be16 *)rctx->adata = cpu_to_be16(0xfffe);
224 *(__be32 *)(rctx->adata + 2) = cpu_to_be32(assoclen);
229 rctx->adata + adata_header_len,
234 rctx->assoclen = ALIGN(assoclen + adata_header_len, 16);
239 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 1;
241 rctx->src_nents = sg_nents_for_len(req->src, totallen) + 2;
244 ret = sg_alloc_table(&rctx->src_tbl, rctx->src_nents, gfp);
249 sg_init_one(&rctx->adata_sg, rctx->adata, rctx->assoclen);
250 sg = qce_sgtable_add(&rctx->src_tbl, &rctx->adata_sg,
251 rctx->assoclen);
257 sg = qce_sgtable_add(&rctx->src_tbl, msg_sg, cryptlen);
268 if (!IS_DECRYPT(rctx->flags)) {
269 sg = qce_aead_prepare_ccm_result_buf(&rctx->src_tbl, req);
277 rctx->src_sg = rctx->src_tbl.sgl;
278 totallen = cryptlen + rctx->assoclen;
279 rctx->src_nents = sg_nents_for_len(rctx->src_sg, totallen);
288 if (IS_ENCRYPT(rctx->flags))
289 rctx->dst_nents = rctx->src_nents + 1;
291 rctx->dst_nents = rctx->src_nents;
292 rctx->dst_sg = rctx->src_sg;
297 sg_free_table(&rctx->src_tbl);
303 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
310 totallen = rctx->cryptlen + rctx->assoclen;
316 rctx->src_nents = sg_nents_for_len(req->src, totallen);
317 if (rctx->src_nents < 0) {
321 rctx->src_sg = req->src;
323 rctx->src_nents = rctx->dst_nents - 1;
324 rctx->src_sg = rctx->dst_sg;
331 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
338 if (rctx->assoclen)
341 if (IS_ENCRYPT(rctx->flags))
344 cryptlen = rctx->cryptlen + ctx->authsize;
346 rctx->src_nents = sg_nents_for_len(req->src, cryptlen);
347 rctx->src_sg = req->src;
352 rctx->src_nents = sg_nents_for_len(req->src, cryptlen);
353 rctx->src_sg = req->src;
354 rctx->dst_nents = rctx->src_nents;
355 rctx->dst_sg = rctx->src_sg;
361 static int qce_aead_create_ccm_nonce(struct qce_aead_reqctx *rctx, struct qce_aead_ctx *ctx)
367 if (!rctx || !rctx->iv)
370 msglen_size = rctx->iv[0] + 1;
376 ivsize = rctx->ivsize;
382 if (!IS_CCM_RFC4309(rctx->flags)) {
384 rctx->iv[ivsize - i - 1] = 0;
394 memcpy(&msg_len[0], &rctx->cryptlen, 4);
396 memcpy(&rctx->ccm_nonce[0], rctx->iv, rctx->ivsize);
397 if (rctx->assoclen)
398 rctx->ccm_nonce[0] |= 1 << CCM_NONCE_ADATA_SHIFT;
399 rctx->ccm_nonce[0] |= ((ctx->authsize - 2) / 2) <<
402 rctx->ccm_nonce[QCE_MAX_NONCE - i - 1] = msg_len[i];
411 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
420 if (IS_CCM_RFC4309(rctx->flags)) {
421 memset(rctx->ccm_rfc4309_iv, 0, QCE_MAX_IV_SIZE);
422 rctx->ccm_rfc4309_iv[0] = 3;
423 memcpy(&rctx->ccm_rfc4309_iv[1], ctx->ccm4309_salt, QCE_CCM4309_SALT_SIZE);
424 memcpy(&rctx->ccm_rfc4309_iv[4], req->iv, 8);
425 rctx->iv = rctx->ccm_rfc4309_iv;
426 rctx->ivsize = AES_BLOCK_SIZE;
428 rctx->iv = req->iv;
429 rctx->ivsize = crypto_aead_ivsize(tfm);
431 if (IS_CCM_RFC4309(rctx->flags))
432 rctx->assoclen = req->assoclen - 8;
434 rctx->assoclen = req->assoclen;
440 if (IS_CCM(rctx->flags)) {
441 ret = qce_aead_create_ccm_nonce(rctx, ctx);
445 if (IS_CCM(rctx->flags))
452 dst_nents = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
459 src_nents = dma_map_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src);
465 if (IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags))
471 ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, src_nents, rctx->dst_sg, dst_nents,
488 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src);
490 dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
492 if (IS_CCM(rctx->flags) && rctx->assoclen) {
493 sg_free_table(&rctx->src_tbl);
495 sg_free_table(&rctx->dst_tbl);
497 sg_free_table(&rctx->dst_tbl);
505 struct qce_aead_reqctx *rctx = aead_request_ctx_dma(req);
510 rctx->flags = tmpl->alg_flags;
511 rctx->flags |= encrypt ? QCE_ENCRYPT : QCE_DECRYPT;
514 rctx->cryptlen = req->cryptlen;
516 rctx->cryptlen = req->cryptlen - ctx->authsize;
519 if (!rctx->cryptlen) {
520 if (!(IS_CCM(rctx->flags) && IS_DECRYPT(rctx->flags)))
529 aead_request_set_tfm(&rctx->fallback_req, ctx->fallback);
530 aead_request_set_callback(&rctx->fallback_req, req->base.flags,
532 aead_request_set_crypt(&rctx->fallback_req, req->src,
534 aead_request_set_ad(&rctx->fallback_req, req->assoclen);
536 return encrypt ? crypto_aead_encrypt(&rctx->fallback_req) :
537 crypto_aead_decrypt(&rctx->fallback_req);
544 if (IS_CBC(rctx->flags) && !IS_ALIGNED(rctx->cryptlen, blocksize))
548 if (IS_CCM_RFC4309(rctx->flags))