Lines Matching refs:rctx
41 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
53 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
54 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE);
56 memcpy(rctx->digest, result->auth_iv, digestsize);
57 if (req->result && rctx->last_blk)
60 rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]);
61 rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]);
67 req->src = rctx->src_orig;
68 req->nbytes = rctx->nbytes_orig;
69 rctx->last_blk = false;
70 rctx->first_blk = false;
78 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
82 unsigned long flags = rctx->flags;
86 rctx->authkey = ctx->authkey;
87 rctx->authklen = QCE_SHA_HMAC_KEY_SIZE;
89 rctx->authkey = ctx->authkey;
90 rctx->authklen = AES_KEYSIZE_128;
93 rctx->src_nents = sg_nents_for_len(req->src, req->nbytes);
94 if (rctx->src_nents < 0) {
96 return rctx->src_nents;
99 ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
103 sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ);
105 ret = dma_map_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE);
111 ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents,
112 &rctx->result_sg, 1, qce_ahash_done, async_req);
127 dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE);
129 dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
135 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
139 memset(rctx, 0, sizeof(*rctx));
140 rctx->first_blk = true;
141 rctx->last_blk = false;
142 rctx->flags = tmpl->alg_flags;
143 memcpy(rctx->digest, std_iv, sizeof(rctx->digest));
150 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
153 memcpy(export_state->pending_buf, rctx->buf, rctx->buflen);
154 memcpy(export_state->partial_digest, rctx->digest, sizeof(rctx->digest));
155 export_state->byte_count[0] = rctx->byte_count[0];
156 export_state->byte_count[1] = rctx->byte_count[1];
157 export_state->pending_buflen = rctx->buflen;
158 export_state->count = rctx->count;
159 export_state->first_blk = rctx->first_blk;
160 export_state->flags = rctx->flags;
167 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
170 memset(rctx, 0, sizeof(*rctx));
171 rctx->count = import_state->count;
172 rctx->buflen = import_state->pending_buflen;
173 rctx->first_blk = import_state->first_blk;
174 rctx->flags = import_state->flags;
175 rctx->byte_count[0] = import_state->byte_count[0];
176 rctx->byte_count[1] = import_state->byte_count[1];
177 memcpy(rctx->buf, import_state->pending_buf, rctx->buflen);
178 memcpy(rctx->digest, import_state->partial_digest, sizeof(rctx->digest));
186 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
196 rctx->count += req->nbytes;
199 total = req->nbytes + rctx->buflen;
202 scatterwalk_map_and_copy(rctx->buf + rctx->buflen, req->src,
204 rctx->buflen += req->nbytes;
209 rctx->src_orig = req->src;
210 rctx->nbytes_orig = req->nbytes;
216 if (rctx->buflen)
217 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen);
230 * since qce_ahash_final will see that rctx->buflen is 0 and return
234 * rctx->buflen is 0 because the crypto engine BAM does not allow
242 scatterwalk_map_and_copy(rctx->buf, req->src, src_offset,
249 len = rctx->buflen;
263 if (rctx->buflen) {
264 sg_init_table(rctx->sg, 2);
265 sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen);
266 sg_chain(rctx->sg, 2, req->src);
267 req->src = rctx->sg;
271 rctx->buflen = hash_later;
278 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
282 if (!rctx->buflen) {
289 rctx->last_blk = true;
291 rctx->src_orig = req->src;
292 rctx->nbytes_orig = req->nbytes;
294 memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen);
295 sg_init_one(rctx->sg, rctx->tmpbuf, rctx->buflen);
297 req->src = rctx->sg;
298 req->nbytes = rctx->buflen;
305 struct qce_sha_reqctx *rctx = ahash_request_ctx_dma(req);
314 rctx->src_orig = req->src;
315 rctx->nbytes_orig = req->nbytes;
316 rctx->first_blk = true;
317 rctx->last_blk = true;
319 if (!rctx->nbytes_orig) {