Lines Matching refs:req_ctx
350 struct caam_request *req_ctx = aead_request_ctx(req);
351 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
352 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
547 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
1110 struct caam_request *req_ctx = skcipher_request_ctx(req);
1111 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
1112 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
1236 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
1284 struct caam_request *req_ctx = to_caam_req(areq);
1285 struct aead_edesc *edesc = req_ctx->edesc;
1305 struct caam_request *req_ctx = to_caam_req(areq);
1306 struct aead_edesc *edesc = req_ctx->edesc;
1391 struct caam_request *req_ctx = to_caam_req(areq);
1394 struct skcipher_edesc *edesc = req_ctx->edesc;
1429 struct caam_request *req_ctx = to_caam_req(areq);
1432 struct skcipher_edesc *edesc = req_ctx->edesc;
3210 struct caam_request *req_ctx;
3219 req_ctx = kzalloc(sizeof(*req_ctx), GFP_KERNEL | GFP_DMA);
3220 if (!req_ctx)
3223 in_fle = &req_ctx->fd_flt[1];
3224 out_fle = &req_ctx->fd_flt[0];
3274 req_ctx->flc = flc;
3275 req_ctx->flc_dma = flc_dma;
3276 req_ctx->cbk = split_key_sh_done;
3277 req_ctx->ctx = &result;
3279 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
3296 kfree(req_ctx);
3521 struct caam_request *req_ctx = &state->caam_req;
3522 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
3523 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
3598 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
3607 req_ctx->flc = &ctx->flc[UPDATE];
3608 req_ctx->flc_dma = ctx->flc_dma[UPDATE];
3609 req_ctx->cbk = ahash_done_bi;
3610 req_ctx->ctx = &req->base;
3611 req_ctx->edesc = edesc;
3613 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
3640 struct caam_request *req_ctx = &state->caam_req;
3641 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
3642 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
3680 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
3689 req_ctx->flc = &ctx->flc[FINALIZE];
3690 req_ctx->flc_dma = ctx->flc_dma[FINALIZE];
3691 req_ctx->cbk = ahash_done_ctx_src;
3692 req_ctx->ctx = &req->base;
3693 req_ctx->edesc = edesc;
3695 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
3711 struct caam_request *req_ctx = &state->caam_req;
3712 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
3713 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
3774 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
3783 req_ctx->flc = &ctx->flc[FINALIZE];
3784 req_ctx->flc_dma = ctx->flc_dma[FINALIZE];
3785 req_ctx->cbk = ahash_done_ctx_src;
3786 req_ctx->ctx = &req->base;
3787 req_ctx->edesc = edesc;
3789 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
3805 struct caam_request *req_ctx = &state->caam_req;
3806 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
3807 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
3842 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
3879 req_ctx->flc = &ctx->flc[DIGEST];
3880 req_ctx->flc_dma = ctx->flc_dma[DIGEST];
3881 req_ctx->cbk = ahash_done;
3882 req_ctx->ctx = &req->base;
3883 req_ctx->edesc = edesc;
3884 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
3900 struct caam_request *req_ctx = &state->caam_req;
3901 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
3902 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
3934 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
3951 req_ctx->flc = &ctx->flc[DIGEST];
3952 req_ctx->flc_dma = ctx->flc_dma[DIGEST];
3953 req_ctx->cbk = ahash_done;
3954 req_ctx->ctx = &req->base;
3955 req_ctx->edesc = edesc;
3957 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
3973 struct caam_request *req_ctx = &state->caam_req;
3974 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
3975 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
4048 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
4057 req_ctx->flc = &ctx->flc[UPDATE_FIRST];
4058 req_ctx->flc_dma = ctx->flc_dma[UPDATE_FIRST];
4059 req_ctx->cbk = ahash_done_ctx_dst;
4060 req_ctx->ctx = &req->base;
4061 req_ctx->edesc = edesc;
4063 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
4094 struct caam_request *req_ctx = &state->caam_req;
4095 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
4096 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
4159 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
4168 req_ctx->flc = &ctx->flc[DIGEST];
4169 req_ctx->flc_dma = ctx->flc_dma[DIGEST];
4170 req_ctx->cbk = ahash_done;
4171 req_ctx->ctx = &req->base;
4172 req_ctx->edesc = edesc;
4173 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
4190 struct caam_request *req_ctx = &state->caam_req;
4191 struct dpaa2_fl_entry *in_fle = &req_ctx->fd_flt[1];
4192 struct dpaa2_fl_entry *out_fle = &req_ctx->fd_flt[0];
4239 memset(&req_ctx->fd_flt, 0, sizeof(req_ctx->fd_flt));
4279 req_ctx->flc = &ctx->flc[UPDATE_FIRST];
4280 req_ctx->flc_dma = ctx->flc_dma[UPDATE_FIRST];
4281 req_ctx->cbk = ahash_done_ctx_dst;
4282 req_ctx->ctx = &req->base;
4283 req_ctx->edesc = edesc;
4285 ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);