Lines Matching refs:ctx
30 #define HPRE_DEV(ctx) (&((ctx)->qp->qm->pdev->dev))
39 typedef void (*hpre_cb)(struct hpre_ctx *ctx, void *sqe);
90 struct hpre_ctx *ctx;
101 static int hpre_alloc_req_id(struct hpre_ctx *ctx)
106 spin_lock_irqsave(&ctx->req_lock, flags);
107 id = idr_alloc(&ctx->req_idr, NULL, 0, QM_Q_DEPTH, GFP_ATOMIC);
108 spin_unlock_irqrestore(&ctx->req_lock, flags);
113 static void hpre_free_req_id(struct hpre_ctx *ctx, int req_id)
117 spin_lock_irqsave(&ctx->req_lock, flags);
118 idr_remove(&ctx->req_idr, req_id);
119 spin_unlock_irqrestore(&ctx->req_lock, flags);
124 struct hpre_ctx *ctx;
128 ctx = hpre_req->ctx;
129 id = hpre_alloc_req_id(ctx);
133 ctx->req_list[id] = hpre_req;
136 dfx = ctx->hpre->debug.dfx;
145 struct hpre_ctx *ctx = hpre_req->ctx;
150 ctx->req_list[id] = NULL;
151 hpre_free_req_id(ctx, id);
180 struct hpre_ctx *ctx = hpre_req->ctx;
181 struct device *dev = HPRE_DEV(ctx);
204 struct hpre_ctx *ctx = hpre_req->ctx;
205 struct device *dev = HPRE_DEV(ctx);
209 shift = ctx->key_sz - len;
213 ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC);
232 struct hpre_ctx *ctx = hpre_req->ctx;
237 if ((sg_is_last(data) && len == ctx->key_sz) &&
254 static void hpre_hw_data_clr_all(struct hpre_ctx *ctx,
259 struct device *dev = HPRE_DEV(ctx);
269 dma_free_coherent(dev, ctx->key_sz, req->src, tmp);
271 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE);
281 ctx->key_sz, 1);
282 dma_free_coherent(dev, ctx->key_sz, req->dst, tmp);
284 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE);
288 static int hpre_alg_res_post_hf(struct hpre_ctx *ctx, struct hpre_sqe *sqe,
299 req = ctx->req_list[id];
315 static int hpre_ctx_set(struct hpre_ctx *ctx, struct hisi_qp *qp, int qlen)
319 if (!ctx || !qp || qlen < 0)
322 spin_lock_init(&ctx->req_lock);
323 ctx->qp = qp;
325 hpre = container_of(ctx->qp->qm, struct hpre, qm);
326 ctx->hpre = hpre;
327 ctx->req_list = kcalloc(qlen, sizeof(void *), GFP_KERNEL);
328 if (!ctx->req_list)
330 ctx->key_sz = 0;
331 ctx->crt_g2_mode = false;
332 idr_init(&ctx->req_idr);
337 static void hpre_ctx_clear(struct hpre_ctx *ctx, bool is_clear_all)
340 idr_destroy(&ctx->req_idr);
341 kfree(ctx->req_list);
342 hisi_qm_free_qps(&ctx->qp, 1);
345 ctx->crt_g2_mode = false;
346 ctx->key_sz = 0;
367 static void hpre_dh_cb(struct hpre_ctx *ctx, void *resp)
369 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
375 ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);
377 areq->dst_len = ctx->key_sz;
383 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);
388 static void hpre_rsa_cb(struct hpre_ctx *ctx, void *resp)
390 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
396 ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);
403 areq->dst_len = ctx->key_sz;
404 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);
411 struct hpre_ctx *ctx = qp->qp_ctx;
412 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
414 struct hpre_asym_request *req = ctx->req_list[le16_to_cpu(sqe->tag)];
422 req->cb(ctx, resp);
425 static int hpre_ctx_init(struct hpre_ctx *ctx)
433 qp->qp_ctx = ctx;
436 return hpre_ctx_set(ctx, qp, QM_Q_DEPTH);
439 static int hpre_msg_request_set(struct hpre_ctx *ctx, void *req, bool is_rsa)
449 if (akreq->dst_len < ctx->key_sz) {
450 akreq->dst_len = ctx->key_sz;
463 if (kreq->dst_len < ctx->key_sz) {
464 kreq->dst_len = ctx->key_sz;
474 msg->key = cpu_to_le64(ctx->dh.dma_xa_p);
478 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;
479 h_req->ctx = ctx;
490 static int hpre_send(struct hpre_ctx *ctx, struct hpre_sqe *msg)
492 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
498 ret = hisi_qp_send(ctx->qp, msg);
517 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
523 ret = hpre_msg_request_set(ctx, req, false);
532 msg->in = cpu_to_le64(ctx->dh.dma_g);
539 if (ctx->crt_g2_mode && !req->src)
545 ret = hpre_send(ctx, msg);
551 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
577 static int hpre_dh_set_params(struct hpre_ctx *ctx, struct dh *params)
579 struct device *dev = HPRE_DEV(ctx);
589 sz = ctx->key_sz = params->p_size;
590 ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1,
591 &ctx->dh.dma_xa_p, GFP_KERNEL);
592 if (!ctx->dh.xa_p)
595 memcpy(ctx->dh.xa_p + sz, params->p, sz);
599 ctx->crt_g2_mode = true;
603 ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL);
604 if (!ctx->dh.g) {
605 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,
606 ctx->dh.dma_xa_p);
607 ctx->dh.xa_p = NULL;
611 memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size);
616 static void hpre_dh_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)
618 struct device *dev = HPRE_DEV(ctx);
619 unsigned int sz = ctx->key_sz;
622 hisi_qm_stop_qp(ctx->qp);
624 if (ctx->dh.g) {
625 dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g);
626 ctx->dh.g = NULL;
629 if (ctx->dh.xa_p) {
630 memzero_explicit(ctx->dh.xa_p, sz);
631 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,
632 ctx->dh.dma_xa_p);
633 ctx->dh.xa_p = NULL;
636 hpre_ctx_clear(ctx, is_clear_all);
642 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
650 hpre_dh_clear_ctx(ctx, false);
652 ret = hpre_dh_set_params(ctx, ¶ms);
656 memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key,
662 hpre_dh_clear_ctx(ctx, false);
668 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
670 return ctx->key_sz;
675 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
677 return hpre_ctx_init(ctx);
682 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
684 hpre_dh_clear_ctx(ctx, true);
719 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
726 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
727 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {
728 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);
734 if (unlikely(!ctx->rsa.pubkey))
737 ret = hpre_msg_request_set(ctx, req, true);
742 msg->key = cpu_to_le64(ctx->rsa.dma_pubkey);
753 ret = hpre_send(ctx, msg);
759 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
767 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
774 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
775 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {
776 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);
782 if (unlikely(!ctx->rsa.prikey))
785 ret = hpre_msg_request_set(ctx, req, true);
789 if (ctx->crt_g2_mode) {
790 msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey);
794 msg->key = cpu_to_le64(ctx->rsa.dma_prikey);
808 ret = hpre_send(ctx, msg);
814 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
819 static int hpre_rsa_set_n(struct hpre_ctx *ctx, const char *value,
826 ctx->key_sz = vlen;
829 if (!hpre_rsa_key_size_is_support(ctx->key_sz))
832 ctx->rsa.pubkey = dma_alloc_coherent(HPRE_DEV(ctx), vlen << 1,
833 &ctx->rsa.dma_pubkey,
835 if (!ctx->rsa.pubkey)
839 ctx->rsa.prikey = dma_alloc_coherent(HPRE_DEV(ctx), vlen << 1,
840 &ctx->rsa.dma_prikey,
842 if (!ctx->rsa.prikey) {
843 dma_free_coherent(HPRE_DEV(ctx), vlen << 1,
844 ctx->rsa.pubkey,
845 ctx->rsa.dma_pubkey);
846 ctx->rsa.pubkey = NULL;
849 memcpy(ctx->rsa.prikey + vlen, ptr, vlen);
851 memcpy(ctx->rsa.pubkey + vlen, ptr, vlen);
857 static int hpre_rsa_set_e(struct hpre_ctx *ctx, const char *value,
864 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)
867 memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen);
872 static int hpre_rsa_set_d(struct hpre_ctx *ctx, const char *value,
879 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)
882 memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen);
902 static int hpre_rsa_setkey_crt(struct hpre_ctx *ctx, struct rsa_key *rsa_key)
904 unsigned int hlf_ksz = ctx->key_sz >> 1;
905 struct device *dev = HPRE_DEV(ctx);
909 ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS,
910 &ctx->rsa.dma_crt_prikey,
912 if (!ctx->rsa.crt_prikey)
915 ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz,
921 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
927 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
933 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
939 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
944 ctx->crt_g2_mode = true;
950 memzero_explicit(ctx->rsa.crt_prikey, offset);
951 dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey,
952 ctx->rsa.dma_crt_prikey);
953 ctx->rsa.crt_prikey = NULL;
954 ctx->crt_g2_mode = false;
960 static void hpre_rsa_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)
962 unsigned int half_key_sz = ctx->key_sz >> 1;
963 struct device *dev = HPRE_DEV(ctx);
966 hisi_qm_stop_qp(ctx->qp);
968 if (ctx->rsa.pubkey) {
969 dma_free_coherent(dev, ctx->key_sz << 1,
970 ctx->rsa.pubkey, ctx->rsa.dma_pubkey);
971 ctx->rsa.pubkey = NULL;
974 if (ctx->rsa.crt_prikey) {
975 memzero_explicit(ctx->rsa.crt_prikey,
978 ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey);
979 ctx->rsa.crt_prikey = NULL;
982 if (ctx->rsa.prikey) {
983 memzero_explicit(ctx->rsa.prikey, ctx->key_sz);
984 dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey,
985 ctx->rsa.dma_prikey);
986 ctx->rsa.prikey = NULL;
989 hpre_ctx_clear(ctx, is_clear_all);
1007 static int hpre_rsa_setkey(struct hpre_ctx *ctx, const void *key,
1013 hpre_rsa_clear_ctx(ctx, false);
1022 ret = hpre_rsa_set_n(ctx, rsa_key.n, rsa_key.n_sz, private);
1027 ret = hpre_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz);
1032 ret = hpre_rsa_setkey_crt(ctx, &rsa_key);
1038 ret = hpre_rsa_set_e(ctx, rsa_key.e, rsa_key.e_sz);
1042 if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) {
1050 hpre_rsa_clear_ctx(ctx, false);
1057 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1060 ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen);
1064 return hpre_rsa_setkey(ctx, key, keylen, false);
1070 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1073 ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen);
1077 return hpre_rsa_setkey(ctx, key, keylen, true);
1082 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1085 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
1086 ctx->key_sz == HPRE_RSA_1536BITS_KSZ)
1087 return crypto_akcipher_maxsize(ctx->rsa.soft_tfm);
1089 return ctx->key_sz;
1094 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1097 ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0);
1098 if (IS_ERR(ctx->rsa.soft_tfm)) {
1100 return PTR_ERR(ctx->rsa.soft_tfm);
1103 ret = hpre_ctx_init(ctx);
1105 crypto_free_akcipher(ctx->rsa.soft_tfm);
1112 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1114 hpre_rsa_clear_ctx(ctx, true);
1115 crypto_free_akcipher(ctx->rsa.soft_tfm);