Lines Matching refs:ctx

60 typedef void (*hpre_cb)(struct hpre_ctx *ctx, void *sqe);
137 struct hpre_ctx *ctx;
160 static int hpre_alloc_req_id(struct hpre_ctx *ctx)
165 spin_lock_irqsave(&ctx->req_lock, flags);
166 id = idr_alloc(&ctx->req_idr, NULL, 0, ctx->qp->sq_depth, GFP_ATOMIC);
167 spin_unlock_irqrestore(&ctx->req_lock, flags);
172 static void hpre_free_req_id(struct hpre_ctx *ctx, int req_id)
176 spin_lock_irqsave(&ctx->req_lock, flags);
177 idr_remove(&ctx->req_idr, req_id);
178 spin_unlock_irqrestore(&ctx->req_lock, flags);
183 struct hpre_ctx *ctx;
187 ctx = hpre_req->ctx;
188 id = hpre_alloc_req_id(ctx);
192 ctx->req_list[id] = hpre_req;
195 dfx = ctx->hpre->debug.dfx;
204 struct hpre_ctx *ctx = hpre_req->ctx;
209 ctx->req_list[id] = NULL;
210 hpre_free_req_id(ctx, id);
239 struct device *dev = hpre_req->ctx->dev;
262 struct hpre_ctx *ctx = hpre_req->ctx;
263 struct device *dev = ctx->dev;
267 shift = ctx->key_sz - len;
271 ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC);
290 struct hpre_ctx *ctx = hpre_req->ctx;
295 if ((sg_is_last(data) && len == ctx->key_sz) &&
312 static void hpre_hw_data_clr_all(struct hpre_ctx *ctx,
317 struct device *dev = ctx->dev;
327 dma_free_coherent(dev, ctx->key_sz, req->src, tmp);
329 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE);
339 ctx->key_sz, 1);
340 dma_free_coherent(dev, ctx->key_sz, req->dst, tmp);
342 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE);
346 static int hpre_alg_res_post_hf(struct hpre_ctx *ctx, struct hpre_sqe *sqe,
359 req = ctx->req_list[id];
373 dev_err_ratelimited(ctx->dev, "alg[0x%x] error: done[0x%x], etype[0x%x]\n",
379 static int hpre_ctx_set(struct hpre_ctx *ctx, struct hisi_qp *qp, int qlen)
383 if (!ctx || !qp || qlen < 0)
386 spin_lock_init(&ctx->req_lock);
387 ctx->qp = qp;
388 ctx->dev = &qp->qm->pdev->dev;
390 hpre = container_of(ctx->qp->qm, struct hpre, qm);
391 ctx->hpre = hpre;
392 ctx->req_list = kcalloc(qlen, sizeof(void *), GFP_KERNEL);
393 if (!ctx->req_list)
395 ctx->key_sz = 0;
396 ctx->crt_g2_mode = false;
397 idr_init(&ctx->req_idr);
402 static void hpre_ctx_clear(struct hpre_ctx *ctx, bool is_clear_all)
405 idr_destroy(&ctx->req_idr);
406 kfree(ctx->req_list);
407 hisi_qm_free_qps(&ctx->qp, 1);
410 ctx->crt_g2_mode = false;
411 ctx->key_sz = 0;
432 static void hpre_dh_cb(struct hpre_ctx *ctx, void *resp)
434 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
440 ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);
442 areq->dst_len = ctx->key_sz;
448 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);
453 static void hpre_rsa_cb(struct hpre_ctx *ctx, void *resp)
455 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
461 ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);
468 areq->dst_len = ctx->key_sz;
469 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);
476 struct hpre_ctx *ctx = qp->qp_ctx;
477 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
479 struct hpre_asym_request *req = ctx->req_list[le16_to_cpu(sqe->tag)];
486 req->cb(ctx, resp);
495 static int hpre_ctx_init(struct hpre_ctx *ctx, u8 type)
504 qp->qp_ctx = ctx;
507 ret = hpre_ctx_set(ctx, qp, qp->sq_depth);
514 static int hpre_msg_request_set(struct hpre_ctx *ctx, void *req, bool is_rsa)
524 if (akreq->dst_len < ctx->key_sz) {
525 akreq->dst_len = ctx->key_sz;
538 if (kreq->dst_len < ctx->key_sz) {
539 kreq->dst_len = ctx->key_sz;
549 msg->key = cpu_to_le64(ctx->dh.dma_xa_p);
555 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;
556 h_req->ctx = ctx;
567 static int hpre_send(struct hpre_ctx *ctx, struct hpre_sqe *msg)
569 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
575 ret = hisi_qp_send(ctx->qp, msg);
593 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
599 ret = hpre_msg_request_set(ctx, req, false);
608 msg->in = cpu_to_le64(ctx->dh.dma_g);
615 if (ctx->crt_g2_mode && !req->src)
621 ret = hpre_send(ctx, msg);
627 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
653 static int hpre_dh_set_params(struct hpre_ctx *ctx, struct dh *params)
655 struct device *dev = ctx->dev;
665 sz = ctx->key_sz = params->p_size;
666 ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1,
667 &ctx->dh.dma_xa_p, GFP_KERNEL);
668 if (!ctx->dh.xa_p)
671 memcpy(ctx->dh.xa_p + sz, params->p, sz);
675 ctx->crt_g2_mode = true;
679 ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL);
680 if (!ctx->dh.g) {
681 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,
682 ctx->dh.dma_xa_p);
683 ctx->dh.xa_p = NULL;
687 memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size);
692 static void hpre_dh_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)
694 struct device *dev = ctx->dev;
695 unsigned int sz = ctx->key_sz;
698 hisi_qm_stop_qp(ctx->qp);
700 if (ctx->dh.g) {
701 dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g);
702 ctx->dh.g = NULL;
705 if (ctx->dh.xa_p) {
706 memzero_explicit(ctx->dh.xa_p, sz);
707 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,
708 ctx->dh.dma_xa_p);
709 ctx->dh.xa_p = NULL;
712 hpre_ctx_clear(ctx, is_clear_all);
718 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
726 hpre_dh_clear_ctx(ctx, false);
728 ret = hpre_dh_set_params(ctx, &params);
732 memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key,
738 hpre_dh_clear_ctx(ctx, false);
744 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
746 return ctx->key_sz;
751 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
755 return hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);
760 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
762 hpre_dh_clear_ctx(ctx, true);
796 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
803 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
804 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {
805 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);
811 if (unlikely(!ctx->rsa.pubkey))
814 ret = hpre_msg_request_set(ctx, req, true);
819 msg->key = cpu_to_le64(ctx->rsa.dma_pubkey);
830 ret = hpre_send(ctx, msg);
836 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
844 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
851 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
852 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {
853 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);
859 if (unlikely(!ctx->rsa.prikey))
862 ret = hpre_msg_request_set(ctx, req, true);
866 if (ctx->crt_g2_mode) {
867 msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey);
871 msg->key = cpu_to_le64(ctx->rsa.dma_prikey);
885 ret = hpre_send(ctx, msg);
891 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
896 static int hpre_rsa_set_n(struct hpre_ctx *ctx, const char *value,
903 ctx->key_sz = vlen;
906 if (!hpre_rsa_key_size_is_support(ctx->key_sz))
909 ctx->rsa.pubkey = dma_alloc_coherent(ctx->dev, vlen << 1,
910 &ctx->rsa.dma_pubkey,
912 if (!ctx->rsa.pubkey)
916 ctx->rsa.prikey = dma_alloc_coherent(ctx->dev, vlen << 1,
917 &ctx->rsa.dma_prikey,
919 if (!ctx->rsa.prikey) {
920 dma_free_coherent(ctx->dev, vlen << 1,
921 ctx->rsa.pubkey,
922 ctx->rsa.dma_pubkey);
923 ctx->rsa.pubkey = NULL;
926 memcpy(ctx->rsa.prikey + vlen, ptr, vlen);
928 memcpy(ctx->rsa.pubkey + vlen, ptr, vlen);
934 static int hpre_rsa_set_e(struct hpre_ctx *ctx, const char *value,
941 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)
944 memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen);
949 static int hpre_rsa_set_d(struct hpre_ctx *ctx, const char *value,
956 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)
959 memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen);
979 static int hpre_rsa_setkey_crt(struct hpre_ctx *ctx, struct rsa_key *rsa_key)
981 unsigned int hlf_ksz = ctx->key_sz >> 1;
982 struct device *dev = ctx->dev;
986 ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS,
987 &ctx->rsa.dma_crt_prikey,
989 if (!ctx->rsa.crt_prikey)
992 ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz,
998 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
1004 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
1010 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
1016 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
1021 ctx->crt_g2_mode = true;
1027 memzero_explicit(ctx->rsa.crt_prikey, offset);
1028 dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey,
1029 ctx->rsa.dma_crt_prikey);
1030 ctx->rsa.crt_prikey = NULL;
1031 ctx->crt_g2_mode = false;
1037 static void hpre_rsa_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)
1039 unsigned int half_key_sz = ctx->key_sz >> 1;
1040 struct device *dev = ctx->dev;
1043 hisi_qm_stop_qp(ctx->qp);
1045 if (ctx->rsa.pubkey) {
1046 dma_free_coherent(dev, ctx->key_sz << 1,
1047 ctx->rsa.pubkey, ctx->rsa.dma_pubkey);
1048 ctx->rsa.pubkey = NULL;
1051 if (ctx->rsa.crt_prikey) {
1052 memzero_explicit(ctx->rsa.crt_prikey,
1055 ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey);
1056 ctx->rsa.crt_prikey = NULL;
1059 if (ctx->rsa.prikey) {
1060 memzero_explicit(ctx->rsa.prikey, ctx->key_sz);
1061 dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey,
1062 ctx->rsa.dma_prikey);
1063 ctx->rsa.prikey = NULL;
1066 hpre_ctx_clear(ctx, is_clear_all);
1084 static int hpre_rsa_setkey(struct hpre_ctx *ctx, const void *key,
1090 hpre_rsa_clear_ctx(ctx, false);
1099 ret = hpre_rsa_set_n(ctx, rsa_key.n, rsa_key.n_sz, private);
1104 ret = hpre_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz);
1109 ret = hpre_rsa_setkey_crt(ctx, &rsa_key);
1115 ret = hpre_rsa_set_e(ctx, rsa_key.e, rsa_key.e_sz);
1119 if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) {
1127 hpre_rsa_clear_ctx(ctx, false);
1134 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1137 ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen);
1141 return hpre_rsa_setkey(ctx, key, keylen, false);
1147 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1150 ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen);
1154 return hpre_rsa_setkey(ctx, key, keylen, true);
1159 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1162 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
1163 ctx->key_sz == HPRE_RSA_1536BITS_KSZ)
1164 return crypto_akcipher_maxsize(ctx->rsa.soft_tfm);
1166 return ctx->key_sz;
1171 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1174 ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0);
1175 if (IS_ERR(ctx->rsa.soft_tfm)) {
1177 return PTR_ERR(ctx->rsa.soft_tfm);
1183 ret = hpre_ctx_init(ctx, HPRE_V2_ALG_TYPE);
1185 crypto_free_akcipher(ctx->rsa.soft_tfm);
1192 struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1194 hpre_rsa_clear_ctx(ctx, true);
1195 crypto_free_akcipher(ctx->rsa.soft_tfm);
1208 static void hpre_ecc_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all,
1211 struct device *dev = ctx->dev;
1212 unsigned int sz = ctx->key_sz;
1216 hisi_qm_stop_qp(ctx->qp);
1218 if (is_ecdh && ctx->ecdh.p) {
1220 memzero_explicit(ctx->ecdh.p + shift, sz);
1221 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p);
1222 ctx->ecdh.p = NULL;
1223 } else if (!is_ecdh && ctx->curve25519.p) {
1225 memzero_explicit(ctx->curve25519.p + shift, sz);
1226 dma_free_coherent(dev, sz << 2, ctx->curve25519.p,
1227 ctx->curve25519.dma_p);
1228 ctx->curve25519.p = NULL;
1231 hpre_ctx_clear(ctx, is_clear_all);
1270 static int hpre_ecdh_fill_curve(struct hpre_ctx *ctx, struct ecdh *params,
1273 unsigned int shifta = ctx->key_sz << 1;
1274 unsigned int shiftb = ctx->key_sz << 2;
1275 void *p = ctx->ecdh.p + ctx->key_sz - cur_sz;
1276 void *a = ctx->ecdh.p + shifta - cur_sz;
1277 void *b = ctx->ecdh.p + shiftb - cur_sz;
1278 void *x = ctx->ecdh.g + ctx->key_sz - cur_sz;
1279 void *y = ctx->ecdh.g + shifta - cur_sz;
1280 const struct ecc_curve *curve = ecc_get_curve(ctx->curve_id);
1286 n = kzalloc(ctx->key_sz, GFP_KERNEL);
1322 static int hpre_ecdh_set_param(struct hpre_ctx *ctx, struct ecdh *params)
1324 struct device *dev = ctx->dev;
1328 ctx->key_sz = hpre_ecdh_supported_curve(ctx->curve_id);
1329 if (!ctx->key_sz)
1332 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);
1336 sz = ctx->key_sz;
1338 if (!ctx->ecdh.p) {
1339 ctx->ecdh.p = dma_alloc_coherent(dev, sz << 3, &ctx->ecdh.dma_p,
1341 if (!ctx->ecdh.p)
1346 ctx->ecdh.g = ctx->ecdh.p + shift;
1347 ctx->ecdh.dma_g = ctx->ecdh.dma_p + shift;
1349 ret = hpre_ecdh_fill_curve(ctx, params, curve_sz);
1352 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p);
1353 ctx->ecdh.p = NULL;
1371 static int ecdh_gen_privkey(struct hpre_ctx *ctx, struct ecdh *params)
1373 struct device *dev = ctx->dev;
1394 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1396 struct device *dev = ctx->dev;
1409 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);
1416 ret = ecdh_gen_privkey(ctx, &params);
1426 hpre_ecc_clear_ctx(ctx, false, true);
1428 ret = hpre_ecdh_set_param(ctx, &params);
1434 sz = ctx->key_sz;
1436 memcpy(ctx->ecdh.p + sz_shift, params.key, params.key_size);
1441 static void hpre_ecdh_hw_data_clr_all(struct hpre_ctx *ctx,
1446 struct device *dev = ctx->dev;
1455 dma_free_coherent(dev, ctx->key_sz << 2, req->src, dma);
1462 dma_free_coherent(dev, ctx->key_sz << 1, req->dst, dma);
1464 dma_unmap_single(dev, dma, ctx->key_sz << 1, DMA_FROM_DEVICE);
1467 static void hpre_ecdh_cb(struct hpre_ctx *ctx, void *resp)
1469 unsigned int curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id);
1470 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
1477 ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);
1479 areq->dst_len = ctx->key_sz << 1;
1486 memmove(p, p + ctx->key_sz - curve_sz, curve_sz);
1489 hpre_ecdh_hw_data_clr_all(ctx, req, areq->dst, areq->src);
1495 static int hpre_ecdh_msg_request_set(struct hpre_ctx *ctx,
1503 if (req->dst_len < ctx->key_sz << 1) {
1504 req->dst_len = ctx->key_sz << 1;
1516 msg->key = cpu_to_le64(ctx->ecdh.dma_p);
1519 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;
1520 h_req->ctx = ctx;
1534 struct hpre_ctx *ctx = hpre_req->ctx;
1535 struct device *dev = ctx->dev;
1542 shift = ctx->key_sz - (len >> 1);
1546 ptr = dma_alloc_coherent(dev, ctx->key_sz << 2, &dma, GFP_KERNEL);
1550 tmpshift = ctx->key_sz << 1;
1553 memcpy(ptr + ctx->key_sz + shift, ptr + tmpshift + (len >> 1), len >> 1);
1564 struct hpre_ctx *ctx = hpre_req->ctx;
1565 struct device *dev = ctx->dev;
1568 if (unlikely(!data || !sg_is_last(data) || len != ctx->key_sz << 1)) {
1587 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1588 struct device *dev = ctx->dev;
1594 ret = hpre_ecdh_msg_request_set(ctx, req);
1607 msg->in = cpu_to_le64(ctx->ecdh.dma_g);
1617 ret = hpre_send(ctx, msg);
1623 hpre_ecdh_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
1629 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1632 return ctx->key_sz << 1;
1637 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1639 ctx->curve_id = ECC_CURVE_NIST_P192;
1643 return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);
1648 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1650 ctx->curve_id = ECC_CURVE_NIST_P256;
1654 return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);
1659 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1661 ctx->curve_id = ECC_CURVE_NIST_P384;
1665 return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);
1670 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1672 hpre_ecc_clear_ctx(ctx, true, true);
1675 static void hpre_curve25519_fill_curve(struct hpre_ctx *ctx, const void *buf,
1679 unsigned int sz = ctx->key_sz;
1694 p = ctx->curve25519.p + sz - len;
1706 static int hpre_curve25519_set_param(struct hpre_ctx *ctx, const void *buf,
1709 struct device *dev = ctx->dev;
1710 unsigned int sz = ctx->key_sz;
1714 if (!ctx->curve25519.p) {
1715 ctx->curve25519.p = dma_alloc_coherent(dev, sz << 2,
1716 &ctx->curve25519.dma_p,
1718 if (!ctx->curve25519.p)
1722 ctx->curve25519.g = ctx->curve25519.p + shift + sz;
1723 ctx->curve25519.dma_g = ctx->curve25519.dma_p + shift + sz;
1725 hpre_curve25519_fill_curve(ctx, buf, len);
1733 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1734 struct device *dev = ctx->dev;
1744 hpre_ecc_clear_ctx(ctx, false, false);
1746 ctx->key_sz = CURVE25519_KEY_SIZE;
1747 ret = hpre_curve25519_set_param(ctx, buf, CURVE25519_KEY_SIZE);
1750 hpre_ecc_clear_ctx(ctx, false, false);
1757 static void hpre_curve25519_hw_data_clr_all(struct hpre_ctx *ctx,
1762 struct device *dev = ctx->dev;
1771 dma_free_coherent(dev, ctx->key_sz, req->src, dma);
1778 dma_free_coherent(dev, ctx->key_sz, req->dst, dma);
1780 dma_unmap_single(dev, dma, ctx->key_sz, DMA_FROM_DEVICE);
1783 static void hpre_curve25519_cb(struct hpre_ctx *ctx, void *resp)
1785 struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
1791 ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);
1793 areq->dst_len = ctx->key_sz;
1801 hpre_curve25519_hw_data_clr_all(ctx, req, areq->dst, areq->src);
1807 static int hpre_curve25519_msg_request_set(struct hpre_ctx *ctx,
1815 if (unlikely(req->dst_len < ctx->key_sz)) {
1816 req->dst_len = ctx->key_sz;
1828 msg->key = cpu_to_le64(ctx->curve25519.dma_p);
1831 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;
1832 h_req->ctx = ctx;
1857 struct hpre_ctx *ctx = hpre_req->ctx;
1858 struct device *dev = ctx->dev;
1869 ptr = dma_alloc_coherent(dev, ctx->key_sz, &dma, GFP_KERNEL);
1896 if (memcmp(ptr, p, ctx->key_sz) == 0) {
1899 } else if (memcmp(ptr, p, ctx->key_sz) > 0) {
1908 dma_free_coherent(dev, ctx->key_sz, ptr, dma);
1916 struct hpre_ctx *ctx = hpre_req->ctx;
1917 struct device *dev = ctx->dev;
1920 if (!data || !sg_is_last(data) || len != ctx->key_sz) {
1939 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1940 struct device *dev = ctx->dev;
1946 ret = hpre_curve25519_msg_request_set(ctx, req);
1960 msg->in = cpu_to_le64(ctx->curve25519.dma_g);
1970 ret = hpre_send(ctx, msg);
1976 hpre_curve25519_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
1982 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1984 return ctx->key_sz;
1989 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
1993 return hpre_ctx_init(ctx, HPRE_V3_ECC_ALG_TYPE);
1998 struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
2000 hpre_ecc_clear_ctx(ctx, true, false);