Lines Matching refs:cryp
13 #include "jh7110-cryp.h"
52 static inline int starfive_aes_wait_busy(struct starfive_cryp_dev *cryp)
56 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
60 static inline int starfive_aes_wait_keydone(struct starfive_cryp_dev *cryp)
64 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
68 static inline int starfive_aes_wait_gcmdone(struct starfive_cryp_dev *cryp)
72 return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
76 static inline int is_gcm(struct starfive_cryp_dev *cryp)
78 return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM;
81 static inline int is_encrypt(struct starfive_cryp_dev *cryp)
83 return cryp->flags & FLG_ENCRYPT;
88 struct starfive_cryp_dev *cryp = ctx->cryp;
93 value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
95 writel(value, cryp->base + STARFIVE_AES_CSR);
96 starfive_aes_wait_gcmdone(cryp);
99 value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
101 writel(value, cryp->base + STARFIVE_AES_CSR);
108 struct starfive_cryp_dev *cryp = ctx->cryp;
110 if (is_gcm(cryp))
111 writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN);
113 writel(AES_BLOCK_SIZE, cryp->base + STARFIVE_AES_IVLEN);
118 struct starfive_cryp_dev *cryp = ctx->cryp;
120 writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0);
121 writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1);
126 struct starfive_cryp_dev *cryp = ctx->cryp;
128 writel(upper_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN0);
129 writel(lower_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN1);
143 struct starfive_cryp_dev *cryp = ctx->cryp;
145 writel(iv[0], cryp->base + STARFIVE_AES_IV0);
146 writel(iv[1], cryp->base + STARFIVE_AES_IV1);
147 writel(iv[2], cryp->base + STARFIVE_AES_IV2);
149 if (is_gcm(cryp)) {
150 if (starfive_aes_wait_gcmdone(cryp))
156 writel(iv[3], cryp->base + STARFIVE_AES_IV3);
161 static inline void starfive_aes_get_iv(struct starfive_cryp_dev *cryp, u32 *iv)
163 iv[0] = readl(cryp->base + STARFIVE_AES_IV0);
164 iv[1] = readl(cryp->base + STARFIVE_AES_IV1);
165 iv[2] = readl(cryp->base + STARFIVE_AES_IV2);
166 iv[3] = readl(cryp->base + STARFIVE_AES_IV3);
171 struct starfive_cryp_dev *cryp = ctx->cryp;
173 writel(nonce[0], cryp->base + STARFIVE_AES_NONCE0);
174 writel(nonce[1], cryp->base + STARFIVE_AES_NONCE1);
175 writel(nonce[2], cryp->base + STARFIVE_AES_NONCE2);
176 writel(nonce[3], cryp->base + STARFIVE_AES_NONCE3);
181 struct starfive_cryp_dev *cryp = ctx->cryp;
185 writel(key[0], cryp->base + STARFIVE_AES_KEY0);
186 writel(key[1], cryp->base + STARFIVE_AES_KEY1);
187 writel(key[2], cryp->base + STARFIVE_AES_KEY2);
188 writel(key[3], cryp->base + STARFIVE_AES_KEY3);
192 writel(key[4], cryp->base + STARFIVE_AES_KEY4);
193 writel(key[5], cryp->base + STARFIVE_AES_KEY5);
197 writel(key[6], cryp->base + STARFIVE_AES_KEY6);
198 writel(key[7], cryp->base + STARFIVE_AES_KEY7);
201 if (starfive_aes_wait_keydone(cryp))
209 struct starfive_cryp_dev *cryp = ctx->cryp;
213 memcpy(iv, cryp->req.areq->iv, AES_BLOCK_SIZE);
219 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
221 if (cryp->assoclen)
224 textlen = cryp->total_in;
237 struct starfive_cryp_dev *cryp = ctx->cryp;
243 writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
246 hw_mode = cryp->flags & FLG_MODE_MASK;
263 rctx->csr.aes.cmode = !is_encrypt(cryp);
272 if (cryp->side_chan) {
277 writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
279 cryp->err = starfive_aes_write_key(ctx);
280 if (cryp->err)
281 return cryp->err;
289 starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv);
301 starfive_aes_write_iv(ctx, (void *)cryp->req.sreq->iv);
307 return cryp->err;
310 static int starfive_aes_read_authtag(struct starfive_cryp_dev *cryp)
314 if (starfive_aes_wait_busy(cryp))
315 return dev_err_probe(cryp->dev, -ETIMEDOUT,
320 if (is_gcm(cryp))
322 cryp->tag_out[i] = readl(cryp->base + start_addr);
325 cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
327 if (is_encrypt(cryp)) {
328 scatterwalk_copychunks(cryp->tag_out, &cryp->out_walk, cryp->authsize, 1);
330 scatterwalk_copychunks(cryp->tag_in, &cryp->in_walk, cryp->authsize, 0);
332 if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize))
333 return dev_err_probe(cryp->dev, -EBADMSG, "Failed tag verification\n");
339 static void starfive_aes_finish_req(struct starfive_cryp_dev *cryp)
342 int err = cryp->err;
344 if (!err && cryp->authsize)
345 err = starfive_aes_read_authtag(cryp);
347 if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC ||
348 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR))
349 starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv);
354 writel(csr.v, cryp->base + STARFIVE_AES_CSR);
356 if (cryp->authsize)
357 crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err);
359 crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq,
365 struct starfive_cryp_dev *cryp = (struct starfive_cryp_dev *)param;
371 block[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
373 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, AES_BLOCK_SIZE,
374 cryp->total_out), 1);
376 cryp->total_out -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_out);
378 if (!cryp->total_out) {
379 starfive_aes_finish_req(cryp);
384 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
385 cryp->total_in), 0);
386 cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
389 writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
391 stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
393 writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
398 struct starfive_cryp_dev *cryp = ctx->cryp;
403 total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int);
407 writel(*buffer, cryp->base + STARFIVE_AES_NONCE0);
409 writel(*buffer, cryp->base + STARFIVE_AES_NONCE1);
411 writel(*buffer, cryp->base + STARFIVE_AES_NONCE2);
413 writel(*buffer, cryp->base + STARFIVE_AES_NONCE3);
417 if (starfive_aes_wait_gcmdone(cryp))
418 return dev_err_probe(cryp->dev, -ETIMEDOUT,
426 struct starfive_cryp_dev *cryp = ctx->cryp;
432 total_len = cryp->assoclen;
435 writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
437 writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
443 writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
449 writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
454 if (starfive_aes_wait_busy(cryp))
455 return dev_err_probe(cryp->dev, -ETIMEDOUT,
466 struct starfive_cryp_dev *cryp;
474 cryp = ctx->cryp;
478 cryp->req.sreq = req;
479 cryp->total_in = req->cryptlen;
480 cryp->total_out = req->cryptlen;
481 cryp->assoclen = 0;
482 cryp->authsize = 0;
484 cryp->req.areq = areq;
485 cryp->assoclen = areq->assoclen;
486 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
487 if (is_encrypt(cryp)) {
488 cryp->total_in = areq->cryptlen;
489 cryp->total_out = areq->cryptlen;
491 cryp->total_in = areq->cryptlen - cryp->authsize;
492 cryp->total_out = cryp->total_in;
497 scatterwalk_start(&cryp->in_walk, rctx->in_sg);
500 scatterwalk_start(&cryp->out_walk, rctx->out_sg);
502 if (cryp->assoclen) {
503 rctx->adata = kzalloc(ALIGN(cryp->assoclen, AES_BLOCK_SIZE), GFP_KERNEL);
505 return dev_err_probe(cryp->dev, -ENOMEM,
508 scatterwalk_copychunks(rctx->adata, &cryp->in_walk, cryp->assoclen, 0);
509 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->assoclen, 2);
523 struct starfive_cryp_dev *cryp = ctx->cryp;
537 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
538 cryp->total_in), 0);
539 cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
542 writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
544 stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
546 writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
555 ctx->cryp = starfive_cryp_find_dev(ctx);
556 if (!ctx->cryp)
571 struct starfive_cryp_dev *cryp = ctx->cryp;
582 if (!cryp->assoclen)
585 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM)
586 cryp->err = starfive_aes_ccm_write_adata(ctx);
588 cryp->err = starfive_aes_gcm_write_adata(ctx);
592 if (cryp->err)
593 return cryp->err;
596 if (!cryp->total_in)
603 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
604 cryp->total_in), 0);
605 cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
608 writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
610 stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
612 writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
617 starfive_aes_finish_req(cryp);
624 struct starfive_cryp_dev *cryp = ctx->cryp;
628 ctx->cryp = starfive_cryp_find_dev(ctx);
629 if (!ctx->cryp)
636 return dev_err_probe(cryp->dev, PTR_ERR(ctx->aead_fbk),
658 struct starfive_cryp_dev *cryp = ctx->cryp;
661 cryp->flags = flags;
663 if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB ||
664 (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC)
668 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
674 struct starfive_cryp_dev *cryp = ctx->cryp;
676 cryp->flags = flags;
682 if (ctx->aead_fbk && !is_encrypt(cryp)) {
695 return crypto_transfer_aead_request_to_engine(cryp->engine, req);