1// SPDX-License-Identifier: GPL-2.0
2/*
3 * StarFive AES acceleration driver
4 *
5 * Copyright (c) 2022 StarFive Technology
6 */
7
8#include <crypto/engine.h>
9#include <crypto/gcm.h>
10#include <crypto/internal/aead.h>
11#include <crypto/internal/skcipher.h>
12#include <crypto/scatterwalk.h>
13#include "jh7110-cryp.h"
14#include <linux/err.h>
15#include <linux/iopoll.h>
16#include <linux/kernel.h>
17#include <linux/slab.h>
18#include <linux/string.h>
19
20#define STARFIVE_AES_REGS_OFFSET	0x100
21#define STARFIVE_AES_AESDIO0R		(STARFIVE_AES_REGS_OFFSET + 0x0)
22#define STARFIVE_AES_KEY0		(STARFIVE_AES_REGS_OFFSET + 0x4)
23#define STARFIVE_AES_KEY1		(STARFIVE_AES_REGS_OFFSET + 0x8)
24#define STARFIVE_AES_KEY2		(STARFIVE_AES_REGS_OFFSET + 0xC)
25#define STARFIVE_AES_KEY3		(STARFIVE_AES_REGS_OFFSET + 0x10)
26#define STARFIVE_AES_KEY4		(STARFIVE_AES_REGS_OFFSET + 0x14)
27#define STARFIVE_AES_KEY5		(STARFIVE_AES_REGS_OFFSET + 0x18)
28#define STARFIVE_AES_KEY6		(STARFIVE_AES_REGS_OFFSET + 0x1C)
29#define STARFIVE_AES_KEY7		(STARFIVE_AES_REGS_OFFSET + 0x20)
30#define STARFIVE_AES_CSR		(STARFIVE_AES_REGS_OFFSET + 0x24)
31#define STARFIVE_AES_IV0		(STARFIVE_AES_REGS_OFFSET + 0x28)
32#define STARFIVE_AES_IV1		(STARFIVE_AES_REGS_OFFSET + 0x2C)
33#define STARFIVE_AES_IV2		(STARFIVE_AES_REGS_OFFSET + 0x30)
34#define STARFIVE_AES_IV3		(STARFIVE_AES_REGS_OFFSET + 0x34)
35#define STARFIVE_AES_NONCE0		(STARFIVE_AES_REGS_OFFSET + 0x3C)
36#define STARFIVE_AES_NONCE1		(STARFIVE_AES_REGS_OFFSET + 0x40)
37#define STARFIVE_AES_NONCE2		(STARFIVE_AES_REGS_OFFSET + 0x44)
38#define STARFIVE_AES_NONCE3		(STARFIVE_AES_REGS_OFFSET + 0x48)
39#define STARFIVE_AES_ALEN0		(STARFIVE_AES_REGS_OFFSET + 0x4C)
40#define STARFIVE_AES_ALEN1		(STARFIVE_AES_REGS_OFFSET + 0x50)
41#define STARFIVE_AES_MLEN0		(STARFIVE_AES_REGS_OFFSET + 0x54)
42#define STARFIVE_AES_MLEN1		(STARFIVE_AES_REGS_OFFSET + 0x58)
43#define STARFIVE_AES_IVLEN		(STARFIVE_AES_REGS_OFFSET + 0x5C)
44
45#define FLG_MODE_MASK			GENMASK(2, 0)
46#define FLG_ENCRYPT			BIT(4)
47
48/* Misc */
49#define CCM_B0_ADATA			0x40
50#define AES_BLOCK_32			(AES_BLOCK_SIZE / sizeof(u32))
51
52static inline int starfive_aes_wait_busy(struct starfive_cryp_dev *cryp)
53{
54	u32 status;
55
56	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
57					  !(status & STARFIVE_AES_BUSY), 10, 100000);
58}
59
60static inline int starfive_aes_wait_keydone(struct starfive_cryp_dev *cryp)
61{
62	u32 status;
63
64	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
65					  (status & STARFIVE_AES_KEY_DONE), 10, 100000);
66}
67
68static inline int starfive_aes_wait_gcmdone(struct starfive_cryp_dev *cryp)
69{
70	u32 status;
71
72	return readl_relaxed_poll_timeout(cryp->base + STARFIVE_AES_CSR, status,
73					  (status & STARFIVE_AES_GCM_DONE), 10, 100000);
74}
75
76static inline int is_gcm(struct starfive_cryp_dev *cryp)
77{
78	return (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_GCM;
79}
80
81static inline int is_encrypt(struct starfive_cryp_dev *cryp)
82{
83	return cryp->flags & FLG_ENCRYPT;
84}
85
86static void starfive_aes_aead_hw_start(struct starfive_cryp_ctx *ctx, u32 hw_mode)
87{
88	struct starfive_cryp_dev *cryp = ctx->cryp;
89	unsigned int value;
90
91	switch (hw_mode) {
92	case STARFIVE_AES_MODE_GCM:
93		value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
94		value |= STARFIVE_AES_GCM_START;
95		writel(value, cryp->base + STARFIVE_AES_CSR);
96		starfive_aes_wait_gcmdone(cryp);
97		break;
98	case STARFIVE_AES_MODE_CCM:
99		value = readl(ctx->cryp->base + STARFIVE_AES_CSR);
100		value |= STARFIVE_AES_CCM_START;
101		writel(value, cryp->base + STARFIVE_AES_CSR);
102		break;
103	}
104}
105
106static inline void starfive_aes_set_ivlen(struct starfive_cryp_ctx *ctx)
107{
108	struct starfive_cryp_dev *cryp = ctx->cryp;
109
110	if (is_gcm(cryp))
111		writel(GCM_AES_IV_SIZE, cryp->base + STARFIVE_AES_IVLEN);
112	else
113		writel(AES_BLOCK_SIZE, cryp->base + STARFIVE_AES_IVLEN);
114}
115
116static inline void starfive_aes_set_alen(struct starfive_cryp_ctx *ctx)
117{
118	struct starfive_cryp_dev *cryp = ctx->cryp;
119
120	writel(upper_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN0);
121	writel(lower_32_bits(cryp->assoclen), cryp->base + STARFIVE_AES_ALEN1);
122}
123
124static inline void starfive_aes_set_mlen(struct starfive_cryp_ctx *ctx)
125{
126	struct starfive_cryp_dev *cryp = ctx->cryp;
127
128	writel(upper_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN0);
129	writel(lower_32_bits(cryp->total_in), cryp->base + STARFIVE_AES_MLEN1);
130}
131
132static inline int starfive_aes_ccm_check_iv(const u8 *iv)
133{
134	/* 2 <= L <= 8, so 1 <= L' <= 7. */
135	if (iv[0] < 1 || iv[0] > 7)
136		return -EINVAL;
137
138	return 0;
139}
140
141static int starfive_aes_write_iv(struct starfive_cryp_ctx *ctx, u32 *iv)
142{
143	struct starfive_cryp_dev *cryp = ctx->cryp;
144
145	writel(iv[0], cryp->base + STARFIVE_AES_IV0);
146	writel(iv[1], cryp->base + STARFIVE_AES_IV1);
147	writel(iv[2], cryp->base + STARFIVE_AES_IV2);
148
149	if (is_gcm(cryp)) {
150		if (starfive_aes_wait_gcmdone(cryp))
151			return -ETIMEDOUT;
152
153		return 0;
154	}
155
156	writel(iv[3], cryp->base + STARFIVE_AES_IV3);
157
158	return 0;
159}
160
161static inline void starfive_aes_get_iv(struct starfive_cryp_dev *cryp, u32 *iv)
162{
163	iv[0] = readl(cryp->base + STARFIVE_AES_IV0);
164	iv[1] = readl(cryp->base + STARFIVE_AES_IV1);
165	iv[2] = readl(cryp->base + STARFIVE_AES_IV2);
166	iv[3] = readl(cryp->base + STARFIVE_AES_IV3);
167}
168
169static inline void starfive_aes_write_nonce(struct starfive_cryp_ctx *ctx, u32 *nonce)
170{
171	struct starfive_cryp_dev *cryp = ctx->cryp;
172
173	writel(nonce[0], cryp->base + STARFIVE_AES_NONCE0);
174	writel(nonce[1], cryp->base + STARFIVE_AES_NONCE1);
175	writel(nonce[2], cryp->base + STARFIVE_AES_NONCE2);
176	writel(nonce[3], cryp->base + STARFIVE_AES_NONCE3);
177}
178
179static int starfive_aes_write_key(struct starfive_cryp_ctx *ctx)
180{
181	struct starfive_cryp_dev *cryp = ctx->cryp;
182	u32 *key = (u32 *)ctx->key;
183
184	if (ctx->keylen >= AES_KEYSIZE_128) {
185		writel(key[0], cryp->base + STARFIVE_AES_KEY0);
186		writel(key[1], cryp->base + STARFIVE_AES_KEY1);
187		writel(key[2], cryp->base + STARFIVE_AES_KEY2);
188		writel(key[3], cryp->base + STARFIVE_AES_KEY3);
189	}
190
191	if (ctx->keylen >= AES_KEYSIZE_192) {
192		writel(key[4], cryp->base + STARFIVE_AES_KEY4);
193		writel(key[5], cryp->base + STARFIVE_AES_KEY5);
194	}
195
196	if (ctx->keylen >= AES_KEYSIZE_256) {
197		writel(key[6], cryp->base + STARFIVE_AES_KEY6);
198		writel(key[7], cryp->base + STARFIVE_AES_KEY7);
199	}
200
201	if (starfive_aes_wait_keydone(cryp))
202		return -ETIMEDOUT;
203
204	return 0;
205}
206
207static int starfive_aes_ccm_init(struct starfive_cryp_ctx *ctx)
208{
209	struct starfive_cryp_dev *cryp = ctx->cryp;
210	u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
211	unsigned int textlen;
212
213	memcpy(iv, cryp->req.areq->iv, AES_BLOCK_SIZE);
214	memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
215
216	/* Build B0 */
217	memcpy(b0, iv, AES_BLOCK_SIZE);
218
219	b0[0] |= (8 * ((cryp->authsize - 2) / 2));
220
221	if (cryp->assoclen)
222		b0[0] |= CCM_B0_ADATA;
223
224	textlen = cryp->total_in;
225
226	b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
227	b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
228
229	starfive_aes_write_nonce(ctx, (u32 *)b0);
230
231	return 0;
232}
233
234static int starfive_aes_hw_init(struct starfive_cryp_ctx *ctx)
235{
236	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
237	struct starfive_cryp_dev *cryp = ctx->cryp;
238	u32 hw_mode;
239
240	/* reset */
241	rctx->csr.aes.v = 0;
242	rctx->csr.aes.aesrst = 1;
243	writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
244
245	/* csr setup */
246	hw_mode = cryp->flags & FLG_MODE_MASK;
247
248	rctx->csr.aes.v = 0;
249
250	switch (ctx->keylen) {
251	case AES_KEYSIZE_128:
252		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_128;
253		break;
254	case AES_KEYSIZE_192:
255		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_192;
256		break;
257	case AES_KEYSIZE_256:
258		rctx->csr.aes.keymode = STARFIVE_AES_KEYMODE_256;
259		break;
260	}
261
262	rctx->csr.aes.mode  = hw_mode;
263	rctx->csr.aes.cmode = !is_encrypt(cryp);
264	rctx->csr.aes.ie = 1;
265
266	if (hw_mode == STARFIVE_AES_MODE_CFB ||
267	    hw_mode == STARFIVE_AES_MODE_OFB)
268		rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_128;
269	else
270		rctx->csr.aes.stmode = STARFIVE_AES_MODE_XFB_1;
271
272	if (cryp->side_chan) {
273		rctx->csr.aes.delay_aes = 1;
274		rctx->csr.aes.vaes_start = 1;
275	}
276
277	writel(rctx->csr.aes.v, cryp->base + STARFIVE_AES_CSR);
278
279	cryp->err = starfive_aes_write_key(ctx);
280	if (cryp->err)
281		return cryp->err;
282
283	switch (hw_mode) {
284	case STARFIVE_AES_MODE_GCM:
285		starfive_aes_set_alen(ctx);
286		starfive_aes_set_mlen(ctx);
287		starfive_aes_set_ivlen(ctx);
288		starfive_aes_aead_hw_start(ctx, hw_mode);
289		starfive_aes_write_iv(ctx, (void *)cryp->req.areq->iv);
290		break;
291	case STARFIVE_AES_MODE_CCM:
292		starfive_aes_set_alen(ctx);
293		starfive_aes_set_mlen(ctx);
294		starfive_aes_ccm_init(ctx);
295		starfive_aes_aead_hw_start(ctx, hw_mode);
296		break;
297	case STARFIVE_AES_MODE_OFB:
298	case STARFIVE_AES_MODE_CFB:
299	case STARFIVE_AES_MODE_CBC:
300	case STARFIVE_AES_MODE_CTR:
301		starfive_aes_write_iv(ctx, (void *)cryp->req.sreq->iv);
302		break;
303	default:
304		break;
305	}
306
307	return cryp->err;
308}
309
310static int starfive_aes_read_authtag(struct starfive_cryp_dev *cryp)
311{
312	int i, start_addr;
313
314	if (starfive_aes_wait_busy(cryp))
315		return dev_err_probe(cryp->dev, -ETIMEDOUT,
316				     "Timeout waiting for tag generation.");
317
318	start_addr = STARFIVE_AES_NONCE0;
319
320	if (is_gcm(cryp))
321		for (i = 0; i < AES_BLOCK_32; i++, start_addr += 4)
322			cryp->tag_out[i] = readl(cryp->base + start_addr);
323	else
324		for (i = 0; i < AES_BLOCK_32; i++)
325			cryp->tag_out[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
326
327	if (is_encrypt(cryp)) {
328		scatterwalk_copychunks(cryp->tag_out, &cryp->out_walk, cryp->authsize, 1);
329	} else {
330		scatterwalk_copychunks(cryp->tag_in, &cryp->in_walk, cryp->authsize, 0);
331
332		if (crypto_memneq(cryp->tag_in, cryp->tag_out, cryp->authsize))
333			return dev_err_probe(cryp->dev, -EBADMSG, "Failed tag verification\n");
334	}
335
336	return 0;
337}
338
339static void starfive_aes_finish_req(struct starfive_cryp_dev *cryp)
340{
341	union starfive_aes_csr csr;
342	int err = cryp->err;
343
344	if (!err && cryp->authsize)
345		err = starfive_aes_read_authtag(cryp);
346
347	if (!err && ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC ||
348		     (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CTR))
349		starfive_aes_get_iv(cryp, (void *)cryp->req.sreq->iv);
350
351	/* reset irq flags*/
352	csr.v = 0;
353	csr.aesrst = 1;
354	writel(csr.v, cryp->base + STARFIVE_AES_CSR);
355
356	if (cryp->authsize)
357		crypto_finalize_aead_request(cryp->engine, cryp->req.areq, err);
358	else
359		crypto_finalize_skcipher_request(cryp->engine, cryp->req.sreq,
360						 err);
361}
362
363void starfive_aes_done_task(unsigned long param)
364{
365	struct starfive_cryp_dev *cryp = (struct starfive_cryp_dev *)param;
366	u32 block[AES_BLOCK_32];
367	u32 stat;
368	int i;
369
370	for (i = 0; i < AES_BLOCK_32; i++)
371		block[i] = readl(cryp->base + STARFIVE_AES_AESDIO0R);
372
373	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, AES_BLOCK_SIZE,
374							     cryp->total_out), 1);
375
376	cryp->total_out -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_out);
377
378	if (!cryp->total_out) {
379		starfive_aes_finish_req(cryp);
380		return;
381	}
382
383	memset(block, 0, AES_BLOCK_SIZE);
384	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
385							    cryp->total_in), 0);
386	cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
387
388	for (i = 0; i < AES_BLOCK_32; i++)
389		writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
390
391	stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
392	stat &= ~STARFIVE_IE_MASK_AES_DONE;
393	writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
394}
395
396static int starfive_aes_gcm_write_adata(struct starfive_cryp_ctx *ctx)
397{
398	struct starfive_cryp_dev *cryp = ctx->cryp;
399	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
400	u32 *buffer;
401	int total_len, loop;
402
403	total_len = ALIGN(cryp->assoclen, AES_BLOCK_SIZE) / sizeof(unsigned int);
404	buffer = (u32 *)rctx->adata;
405
406	for (loop = 0; loop < total_len; loop += 4) {
407		writel(*buffer, cryp->base + STARFIVE_AES_NONCE0);
408		buffer++;
409		writel(*buffer, cryp->base + STARFIVE_AES_NONCE1);
410		buffer++;
411		writel(*buffer, cryp->base + STARFIVE_AES_NONCE2);
412		buffer++;
413		writel(*buffer, cryp->base + STARFIVE_AES_NONCE3);
414		buffer++;
415	}
416
417	if (starfive_aes_wait_gcmdone(cryp))
418		return dev_err_probe(cryp->dev, -ETIMEDOUT,
419				     "Timeout processing gcm aad block");
420
421	return 0;
422}
423
424static int starfive_aes_ccm_write_adata(struct starfive_cryp_ctx *ctx)
425{
426	struct starfive_cryp_dev *cryp = ctx->cryp;
427	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
428	u32 *buffer;
429	u8 *ci;
430	int total_len, loop;
431
432	total_len = cryp->assoclen;
433
434	ci = rctx->adata;
435	writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
436	ci++;
437	writeb(*ci, cryp->base + STARFIVE_AES_AESDIO0R);
438	ci++;
439	total_len -= 2;
440	buffer = (u32 *)ci;
441
442	for (loop = 0; loop < 3; loop++, buffer++)
443		writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
444
445	total_len -= 12;
446
447	while (total_len > 0) {
448		for (loop = 0; loop < AES_BLOCK_32; loop++, buffer++)
449			writel(*buffer, cryp->base + STARFIVE_AES_AESDIO0R);
450
451		total_len -= AES_BLOCK_SIZE;
452	}
453
454	if (starfive_aes_wait_busy(cryp))
455		return dev_err_probe(cryp->dev, -ETIMEDOUT,
456				     "Timeout processing ccm aad block");
457
458	return 0;
459}
460
461static int starfive_aes_prepare_req(struct skcipher_request *req,
462				    struct aead_request *areq)
463{
464	struct starfive_cryp_ctx *ctx;
465	struct starfive_cryp_request_ctx *rctx;
466	struct starfive_cryp_dev *cryp;
467
468	if (!req && !areq)
469		return -EINVAL;
470
471	ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
472		    crypto_aead_ctx(crypto_aead_reqtfm(areq));
473
474	cryp = ctx->cryp;
475	rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
476
477	if (req) {
478		cryp->req.sreq = req;
479		cryp->total_in = req->cryptlen;
480		cryp->total_out = req->cryptlen;
481		cryp->assoclen = 0;
482		cryp->authsize = 0;
483	} else {
484		cryp->req.areq = areq;
485		cryp->assoclen = areq->assoclen;
486		cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
487		if (is_encrypt(cryp)) {
488			cryp->total_in = areq->cryptlen;
489			cryp->total_out = areq->cryptlen;
490		} else {
491			cryp->total_in = areq->cryptlen - cryp->authsize;
492			cryp->total_out = cryp->total_in;
493		}
494	}
495
496	rctx->in_sg = req ? req->src : areq->src;
497	scatterwalk_start(&cryp->in_walk, rctx->in_sg);
498
499	rctx->out_sg = req ? req->dst : areq->dst;
500	scatterwalk_start(&cryp->out_walk, rctx->out_sg);
501
502	if (cryp->assoclen) {
503		rctx->adata = kzalloc(ALIGN(cryp->assoclen, AES_BLOCK_SIZE), GFP_KERNEL);
504		if (!rctx->adata)
505			return dev_err_probe(cryp->dev, -ENOMEM,
506					     "Failed to alloc memory for adata");
507
508		scatterwalk_copychunks(rctx->adata, &cryp->in_walk, cryp->assoclen, 0);
509		scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->assoclen, 2);
510	}
511
512	ctx->rctx = rctx;
513
514	return starfive_aes_hw_init(ctx);
515}
516
517static int starfive_aes_do_one_req(struct crypto_engine *engine, void *areq)
518{
519	struct skcipher_request *req =
520		container_of(areq, struct skcipher_request, base);
521	struct starfive_cryp_ctx *ctx =
522		crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
523	struct starfive_cryp_dev *cryp = ctx->cryp;
524	u32 block[AES_BLOCK_32];
525	u32 stat;
526	int err;
527	int i;
528
529	err = starfive_aes_prepare_req(req, NULL);
530	if (err)
531		return err;
532
533	/*
534	 * Write first plain/ciphertext block to start the module
535	 * then let irq tasklet handle the rest of the data blocks.
536	 */
537	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
538							    cryp->total_in), 0);
539	cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
540
541	for (i = 0; i < AES_BLOCK_32; i++)
542		writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
543
544	stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
545	stat &= ~STARFIVE_IE_MASK_AES_DONE;
546	writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
547
548	return 0;
549}
550
551static int starfive_aes_init_tfm(struct crypto_skcipher *tfm)
552{
553	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
554
555	ctx->cryp = starfive_cryp_find_dev(ctx);
556	if (!ctx->cryp)
557		return -ENODEV;
558
559	crypto_skcipher_set_reqsize(tfm, sizeof(struct starfive_cryp_request_ctx) +
560				    sizeof(struct skcipher_request));
561
562	return 0;
563}
564
565static int starfive_aes_aead_do_one_req(struct crypto_engine *engine, void *areq)
566{
567	struct aead_request *req =
568		container_of(areq, struct aead_request, base);
569	struct starfive_cryp_ctx *ctx =
570		crypto_aead_ctx(crypto_aead_reqtfm(req));
571	struct starfive_cryp_dev *cryp = ctx->cryp;
572	struct starfive_cryp_request_ctx *rctx = ctx->rctx;
573	u32 block[AES_BLOCK_32];
574	u32 stat;
575	int err;
576	int i;
577
578	err = starfive_aes_prepare_req(NULL, req);
579	if (err)
580		return err;
581
582	if (!cryp->assoclen)
583		goto write_text;
584
585	if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CCM)
586		cryp->err = starfive_aes_ccm_write_adata(ctx);
587	else
588		cryp->err = starfive_aes_gcm_write_adata(ctx);
589
590	kfree(rctx->adata);
591
592	if (cryp->err)
593		return cryp->err;
594
595write_text:
596	if (!cryp->total_in)
597		goto finish_req;
598
599	/*
600	 * Write first plain/ciphertext block to start the module
601	 * then let irq tasklet handle the rest of the data blocks.
602	 */
603	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, AES_BLOCK_SIZE,
604							    cryp->total_in), 0);
605	cryp->total_in -= min_t(size_t, AES_BLOCK_SIZE, cryp->total_in);
606
607	for (i = 0; i < AES_BLOCK_32; i++)
608		writel(block[i], cryp->base + STARFIVE_AES_AESDIO0R);
609
610	stat = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
611	stat &= ~STARFIVE_IE_MASK_AES_DONE;
612	writel(stat, cryp->base + STARFIVE_IE_MASK_OFFSET);
613
614	return 0;
615
616finish_req:
617	starfive_aes_finish_req(cryp);
618	return 0;
619}
620
621static int starfive_aes_aead_init_tfm(struct crypto_aead *tfm)
622{
623	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
624	struct starfive_cryp_dev *cryp = ctx->cryp;
625	struct crypto_tfm *aead = crypto_aead_tfm(tfm);
626	struct crypto_alg *alg = aead->__crt_alg;
627
628	ctx->cryp = starfive_cryp_find_dev(ctx);
629	if (!ctx->cryp)
630		return -ENODEV;
631
632	if (alg->cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
633		ctx->aead_fbk = crypto_alloc_aead(alg->cra_name, 0,
634						  CRYPTO_ALG_NEED_FALLBACK);
635		if (IS_ERR(ctx->aead_fbk))
636			return dev_err_probe(cryp->dev, PTR_ERR(ctx->aead_fbk),
637					     "%s() failed to allocate fallback for %s\n",
638					     __func__, alg->cra_name);
639	}
640
641	crypto_aead_set_reqsize(tfm, sizeof(struct starfive_cryp_ctx) +
642				sizeof(struct aead_request));
643
644	return 0;
645}
646
647static void starfive_aes_aead_exit_tfm(struct crypto_aead *tfm)
648{
649	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
650
651	crypto_free_aead(ctx->aead_fbk);
652}
653
654static int starfive_aes_crypt(struct skcipher_request *req, unsigned long flags)
655{
656	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
657	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
658	struct starfive_cryp_dev *cryp = ctx->cryp;
659	unsigned int blocksize_align = crypto_skcipher_blocksize(tfm) - 1;
660
661	cryp->flags = flags;
662
663	if ((cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_ECB ||
664	    (cryp->flags & FLG_MODE_MASK) == STARFIVE_AES_MODE_CBC)
665		if (req->cryptlen & blocksize_align)
666			return -EINVAL;
667
668	return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
669}
670
671static int starfive_aes_aead_crypt(struct aead_request *req, unsigned long flags)
672{
673	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
674	struct starfive_cryp_dev *cryp = ctx->cryp;
675
676	cryp->flags = flags;
677
678	/*
679	 * HW engine could not perform CCM tag verification on
680	 * non-blocksize aligned text, use fallback algo instead
681	 */
682	if (ctx->aead_fbk && !is_encrypt(cryp)) {
683		struct aead_request *subreq = aead_request_ctx(req);
684
685		aead_request_set_tfm(subreq, ctx->aead_fbk);
686		aead_request_set_callback(subreq, req->base.flags,
687					  req->base.complete, req->base.data);
688		aead_request_set_crypt(subreq, req->src,
689				       req->dst, req->cryptlen, req->iv);
690		aead_request_set_ad(subreq, req->assoclen);
691
692		return crypto_aead_decrypt(subreq);
693	}
694
695	return crypto_transfer_aead_request_to_engine(cryp->engine, req);
696}
697
698static int starfive_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
699			       unsigned int keylen)
700{
701	struct starfive_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
702
703	if (!key || !keylen)
704		return -EINVAL;
705
706	if (keylen != AES_KEYSIZE_128 &&
707	    keylen != AES_KEYSIZE_192 &&
708	    keylen != AES_KEYSIZE_256)
709		return -EINVAL;
710
711	memcpy(ctx->key, key, keylen);
712	ctx->keylen = keylen;
713
714	return 0;
715}
716
717static int starfive_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
718				    unsigned int keylen)
719{
720	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
721
722	if (!key || !keylen)
723		return -EINVAL;
724
725	if (keylen != AES_KEYSIZE_128 &&
726	    keylen != AES_KEYSIZE_192 &&
727	    keylen != AES_KEYSIZE_256)
728		return -EINVAL;
729
730	memcpy(ctx->key, key, keylen);
731	ctx->keylen = keylen;
732
733	if (ctx->aead_fbk)
734		return crypto_aead_setkey(ctx->aead_fbk, key, keylen);
735
736	return 0;
737}
738
739static int starfive_aes_gcm_setauthsize(struct crypto_aead *tfm,
740					unsigned int authsize)
741{
742	return crypto_gcm_check_authsize(authsize);
743}
744
745static int starfive_aes_ccm_setauthsize(struct crypto_aead *tfm,
746					unsigned int authsize)
747{
748	struct starfive_cryp_ctx *ctx = crypto_aead_ctx(tfm);
749
750	switch (authsize) {
751	case 4:
752	case 6:
753	case 8:
754	case 10:
755	case 12:
756	case 14:
757	case 16:
758		break;
759	default:
760		return -EINVAL;
761	}
762
763	return crypto_aead_setauthsize(ctx->aead_fbk, authsize);
764}
765
766static int starfive_aes_ecb_encrypt(struct skcipher_request *req)
767{
768	return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB | FLG_ENCRYPT);
769}
770
771static int starfive_aes_ecb_decrypt(struct skcipher_request *req)
772{
773	return starfive_aes_crypt(req, STARFIVE_AES_MODE_ECB);
774}
775
776static int starfive_aes_cbc_encrypt(struct skcipher_request *req)
777{
778	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC | FLG_ENCRYPT);
779}
780
781static int starfive_aes_cbc_decrypt(struct skcipher_request *req)
782{
783	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CBC);
784}
785
786static int starfive_aes_cfb_encrypt(struct skcipher_request *req)
787{
788	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CFB | FLG_ENCRYPT);
789}
790
791static int starfive_aes_cfb_decrypt(struct skcipher_request *req)
792{
793	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CFB);
794}
795
796static int starfive_aes_ofb_encrypt(struct skcipher_request *req)
797{
798	return starfive_aes_crypt(req, STARFIVE_AES_MODE_OFB | FLG_ENCRYPT);
799}
800
801static int starfive_aes_ofb_decrypt(struct skcipher_request *req)
802{
803	return starfive_aes_crypt(req, STARFIVE_AES_MODE_OFB);
804}
805
806static int starfive_aes_ctr_encrypt(struct skcipher_request *req)
807{
808	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR | FLG_ENCRYPT);
809}
810
811static int starfive_aes_ctr_decrypt(struct skcipher_request *req)
812{
813	return starfive_aes_crypt(req, STARFIVE_AES_MODE_CTR);
814}
815
816static int starfive_aes_gcm_encrypt(struct aead_request *req)
817{
818	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM | FLG_ENCRYPT);
819}
820
821static int starfive_aes_gcm_decrypt(struct aead_request *req)
822{
823	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_GCM);
824}
825
826static int starfive_aes_ccm_encrypt(struct aead_request *req)
827{
828	int ret;
829
830	ret = starfive_aes_ccm_check_iv(req->iv);
831	if (ret)
832		return ret;
833
834	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM | FLG_ENCRYPT);
835}
836
837static int starfive_aes_ccm_decrypt(struct aead_request *req)
838{
839	int ret;
840
841	ret = starfive_aes_ccm_check_iv(req->iv);
842	if (ret)
843		return ret;
844
845	return starfive_aes_aead_crypt(req, STARFIVE_AES_MODE_CCM);
846}
847
848static struct skcipher_engine_alg skcipher_algs[] = {
849{
850	.base.init			= starfive_aes_init_tfm,
851	.base.setkey			= starfive_aes_setkey,
852	.base.encrypt			= starfive_aes_ecb_encrypt,
853	.base.decrypt			= starfive_aes_ecb_decrypt,
854	.base.min_keysize		= AES_MIN_KEY_SIZE,
855	.base.max_keysize		= AES_MAX_KEY_SIZE,
856	.base.base = {
857		.cra_name		= "ecb(aes)",
858		.cra_driver_name	= "starfive-ecb-aes",
859		.cra_priority		= 200,
860		.cra_flags		= CRYPTO_ALG_ASYNC,
861		.cra_blocksize		= AES_BLOCK_SIZE,
862		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
863		.cra_alignmask		= 0xf,
864		.cra_module		= THIS_MODULE,
865	},
866	.op = {
867		.do_one_request = starfive_aes_do_one_req,
868	},
869}, {
870	.base.init			= starfive_aes_init_tfm,
871	.base.setkey			= starfive_aes_setkey,
872	.base.encrypt			= starfive_aes_cbc_encrypt,
873	.base.decrypt			= starfive_aes_cbc_decrypt,
874	.base.min_keysize		= AES_MIN_KEY_SIZE,
875	.base.max_keysize		= AES_MAX_KEY_SIZE,
876	.base.ivsize			= AES_BLOCK_SIZE,
877	.base.base = {
878		.cra_name		= "cbc(aes)",
879		.cra_driver_name	= "starfive-cbc-aes",
880		.cra_priority		= 200,
881		.cra_flags		= CRYPTO_ALG_ASYNC,
882		.cra_blocksize		= AES_BLOCK_SIZE,
883		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
884		.cra_alignmask		= 0xf,
885		.cra_module		= THIS_MODULE,
886	},
887	.op = {
888		.do_one_request = starfive_aes_do_one_req,
889	},
890}, {
891	.base.init			= starfive_aes_init_tfm,
892	.base.setkey			= starfive_aes_setkey,
893	.base.encrypt			= starfive_aes_ctr_encrypt,
894	.base.decrypt			= starfive_aes_ctr_decrypt,
895	.base.min_keysize		= AES_MIN_KEY_SIZE,
896	.base.max_keysize		= AES_MAX_KEY_SIZE,
897	.base.ivsize			= AES_BLOCK_SIZE,
898	.base.base = {
899		.cra_name		= "ctr(aes)",
900		.cra_driver_name	= "starfive-ctr-aes",
901		.cra_priority		= 200,
902		.cra_flags		= CRYPTO_ALG_ASYNC,
903		.cra_blocksize		= 1,
904		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
905		.cra_alignmask		= 0xf,
906		.cra_module		= THIS_MODULE,
907	},
908	.op = {
909		.do_one_request = starfive_aes_do_one_req,
910	},
911}, {
912	.base.init			= starfive_aes_init_tfm,
913	.base.setkey			= starfive_aes_setkey,
914	.base.encrypt			= starfive_aes_cfb_encrypt,
915	.base.decrypt			= starfive_aes_cfb_decrypt,
916	.base.min_keysize		= AES_MIN_KEY_SIZE,
917	.base.max_keysize		= AES_MAX_KEY_SIZE,
918	.base.ivsize			= AES_BLOCK_SIZE,
919	.base.base = {
920		.cra_name		= "cfb(aes)",
921		.cra_driver_name	= "starfive-cfb-aes",
922		.cra_priority		= 200,
923		.cra_flags		= CRYPTO_ALG_ASYNC,
924		.cra_blocksize		= 1,
925		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
926		.cra_alignmask		= 0xf,
927		.cra_module		= THIS_MODULE,
928	},
929	.op = {
930		.do_one_request = starfive_aes_do_one_req,
931	},
932}, {
933	.base.init			= starfive_aes_init_tfm,
934	.base.setkey			= starfive_aes_setkey,
935	.base.encrypt			= starfive_aes_ofb_encrypt,
936	.base.decrypt			= starfive_aes_ofb_decrypt,
937	.base.min_keysize		= AES_MIN_KEY_SIZE,
938	.base.max_keysize		= AES_MAX_KEY_SIZE,
939	.base.ivsize			= AES_BLOCK_SIZE,
940	.base.base = {
941		.cra_name		= "ofb(aes)",
942		.cra_driver_name	= "starfive-ofb-aes",
943		.cra_priority		= 200,
944		.cra_flags		= CRYPTO_ALG_ASYNC,
945		.cra_blocksize		= 1,
946		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
947		.cra_alignmask		= 0xf,
948		.cra_module		= THIS_MODULE,
949	},
950	.op = {
951		.do_one_request = starfive_aes_do_one_req,
952	},
953},
954};
955
956static struct aead_engine_alg aead_algs[] = {
957{
958	.base.setkey			= starfive_aes_aead_setkey,
959	.base.setauthsize		= starfive_aes_gcm_setauthsize,
960	.base.encrypt			= starfive_aes_gcm_encrypt,
961	.base.decrypt			= starfive_aes_gcm_decrypt,
962	.base.init			= starfive_aes_aead_init_tfm,
963	.base.exit			= starfive_aes_aead_exit_tfm,
964	.base.ivsize			= GCM_AES_IV_SIZE,
965	.base.maxauthsize		= AES_BLOCK_SIZE,
966	.base.base = {
967		.cra_name               = "gcm(aes)",
968		.cra_driver_name        = "starfive-gcm-aes",
969		.cra_priority           = 200,
970		.cra_flags              = CRYPTO_ALG_ASYNC,
971		.cra_blocksize          = 1,
972		.cra_ctxsize            = sizeof(struct starfive_cryp_ctx),
973		.cra_alignmask          = 0xf,
974		.cra_module             = THIS_MODULE,
975	},
976	.op = {
977		.do_one_request = starfive_aes_aead_do_one_req,
978	},
979}, {
980	.base.setkey			= starfive_aes_aead_setkey,
981	.base.setauthsize		= starfive_aes_ccm_setauthsize,
982	.base.encrypt			= starfive_aes_ccm_encrypt,
983	.base.decrypt			= starfive_aes_ccm_decrypt,
984	.base.init			= starfive_aes_aead_init_tfm,
985	.base.exit			= starfive_aes_aead_exit_tfm,
986	.base.ivsize			= AES_BLOCK_SIZE,
987	.base.maxauthsize		= AES_BLOCK_SIZE,
988	.base.base = {
989		.cra_name		= "ccm(aes)",
990		.cra_driver_name	= "starfive-ccm-aes",
991		.cra_priority		= 200,
992		.cra_flags		= CRYPTO_ALG_ASYNC |
993					  CRYPTO_ALG_NEED_FALLBACK,
994		.cra_blocksize		= 1,
995		.cra_ctxsize		= sizeof(struct starfive_cryp_ctx),
996		.cra_alignmask		= 0xf,
997		.cra_module		= THIS_MODULE,
998	},
999	.op = {
1000		.do_one_request = starfive_aes_aead_do_one_req,
1001	},
1002},
1003};
1004
1005int starfive_aes_register_algs(void)
1006{
1007	int ret;
1008
1009	ret = crypto_engine_register_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1010	if (ret)
1011		return ret;
1012
1013	ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1014	if (ret)
1015		crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1016
1017	return ret;
1018}
1019
1020void starfive_aes_unregister_algs(void)
1021{
1022	crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1023	crypto_engine_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
1024}
1025