1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (C) STMicroelectronics SA 2017
4 * Author: Fabien Dessenne <fabien.dessenne@st.com>
5 */
6
7#include <linux/clk.h>
8#include <linux/delay.h>
9#include <linux/interrupt.h>
10#include <linux/iopoll.h>
11#include <linux/module.h>
12#include <linux/of_device.h>
13#include <linux/platform_device.h>
14#include <linux/pm_runtime.h>
15#include <linux/reset.h>
16
17#include <crypto/aes.h>
18#include <crypto/internal/des.h>
19#include <crypto/engine.h>
20#include <crypto/scatterwalk.h>
21#include <crypto/internal/aead.h>
22#include <crypto/internal/skcipher.h>
23
24#define DRIVER_NAME             "stm32-cryp"
25
26/* Bit [0] encrypt / decrypt */
27#define FLG_ENCRYPT             BIT(0)
28/* Bit [8..1] algo & operation mode */
29#define FLG_AES                 BIT(1)
30#define FLG_DES                 BIT(2)
31#define FLG_TDES                BIT(3)
32#define FLG_ECB                 BIT(4)
33#define FLG_CBC                 BIT(5)
34#define FLG_CTR                 BIT(6)
35#define FLG_GCM                 BIT(7)
36#define FLG_CCM                 BIT(8)
37/* Mode mask = bits [15..0] */
38#define FLG_MODE_MASK           GENMASK(15, 0)
39/* Bit [31..16] status  */
40
41/* Registers */
42#define CRYP_CR                 0x00000000
43#define CRYP_SR                 0x00000004
44#define CRYP_DIN                0x00000008
45#define CRYP_DOUT               0x0000000C
46#define CRYP_DMACR              0x00000010
47#define CRYP_IMSCR              0x00000014
48#define CRYP_RISR               0x00000018
49#define CRYP_MISR               0x0000001C
50#define CRYP_K0LR               0x00000020
51#define CRYP_K0RR               0x00000024
52#define CRYP_K1LR               0x00000028
53#define CRYP_K1RR               0x0000002C
54#define CRYP_K2LR               0x00000030
55#define CRYP_K2RR               0x00000034
56#define CRYP_K3LR               0x00000038
57#define CRYP_K3RR               0x0000003C
58#define CRYP_IV0LR              0x00000040
59#define CRYP_IV0RR              0x00000044
60#define CRYP_IV1LR              0x00000048
61#define CRYP_IV1RR              0x0000004C
62#define CRYP_CSGCMCCM0R         0x00000050
63#define CRYP_CSGCM0R            0x00000070
64
65/* Registers values */
66#define CR_DEC_NOT_ENC          0x00000004
67#define CR_TDES_ECB             0x00000000
68#define CR_TDES_CBC             0x00000008
69#define CR_DES_ECB              0x00000010
70#define CR_DES_CBC              0x00000018
71#define CR_AES_ECB              0x00000020
72#define CR_AES_CBC              0x00000028
73#define CR_AES_CTR              0x00000030
74#define CR_AES_KP               0x00000038
75#define CR_AES_GCM              0x00080000
76#define CR_AES_CCM              0x00080008
77#define CR_AES_UNKNOWN          0xFFFFFFFF
78#define CR_ALGO_MASK            0x00080038
79#define CR_DATA32               0x00000000
80#define CR_DATA16               0x00000040
81#define CR_DATA8                0x00000080
82#define CR_DATA1                0x000000C0
83#define CR_KEY128               0x00000000
84#define CR_KEY192               0x00000100
85#define CR_KEY256               0x00000200
86#define CR_FFLUSH               0x00004000
87#define CR_CRYPEN               0x00008000
88#define CR_PH_INIT              0x00000000
89#define CR_PH_HEADER            0x00010000
90#define CR_PH_PAYLOAD           0x00020000
91#define CR_PH_FINAL             0x00030000
92#define CR_PH_MASK              0x00030000
93#define CR_NBPBL_SHIFT          20
94
95#define SR_BUSY                 0x00000010
96#define SR_OFNE                 0x00000004
97
98#define IMSCR_IN                BIT(0)
99#define IMSCR_OUT               BIT(1)
100
101#define MISR_IN                 BIT(0)
102#define MISR_OUT                BIT(1)
103
104/* Misc */
105#define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
106#define GCM_CTR_INIT            2
107#define CRYP_AUTOSUSPEND_DELAY	50
108
109struct stm32_cryp_caps {
110	bool                    swap_final;
111	bool                    padding_wa;
112};
113
114struct stm32_cryp_ctx {
115	struct crypto_engine_ctx enginectx;
116	struct stm32_cryp       *cryp;
117	int                     keylen;
118	__be32                  key[AES_KEYSIZE_256 / sizeof(u32)];
119	unsigned long           flags;
120};
121
122struct stm32_cryp_reqctx {
123	unsigned long mode;
124};
125
126struct stm32_cryp {
127	struct list_head        list;
128	struct device           *dev;
129	void __iomem            *regs;
130	struct clk              *clk;
131	unsigned long           flags;
132	u32                     irq_status;
133	const struct stm32_cryp_caps *caps;
134	struct stm32_cryp_ctx   *ctx;
135
136	struct crypto_engine    *engine;
137
138	struct skcipher_request *req;
139	struct aead_request     *areq;
140
141	size_t                  authsize;
142	size_t                  hw_blocksize;
143
144	size_t                  payload_in;
145	size_t                  header_in;
146	size_t                  payload_out;
147
148	struct scatterlist      *out_sg;
149
150	struct scatter_walk     in_walk;
151	struct scatter_walk     out_walk;
152
153	__be32                  last_ctr[4];
154	u32                     gcm_ctr;
155};
156
157struct stm32_cryp_list {
158	struct list_head        dev_list;
159	spinlock_t              lock; /* protect dev_list */
160};
161
162static struct stm32_cryp_list cryp_list = {
163	.dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
164	.lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
165};
166
167static inline bool is_aes(struct stm32_cryp *cryp)
168{
169	return cryp->flags & FLG_AES;
170}
171
172static inline bool is_des(struct stm32_cryp *cryp)
173{
174	return cryp->flags & FLG_DES;
175}
176
177static inline bool is_tdes(struct stm32_cryp *cryp)
178{
179	return cryp->flags & FLG_TDES;
180}
181
182static inline bool is_ecb(struct stm32_cryp *cryp)
183{
184	return cryp->flags & FLG_ECB;
185}
186
187static inline bool is_cbc(struct stm32_cryp *cryp)
188{
189	return cryp->flags & FLG_CBC;
190}
191
192static inline bool is_ctr(struct stm32_cryp *cryp)
193{
194	return cryp->flags & FLG_CTR;
195}
196
197static inline bool is_gcm(struct stm32_cryp *cryp)
198{
199	return cryp->flags & FLG_GCM;
200}
201
202static inline bool is_ccm(struct stm32_cryp *cryp)
203{
204	return cryp->flags & FLG_CCM;
205}
206
207static inline bool is_encrypt(struct stm32_cryp *cryp)
208{
209	return cryp->flags & FLG_ENCRYPT;
210}
211
212static inline bool is_decrypt(struct stm32_cryp *cryp)
213{
214	return !is_encrypt(cryp);
215}
216
217static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
218{
219	return readl_relaxed(cryp->regs + ofst);
220}
221
222static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
223{
224	writel_relaxed(val, cryp->regs + ofst);
225}
226
227static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
228{
229	u32 status;
230
231	return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
232			!(status & SR_BUSY), 10, 100000);
233}
234
235static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
236{
237	u32 status;
238
239	return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
240			!(status & CR_CRYPEN), 10, 100000);
241}
242
243static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
244{
245	u32 status;
246
247	return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
248			status & SR_OFNE, 10, 100000);
249}
250
251static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
252static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
253
254static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
255{
256	struct stm32_cryp *tmp, *cryp = NULL;
257
258	spin_lock_bh(&cryp_list.lock);
259	if (!ctx->cryp) {
260		list_for_each_entry(tmp, &cryp_list.dev_list, list) {
261			cryp = tmp;
262			break;
263		}
264		ctx->cryp = cryp;
265	} else {
266		cryp = ctx->cryp;
267	}
268
269	spin_unlock_bh(&cryp_list.lock);
270
271	return cryp;
272}
273
274static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
275{
276	if (!iv)
277		return;
278
279	stm32_cryp_write(cryp, CRYP_IV0LR, be32_to_cpu(*iv++));
280	stm32_cryp_write(cryp, CRYP_IV0RR, be32_to_cpu(*iv++));
281
282	if (is_aes(cryp)) {
283		stm32_cryp_write(cryp, CRYP_IV1LR, be32_to_cpu(*iv++));
284		stm32_cryp_write(cryp, CRYP_IV1RR, be32_to_cpu(*iv++));
285	}
286}
287
288static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
289{
290	struct skcipher_request *req = cryp->req;
291	__be32 *tmp = (void *)req->iv;
292
293	if (!tmp)
294		return;
295
296	*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
297	*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
298
299	if (is_aes(cryp)) {
300		*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
301		*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
302	}
303}
304
305static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
306{
307	unsigned int i;
308	int r_id;
309
310	if (is_des(c)) {
311		stm32_cryp_write(c, CRYP_K1LR, be32_to_cpu(c->ctx->key[0]));
312		stm32_cryp_write(c, CRYP_K1RR, be32_to_cpu(c->ctx->key[1]));
313	} else {
314		r_id = CRYP_K3RR;
315		for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
316			stm32_cryp_write(c, r_id,
317					 be32_to_cpu(c->ctx->key[i - 1]));
318	}
319}
320
321static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
322{
323	if (is_aes(cryp) && is_ecb(cryp))
324		return CR_AES_ECB;
325
326	if (is_aes(cryp) && is_cbc(cryp))
327		return CR_AES_CBC;
328
329	if (is_aes(cryp) && is_ctr(cryp))
330		return CR_AES_CTR;
331
332	if (is_aes(cryp) && is_gcm(cryp))
333		return CR_AES_GCM;
334
335	if (is_aes(cryp) && is_ccm(cryp))
336		return CR_AES_CCM;
337
338	if (is_des(cryp) && is_ecb(cryp))
339		return CR_DES_ECB;
340
341	if (is_des(cryp) && is_cbc(cryp))
342		return CR_DES_CBC;
343
344	if (is_tdes(cryp) && is_ecb(cryp))
345		return CR_TDES_ECB;
346
347	if (is_tdes(cryp) && is_cbc(cryp))
348		return CR_TDES_CBC;
349
350	dev_err(cryp->dev, "Unknown mode\n");
351	return CR_AES_UNKNOWN;
352}
353
354static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
355{
356	return is_encrypt(cryp) ? cryp->areq->cryptlen :
357				  cryp->areq->cryptlen - cryp->authsize;
358}
359
360static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
361{
362	int ret;
363	__be32 iv[4];
364
365	/* Phase 1 : init */
366	memcpy(iv, cryp->areq->iv, 12);
367	iv[3] = cpu_to_be32(GCM_CTR_INIT);
368	cryp->gcm_ctr = GCM_CTR_INIT;
369	stm32_cryp_hw_write_iv(cryp, iv);
370
371	stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
372
373	/* Wait for end of processing */
374	ret = stm32_cryp_wait_enable(cryp);
375	if (ret) {
376		dev_err(cryp->dev, "Timeout (gcm init)\n");
377		return ret;
378	}
379
380	/* Prepare next phase */
381	if (cryp->areq->assoclen) {
382		cfg |= CR_PH_HEADER;
383		stm32_cryp_write(cryp, CRYP_CR, cfg);
384	} else if (stm32_cryp_get_input_text_len(cryp)) {
385		cfg |= CR_PH_PAYLOAD;
386		stm32_cryp_write(cryp, CRYP_CR, cfg);
387	}
388
389	return 0;
390}
391
392static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
393{
394	u32 cfg;
395	int err;
396
397	/* Check if whole header written */
398	if (!cryp->header_in) {
399		/* Wait for completion */
400		err = stm32_cryp_wait_busy(cryp);
401		if (err) {
402			dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
403			stm32_cryp_write(cryp, CRYP_IMSCR, 0);
404			stm32_cryp_finish_req(cryp, err);
405			return;
406		}
407
408		if (stm32_cryp_get_input_text_len(cryp)) {
409			/* Phase 3 : payload */
410			cfg = stm32_cryp_read(cryp, CRYP_CR);
411			cfg &= ~CR_CRYPEN;
412			stm32_cryp_write(cryp, CRYP_CR, cfg);
413
414			cfg &= ~CR_PH_MASK;
415			cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
416			stm32_cryp_write(cryp, CRYP_CR, cfg);
417		} else {
418			/*
419			 * Phase 4 : tag.
420			 * Nothing to read, nothing to write, caller have to
421			 * end request
422			 */
423		}
424	}
425}
426
427static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
428{
429	unsigned int i;
430	size_t written;
431	size_t len;
432	u32 alen = cryp->areq->assoclen;
433	u32 block[AES_BLOCK_32] = {0};
434	u8 *b8 = (u8 *)block;
435
436	if (alen <= 65280) {
437		/* Write first u32 of B1 */
438		b8[0] = (alen >> 8) & 0xFF;
439		b8[1] = alen & 0xFF;
440		len = 2;
441	} else {
442		/* Build the two first u32 of B1 */
443		b8[0] = 0xFF;
444		b8[1] = 0xFE;
445		b8[2] = (alen & 0xFF000000) >> 24;
446		b8[3] = (alen & 0x00FF0000) >> 16;
447		b8[4] = (alen & 0x0000FF00) >> 8;
448		b8[5] = alen & 0x000000FF;
449		len = 6;
450	}
451
452	written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
453
454	scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
455	for (i = 0; i < AES_BLOCK_32; i++)
456		stm32_cryp_write(cryp, CRYP_DIN, block[i]);
457
458	cryp->header_in -= written;
459
460	stm32_crypt_gcmccm_end_header(cryp);
461}
462
463static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
464{
465	int ret;
466	u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
467	u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
468	__be32 *bd;
469	u32 *d;
470	unsigned int i, textlen;
471
472	/* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
473	memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
474	memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
475	iv[AES_BLOCK_SIZE - 1] = 1;
476	stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
477
478	/* Build B0 */
479	memcpy(b0, iv, AES_BLOCK_SIZE);
480
481	b0[0] |= (8 * ((cryp->authsize - 2) / 2));
482
483	if (cryp->areq->assoclen)
484		b0[0] |= 0x40;
485
486	textlen = stm32_cryp_get_input_text_len(cryp);
487
488	b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
489	b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
490
491	/* Enable HW */
492	stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
493
494	/* Write B0 */
495	d = (u32 *)b0;
496	bd = (__be32 *)b0;
497
498	for (i = 0; i < AES_BLOCK_32; i++) {
499		u32 xd = d[i];
500
501		if (!cryp->caps->padding_wa)
502			xd = be32_to_cpu(bd[i]);
503		stm32_cryp_write(cryp, CRYP_DIN, xd);
504	}
505
506	/* Wait for end of processing */
507	ret = stm32_cryp_wait_enable(cryp);
508	if (ret) {
509		dev_err(cryp->dev, "Timeout (ccm init)\n");
510		return ret;
511	}
512
513	/* Prepare next phase */
514	if (cryp->areq->assoclen) {
515		cfg |= CR_PH_HEADER | CR_CRYPEN;
516		stm32_cryp_write(cryp, CRYP_CR, cfg);
517
518		/* Write first (special) block (may move to next phase [payload]) */
519		stm32_cryp_write_ccm_first_header(cryp);
520	} else if (stm32_cryp_get_input_text_len(cryp)) {
521		cfg |= CR_PH_PAYLOAD;
522		stm32_cryp_write(cryp, CRYP_CR, cfg);
523	}
524
525	return 0;
526}
527
528static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
529{
530	int ret;
531	u32 cfg, hw_mode;
532
533	pm_runtime_get_sync(cryp->dev);
534
535	/* Disable interrupt */
536	stm32_cryp_write(cryp, CRYP_IMSCR, 0);
537
538	/* Set key */
539	stm32_cryp_hw_write_key(cryp);
540
541	/* Set configuration */
542	cfg = CR_DATA8 | CR_FFLUSH;
543
544	switch (cryp->ctx->keylen) {
545	case AES_KEYSIZE_128:
546		cfg |= CR_KEY128;
547		break;
548
549	case AES_KEYSIZE_192:
550		cfg |= CR_KEY192;
551		break;
552
553	default:
554	case AES_KEYSIZE_256:
555		cfg |= CR_KEY256;
556		break;
557	}
558
559	hw_mode = stm32_cryp_get_hw_mode(cryp);
560	if (hw_mode == CR_AES_UNKNOWN)
561		return -EINVAL;
562
563	/* AES ECB/CBC decrypt: run key preparation first */
564	if (is_decrypt(cryp) &&
565	    ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
566		stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
567
568		/* Wait for end of processing */
569		ret = stm32_cryp_wait_busy(cryp);
570		if (ret) {
571			dev_err(cryp->dev, "Timeout (key preparation)\n");
572			return ret;
573		}
574	}
575
576	cfg |= hw_mode;
577
578	if (is_decrypt(cryp))
579		cfg |= CR_DEC_NOT_ENC;
580
581	/* Apply config and flush (valid when CRYPEN = 0) */
582	stm32_cryp_write(cryp, CRYP_CR, cfg);
583
584	switch (hw_mode) {
585	case CR_AES_GCM:
586	case CR_AES_CCM:
587		/* Phase 1 : init */
588		if (hw_mode == CR_AES_CCM)
589			ret = stm32_cryp_ccm_init(cryp, cfg);
590		else
591			ret = stm32_cryp_gcm_init(cryp, cfg);
592
593		if (ret)
594			return ret;
595
596		break;
597
598	case CR_DES_CBC:
599	case CR_TDES_CBC:
600	case CR_AES_CBC:
601	case CR_AES_CTR:
602		stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
603		break;
604
605	default:
606		break;
607	}
608
609	/* Enable now */
610	cfg |= CR_CRYPEN;
611
612	stm32_cryp_write(cryp, CRYP_CR, cfg);
613
614	return 0;
615}
616
617static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
618{
619	if (!err && (is_gcm(cryp) || is_ccm(cryp)))
620		/* Phase 4 : output tag */
621		err = stm32_cryp_read_auth_tag(cryp);
622
623	if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
624		stm32_cryp_get_iv(cryp);
625
626	pm_runtime_mark_last_busy(cryp->dev);
627	pm_runtime_put_autosuspend(cryp->dev);
628
629	if (is_gcm(cryp) || is_ccm(cryp))
630		crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
631	else
632		crypto_finalize_skcipher_request(cryp->engine, cryp->req,
633						   err);
634}
635
636static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
637{
638	/* Enable interrupt and let the IRQ handler do everything */
639	stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
640
641	return 0;
642}
643
644static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
645static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
646					 void *areq);
647
648static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
649{
650	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
651
652	crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
653
654	ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
655	ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
656	ctx->enginectx.op.unprepare_request = NULL;
657	return 0;
658}
659
660static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
661static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
662				       void *areq);
663
664static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
665{
666	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
667
668	tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
669
670	ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
671	ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
672	ctx->enginectx.op.unprepare_request = NULL;
673
674	return 0;
675}
676
677static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
678{
679	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
680			crypto_skcipher_reqtfm(req));
681	struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
682	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
683
684	if (!cryp)
685		return -ENODEV;
686
687	rctx->mode = mode;
688
689	return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
690}
691
692static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
693{
694	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
695	struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
696	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
697
698	if (!cryp)
699		return -ENODEV;
700
701	rctx->mode = mode;
702
703	return crypto_transfer_aead_request_to_engine(cryp->engine, req);
704}
705
706static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
707			     unsigned int keylen)
708{
709	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
710
711	memcpy(ctx->key, key, keylen);
712	ctx->keylen = keylen;
713
714	return 0;
715}
716
717static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
718				 unsigned int keylen)
719{
720	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
721	    keylen != AES_KEYSIZE_256)
722		return -EINVAL;
723	else
724		return stm32_cryp_setkey(tfm, key, keylen);
725}
726
727static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
728				 unsigned int keylen)
729{
730	return verify_skcipher_des_key(tfm, key) ?:
731	       stm32_cryp_setkey(tfm, key, keylen);
732}
733
734static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
735				  unsigned int keylen)
736{
737	return verify_skcipher_des3_key(tfm, key) ?:
738	       stm32_cryp_setkey(tfm, key, keylen);
739}
740
741static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
742				      unsigned int keylen)
743{
744	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
745
746	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
747	    keylen != AES_KEYSIZE_256)
748		return -EINVAL;
749
750	memcpy(ctx->key, key, keylen);
751	ctx->keylen = keylen;
752
753	return 0;
754}
755
756static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
757					  unsigned int authsize)
758{
759	switch (authsize) {
760	case 4:
761	case 8:
762	case 12:
763	case 13:
764	case 14:
765	case 15:
766	case 16:
767		break;
768	default:
769		return -EINVAL;
770	}
771
772	return 0;
773}
774
775static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
776					  unsigned int authsize)
777{
778	switch (authsize) {
779	case 4:
780	case 6:
781	case 8:
782	case 10:
783	case 12:
784	case 14:
785	case 16:
786		break;
787	default:
788		return -EINVAL;
789	}
790
791	return 0;
792}
793
794static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
795{
796	if (req->cryptlen % AES_BLOCK_SIZE)
797		return -EINVAL;
798
799	if (req->cryptlen == 0)
800		return 0;
801
802	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
803}
804
805static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
806{
807	if (req->cryptlen % AES_BLOCK_SIZE)
808		return -EINVAL;
809
810	if (req->cryptlen == 0)
811		return 0;
812
813	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
814}
815
816static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
817{
818	if (req->cryptlen % AES_BLOCK_SIZE)
819		return -EINVAL;
820
821	if (req->cryptlen == 0)
822		return 0;
823
824	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
825}
826
827static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
828{
829	if (req->cryptlen % AES_BLOCK_SIZE)
830		return -EINVAL;
831
832	if (req->cryptlen == 0)
833		return 0;
834
835	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
836}
837
838static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
839{
840	if (req->cryptlen == 0)
841		return 0;
842
843	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
844}
845
846static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
847{
848	if (req->cryptlen == 0)
849		return 0;
850
851	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
852}
853
854static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
855{
856	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
857}
858
859static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
860{
861	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
862}
863
864static inline int crypto_ccm_check_iv(const u8 *iv)
865{
866	/* 2 <= L <= 8, so 1 <= L' <= 7. */
867	if (iv[0] < 1 || iv[0] > 7)
868		return -EINVAL;
869
870	return 0;
871}
872
873static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
874{
875	int err;
876
877	err = crypto_ccm_check_iv(req->iv);
878	if (err)
879		return err;
880
881	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
882}
883
884static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
885{
886	int err;
887
888	err = crypto_ccm_check_iv(req->iv);
889	if (err)
890		return err;
891
892	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
893}
894
895static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
896{
897	if (req->cryptlen % DES_BLOCK_SIZE)
898		return -EINVAL;
899
900	if (req->cryptlen == 0)
901		return 0;
902
903	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
904}
905
906static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
907{
908	if (req->cryptlen % DES_BLOCK_SIZE)
909		return -EINVAL;
910
911	if (req->cryptlen == 0)
912		return 0;
913
914	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
915}
916
917static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
918{
919	if (req->cryptlen % DES_BLOCK_SIZE)
920		return -EINVAL;
921
922	if (req->cryptlen == 0)
923		return 0;
924
925	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
926}
927
928static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
929{
930	if (req->cryptlen % DES_BLOCK_SIZE)
931		return -EINVAL;
932
933	if (req->cryptlen == 0)
934		return 0;
935
936	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
937}
938
939static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
940{
941	if (req->cryptlen % DES_BLOCK_SIZE)
942		return -EINVAL;
943
944	if (req->cryptlen == 0)
945		return 0;
946
947	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
948}
949
950static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
951{
952	if (req->cryptlen % DES_BLOCK_SIZE)
953		return -EINVAL;
954
955	if (req->cryptlen == 0)
956		return 0;
957
958	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
959}
960
961static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
962{
963	if (req->cryptlen % DES_BLOCK_SIZE)
964		return -EINVAL;
965
966	if (req->cryptlen == 0)
967		return 0;
968
969	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
970}
971
972static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
973{
974	if (req->cryptlen % DES_BLOCK_SIZE)
975		return -EINVAL;
976
977	if (req->cryptlen == 0)
978		return 0;
979
980	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
981}
982
983static int stm32_cryp_prepare_req(struct skcipher_request *req,
984				  struct aead_request *areq)
985{
986	struct stm32_cryp_ctx *ctx;
987	struct stm32_cryp *cryp;
988	struct stm32_cryp_reqctx *rctx;
989	struct scatterlist *in_sg;
990	int ret;
991
992	if (!req && !areq)
993		return -EINVAL;
994
995	ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
996		    crypto_aead_ctx(crypto_aead_reqtfm(areq));
997
998	cryp = ctx->cryp;
999
1000	if (!cryp)
1001		return -ENODEV;
1002
1003	rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1004	rctx->mode &= FLG_MODE_MASK;
1005
1006	ctx->cryp = cryp;
1007
1008	cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1009	cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1010	cryp->ctx = ctx;
1011
1012	if (req) {
1013		cryp->req = req;
1014		cryp->areq = NULL;
1015		cryp->header_in = 0;
1016		cryp->payload_in = req->cryptlen;
1017		cryp->payload_out = req->cryptlen;
1018		cryp->authsize = 0;
1019	} else {
1020		/*
1021		 * Length of input and output data:
1022		 * Encryption case:
1023		 *  INPUT  = AssocData   ||     PlainText
1024		 *          <- assoclen ->  <- cryptlen ->
1025		 *
1026		 *  OUTPUT = AssocData    ||   CipherText   ||      AuthTag
1027		 *          <- assoclen ->  <-- cryptlen -->  <- authsize ->
1028		 *
1029		 * Decryption case:
1030		 *  INPUT  =  AssocData     ||    CipherTex   ||       AuthTag
1031		 *          <- assoclen --->  <---------- cryptlen ---------->
1032		 *
1033		 *  OUTPUT = AssocData    ||               PlainText
1034		 *          <- assoclen ->  <- cryptlen - authsize ->
1035		 */
1036		cryp->areq = areq;
1037		cryp->req = NULL;
1038		cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1039		if (is_encrypt(cryp)) {
1040			cryp->payload_in = areq->cryptlen;
1041			cryp->header_in = areq->assoclen;
1042			cryp->payload_out = areq->cryptlen;
1043		} else {
1044			cryp->payload_in = areq->cryptlen - cryp->authsize;
1045			cryp->header_in = areq->assoclen;
1046			cryp->payload_out = cryp->payload_in;
1047		}
1048	}
1049
1050	in_sg = req ? req->src : areq->src;
1051	scatterwalk_start(&cryp->in_walk, in_sg);
1052
1053	cryp->out_sg = req ? req->dst : areq->dst;
1054	scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1055
1056	if (is_gcm(cryp) || is_ccm(cryp)) {
1057		/* In output, jump after assoc data */
1058		scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
1059	}
1060
1061	if (is_ctr(cryp))
1062		memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1063
1064	ret = stm32_cryp_hw_init(cryp);
1065	return ret;
1066}
1067
1068static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
1069					 void *areq)
1070{
1071	struct skcipher_request *req = container_of(areq,
1072						      struct skcipher_request,
1073						      base);
1074
1075	return stm32_cryp_prepare_req(req, NULL);
1076}
1077
1078static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1079{
1080	struct skcipher_request *req = container_of(areq,
1081						      struct skcipher_request,
1082						      base);
1083	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1084			crypto_skcipher_reqtfm(req));
1085	struct stm32_cryp *cryp = ctx->cryp;
1086
1087	if (!cryp)
1088		return -ENODEV;
1089
1090	return stm32_cryp_cpu_start(cryp);
1091}
1092
1093static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
1094{
1095	struct aead_request *req = container_of(areq, struct aead_request,
1096						base);
1097
1098	return stm32_cryp_prepare_req(NULL, req);
1099}
1100
1101static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1102{
1103	struct aead_request *req = container_of(areq, struct aead_request,
1104						base);
1105	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1106	struct stm32_cryp *cryp = ctx->cryp;
1107
1108	if (!cryp)
1109		return -ENODEV;
1110
1111	if (unlikely(!cryp->payload_in && !cryp->header_in)) {
1112		/* No input data to process: get tag and finish */
1113		stm32_cryp_finish_req(cryp, 0);
1114		return 0;
1115	}
1116
1117	return stm32_cryp_cpu_start(cryp);
1118}
1119
1120static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1121{
1122	u32 cfg, size_bit;
1123	unsigned int i;
1124	int ret = 0;
1125
1126	/* Update Config */
1127	cfg = stm32_cryp_read(cryp, CRYP_CR);
1128
1129	cfg &= ~CR_PH_MASK;
1130	cfg |= CR_PH_FINAL;
1131	cfg &= ~CR_DEC_NOT_ENC;
1132	cfg |= CR_CRYPEN;
1133
1134	stm32_cryp_write(cryp, CRYP_CR, cfg);
1135
1136	if (is_gcm(cryp)) {
1137		/* GCM: write aad and payload size (in bits) */
1138		size_bit = cryp->areq->assoclen * 8;
1139		if (cryp->caps->swap_final)
1140			size_bit = (__force u32)cpu_to_be32(size_bit);
1141
1142		stm32_cryp_write(cryp, CRYP_DIN, 0);
1143		stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1144
1145		size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1146				cryp->areq->cryptlen - cryp->authsize;
1147		size_bit *= 8;
1148		if (cryp->caps->swap_final)
1149			size_bit = (__force u32)cpu_to_be32(size_bit);
1150
1151		stm32_cryp_write(cryp, CRYP_DIN, 0);
1152		stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1153	} else {
1154		/* CCM: write CTR0 */
1155		u32 iv32[AES_BLOCK_32];
1156		u8 *iv = (u8 *)iv32;
1157		__be32 *biv = (__be32 *)iv32;
1158
1159		memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1160		memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1161
1162		for (i = 0; i < AES_BLOCK_32; i++) {
1163			u32 xiv = iv32[i];
1164
1165			if (!cryp->caps->padding_wa)
1166				xiv = be32_to_cpu(biv[i]);
1167			stm32_cryp_write(cryp, CRYP_DIN, xiv);
1168		}
1169	}
1170
1171	/* Wait for output data */
1172	ret = stm32_cryp_wait_output(cryp);
1173	if (ret) {
1174		dev_err(cryp->dev, "Timeout (read tag)\n");
1175		return ret;
1176	}
1177
1178	if (is_encrypt(cryp)) {
1179		u32 out_tag[AES_BLOCK_32];
1180
1181		/* Get and write tag */
1182		for (i = 0; i < AES_BLOCK_32; i++)
1183			out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1184
1185		scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
1186	} else {
1187		/* Get and check tag */
1188		u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1189
1190		scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
1191
1192		for (i = 0; i < AES_BLOCK_32; i++)
1193			out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1194
1195		if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1196			ret = -EBADMSG;
1197	}
1198
1199	/* Disable cryp */
1200	cfg &= ~CR_CRYPEN;
1201	stm32_cryp_write(cryp, CRYP_CR, cfg);
1202
1203	return ret;
1204}
1205
1206static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1207{
1208	u32 cr;
1209
1210	if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1211		/*
1212		 * In this case, we need to increment manually the ctr counter,
1213		 * as HW doesn't handle the U32 carry.
1214		 */
1215		crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1216
1217		cr = stm32_cryp_read(cryp, CRYP_CR);
1218		stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1219
1220		stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1221
1222		stm32_cryp_write(cryp, CRYP_CR, cr);
1223	}
1224
1225	/* The IV registers are BE  */
1226	cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
1227	cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
1228	cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
1229	cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
1230}
1231
1232static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1233{
1234	unsigned int i;
1235	u32 block[AES_BLOCK_32];
1236
1237	for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1238		block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1239
1240	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1241							     cryp->payload_out), 1);
1242	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1243				   cryp->payload_out);
1244}
1245
1246static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1247{
1248	unsigned int i;
1249	u32 block[AES_BLOCK_32] = {0};
1250
1251	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1252							    cryp->payload_in), 0);
1253	for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1254		stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1255
1256	cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1257}
1258
1259static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1260{
1261	int err;
1262	u32 cfg, block[AES_BLOCK_32] = {0};
1263	unsigned int i;
1264
1265	/* 'Special workaround' procedure described in the datasheet */
1266
1267	/* a) disable ip */
1268	stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1269	cfg = stm32_cryp_read(cryp, CRYP_CR);
1270	cfg &= ~CR_CRYPEN;
1271	stm32_cryp_write(cryp, CRYP_CR, cfg);
1272
1273	/* b) Update IV1R */
1274	stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1275
1276	/* c) change mode to CTR */
1277	cfg &= ~CR_ALGO_MASK;
1278	cfg |= CR_AES_CTR;
1279	stm32_cryp_write(cryp, CRYP_CR, cfg);
1280
1281	/* a) enable IP */
1282	cfg |= CR_CRYPEN;
1283	stm32_cryp_write(cryp, CRYP_CR, cfg);
1284
1285	/* b) pad and write the last block */
1286	stm32_cryp_irq_write_block(cryp);
1287	/* wait end of process */
1288	err = stm32_cryp_wait_output(cryp);
1289	if (err) {
1290		dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1291		return stm32_cryp_finish_req(cryp, err);
1292	}
1293
1294	/* c) get and store encrypted data */
1295	/*
1296	 * Same code as stm32_cryp_irq_read_data(), but we want to store
1297	 * block value
1298	 */
1299	for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1300		block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1301
1302	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1303							     cryp->payload_out), 1);
1304	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1305				   cryp->payload_out);
1306
1307	/* d) change mode back to AES GCM */
1308	cfg &= ~CR_ALGO_MASK;
1309	cfg |= CR_AES_GCM;
1310	stm32_cryp_write(cryp, CRYP_CR, cfg);
1311
1312	/* e) change phase to Final */
1313	cfg &= ~CR_PH_MASK;
1314	cfg |= CR_PH_FINAL;
1315	stm32_cryp_write(cryp, CRYP_CR, cfg);
1316
1317	/* f) write padded data */
1318	for (i = 0; i < AES_BLOCK_32; i++)
1319		stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1320
1321	/* g) Empty fifo out */
1322	err = stm32_cryp_wait_output(cryp);
1323	if (err) {
1324		dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
1325		return stm32_cryp_finish_req(cryp, err);
1326	}
1327
1328	for (i = 0; i < AES_BLOCK_32; i++)
1329		stm32_cryp_read(cryp, CRYP_DOUT);
1330
1331	/* h) run the he normal Final phase */
1332	stm32_cryp_finish_req(cryp, 0);
1333}
1334
1335static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1336{
1337	u32 cfg;
1338
1339	/* disable ip, set NPBLB and reneable ip */
1340	cfg = stm32_cryp_read(cryp, CRYP_CR);
1341	cfg &= ~CR_CRYPEN;
1342	stm32_cryp_write(cryp, CRYP_CR, cfg);
1343
1344	cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
1345	cfg |= CR_CRYPEN;
1346	stm32_cryp_write(cryp, CRYP_CR, cfg);
1347}
1348
1349static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1350{
1351	int err = 0;
1352	u32 cfg, iv1tmp;
1353	u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
1354	u32 block[AES_BLOCK_32] = {0};
1355	unsigned int i;
1356
1357	/* 'Special workaround' procedure described in the datasheet */
1358
1359	/* a) disable ip */
1360	stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1361
1362	cfg = stm32_cryp_read(cryp, CRYP_CR);
1363	cfg &= ~CR_CRYPEN;
1364	stm32_cryp_write(cryp, CRYP_CR, cfg);
1365
1366	/* b) get IV1 from CRYP_CSGCMCCM7 */
1367	iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1368
1369	/* c) Load CRYP_CSGCMCCMxR */
1370	for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1371		cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1372
1373	/* d) Write IV1R */
1374	stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1375
1376	/* e) change mode to CTR */
1377	cfg &= ~CR_ALGO_MASK;
1378	cfg |= CR_AES_CTR;
1379	stm32_cryp_write(cryp, CRYP_CR, cfg);
1380
1381	/* a) enable IP */
1382	cfg |= CR_CRYPEN;
1383	stm32_cryp_write(cryp, CRYP_CR, cfg);
1384
1385	/* b) pad and write the last block */
1386	stm32_cryp_irq_write_block(cryp);
1387	/* wait end of process */
1388	err = stm32_cryp_wait_output(cryp);
1389	if (err) {
1390		dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1391		return stm32_cryp_finish_req(cryp, err);
1392	}
1393
1394	/* c) get and store decrypted data */
1395	/*
1396	 * Same code as stm32_cryp_irq_read_data(), but we want to store
1397	 * block value
1398	 */
1399	for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1400		block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1401
1402	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1403							     cryp->payload_out), 1);
1404	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
1405
1406	/* d) Load again CRYP_CSGCMCCMxR */
1407	for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1408		cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1409
1410	/* e) change mode back to AES CCM */
1411	cfg &= ~CR_ALGO_MASK;
1412	cfg |= CR_AES_CCM;
1413	stm32_cryp_write(cryp, CRYP_CR, cfg);
1414
1415	/* f) change phase to header */
1416	cfg &= ~CR_PH_MASK;
1417	cfg |= CR_PH_HEADER;
1418	stm32_cryp_write(cryp, CRYP_CR, cfg);
1419
1420	/* g) XOR and write padded data */
1421	for (i = 0; i < ARRAY_SIZE(block); i++) {
1422		block[i] ^= cstmp1[i];
1423		block[i] ^= cstmp2[i];
1424		stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1425	}
1426
1427	/* h) wait for completion */
1428	err = stm32_cryp_wait_busy(cryp);
1429	if (err)
1430		dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1431
1432	/* i) run the he normal Final phase */
1433	stm32_cryp_finish_req(cryp, err);
1434}
1435
1436static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1437{
1438	if (unlikely(!cryp->payload_in)) {
1439		dev_warn(cryp->dev, "No more data to process\n");
1440		return;
1441	}
1442
1443	if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
1444		     (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1445		     is_encrypt(cryp))) {
1446		/* Padding for AES GCM encryption */
1447		if (cryp->caps->padding_wa) {
1448			/* Special case 1 */
1449			stm32_cryp_irq_write_gcm_padded_data(cryp);
1450			return;
1451		}
1452
1453		/* Setting padding bytes (NBBLB) */
1454		stm32_cryp_irq_set_npblb(cryp);
1455	}
1456
1457	if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
1458		     (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1459		     is_decrypt(cryp))) {
1460		/* Padding for AES CCM decryption */
1461		if (cryp->caps->padding_wa) {
1462			/* Special case 2 */
1463			stm32_cryp_irq_write_ccm_padded_data(cryp);
1464			return;
1465		}
1466
1467		/* Setting padding bytes (NBBLB) */
1468		stm32_cryp_irq_set_npblb(cryp);
1469	}
1470
1471	if (is_aes(cryp) && is_ctr(cryp))
1472		stm32_cryp_check_ctr_counter(cryp);
1473
1474	stm32_cryp_irq_write_block(cryp);
1475}
1476
1477static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
1478{
1479	unsigned int i;
1480	u32 block[AES_BLOCK_32] = {0};
1481	size_t written;
1482
1483	written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
1484
1485	scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
1486	for (i = 0; i < AES_BLOCK_32; i++)
1487		stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1488
1489	cryp->header_in -= written;
1490
1491	stm32_crypt_gcmccm_end_header(cryp);
1492}
1493
1494static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1495{
1496	struct stm32_cryp *cryp = arg;
1497	u32 ph;
1498	u32 it_mask = stm32_cryp_read(cryp, CRYP_IMSCR);
1499
1500	if (cryp->irq_status & MISR_OUT)
1501		/* Output FIFO IRQ: read data */
1502		stm32_cryp_irq_read_data(cryp);
1503
1504	if (cryp->irq_status & MISR_IN) {
1505		if (is_gcm(cryp) || is_ccm(cryp)) {
1506			ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1507			if (unlikely(ph == CR_PH_HEADER))
1508				/* Write Header */
1509				stm32_cryp_irq_write_gcmccm_header(cryp);
1510			else
1511				/* Input FIFO IRQ: write data */
1512				stm32_cryp_irq_write_data(cryp);
1513			if (is_gcm(cryp))
1514				cryp->gcm_ctr++;
1515		} else {
1516			/* Input FIFO IRQ: write data */
1517			stm32_cryp_irq_write_data(cryp);
1518		}
1519	}
1520
1521	/* Mask useless interrupts */
1522	if (!cryp->payload_in && !cryp->header_in)
1523		it_mask &= ~IMSCR_IN;
1524	if (!cryp->payload_out)
1525		it_mask &= ~IMSCR_OUT;
1526	stm32_cryp_write(cryp, CRYP_IMSCR, it_mask);
1527
1528	if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out)
1529		stm32_cryp_finish_req(cryp, 0);
1530
1531	return IRQ_HANDLED;
1532}
1533
1534static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1535{
1536	struct stm32_cryp *cryp = arg;
1537
1538	cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1539
1540	return IRQ_WAKE_THREAD;
1541}
1542
1543static struct skcipher_alg crypto_algs[] = {
1544{
1545	.base.cra_name		= "ecb(aes)",
1546	.base.cra_driver_name	= "stm32-ecb-aes",
1547	.base.cra_priority	= 200,
1548	.base.cra_flags		= CRYPTO_ALG_ASYNC,
1549	.base.cra_blocksize	= AES_BLOCK_SIZE,
1550	.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1551	.base.cra_alignmask	= 0,
1552	.base.cra_module	= THIS_MODULE,
1553
1554	.init			= stm32_cryp_init_tfm,
1555	.min_keysize		= AES_MIN_KEY_SIZE,
1556	.max_keysize		= AES_MAX_KEY_SIZE,
1557	.setkey			= stm32_cryp_aes_setkey,
1558	.encrypt		= stm32_cryp_aes_ecb_encrypt,
1559	.decrypt		= stm32_cryp_aes_ecb_decrypt,
1560},
1561{
1562	.base.cra_name		= "cbc(aes)",
1563	.base.cra_driver_name	= "stm32-cbc-aes",
1564	.base.cra_priority	= 200,
1565	.base.cra_flags		= CRYPTO_ALG_ASYNC,
1566	.base.cra_blocksize	= AES_BLOCK_SIZE,
1567	.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1568	.base.cra_alignmask	= 0,
1569	.base.cra_module	= THIS_MODULE,
1570
1571	.init			= stm32_cryp_init_tfm,
1572	.min_keysize		= AES_MIN_KEY_SIZE,
1573	.max_keysize		= AES_MAX_KEY_SIZE,
1574	.ivsize			= AES_BLOCK_SIZE,
1575	.setkey			= stm32_cryp_aes_setkey,
1576	.encrypt		= stm32_cryp_aes_cbc_encrypt,
1577	.decrypt		= stm32_cryp_aes_cbc_decrypt,
1578},
1579{
1580	.base.cra_name		= "ctr(aes)",
1581	.base.cra_driver_name	= "stm32-ctr-aes",
1582	.base.cra_priority	= 200,
1583	.base.cra_flags		= CRYPTO_ALG_ASYNC,
1584	.base.cra_blocksize	= 1,
1585	.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1586	.base.cra_alignmask	= 0,
1587	.base.cra_module	= THIS_MODULE,
1588
1589	.init			= stm32_cryp_init_tfm,
1590	.min_keysize		= AES_MIN_KEY_SIZE,
1591	.max_keysize		= AES_MAX_KEY_SIZE,
1592	.ivsize			= AES_BLOCK_SIZE,
1593	.setkey			= stm32_cryp_aes_setkey,
1594	.encrypt		= stm32_cryp_aes_ctr_encrypt,
1595	.decrypt		= stm32_cryp_aes_ctr_decrypt,
1596},
1597{
1598	.base.cra_name		= "ecb(des)",
1599	.base.cra_driver_name	= "stm32-ecb-des",
1600	.base.cra_priority	= 200,
1601	.base.cra_flags		= CRYPTO_ALG_ASYNC,
1602	.base.cra_blocksize	= DES_BLOCK_SIZE,
1603	.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1604	.base.cra_alignmask	= 0,
1605	.base.cra_module	= THIS_MODULE,
1606
1607	.init			= stm32_cryp_init_tfm,
1608	.min_keysize		= DES_BLOCK_SIZE,
1609	.max_keysize		= DES_BLOCK_SIZE,
1610	.setkey			= stm32_cryp_des_setkey,
1611	.encrypt		= stm32_cryp_des_ecb_encrypt,
1612	.decrypt		= stm32_cryp_des_ecb_decrypt,
1613},
1614{
1615	.base.cra_name		= "cbc(des)",
1616	.base.cra_driver_name	= "stm32-cbc-des",
1617	.base.cra_priority	= 200,
1618	.base.cra_flags		= CRYPTO_ALG_ASYNC,
1619	.base.cra_blocksize	= DES_BLOCK_SIZE,
1620	.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1621	.base.cra_alignmask	= 0,
1622	.base.cra_module	= THIS_MODULE,
1623
1624	.init			= stm32_cryp_init_tfm,
1625	.min_keysize		= DES_BLOCK_SIZE,
1626	.max_keysize		= DES_BLOCK_SIZE,
1627	.ivsize			= DES_BLOCK_SIZE,
1628	.setkey			= stm32_cryp_des_setkey,
1629	.encrypt		= stm32_cryp_des_cbc_encrypt,
1630	.decrypt		= stm32_cryp_des_cbc_decrypt,
1631},
1632{
1633	.base.cra_name		= "ecb(des3_ede)",
1634	.base.cra_driver_name	= "stm32-ecb-des3",
1635	.base.cra_priority	= 200,
1636	.base.cra_flags		= CRYPTO_ALG_ASYNC,
1637	.base.cra_blocksize	= DES_BLOCK_SIZE,
1638	.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1639	.base.cra_alignmask	= 0,
1640	.base.cra_module	= THIS_MODULE,
1641
1642	.init			= stm32_cryp_init_tfm,
1643	.min_keysize		= 3 * DES_BLOCK_SIZE,
1644	.max_keysize		= 3 * DES_BLOCK_SIZE,
1645	.setkey			= stm32_cryp_tdes_setkey,
1646	.encrypt		= stm32_cryp_tdes_ecb_encrypt,
1647	.decrypt		= stm32_cryp_tdes_ecb_decrypt,
1648},
1649{
1650	.base.cra_name		= "cbc(des3_ede)",
1651	.base.cra_driver_name	= "stm32-cbc-des3",
1652	.base.cra_priority	= 200,
1653	.base.cra_flags		= CRYPTO_ALG_ASYNC,
1654	.base.cra_blocksize	= DES_BLOCK_SIZE,
1655	.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1656	.base.cra_alignmask	= 0,
1657	.base.cra_module	= THIS_MODULE,
1658
1659	.init			= stm32_cryp_init_tfm,
1660	.min_keysize		= 3 * DES_BLOCK_SIZE,
1661	.max_keysize		= 3 * DES_BLOCK_SIZE,
1662	.ivsize			= DES_BLOCK_SIZE,
1663	.setkey			= stm32_cryp_tdes_setkey,
1664	.encrypt		= stm32_cryp_tdes_cbc_encrypt,
1665	.decrypt		= stm32_cryp_tdes_cbc_decrypt,
1666},
1667};
1668
1669static struct aead_alg aead_algs[] = {
1670{
1671	.setkey		= stm32_cryp_aes_aead_setkey,
1672	.setauthsize	= stm32_cryp_aes_gcm_setauthsize,
1673	.encrypt	= stm32_cryp_aes_gcm_encrypt,
1674	.decrypt	= stm32_cryp_aes_gcm_decrypt,
1675	.init		= stm32_cryp_aes_aead_init,
1676	.ivsize		= 12,
1677	.maxauthsize	= AES_BLOCK_SIZE,
1678
1679	.base = {
1680		.cra_name		= "gcm(aes)",
1681		.cra_driver_name	= "stm32-gcm-aes",
1682		.cra_priority		= 200,
1683		.cra_flags		= CRYPTO_ALG_ASYNC,
1684		.cra_blocksize		= 1,
1685		.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
1686		.cra_alignmask		= 0,
1687		.cra_module		= THIS_MODULE,
1688	},
1689},
1690{
1691	.setkey		= stm32_cryp_aes_aead_setkey,
1692	.setauthsize	= stm32_cryp_aes_ccm_setauthsize,
1693	.encrypt	= stm32_cryp_aes_ccm_encrypt,
1694	.decrypt	= stm32_cryp_aes_ccm_decrypt,
1695	.init		= stm32_cryp_aes_aead_init,
1696	.ivsize		= AES_BLOCK_SIZE,
1697	.maxauthsize	= AES_BLOCK_SIZE,
1698
1699	.base = {
1700		.cra_name		= "ccm(aes)",
1701		.cra_driver_name	= "stm32-ccm-aes",
1702		.cra_priority		= 200,
1703		.cra_flags		= CRYPTO_ALG_ASYNC,
1704		.cra_blocksize		= 1,
1705		.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
1706		.cra_alignmask		= 0,
1707		.cra_module		= THIS_MODULE,
1708	},
1709},
1710};
1711
1712static const struct stm32_cryp_caps f7_data = {
1713	.swap_final = true,
1714	.padding_wa = true,
1715};
1716
1717static const struct stm32_cryp_caps mp1_data = {
1718	.swap_final = false,
1719	.padding_wa = false,
1720};
1721
1722static const struct of_device_id stm32_dt_ids[] = {
1723	{ .compatible = "st,stm32f756-cryp", .data = &f7_data},
1724	{ .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1725	{},
1726};
1727MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1728
1729static int stm32_cryp_probe(struct platform_device *pdev)
1730{
1731	struct device *dev = &pdev->dev;
1732	struct stm32_cryp *cryp;
1733	struct reset_control *rst;
1734	int irq, ret;
1735
1736	cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1737	if (!cryp)
1738		return -ENOMEM;
1739
1740	cryp->caps = of_device_get_match_data(dev);
1741	if (!cryp->caps)
1742		return -ENODEV;
1743
1744	cryp->dev = dev;
1745
1746	cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1747	if (IS_ERR(cryp->regs))
1748		return PTR_ERR(cryp->regs);
1749
1750	irq = platform_get_irq(pdev, 0);
1751	if (irq < 0)
1752		return irq;
1753
1754	ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1755					stm32_cryp_irq_thread, IRQF_ONESHOT,
1756					dev_name(dev), cryp);
1757	if (ret) {
1758		dev_err(dev, "Cannot grab IRQ\n");
1759		return ret;
1760	}
1761
1762	cryp->clk = devm_clk_get(dev, NULL);
1763	if (IS_ERR(cryp->clk)) {
1764		dev_err(dev, "Could not get clock\n");
1765		return PTR_ERR(cryp->clk);
1766	}
1767
1768	ret = clk_prepare_enable(cryp->clk);
1769	if (ret) {
1770		dev_err(cryp->dev, "Failed to enable clock\n");
1771		return ret;
1772	}
1773
1774	pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
1775	pm_runtime_use_autosuspend(dev);
1776
1777	pm_runtime_get_noresume(dev);
1778	pm_runtime_set_active(dev);
1779	pm_runtime_enable(dev);
1780
1781	rst = devm_reset_control_get(dev, NULL);
1782	if (!IS_ERR(rst)) {
1783		reset_control_assert(rst);
1784		udelay(2);
1785		reset_control_deassert(rst);
1786	}
1787
1788	platform_set_drvdata(pdev, cryp);
1789
1790	spin_lock(&cryp_list.lock);
1791	list_add(&cryp->list, &cryp_list.dev_list);
1792	spin_unlock(&cryp_list.lock);
1793
1794	/* Initialize crypto engine */
1795	cryp->engine = crypto_engine_alloc_init(dev, 1);
1796	if (!cryp->engine) {
1797		dev_err(dev, "Could not init crypto engine\n");
1798		ret = -ENOMEM;
1799		goto err_engine1;
1800	}
1801
1802	ret = crypto_engine_start(cryp->engine);
1803	if (ret) {
1804		dev_err(dev, "Could not start crypto engine\n");
1805		goto err_engine2;
1806	}
1807
1808	ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1809	if (ret) {
1810		dev_err(dev, "Could not register algs\n");
1811		goto err_algs;
1812	}
1813
1814	ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1815	if (ret)
1816		goto err_aead_algs;
1817
1818	dev_info(dev, "Initialized\n");
1819
1820	pm_runtime_put_sync(dev);
1821
1822	return 0;
1823
1824err_aead_algs:
1825	crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1826err_algs:
1827err_engine2:
1828	crypto_engine_exit(cryp->engine);
1829err_engine1:
1830	spin_lock(&cryp_list.lock);
1831	list_del(&cryp->list);
1832	spin_unlock(&cryp_list.lock);
1833
1834	pm_runtime_disable(dev);
1835	pm_runtime_put_noidle(dev);
1836
1837	clk_disable_unprepare(cryp->clk);
1838
1839	return ret;
1840}
1841
1842static int stm32_cryp_remove(struct platform_device *pdev)
1843{
1844	struct stm32_cryp *cryp = platform_get_drvdata(pdev);
1845	int ret;
1846
1847	if (!cryp)
1848		return -ENODEV;
1849
1850	ret = pm_runtime_resume_and_get(cryp->dev);
1851	if (ret < 0)
1852		return ret;
1853
1854	crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1855	crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1856
1857	crypto_engine_exit(cryp->engine);
1858
1859	spin_lock(&cryp_list.lock);
1860	list_del(&cryp->list);
1861	spin_unlock(&cryp_list.lock);
1862
1863	pm_runtime_disable(cryp->dev);
1864	pm_runtime_put_noidle(cryp->dev);
1865
1866	clk_disable_unprepare(cryp->clk);
1867
1868	return 0;
1869}
1870
1871#ifdef CONFIG_PM
1872static int stm32_cryp_runtime_suspend(struct device *dev)
1873{
1874	struct stm32_cryp *cryp = dev_get_drvdata(dev);
1875
1876	clk_disable_unprepare(cryp->clk);
1877
1878	return 0;
1879}
1880
1881static int stm32_cryp_runtime_resume(struct device *dev)
1882{
1883	struct stm32_cryp *cryp = dev_get_drvdata(dev);
1884	int ret;
1885
1886	ret = clk_prepare_enable(cryp->clk);
1887	if (ret) {
1888		dev_err(cryp->dev, "Failed to prepare_enable clock\n");
1889		return ret;
1890	}
1891
1892	return 0;
1893}
1894#endif
1895
1896static const struct dev_pm_ops stm32_cryp_pm_ops = {
1897	SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1898				pm_runtime_force_resume)
1899	SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
1900			   stm32_cryp_runtime_resume, NULL)
1901};
1902
1903static struct platform_driver stm32_cryp_driver = {
1904	.probe  = stm32_cryp_probe,
1905	.remove = stm32_cryp_remove,
1906	.driver = {
1907		.name           = DRIVER_NAME,
1908		.pm		= &stm32_cryp_pm_ops,
1909		.of_match_table = stm32_dt_ids,
1910	},
1911};
1912
1913module_platform_driver(stm32_cryp_driver);
1914
1915MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
1916MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
1917MODULE_LICENSE("GPL");
1918