1// SPDX-License-Identifier: GPL-2.0+
2/*
3 * Copyright (c) 2021 Aspeed Technology Inc.
4 */
5
6#include "aspeed-hace.h"
7#include <crypto/des.h>
8#include <crypto/engine.h>
9#include <crypto/internal/des.h>
10#include <crypto/internal/skcipher.h>
11#include <linux/dma-mapping.h>
12#include <linux/err.h>
13#include <linux/io.h>
14#include <linux/kernel.h>
15#include <linux/module.h>
16#include <linux/scatterlist.h>
17#include <linux/string.h>
18
19#ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_CRYPTO_DEBUG
20#define CIPHER_DBG(h, fmt, ...)	\
21	dev_info((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
22#else
23#define CIPHER_DBG(h, fmt, ...)	\
24	dev_dbg((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
25#endif
26
27static int aspeed_crypto_do_fallback(struct skcipher_request *areq)
28{
29	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
30	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
31	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
32	int err;
33
34	skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
35	skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
36				      areq->base.complete, areq->base.data);
37	skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
38				   areq->cryptlen, areq->iv);
39
40	if (rctx->enc_cmd & HACE_CMD_ENCRYPT)
41		err = crypto_skcipher_encrypt(&rctx->fallback_req);
42	else
43		err = crypto_skcipher_decrypt(&rctx->fallback_req);
44
45	return err;
46}
47
48static bool aspeed_crypto_need_fallback(struct skcipher_request *areq)
49{
50	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(areq);
51
52	if (areq->cryptlen == 0)
53		return true;
54
55	if ((rctx->enc_cmd & HACE_CMD_DES_SELECT) &&
56	    !IS_ALIGNED(areq->cryptlen, DES_BLOCK_SIZE))
57		return true;
58
59	if ((!(rctx->enc_cmd & HACE_CMD_DES_SELECT)) &&
60	    !IS_ALIGNED(areq->cryptlen, AES_BLOCK_SIZE))
61		return true;
62
63	return false;
64}
65
66static int aspeed_hace_crypto_handle_queue(struct aspeed_hace_dev *hace_dev,
67					   struct skcipher_request *req)
68{
69	if (hace_dev->version == AST2500_VERSION &&
70	    aspeed_crypto_need_fallback(req)) {
71		CIPHER_DBG(hace_dev, "SW fallback\n");
72		return aspeed_crypto_do_fallback(req);
73	}
74
75	return crypto_transfer_skcipher_request_to_engine(
76			hace_dev->crypt_engine_crypto, req);
77}
78
79static int aspeed_crypto_do_request(struct crypto_engine *engine, void *areq)
80{
81	struct skcipher_request *req = skcipher_request_cast(areq);
82	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
83	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
84	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
85	struct aspeed_engine_crypto *crypto_engine;
86	int rc;
87
88	crypto_engine = &hace_dev->crypto_engine;
89	crypto_engine->req = req;
90	crypto_engine->flags |= CRYPTO_FLAGS_BUSY;
91
92	rc = ctx->start(hace_dev);
93
94	if (rc != -EINPROGRESS)
95		return -EIO;
96
97	return 0;
98}
99
100static int aspeed_sk_complete(struct aspeed_hace_dev *hace_dev, int err)
101{
102	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
103	struct aspeed_cipher_reqctx *rctx;
104	struct skcipher_request *req;
105
106	CIPHER_DBG(hace_dev, "\n");
107
108	req = crypto_engine->req;
109	rctx = skcipher_request_ctx(req);
110
111	if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
112		if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
113			memcpy(req->iv, crypto_engine->cipher_ctx +
114			       DES_KEY_SIZE, DES_KEY_SIZE);
115		else
116			memcpy(req->iv, crypto_engine->cipher_ctx,
117			       AES_BLOCK_SIZE);
118	}
119
120	crypto_engine->flags &= ~CRYPTO_FLAGS_BUSY;
121
122	crypto_finalize_skcipher_request(hace_dev->crypt_engine_crypto, req,
123					 err);
124
125	return err;
126}
127
128static int aspeed_sk_transfer_sg(struct aspeed_hace_dev *hace_dev)
129{
130	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
131	struct device *dev = hace_dev->dev;
132	struct aspeed_cipher_reqctx *rctx;
133	struct skcipher_request *req;
134
135	CIPHER_DBG(hace_dev, "\n");
136
137	req = crypto_engine->req;
138	rctx = skcipher_request_ctx(req);
139
140	if (req->src == req->dst) {
141		dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_BIDIRECTIONAL);
142	} else {
143		dma_unmap_sg(dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
144		dma_unmap_sg(dev, req->dst, rctx->dst_nents, DMA_FROM_DEVICE);
145	}
146
147	return aspeed_sk_complete(hace_dev, 0);
148}
149
150static int aspeed_sk_transfer(struct aspeed_hace_dev *hace_dev)
151{
152	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
153	struct aspeed_cipher_reqctx *rctx;
154	struct skcipher_request *req;
155	struct scatterlist *out_sg;
156	int nbytes = 0;
157	int rc = 0;
158
159	req = crypto_engine->req;
160	rctx = skcipher_request_ctx(req);
161	out_sg = req->dst;
162
163	/* Copy output buffer to dst scatter-gather lists */
164	nbytes = sg_copy_from_buffer(out_sg, rctx->dst_nents,
165				     crypto_engine->cipher_addr, req->cryptlen);
166	if (!nbytes) {
167		dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
168			 "nbytes", nbytes, "cryptlen", req->cryptlen);
169		rc = -EINVAL;
170	}
171
172	CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
173		   "nbytes", nbytes, "req->cryptlen", req->cryptlen,
174		   "nb_out_sg", rctx->dst_nents,
175		   "cipher addr", crypto_engine->cipher_addr);
176
177	return aspeed_sk_complete(hace_dev, rc);
178}
179
180static int aspeed_sk_start(struct aspeed_hace_dev *hace_dev)
181{
182	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
183	struct aspeed_cipher_reqctx *rctx;
184	struct skcipher_request *req;
185	struct scatterlist *in_sg;
186	int nbytes;
187
188	req = crypto_engine->req;
189	rctx = skcipher_request_ctx(req);
190	in_sg = req->src;
191
192	nbytes = sg_copy_to_buffer(in_sg, rctx->src_nents,
193				   crypto_engine->cipher_addr, req->cryptlen);
194
195	CIPHER_DBG(hace_dev, "%s:%d, %s:%d, %s:%d, %s:%p\n",
196		   "nbytes", nbytes, "req->cryptlen", req->cryptlen,
197		   "nb_in_sg", rctx->src_nents,
198		   "cipher addr", crypto_engine->cipher_addr);
199
200	if (!nbytes) {
201		dev_warn(hace_dev->dev, "invalid sg copy, %s:0x%x, %s:0x%x\n",
202			 "nbytes", nbytes, "cryptlen", req->cryptlen);
203		return -EINVAL;
204	}
205
206	crypto_engine->resume = aspeed_sk_transfer;
207
208	/* Trigger engines */
209	ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
210		       ASPEED_HACE_SRC);
211	ast_hace_write(hace_dev, crypto_engine->cipher_dma_addr,
212		       ASPEED_HACE_DEST);
213	ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
214	ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
215
216	return -EINPROGRESS;
217}
218
219static int aspeed_sk_start_sg(struct aspeed_hace_dev *hace_dev)
220{
221	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
222	struct aspeed_sg_list *src_list, *dst_list;
223	dma_addr_t src_dma_addr, dst_dma_addr;
224	struct aspeed_cipher_reqctx *rctx;
225	struct skcipher_request *req;
226	struct scatterlist *s;
227	int src_sg_len;
228	int dst_sg_len;
229	int total, i;
230	int rc;
231
232	CIPHER_DBG(hace_dev, "\n");
233
234	req = crypto_engine->req;
235	rctx = skcipher_request_ctx(req);
236
237	rctx->enc_cmd |= HACE_CMD_DES_SG_CTRL | HACE_CMD_SRC_SG_CTRL |
238			 HACE_CMD_AES_KEY_HW_EXP | HACE_CMD_MBUS_REQ_SYNC_EN;
239
240	/* BIDIRECTIONAL */
241	if (req->dst == req->src) {
242		src_sg_len = dma_map_sg(hace_dev->dev, req->src,
243					rctx->src_nents, DMA_BIDIRECTIONAL);
244		dst_sg_len = src_sg_len;
245		if (!src_sg_len) {
246			dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
247			return -EINVAL;
248		}
249
250	} else {
251		src_sg_len = dma_map_sg(hace_dev->dev, req->src,
252					rctx->src_nents, DMA_TO_DEVICE);
253		if (!src_sg_len) {
254			dev_warn(hace_dev->dev, "dma_map_sg() src error\n");
255			return -EINVAL;
256		}
257
258		dst_sg_len = dma_map_sg(hace_dev->dev, req->dst,
259					rctx->dst_nents, DMA_FROM_DEVICE);
260		if (!dst_sg_len) {
261			dev_warn(hace_dev->dev, "dma_map_sg() dst error\n");
262			rc = -EINVAL;
263			goto free_req_src;
264		}
265	}
266
267	src_list = (struct aspeed_sg_list *)crypto_engine->cipher_addr;
268	src_dma_addr = crypto_engine->cipher_dma_addr;
269	total = req->cryptlen;
270
271	for_each_sg(req->src, s, src_sg_len, i) {
272		u32 phy_addr = sg_dma_address(s);
273		u32 len = sg_dma_len(s);
274
275		if (total > len)
276			total -= len;
277		else {
278			/* last sg list */
279			len = total;
280			len |= BIT(31);
281			total = 0;
282		}
283
284		src_list[i].phy_addr = cpu_to_le32(phy_addr);
285		src_list[i].len = cpu_to_le32(len);
286	}
287
288	if (total != 0) {
289		rc = -EINVAL;
290		goto free_req;
291	}
292
293	if (req->dst == req->src) {
294		dst_list = src_list;
295		dst_dma_addr = src_dma_addr;
296
297	} else {
298		dst_list = (struct aspeed_sg_list *)crypto_engine->dst_sg_addr;
299		dst_dma_addr = crypto_engine->dst_sg_dma_addr;
300		total = req->cryptlen;
301
302		for_each_sg(req->dst, s, dst_sg_len, i) {
303			u32 phy_addr = sg_dma_address(s);
304			u32 len = sg_dma_len(s);
305
306			if (total > len)
307				total -= len;
308			else {
309				/* last sg list */
310				len = total;
311				len |= BIT(31);
312				total = 0;
313			}
314
315			dst_list[i].phy_addr = cpu_to_le32(phy_addr);
316			dst_list[i].len = cpu_to_le32(len);
317
318		}
319
320		dst_list[dst_sg_len].phy_addr = 0;
321		dst_list[dst_sg_len].len = 0;
322	}
323
324	if (total != 0) {
325		rc = -EINVAL;
326		goto free_req;
327	}
328
329	crypto_engine->resume = aspeed_sk_transfer_sg;
330
331	/* Memory barrier to ensure all data setup before engine starts */
332	mb();
333
334	/* Trigger engines */
335	ast_hace_write(hace_dev, src_dma_addr, ASPEED_HACE_SRC);
336	ast_hace_write(hace_dev, dst_dma_addr, ASPEED_HACE_DEST);
337	ast_hace_write(hace_dev, req->cryptlen, ASPEED_HACE_DATA_LEN);
338	ast_hace_write(hace_dev, rctx->enc_cmd, ASPEED_HACE_CMD);
339
340	return -EINPROGRESS;
341
342free_req:
343	if (req->dst == req->src) {
344		dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
345			     DMA_BIDIRECTIONAL);
346
347	} else {
348		dma_unmap_sg(hace_dev->dev, req->dst, rctx->dst_nents,
349			     DMA_TO_DEVICE);
350		dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents,
351			     DMA_TO_DEVICE);
352	}
353
354	return rc;
355
356free_req_src:
357	dma_unmap_sg(hace_dev->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
358
359	return rc;
360}
361
362static int aspeed_hace_skcipher_trigger(struct aspeed_hace_dev *hace_dev)
363{
364	struct aspeed_engine_crypto *crypto_engine = &hace_dev->crypto_engine;
365	struct aspeed_cipher_reqctx *rctx;
366	struct crypto_skcipher *cipher;
367	struct aspeed_cipher_ctx *ctx;
368	struct skcipher_request *req;
369
370	CIPHER_DBG(hace_dev, "\n");
371
372	req = crypto_engine->req;
373	rctx = skcipher_request_ctx(req);
374	cipher = crypto_skcipher_reqtfm(req);
375	ctx = crypto_skcipher_ctx(cipher);
376
377	/* enable interrupt */
378	rctx->enc_cmd |= HACE_CMD_ISR_EN;
379
380	rctx->dst_nents = sg_nents(req->dst);
381	rctx->src_nents = sg_nents(req->src);
382
383	ast_hace_write(hace_dev, crypto_engine->cipher_ctx_dma,
384		       ASPEED_HACE_CONTEXT);
385
386	if (rctx->enc_cmd & HACE_CMD_IV_REQUIRE) {
387		if (rctx->enc_cmd & HACE_CMD_DES_SELECT)
388			memcpy(crypto_engine->cipher_ctx + DES_BLOCK_SIZE,
389			       req->iv, DES_BLOCK_SIZE);
390		else
391			memcpy(crypto_engine->cipher_ctx, req->iv,
392			       AES_BLOCK_SIZE);
393	}
394
395	if (hace_dev->version == AST2600_VERSION) {
396		memcpy(crypto_engine->cipher_ctx + 16, ctx->key, ctx->key_len);
397
398		return aspeed_sk_start_sg(hace_dev);
399	}
400
401	memcpy(crypto_engine->cipher_ctx + 16, ctx->key, AES_MAX_KEYLENGTH);
402
403	return aspeed_sk_start(hace_dev);
404}
405
406static int aspeed_des_crypt(struct skcipher_request *req, u32 cmd)
407{
408	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
409	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
410	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
411	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
412	u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
413
414	CIPHER_DBG(hace_dev, "\n");
415
416	if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
417		if (!IS_ALIGNED(req->cryptlen, DES_BLOCK_SIZE))
418			return -EINVAL;
419	}
420
421	rctx->enc_cmd = cmd | HACE_CMD_DES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
422			HACE_CMD_DES | HACE_CMD_CONTEXT_LOAD_ENABLE |
423			HACE_CMD_CONTEXT_SAVE_ENABLE;
424
425	return aspeed_hace_crypto_handle_queue(hace_dev, req);
426}
427
428static int aspeed_des_setkey(struct crypto_skcipher *cipher, const u8 *key,
429			     unsigned int keylen)
430{
431	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
432	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
433	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
434	int rc;
435
436	CIPHER_DBG(hace_dev, "keylen: %d bits\n", keylen);
437
438	if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
439		dev_warn(hace_dev->dev, "invalid keylen: %d bits\n", keylen);
440		return -EINVAL;
441	}
442
443	if (keylen == DES_KEY_SIZE) {
444		rc = crypto_des_verify_key(tfm, key);
445		if (rc)
446			return rc;
447
448	} else if (keylen == DES3_EDE_KEY_SIZE) {
449		rc = crypto_des3_ede_verify_key(tfm, key);
450		if (rc)
451			return rc;
452	}
453
454	memcpy(ctx->key, key, keylen);
455	ctx->key_len = keylen;
456
457	crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
458	crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
459				  CRYPTO_TFM_REQ_MASK);
460
461	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
462}
463
464static int aspeed_tdes_ctr_decrypt(struct skcipher_request *req)
465{
466	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
467				HACE_CMD_TRIPLE_DES);
468}
469
470static int aspeed_tdes_ctr_encrypt(struct skcipher_request *req)
471{
472	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
473				HACE_CMD_TRIPLE_DES);
474}
475
476static int aspeed_tdes_ofb_decrypt(struct skcipher_request *req)
477{
478	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB |
479				HACE_CMD_TRIPLE_DES);
480}
481
482static int aspeed_tdes_ofb_encrypt(struct skcipher_request *req)
483{
484	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB |
485				HACE_CMD_TRIPLE_DES);
486}
487
488static int aspeed_tdes_cfb_decrypt(struct skcipher_request *req)
489{
490	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB |
491				HACE_CMD_TRIPLE_DES);
492}
493
494static int aspeed_tdes_cfb_encrypt(struct skcipher_request *req)
495{
496	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB |
497				HACE_CMD_TRIPLE_DES);
498}
499
500static int aspeed_tdes_cbc_decrypt(struct skcipher_request *req)
501{
502	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
503				HACE_CMD_TRIPLE_DES);
504}
505
506static int aspeed_tdes_cbc_encrypt(struct skcipher_request *req)
507{
508	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
509				HACE_CMD_TRIPLE_DES);
510}
511
512static int aspeed_tdes_ecb_decrypt(struct skcipher_request *req)
513{
514	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
515				HACE_CMD_TRIPLE_DES);
516}
517
518static int aspeed_tdes_ecb_encrypt(struct skcipher_request *req)
519{
520	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
521				HACE_CMD_TRIPLE_DES);
522}
523
524static int aspeed_des_ctr_decrypt(struct skcipher_request *req)
525{
526	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR |
527				HACE_CMD_SINGLE_DES);
528}
529
530static int aspeed_des_ctr_encrypt(struct skcipher_request *req)
531{
532	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR |
533				HACE_CMD_SINGLE_DES);
534}
535
536static int aspeed_des_ofb_decrypt(struct skcipher_request *req)
537{
538	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB |
539				HACE_CMD_SINGLE_DES);
540}
541
542static int aspeed_des_ofb_encrypt(struct skcipher_request *req)
543{
544	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB |
545				HACE_CMD_SINGLE_DES);
546}
547
548static int aspeed_des_cfb_decrypt(struct skcipher_request *req)
549{
550	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB |
551				HACE_CMD_SINGLE_DES);
552}
553
554static int aspeed_des_cfb_encrypt(struct skcipher_request *req)
555{
556	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB |
557				HACE_CMD_SINGLE_DES);
558}
559
560static int aspeed_des_cbc_decrypt(struct skcipher_request *req)
561{
562	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC |
563				HACE_CMD_SINGLE_DES);
564}
565
566static int aspeed_des_cbc_encrypt(struct skcipher_request *req)
567{
568	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC |
569				HACE_CMD_SINGLE_DES);
570}
571
572static int aspeed_des_ecb_decrypt(struct skcipher_request *req)
573{
574	return aspeed_des_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB |
575				HACE_CMD_SINGLE_DES);
576}
577
578static int aspeed_des_ecb_encrypt(struct skcipher_request *req)
579{
580	return aspeed_des_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB |
581				HACE_CMD_SINGLE_DES);
582}
583
584static int aspeed_aes_crypt(struct skcipher_request *req, u32 cmd)
585{
586	struct aspeed_cipher_reqctx *rctx = skcipher_request_ctx(req);
587	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
588	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
589	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
590	u32 crypto_alg = cmd & HACE_CMD_OP_MODE_MASK;
591
592	if (crypto_alg == HACE_CMD_CBC || crypto_alg == HACE_CMD_ECB) {
593		if (!IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
594			return -EINVAL;
595	}
596
597	CIPHER_DBG(hace_dev, "%s\n",
598		   (cmd & HACE_CMD_ENCRYPT) ? "encrypt" : "decrypt");
599
600	cmd |= HACE_CMD_AES_SELECT | HACE_CMD_RI_WO_DATA_ENABLE |
601	       HACE_CMD_CONTEXT_LOAD_ENABLE | HACE_CMD_CONTEXT_SAVE_ENABLE;
602
603	switch (ctx->key_len) {
604	case AES_KEYSIZE_128:
605		cmd |= HACE_CMD_AES128;
606		break;
607	case AES_KEYSIZE_192:
608		cmd |= HACE_CMD_AES192;
609		break;
610	case AES_KEYSIZE_256:
611		cmd |= HACE_CMD_AES256;
612		break;
613	default:
614		return -EINVAL;
615	}
616
617	rctx->enc_cmd = cmd;
618
619	return aspeed_hace_crypto_handle_queue(hace_dev, req);
620}
621
622static int aspeed_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
623			     unsigned int keylen)
624{
625	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
626	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
627	struct crypto_aes_ctx gen_aes_key;
628
629	CIPHER_DBG(hace_dev, "keylen: %d bits\n", (keylen * 8));
630
631	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
632	    keylen != AES_KEYSIZE_256)
633		return -EINVAL;
634
635	if (ctx->hace_dev->version == AST2500_VERSION) {
636		aes_expandkey(&gen_aes_key, key, keylen);
637		memcpy(ctx->key, gen_aes_key.key_enc, AES_MAX_KEYLENGTH);
638
639	} else {
640		memcpy(ctx->key, key, keylen);
641	}
642
643	ctx->key_len = keylen;
644
645	crypto_skcipher_clear_flags(ctx->fallback_tfm, CRYPTO_TFM_REQ_MASK);
646	crypto_skcipher_set_flags(ctx->fallback_tfm, cipher->base.crt_flags &
647				  CRYPTO_TFM_REQ_MASK);
648
649	return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
650}
651
652static int aspeed_aes_ctr_decrypt(struct skcipher_request *req)
653{
654	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CTR);
655}
656
657static int aspeed_aes_ctr_encrypt(struct skcipher_request *req)
658{
659	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CTR);
660}
661
662static int aspeed_aes_ofb_decrypt(struct skcipher_request *req)
663{
664	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_OFB);
665}
666
667static int aspeed_aes_ofb_encrypt(struct skcipher_request *req)
668{
669	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_OFB);
670}
671
672static int aspeed_aes_cfb_decrypt(struct skcipher_request *req)
673{
674	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CFB);
675}
676
677static int aspeed_aes_cfb_encrypt(struct skcipher_request *req)
678{
679	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CFB);
680}
681
682static int aspeed_aes_cbc_decrypt(struct skcipher_request *req)
683{
684	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_CBC);
685}
686
687static int aspeed_aes_cbc_encrypt(struct skcipher_request *req)
688{
689	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_CBC);
690}
691
692static int aspeed_aes_ecb_decrypt(struct skcipher_request *req)
693{
694	return aspeed_aes_crypt(req, HACE_CMD_DECRYPT | HACE_CMD_ECB);
695}
696
697static int aspeed_aes_ecb_encrypt(struct skcipher_request *req)
698{
699	return aspeed_aes_crypt(req, HACE_CMD_ENCRYPT | HACE_CMD_ECB);
700}
701
702static int aspeed_crypto_cra_init(struct crypto_skcipher *tfm)
703{
704	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
705	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
706	const char *name = crypto_tfm_alg_name(&tfm->base);
707	struct aspeed_hace_alg *crypto_alg;
708
709
710	crypto_alg = container_of(alg, struct aspeed_hace_alg, alg.skcipher.base);
711	ctx->hace_dev = crypto_alg->hace_dev;
712	ctx->start = aspeed_hace_skcipher_trigger;
713
714	CIPHER_DBG(ctx->hace_dev, "%s\n", name);
715
716	ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_ASYNC |
717						  CRYPTO_ALG_NEED_FALLBACK);
718	if (IS_ERR(ctx->fallback_tfm)) {
719		dev_err(ctx->hace_dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
720			name, PTR_ERR(ctx->fallback_tfm));
721		return PTR_ERR(ctx->fallback_tfm);
722	}
723
724	crypto_skcipher_set_reqsize(tfm, sizeof(struct aspeed_cipher_reqctx) +
725			 crypto_skcipher_reqsize(ctx->fallback_tfm));
726
727	return 0;
728}
729
730static void aspeed_crypto_cra_exit(struct crypto_skcipher *tfm)
731{
732	struct aspeed_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
733	struct aspeed_hace_dev *hace_dev = ctx->hace_dev;
734
735	CIPHER_DBG(hace_dev, "%s\n", crypto_tfm_alg_name(&tfm->base));
736	crypto_free_skcipher(ctx->fallback_tfm);
737}
738
739static struct aspeed_hace_alg aspeed_crypto_algs[] = {
740	{
741		.alg.skcipher.base = {
742			.min_keysize	= AES_MIN_KEY_SIZE,
743			.max_keysize	= AES_MAX_KEY_SIZE,
744			.setkey		= aspeed_aes_setkey,
745			.encrypt	= aspeed_aes_ecb_encrypt,
746			.decrypt	= aspeed_aes_ecb_decrypt,
747			.init		= aspeed_crypto_cra_init,
748			.exit		= aspeed_crypto_cra_exit,
749			.base = {
750				.cra_name		= "ecb(aes)",
751				.cra_driver_name	= "aspeed-ecb-aes",
752				.cra_priority		= 300,
753				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
754							  CRYPTO_ALG_ASYNC |
755							  CRYPTO_ALG_NEED_FALLBACK,
756				.cra_blocksize		= AES_BLOCK_SIZE,
757				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
758				.cra_alignmask		= 0x0f,
759				.cra_module		= THIS_MODULE,
760			}
761		},
762		.alg.skcipher.op = {
763			.do_one_request = aspeed_crypto_do_request,
764		},
765	},
766	{
767		.alg.skcipher.base = {
768			.ivsize		= AES_BLOCK_SIZE,
769			.min_keysize	= AES_MIN_KEY_SIZE,
770			.max_keysize	= AES_MAX_KEY_SIZE,
771			.setkey		= aspeed_aes_setkey,
772			.encrypt	= aspeed_aes_cbc_encrypt,
773			.decrypt	= aspeed_aes_cbc_decrypt,
774			.init		= aspeed_crypto_cra_init,
775			.exit		= aspeed_crypto_cra_exit,
776			.base = {
777				.cra_name		= "cbc(aes)",
778				.cra_driver_name	= "aspeed-cbc-aes",
779				.cra_priority		= 300,
780				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
781							  CRYPTO_ALG_ASYNC |
782							  CRYPTO_ALG_NEED_FALLBACK,
783				.cra_blocksize		= AES_BLOCK_SIZE,
784				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
785				.cra_alignmask		= 0x0f,
786				.cra_module		= THIS_MODULE,
787			}
788		},
789		.alg.skcipher.op = {
790			.do_one_request = aspeed_crypto_do_request,
791		},
792	},
793	{
794		.alg.skcipher.base = {
795			.ivsize		= AES_BLOCK_SIZE,
796			.min_keysize	= AES_MIN_KEY_SIZE,
797			.max_keysize	= AES_MAX_KEY_SIZE,
798			.setkey		= aspeed_aes_setkey,
799			.encrypt	= aspeed_aes_cfb_encrypt,
800			.decrypt	= aspeed_aes_cfb_decrypt,
801			.init		= aspeed_crypto_cra_init,
802			.exit		= aspeed_crypto_cra_exit,
803			.base = {
804				.cra_name		= "cfb(aes)",
805				.cra_driver_name	= "aspeed-cfb-aes",
806				.cra_priority		= 300,
807				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
808							  CRYPTO_ALG_ASYNC |
809							  CRYPTO_ALG_NEED_FALLBACK,
810				.cra_blocksize		= 1,
811				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
812				.cra_alignmask		= 0x0f,
813				.cra_module		= THIS_MODULE,
814			}
815		},
816		.alg.skcipher.op = {
817			.do_one_request = aspeed_crypto_do_request,
818		},
819	},
820	{
821		.alg.skcipher.base = {
822			.ivsize		= AES_BLOCK_SIZE,
823			.min_keysize	= AES_MIN_KEY_SIZE,
824			.max_keysize	= AES_MAX_KEY_SIZE,
825			.setkey		= aspeed_aes_setkey,
826			.encrypt	= aspeed_aes_ofb_encrypt,
827			.decrypt	= aspeed_aes_ofb_decrypt,
828			.init		= aspeed_crypto_cra_init,
829			.exit		= aspeed_crypto_cra_exit,
830			.base = {
831				.cra_name		= "ofb(aes)",
832				.cra_driver_name	= "aspeed-ofb-aes",
833				.cra_priority		= 300,
834				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
835							  CRYPTO_ALG_ASYNC |
836							  CRYPTO_ALG_NEED_FALLBACK,
837				.cra_blocksize		= 1,
838				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
839				.cra_alignmask		= 0x0f,
840				.cra_module		= THIS_MODULE,
841			}
842		},
843		.alg.skcipher.op = {
844			.do_one_request = aspeed_crypto_do_request,
845		},
846	},
847	{
848		.alg.skcipher.base = {
849			.min_keysize	= DES_KEY_SIZE,
850			.max_keysize	= DES_KEY_SIZE,
851			.setkey		= aspeed_des_setkey,
852			.encrypt	= aspeed_des_ecb_encrypt,
853			.decrypt	= aspeed_des_ecb_decrypt,
854			.init		= aspeed_crypto_cra_init,
855			.exit		= aspeed_crypto_cra_exit,
856			.base = {
857				.cra_name		= "ecb(des)",
858				.cra_driver_name	= "aspeed-ecb-des",
859				.cra_priority		= 300,
860				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
861							  CRYPTO_ALG_ASYNC |
862							  CRYPTO_ALG_NEED_FALLBACK,
863				.cra_blocksize		= DES_BLOCK_SIZE,
864				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
865				.cra_alignmask		= 0x0f,
866				.cra_module		= THIS_MODULE,
867			}
868		},
869		.alg.skcipher.op = {
870			.do_one_request = aspeed_crypto_do_request,
871		},
872	},
873	{
874		.alg.skcipher.base = {
875			.ivsize		= DES_BLOCK_SIZE,
876			.min_keysize	= DES_KEY_SIZE,
877			.max_keysize	= DES_KEY_SIZE,
878			.setkey		= aspeed_des_setkey,
879			.encrypt	= aspeed_des_cbc_encrypt,
880			.decrypt	= aspeed_des_cbc_decrypt,
881			.init		= aspeed_crypto_cra_init,
882			.exit		= aspeed_crypto_cra_exit,
883			.base = {
884				.cra_name		= "cbc(des)",
885				.cra_driver_name	= "aspeed-cbc-des",
886				.cra_priority		= 300,
887				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
888							  CRYPTO_ALG_ASYNC |
889							  CRYPTO_ALG_NEED_FALLBACK,
890				.cra_blocksize		= DES_BLOCK_SIZE,
891				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
892				.cra_alignmask		= 0x0f,
893				.cra_module		= THIS_MODULE,
894			}
895		},
896		.alg.skcipher.op = {
897			.do_one_request = aspeed_crypto_do_request,
898		},
899	},
900	{
901		.alg.skcipher.base = {
902			.ivsize		= DES_BLOCK_SIZE,
903			.min_keysize	= DES_KEY_SIZE,
904			.max_keysize	= DES_KEY_SIZE,
905			.setkey		= aspeed_des_setkey,
906			.encrypt	= aspeed_des_cfb_encrypt,
907			.decrypt	= aspeed_des_cfb_decrypt,
908			.init		= aspeed_crypto_cra_init,
909			.exit		= aspeed_crypto_cra_exit,
910			.base = {
911				.cra_name		= "cfb(des)",
912				.cra_driver_name	= "aspeed-cfb-des",
913				.cra_priority		= 300,
914				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
915							  CRYPTO_ALG_ASYNC |
916							  CRYPTO_ALG_NEED_FALLBACK,
917				.cra_blocksize		= DES_BLOCK_SIZE,
918				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
919				.cra_alignmask		= 0x0f,
920				.cra_module		= THIS_MODULE,
921			}
922		},
923		.alg.skcipher.op = {
924			.do_one_request = aspeed_crypto_do_request,
925		},
926	},
927	{
928		.alg.skcipher.base = {
929			.ivsize		= DES_BLOCK_SIZE,
930			.min_keysize	= DES_KEY_SIZE,
931			.max_keysize	= DES_KEY_SIZE,
932			.setkey		= aspeed_des_setkey,
933			.encrypt	= aspeed_des_ofb_encrypt,
934			.decrypt	= aspeed_des_ofb_decrypt,
935			.init		= aspeed_crypto_cra_init,
936			.exit		= aspeed_crypto_cra_exit,
937			.base = {
938				.cra_name		= "ofb(des)",
939				.cra_driver_name	= "aspeed-ofb-des",
940				.cra_priority		= 300,
941				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
942							  CRYPTO_ALG_ASYNC |
943							  CRYPTO_ALG_NEED_FALLBACK,
944				.cra_blocksize		= DES_BLOCK_SIZE,
945				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
946				.cra_alignmask		= 0x0f,
947				.cra_module		= THIS_MODULE,
948			}
949		},
950		.alg.skcipher.op = {
951			.do_one_request = aspeed_crypto_do_request,
952		},
953	},
954	{
955		.alg.skcipher.base = {
956			.min_keysize	= DES3_EDE_KEY_SIZE,
957			.max_keysize	= DES3_EDE_KEY_SIZE,
958			.setkey		= aspeed_des_setkey,
959			.encrypt	= aspeed_tdes_ecb_encrypt,
960			.decrypt	= aspeed_tdes_ecb_decrypt,
961			.init		= aspeed_crypto_cra_init,
962			.exit		= aspeed_crypto_cra_exit,
963			.base = {
964				.cra_name		= "ecb(des3_ede)",
965				.cra_driver_name	= "aspeed-ecb-tdes",
966				.cra_priority		= 300,
967				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
968							  CRYPTO_ALG_ASYNC |
969							  CRYPTO_ALG_NEED_FALLBACK,
970				.cra_blocksize		= DES_BLOCK_SIZE,
971				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
972				.cra_alignmask		= 0x0f,
973				.cra_module		= THIS_MODULE,
974			}
975		},
976		.alg.skcipher.op = {
977			.do_one_request = aspeed_crypto_do_request,
978		},
979	},
980	{
981		.alg.skcipher.base = {
982			.ivsize		= DES_BLOCK_SIZE,
983			.min_keysize	= DES3_EDE_KEY_SIZE,
984			.max_keysize	= DES3_EDE_KEY_SIZE,
985			.setkey		= aspeed_des_setkey,
986			.encrypt	= aspeed_tdes_cbc_encrypt,
987			.decrypt	= aspeed_tdes_cbc_decrypt,
988			.init		= aspeed_crypto_cra_init,
989			.exit		= aspeed_crypto_cra_exit,
990			.base = {
991				.cra_name		= "cbc(des3_ede)",
992				.cra_driver_name	= "aspeed-cbc-tdes",
993				.cra_priority		= 300,
994				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
995							  CRYPTO_ALG_ASYNC |
996							  CRYPTO_ALG_NEED_FALLBACK,
997				.cra_blocksize		= DES_BLOCK_SIZE,
998				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
999				.cra_alignmask		= 0x0f,
1000				.cra_module		= THIS_MODULE,
1001			}
1002		},
1003		.alg.skcipher.op = {
1004			.do_one_request = aspeed_crypto_do_request,
1005		},
1006	},
1007	{
1008		.alg.skcipher.base = {
1009			.ivsize		= DES_BLOCK_SIZE,
1010			.min_keysize	= DES3_EDE_KEY_SIZE,
1011			.max_keysize	= DES3_EDE_KEY_SIZE,
1012			.setkey		= aspeed_des_setkey,
1013			.encrypt	= aspeed_tdes_cfb_encrypt,
1014			.decrypt	= aspeed_tdes_cfb_decrypt,
1015			.init		= aspeed_crypto_cra_init,
1016			.exit		= aspeed_crypto_cra_exit,
1017			.base = {
1018				.cra_name		= "cfb(des3_ede)",
1019				.cra_driver_name	= "aspeed-cfb-tdes",
1020				.cra_priority		= 300,
1021				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1022							  CRYPTO_ALG_ASYNC |
1023							  CRYPTO_ALG_NEED_FALLBACK,
1024				.cra_blocksize		= DES_BLOCK_SIZE,
1025				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1026				.cra_alignmask		= 0x0f,
1027				.cra_module		= THIS_MODULE,
1028			}
1029		},
1030		.alg.skcipher.op = {
1031			.do_one_request = aspeed_crypto_do_request,
1032		},
1033	},
1034	{
1035		.alg.skcipher.base = {
1036			.ivsize		= DES_BLOCK_SIZE,
1037			.min_keysize	= DES3_EDE_KEY_SIZE,
1038			.max_keysize	= DES3_EDE_KEY_SIZE,
1039			.setkey		= aspeed_des_setkey,
1040			.encrypt	= aspeed_tdes_ofb_encrypt,
1041			.decrypt	= aspeed_tdes_ofb_decrypt,
1042			.init		= aspeed_crypto_cra_init,
1043			.exit		= aspeed_crypto_cra_exit,
1044			.base = {
1045				.cra_name		= "ofb(des3_ede)",
1046				.cra_driver_name	= "aspeed-ofb-tdes",
1047				.cra_priority		= 300,
1048				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1049							  CRYPTO_ALG_ASYNC |
1050							  CRYPTO_ALG_NEED_FALLBACK,
1051				.cra_blocksize		= DES_BLOCK_SIZE,
1052				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1053				.cra_alignmask		= 0x0f,
1054				.cra_module		= THIS_MODULE,
1055			}
1056		},
1057		.alg.skcipher.op = {
1058			.do_one_request = aspeed_crypto_do_request,
1059		},
1060	},
1061};
1062
1063static struct aspeed_hace_alg aspeed_crypto_algs_g6[] = {
1064	{
1065		.alg.skcipher.base = {
1066			.ivsize		= AES_BLOCK_SIZE,
1067			.min_keysize	= AES_MIN_KEY_SIZE,
1068			.max_keysize	= AES_MAX_KEY_SIZE,
1069			.setkey		= aspeed_aes_setkey,
1070			.encrypt	= aspeed_aes_ctr_encrypt,
1071			.decrypt	= aspeed_aes_ctr_decrypt,
1072			.init		= aspeed_crypto_cra_init,
1073			.exit		= aspeed_crypto_cra_exit,
1074			.base = {
1075				.cra_name		= "ctr(aes)",
1076				.cra_driver_name	= "aspeed-ctr-aes",
1077				.cra_priority		= 300,
1078				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1079							  CRYPTO_ALG_ASYNC,
1080				.cra_blocksize		= 1,
1081				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1082				.cra_alignmask		= 0x0f,
1083				.cra_module		= THIS_MODULE,
1084			}
1085		},
1086		.alg.skcipher.op = {
1087			.do_one_request = aspeed_crypto_do_request,
1088		},
1089	},
1090	{
1091		.alg.skcipher.base = {
1092			.ivsize		= DES_BLOCK_SIZE,
1093			.min_keysize	= DES_KEY_SIZE,
1094			.max_keysize	= DES_KEY_SIZE,
1095			.setkey		= aspeed_des_setkey,
1096			.encrypt	= aspeed_des_ctr_encrypt,
1097			.decrypt	= aspeed_des_ctr_decrypt,
1098			.init		= aspeed_crypto_cra_init,
1099			.exit		= aspeed_crypto_cra_exit,
1100			.base = {
1101				.cra_name		= "ctr(des)",
1102				.cra_driver_name	= "aspeed-ctr-des",
1103				.cra_priority		= 300,
1104				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1105							  CRYPTO_ALG_ASYNC,
1106				.cra_blocksize		= 1,
1107				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1108				.cra_alignmask		= 0x0f,
1109				.cra_module		= THIS_MODULE,
1110			}
1111		},
1112		.alg.skcipher.op = {
1113			.do_one_request = aspeed_crypto_do_request,
1114		},
1115	},
1116	{
1117		.alg.skcipher.base = {
1118			.ivsize		= DES_BLOCK_SIZE,
1119			.min_keysize	= DES3_EDE_KEY_SIZE,
1120			.max_keysize	= DES3_EDE_KEY_SIZE,
1121			.setkey		= aspeed_des_setkey,
1122			.encrypt	= aspeed_tdes_ctr_encrypt,
1123			.decrypt	= aspeed_tdes_ctr_decrypt,
1124			.init		= aspeed_crypto_cra_init,
1125			.exit		= aspeed_crypto_cra_exit,
1126			.base = {
1127				.cra_name		= "ctr(des3_ede)",
1128				.cra_driver_name	= "aspeed-ctr-tdes",
1129				.cra_priority		= 300,
1130				.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
1131							  CRYPTO_ALG_ASYNC,
1132				.cra_blocksize		= 1,
1133				.cra_ctxsize		= sizeof(struct aspeed_cipher_ctx),
1134				.cra_alignmask		= 0x0f,
1135				.cra_module		= THIS_MODULE,
1136			}
1137		},
1138		.alg.skcipher.op = {
1139			.do_one_request = aspeed_crypto_do_request,
1140		},
1141	},
1142
1143};
1144
1145void aspeed_unregister_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
1146{
1147	int i;
1148
1149	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++)
1150		crypto_engine_unregister_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
1151
1152	if (hace_dev->version != AST2600_VERSION)
1153		return;
1154
1155	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++)
1156		crypto_engine_unregister_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
1157}
1158
1159void aspeed_register_hace_crypto_algs(struct aspeed_hace_dev *hace_dev)
1160{
1161	int rc, i;
1162
1163	CIPHER_DBG(hace_dev, "\n");
1164
1165	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs); i++) {
1166		aspeed_crypto_algs[i].hace_dev = hace_dev;
1167		rc = crypto_engine_register_skcipher(&aspeed_crypto_algs[i].alg.skcipher);
1168		if (rc) {
1169			CIPHER_DBG(hace_dev, "Failed to register %s\n",
1170				   aspeed_crypto_algs[i].alg.skcipher.base.base.cra_name);
1171		}
1172	}
1173
1174	if (hace_dev->version != AST2600_VERSION)
1175		return;
1176
1177	for (i = 0; i < ARRAY_SIZE(aspeed_crypto_algs_g6); i++) {
1178		aspeed_crypto_algs_g6[i].hace_dev = hace_dev;
1179		rc = crypto_engine_register_skcipher(&aspeed_crypto_algs_g6[i].alg.skcipher);
1180		if (rc) {
1181			CIPHER_DBG(hace_dev, "Failed to register %s\n",
1182				   aspeed_crypto_algs_g6[i].alg.skcipher.base.base.cra_name);
1183		}
1184	}
1185}
1186