1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * SM3 Secure Hash Algorithm, AVX assembler accelerated.
4 * specified in: https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02
5 *
6 * Copyright (C) 2021 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
7 */
8
9#define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
10
11#include <crypto/internal/hash.h>
12#include <crypto/internal/simd.h>
13#include <linux/init.h>
14#include <linux/module.h>
15#include <linux/types.h>
16#include <crypto/sm3.h>
17#include <crypto/sm3_base.h>
18#include <asm/simd.h>
19
20asmlinkage void sm3_transform_avx(struct sm3_state *state,
21			const u8 *data, int nblocks);
22
23static int sm3_avx_update(struct shash_desc *desc, const u8 *data,
24			 unsigned int len)
25{
26	struct sm3_state *sctx = shash_desc_ctx(desc);
27
28	if (!crypto_simd_usable() ||
29			(sctx->count % SM3_BLOCK_SIZE) + len < SM3_BLOCK_SIZE) {
30		sm3_update(sctx, data, len);
31		return 0;
32	}
33
34	/*
35	 * Make sure struct sm3_state begins directly with the SM3
36	 * 256-bit internal state, as this is what the asm functions expect.
37	 */
38	BUILD_BUG_ON(offsetof(struct sm3_state, state) != 0);
39
40	kernel_fpu_begin();
41	sm3_base_do_update(desc, data, len, sm3_transform_avx);
42	kernel_fpu_end();
43
44	return 0;
45}
46
47static int sm3_avx_finup(struct shash_desc *desc, const u8 *data,
48		      unsigned int len, u8 *out)
49{
50	if (!crypto_simd_usable()) {
51		struct sm3_state *sctx = shash_desc_ctx(desc);
52
53		if (len)
54			sm3_update(sctx, data, len);
55
56		sm3_final(sctx, out);
57		return 0;
58	}
59
60	kernel_fpu_begin();
61	if (len)
62		sm3_base_do_update(desc, data, len, sm3_transform_avx);
63	sm3_base_do_finalize(desc, sm3_transform_avx);
64	kernel_fpu_end();
65
66	return sm3_base_finish(desc, out);
67}
68
69static int sm3_avx_final(struct shash_desc *desc, u8 *out)
70{
71	if (!crypto_simd_usable()) {
72		sm3_final(shash_desc_ctx(desc), out);
73		return 0;
74	}
75
76	kernel_fpu_begin();
77	sm3_base_do_finalize(desc, sm3_transform_avx);
78	kernel_fpu_end();
79
80	return sm3_base_finish(desc, out);
81}
82
83static struct shash_alg sm3_avx_alg = {
84	.digestsize	=	SM3_DIGEST_SIZE,
85	.init		=	sm3_base_init,
86	.update		=	sm3_avx_update,
87	.final		=	sm3_avx_final,
88	.finup		=	sm3_avx_finup,
89	.descsize	=	sizeof(struct sm3_state),
90	.base		=	{
91		.cra_name	=	"sm3",
92		.cra_driver_name =	"sm3-avx",
93		.cra_priority	=	300,
94		.cra_blocksize	=	SM3_BLOCK_SIZE,
95		.cra_module	=	THIS_MODULE,
96	}
97};
98
99static int __init sm3_avx_mod_init(void)
100{
101	const char *feature_name;
102
103	if (!boot_cpu_has(X86_FEATURE_AVX)) {
104		pr_info("AVX instruction are not detected.\n");
105		return -ENODEV;
106	}
107
108	if (!boot_cpu_has(X86_FEATURE_BMI2)) {
109		pr_info("BMI2 instruction are not detected.\n");
110		return -ENODEV;
111	}
112
113	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
114				&feature_name)) {
115		pr_info("CPU feature '%s' is not supported.\n", feature_name);
116		return -ENODEV;
117	}
118
119	return crypto_register_shash(&sm3_avx_alg);
120}
121
122static void __exit sm3_avx_mod_exit(void)
123{
124	crypto_unregister_shash(&sm3_avx_alg);
125}
126
127module_init(sm3_avx_mod_init);
128module_exit(sm3_avx_mod_exit);
129
130MODULE_LICENSE("GPL v2");
131MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
132MODULE_DESCRIPTION("SM3 Secure Hash Algorithm, AVX assembler accelerated");
133MODULE_ALIAS_CRYPTO("sm3");
134MODULE_ALIAS_CRYPTO("sm3-avx");
135