1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Shared glue code for 128bit block ciphers
4 */
5
6#ifndef _CRYPTO_GLUE_HELPER_H
7#define _CRYPTO_GLUE_HELPER_H
8
9#include <crypto/internal/skcipher.h>
10#include <linux/kernel.h>
11#include <asm/fpu/api.h>
12#include <crypto/b128ops.h>
13
14typedef void (*common_glue_func_t)(const void *ctx, u8 *dst, const u8 *src);
15typedef void (*common_glue_cbc_func_t)(const void *ctx, u8 *dst, const u8 *src);
16typedef void (*common_glue_ctr_func_t)(const void *ctx, u8 *dst, const u8 *src,
17				       le128 *iv);
18typedef void (*common_glue_xts_func_t)(const void *ctx, u8 *dst, const u8 *src,
19				       le128 *iv);
20
21struct common_glue_func_entry {
22	unsigned int num_blocks; /* number of blocks that @fn will process */
23	union {
24		common_glue_func_t ecb;
25		common_glue_cbc_func_t cbc;
26		common_glue_ctr_func_t ctr;
27		common_glue_xts_func_t xts;
28	} fn_u;
29};
30
31struct common_glue_ctx {
32	unsigned int num_funcs;
33	int fpu_blocks_limit; /* -1 means fpu not needed at all */
34
35	/*
36	 * First funcs entry must have largest num_blocks and last funcs entry
37	 * must have num_blocks == 1!
38	 */
39	struct common_glue_func_entry funcs[];
40};
41
42static inline bool glue_fpu_begin(unsigned int bsize, int fpu_blocks_limit,
43				  struct skcipher_walk *walk,
44				  bool fpu_enabled, unsigned int nbytes)
45{
46	if (likely(fpu_blocks_limit < 0))
47		return false;
48
49	if (fpu_enabled)
50		return true;
51
52	/*
53	 * Vector-registers are only used when chunk to be processed is large
54	 * enough, so do not enable FPU until it is necessary.
55	 */
56	if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
57		return false;
58
59	/* prevent sleeping if FPU is in use */
60	skcipher_walk_atomise(walk);
61
62	kernel_fpu_begin();
63	return true;
64}
65
66static inline void glue_fpu_end(bool fpu_enabled)
67{
68	if (fpu_enabled)
69		kernel_fpu_end();
70}
71
72static inline void le128_to_be128(be128 *dst, const le128 *src)
73{
74	dst->a = cpu_to_be64(le64_to_cpu(src->a));
75	dst->b = cpu_to_be64(le64_to_cpu(src->b));
76}
77
78static inline void be128_to_le128(le128 *dst, const be128 *src)
79{
80	dst->a = cpu_to_le64(be64_to_cpu(src->a));
81	dst->b = cpu_to_le64(be64_to_cpu(src->b));
82}
83
84static inline void le128_inc(le128 *i)
85{
86	u64 a = le64_to_cpu(i->a);
87	u64 b = le64_to_cpu(i->b);
88
89	b++;
90	if (!b)
91		a++;
92
93	i->a = cpu_to_le64(a);
94	i->b = cpu_to_le64(b);
95}
96
97extern int glue_ecb_req_128bit(const struct common_glue_ctx *gctx,
98			       struct skcipher_request *req);
99
100extern int glue_cbc_encrypt_req_128bit(const common_glue_func_t fn,
101				       struct skcipher_request *req);
102
103extern int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx,
104				       struct skcipher_request *req);
105
106extern int glue_ctr_req_128bit(const struct common_glue_ctx *gctx,
107			       struct skcipher_request *req);
108
109extern int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
110			       struct skcipher_request *req,
111			       common_glue_func_t tweak_fn, void *tweak_ctx,
112			       void *crypt_ctx, bool decrypt);
113
114extern void glue_xts_crypt_128bit_one(const void *ctx, u8 *dst,
115				      const u8 *src, le128 *iv,
116				      common_glue_func_t fn);
117
118#endif /* _CRYPTO_GLUE_HELPER_H */
119