Home
last modified time | relevance | path

Searched refs:AES_BLOCK_SIZE (Results 1 - 25 of 211) sorted by relevance

123456789

/kernel/linux/linux-6.6/arch/arm64/crypto/
H A Daes-glue.c134 u8 dg[AES_BLOCK_SIZE];
189 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
194 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
209 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
214 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
227 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
232 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
256 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
261 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
282 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) in cts_cbc_encrypt()
[all...]
H A Daes-neonbs-glue.c59 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32];
61 } __aligned(AES_BLOCK_SIZE);
105 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
106 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
110 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
117 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
165 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt()
166 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt()
174 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
188 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
[all...]
H A Daes-ce-ccm-glue.c62 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; in ccm_init_mac()
80 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac()
93 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac()
151 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_encrypt()
152 u8 buf[AES_BLOCK_SIZE]; in ccm_encrypt()
161 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt()
171 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt()
205 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_decrypt()
206 u8 buf[AES_BLOCK_SIZE]; in ccm_decrypt()
215 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_decrypt()
[all...]
/kernel/linux/linux-5.10/arch/arm64/crypto/
H A Daes-neonbs-glue.c57 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32];
59 } __aligned(AES_BLOCK_SIZE);
108 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
109 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
113 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
120 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
168 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt()
169 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt()
177 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
191 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
[all...]
H A Daes-glue.c128 u8 dg[AES_BLOCK_SIZE];
183 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
188 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
203 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
208 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
221 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
226 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
250 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
255 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
276 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) in cts_cbc_encrypt()
[all...]
H A Daes-ce-ccm-glue.c64 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; in ccm_init_mac()
82 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac()
95 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac()
108 if (*macp > 0 && *macp < AES_BLOCK_SIZE) { in ccm_update_mac()
109 int added = min(abytes, AES_BLOCK_SIZE - *macp); in ccm_update_mac()
118 while (abytes >= AES_BLOCK_SIZE) { in ccm_update_mac()
120 crypto_xor(mac, in, AES_BLOCK_SIZE); in ccm_update_mac()
122 in += AES_BLOCK_SIZE; in ccm_update_mac()
123 abytes -= AES_BLOCK_SIZE; in ccm_update_mac()
177 u8 buf[AES_BLOCK_SIZE]; in ccm_crypt_fallback()
[all...]
H A Dghash-ce-glue.c241 aes_encrypt(&ctx->aes_key, key, (u8[AES_BLOCK_SIZE]){}); in gcm_setkey()
347 u8 buf[AES_BLOCK_SIZE]; in gcm_encrypt()
348 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt()
373 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) { in gcm_encrypt()
377 nbytes &= ~(AES_BLOCK_SIZE - 1); in gcm_encrypt()
390 if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) in gcm_encrypt()
397 while (walk.nbytes >= AES_BLOCK_SIZE) { in gcm_encrypt()
398 int blocks = walk.nbytes / AES_BLOCK_SIZE; in gcm_encrypt()
405 crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE); in gcm_encrypt()
406 crypto_inc(iv, AES_BLOCK_SIZE); in gcm_encrypt()
[all...]
/kernel/linux/linux-5.10/arch/arm/crypto/
H A Daes-neonbs-glue.c46 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
96 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
97 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
101 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
108 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
170 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
171 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
175 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt()
183 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt()
[all...]
H A Daes-ce-glue.c54 u8 b[AES_BLOCK_SIZE];
177 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
182 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
197 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
202 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
215 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
221 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
245 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
251 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
271 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) in cts_cbc_encrypt()
[all...]
/kernel/linux/linux-6.6/arch/arm/crypto/
H A Daes-neonbs-glue.c49 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
99 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
100 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
104 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
111 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
173 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
174 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
178 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt()
186 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt()
[all...]
H A Daes-ce-glue.c54 u8 b[AES_BLOCK_SIZE];
177 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
182 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
197 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
202 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
215 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
221 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
245 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
251 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
271 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) in cts_cbc_encrypt()
[all...]
/kernel/linux/linux-5.10/net/mac80211/
H A Dfils_aead.c28 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v()
35 crypto_shash_digest(desc, tmp, AES_BLOCK_SIZE, d); in aes_s2v()
41 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v()
46 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v()
49 crypto_shash_update(desc, addr[i], len[i] - AES_BLOCK_SIZE); in aes_s2v()
50 crypto_xor(d, addr[i] + len[i] - AES_BLOCK_SIZE, in aes_s2v()
51 AES_BLOCK_SIZE); in aes_s2v()
60 crypto_shash_finup(desc, d, AES_BLOCK_SIZE, v); in aes_s2v()
71 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt()
[all...]
/kernel/linux/linux-6.6/net/mac80211/
H A Dfils_aead.c28 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v()
35 crypto_shash_digest(desc, tmp, AES_BLOCK_SIZE, d); in aes_s2v()
41 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v()
46 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v()
49 crypto_shash_update(desc, addr[i], len[i] - AES_BLOCK_SIZE); in aes_s2v()
50 crypto_xor(d, addr[i] + len[i] - AES_BLOCK_SIZE, in aes_s2v()
51 AES_BLOCK_SIZE); in aes_s2v()
60 crypto_shash_finup(desc, d, AES_BLOCK_SIZE, v); in aes_s2v()
71 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt()
[all...]
/kernel/linux/linux-5.10/arch/powerpc/crypto/
H A Daes-spe-glue.c192 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ecb_crypt()
231 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_cbc_crypt()
273 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ctr_crypt()
300 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_xts_crypt()
324 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_encrypt()
325 int offset = req->cryptlen - tail - AES_BLOCK_SIZE; in ppc_xts_encrypt()
327 u8 b[2][AES_BLOCK_SIZE]; in ppc_xts_encrypt()
330 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_encrypt()
344 scatterwalk_map_and_copy(b[0], req->dst, offset, AES_BLOCK_SIZE, 0); in ppc_xts_encrypt()
346 scatterwalk_map_and_copy(b[0], req->src, offset + AES_BLOCK_SIZE, tai in ppc_xts_encrypt()
[all...]
/kernel/linux/linux-6.6/arch/powerpc/crypto/
H A Daes-spe-glue.c192 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ecb_crypt()
231 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_cbc_crypt()
273 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_ctr_crypt()
300 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in ppc_xts_crypt()
324 int tail = req->cryptlen % AES_BLOCK_SIZE; in ppc_xts_encrypt()
325 int offset = req->cryptlen - tail - AES_BLOCK_SIZE; in ppc_xts_encrypt()
327 u8 b[2][AES_BLOCK_SIZE]; in ppc_xts_encrypt()
330 if (req->cryptlen < AES_BLOCK_SIZE) in ppc_xts_encrypt()
344 scatterwalk_map_and_copy(b[0], req->dst, offset, AES_BLOCK_SIZE, 0); in ppc_xts_encrypt()
346 scatterwalk_map_and_copy(b[0], req->src, offset + AES_BLOCK_SIZE, tai in ppc_xts_encrypt()
[all...]
/kernel/linux/linux-5.10/drivers/crypto/nx/
H A Dnx-aes-xcbc.c23 u8 state[AES_BLOCK_SIZE];
25 u8 buffer[AES_BLOCK_SIZE];
64 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty()
71 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE); in nx_xcbc_empty()
72 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
107 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE); in nx_xcbc_empty()
114 len = AES_BLOCK_SIZE; in nx_xcbc_empty()
118 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty()
132 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
186 * 1: <= AES_BLOCK_SIZE in nx_xcbc_update()
[all...]
/kernel/linux/linux-6.6/drivers/crypto/nx/
H A Dnx-aes-xcbc.c23 u8 state[AES_BLOCK_SIZE];
25 u8 buffer[AES_BLOCK_SIZE];
64 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty()
71 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE); in nx_xcbc_empty()
72 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
107 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE); in nx_xcbc_empty()
114 len = AES_BLOCK_SIZE; in nx_xcbc_empty()
118 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty()
132 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
186 * 1: <= AES_BLOCK_SIZE in nx_xcbc_update()
[all...]
/kernel/linux/linux-5.10/drivers/crypto/vmx/
H A Daes_ctr.c73 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final()
87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt()
113 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt()
120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
147 .ivsize = AES_BLOCK_SIZE,
148 .chunksize = AES_BLOCK_SIZE,
/kernel/linux/linux-6.6/drivers/crypto/vmx/
H A Daes_ctr.c73 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final()
87 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
107 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt()
113 nbytes / AES_BLOCK_SIZE, in p8_aes_ctr_crypt()
120 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
121 } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
147 .ivsize = AES_BLOCK_SIZE,
148 .chunksize = AES_BLOCK_SIZE,
/kernel/linux/linux-5.10/arch/s390/crypto/
H A Dpaes_s390.c223 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_paes_crypt()
253 .base.cra_blocksize = AES_BLOCK_SIZE,
323 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt()
331 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
338 n = nbytes & ~(AES_BLOCK_SIZE - 1); in cbc_paes_crypt()
342 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
370 .base.cra_blocksize = AES_BLOCK_SIZE,
378 .ivsize = AES_BLOCK_SIZE,
515 n = nbytes & ~(AES_BLOCK_SIZE - 1); in xts_paes_crypt()
547 .base.cra_blocksize = AES_BLOCK_SIZE,
[all...]
/kernel/linux/linux-6.6/arch/s390/crypto/
H A Dpaes_s390.c233 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_paes_crypt()
263 .base.cra_blocksize = AES_BLOCK_SIZE,
335 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt()
343 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
350 n = nbytes & ~(AES_BLOCK_SIZE - 1); in cbc_paes_crypt()
354 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
382 .base.cra_blocksize = AES_BLOCK_SIZE,
390 .ivsize = AES_BLOCK_SIZE,
530 n = nbytes & ~(AES_BLOCK_SIZE - 1); in xts_paes_crypt()
562 .base.cra_blocksize = AES_BLOCK_SIZE,
[all...]
/kernel/linux/linux-5.10/arch/sparc/crypto/
H A Daes_glue.c235 round_down(nbytes, AES_BLOCK_SIZE)); in ecb_encrypt()
236 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
260 round_down(nbytes, AES_BLOCK_SIZE)); in ecb_decrypt()
261 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
284 round_down(nbytes, AES_BLOCK_SIZE), in cbc_encrypt()
286 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
310 round_down(nbytes, AES_BLOCK_SIZE), in cbc_decrypt()
312 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in cbc_decrypt()
323 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; in ctr_crypt_final()
329 keystream, AES_BLOCK_SIZE); in ctr_crypt_final()
[all...]
/kernel/linux/linux-6.6/arch/sparc/crypto/
H A Daes_glue.c235 round_down(nbytes, AES_BLOCK_SIZE)); in ecb_encrypt()
236 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
260 round_down(nbytes, AES_BLOCK_SIZE)); in ecb_decrypt()
261 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
284 round_down(nbytes, AES_BLOCK_SIZE), in cbc_encrypt()
286 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
310 round_down(nbytes, AES_BLOCK_SIZE), in cbc_decrypt()
312 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE); in cbc_decrypt()
323 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; in ctr_crypt_final()
329 keystream, AES_BLOCK_SIZE); in ctr_crypt_final()
[all...]
/kernel/linux/linux-5.10/drivers/crypto/ccp/
H A Dccp-crypto.h108 u8 k1[AES_BLOCK_SIZE];
109 u8 k2[AES_BLOCK_SIZE];
114 u8 iv[AES_BLOCK_SIZE];
117 u8 tag[AES_BLOCK_SIZE];
121 u8 rfc3686_iv[AES_BLOCK_SIZE];
141 u8 iv[AES_BLOCK_SIZE];
145 u8 buf[AES_BLOCK_SIZE];
149 u8 pad[AES_BLOCK_SIZE];
157 u8 iv[AES_BLOCK_SIZE];
160 u8 buf[AES_BLOCK_SIZE];
[all...]
/kernel/linux/linux-6.6/drivers/crypto/ccp/
H A Dccp-crypto.h109 u8 k1[AES_BLOCK_SIZE];
110 u8 k2[AES_BLOCK_SIZE];
115 u8 iv[AES_BLOCK_SIZE];
118 u8 tag[AES_BLOCK_SIZE];
122 u8 rfc3686_iv[AES_BLOCK_SIZE];
142 u8 iv[AES_BLOCK_SIZE];
146 u8 buf[AES_BLOCK_SIZE];
150 u8 pad[AES_BLOCK_SIZE];
158 u8 iv[AES_BLOCK_SIZE];
161 u8 buf[AES_BLOCK_SIZE];
[all...]

Completed in 17 milliseconds

123456789