Lines Matching refs:iv

93 			      const u8 *in, unsigned int len, u8 *iv);
95 const u8 *in, unsigned int len, u8 *iv);
101 const u8 *in, unsigned int len, u8 *iv);
104 const u8 *in, unsigned int len, u8 *iv);
109 const u8 *in, unsigned int len, u8 *iv);
111 const u8 *in, unsigned int len, u8 *iv);
119 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
130 const u8 *in, unsigned long plaintext_len, u8 *iv,
140 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
152 const u8 *in, unsigned long ciphertext_len, u8 *iv,
159 u8 *iv,
174 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
191 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
193 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
195 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
204 u8 *iv,
222 const u8 *in, unsigned long plaintext_len, u8 *iv,
228 const u8 *in, unsigned long ciphertext_len, u8 *iv,
246 u8 *iv,
264 const u8 *in, unsigned long plaintext_len, u8 *iv,
270 const u8 *in, unsigned long ciphertext_len, u8 *iv,
429 nbytes & AES_BLOCK_MASK, walk.iv);
451 nbytes & AES_BLOCK_MASK, walk.iv);
464 u8 *ctrblk = walk->iv;
477 const u8 *in, unsigned int len, u8 *iv)
486 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
488 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
490 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
506 nbytes & AES_BLOCK_MASK, walk.iv);
543 static void aesni_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
545 glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_enc);
548 static void aesni_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
550 glue_xts_crypt_128bit_one(ctx, dst, src, iv, aesni_dec);
553 static void aesni_xts_enc32(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
555 aesni_xts_encrypt(ctx, dst, src, 32 * AES_BLOCK_SIZE, (u8 *)iv);
558 static void aesni_xts_dec32(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
560 aesni_xts_decrypt(ctx, dst, src, 32 * AES_BLOCK_SIZE, (u8 *)iv);
687 u8 *iv, void *aes_ctx)
742 gcm_tfm->init(aes_ctx, data, iv, hash_subkey, assoc, assoclen);
816 u8 *hash_subkey, u8 *iv, void *aes_ctx)
818 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
823 u8 *hash_subkey, u8 *iv, void *aes_ctx)
825 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
835 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
847 *(iv+i) = ctx->nonce[i];
849 *(iv+4+i) = req->iv[i];
850 *((__be32 *)(iv+12)) = counter;
852 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
863 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
875 *(iv+i) = ctx->nonce[i];
877 *(iv+4+i) = req->iv[i];
878 *((__be32 *)(iv+12)) = counter;
880 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
994 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
997 memcpy(iv, req->iv, 12);
998 *((__be32 *)(iv+12)) = counter;
1000 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1011 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1013 memcpy(iv, req->iv, 12);
1014 *((__be32 *)(iv+12)) = counter;
1016 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,