Lines Matching refs:iv
92 const u8 *in, unsigned int len, u8 *iv);
94 const u8 *in, unsigned int len, u8 *iv);
96 const u8 *in, unsigned int len, u8 *iv);
98 const u8 *in, unsigned int len, u8 *iv);
104 const u8 *in, unsigned int len, u8 *iv);
107 const u8 *in, unsigned int len, u8 *iv);
112 const u8 *in, unsigned int len, u8 *iv);
118 u8 *iv,
132 asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
134 asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
136 asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
140 asmlinkage void aes_xctr_enc_128_avx_by8(const u8 *in, const u8 *iv,
144 asmlinkage void aes_xctr_enc_192_avx_by8(const u8 *in, const u8 *iv,
148 asmlinkage void aes_xctr_enc_256_avx_by8(const u8 *in, const u8 *iv,
159 u8 *iv,
182 u8 *iv,
348 nbytes & AES_BLOCK_MASK, walk.iv);
370 nbytes & AES_BLOCK_MASK, walk.iv);
403 req->iv);
421 req->iv);
429 walk.nbytes, walk.iv);
459 req->iv);
477 req->iv);
485 walk.nbytes, walk.iv);
493 const u8 *in, unsigned int len, u8 *iv)
502 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
504 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
506 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
526 walk.iv);
530 aesni_enc(ctx, keystream, walk.iv);
534 crypto_inc(walk.iv, AES_BLOCK_SIZE);
544 const u8 *in, unsigned int len, u8 *iv,
548 aes_xctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len,
551 aes_xctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len,
554 aes_xctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len,
576 walk.iv, byte_ctr);
581 memcpy(block, walk.iv, AES_BLOCK_SIZE);
671 u8 *iv, void *aes_ctx, u8 *auth_tag,
709 aesni_gcm_init_avx_gen4(aes_ctx, data, iv, hash_subkey, assoc,
712 aesni_gcm_init_avx_gen2(aes_ctx, data, iv, hash_subkey, assoc,
715 aesni_gcm_init(aes_ctx, data, iv, hash_subkey, assoc, assoclen);
780 u8 *hash_subkey, u8 *iv, void *aes_ctx)
787 err = gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv, aes_ctx,
799 u8 *hash_subkey, u8 *iv, void *aes_ctx)
807 err = gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv, aes_ctx,
831 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
843 *(iv+i) = ctx->nonce[i];
845 *(iv+4+i) = req->iv[i];
846 *((__be32 *)(iv+12)) = counter;
848 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
859 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
871 *(iv+i) = ctx->nonce[i];
873 *(iv+4+i) = req->iv[i];
874 *((__be32 *)(iv+12)) = counter;
876 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
929 blocks * AES_BLOCK_SIZE, req->iv);
942 aesni_enc(aes_ctx(ctx->raw_tweak_ctx), walk.iv, walk.iv);
953 nbytes, walk.iv);
957 nbytes, walk.iv);
975 req->iv);
985 walk.nbytes, walk.iv);
989 walk.nbytes, walk.iv);
1160 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1163 memcpy(iv, req->iv, 12);
1164 *((__be32 *)(iv+12)) = counter;
1166 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1177 u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1179 memcpy(iv, req->iv, 12);
1180 *((__be32 *)(iv+12)) = counter;
1182 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,