Lines Matching refs:ctx
79 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
85 sm4_ce_expand_key(key, ctx->rkey_enc, ctx->rkey_dec,
94 struct sm4_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
105 sm4_ce_expand_key(key, ctx->key1.rkey_enc,
106 ctx->key1.rkey_dec, crypto_sm4_fk, crypto_sm4_ck);
107 sm4_ce_expand_key(&key[SM4_KEY_SIZE], ctx->key2.rkey_enc,
108 ctx->key2.rkey_dec, crypto_sm4_fk, crypto_sm4_ck);
146 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
148 return sm4_ecb_do_crypt(req, ctx->rkey_enc);
154 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
156 return sm4_ecb_do_crypt(req, ctx->rkey_dec);
160 struct sm4_ctx *ctx, bool encrypt)
180 sm4_ce_cbc_enc(ctx->rkey_enc, dst, src,
183 sm4_ce_cbc_dec(ctx->rkey_dec, dst, src,
198 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
200 return sm4_cbc_crypt(req, ctx, true);
206 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
208 return sm4_cbc_crypt(req, ctx, false);
214 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
227 return sm4_cbc_crypt(req, ctx, encrypt);
240 err = sm4_cbc_crypt(&subreq, ctx, encrypt);
262 sm4_ce_cbc_cts_enc(ctx->rkey_enc, walk.dst.virt.addr,
265 sm4_ce_cbc_cts_dec(ctx->rkey_dec, walk.dst.virt.addr,
286 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
302 sm4_ce_cfb_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
312 sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
328 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
344 sm4_ce_cfb_dec(ctx->rkey_enc, dst, src, walk.iv, nblks);
354 sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
370 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
386 sm4_ce_ctr_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
396 sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
413 struct sm4_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
415 const u32 *rkey2_enc = ctx->key2.rkey_enc;
456 sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
460 sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr,
492 sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
496 sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr,
615 struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
621 sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
631 struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
632 be128 *consts = (be128 *)ctx->consts;
642 sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
646 sm4_ce_crypt_block(ctx->key.rkey_enc, (u8 *)consts, (const u8 *)consts);
667 struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
680 sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
683 sm4_ce_crypt_block(ctx->key.rkey_enc, key2, ks[0]);
684 sm4_ce_crypt(ctx->key.rkey_enc, ctx->consts, ks[1], 2);
686 sm4_ce_expand_key(key2, ctx->key.rkey_enc, ctx->key.rkey_dec,
696 struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
698 memset(ctx->digest, 0, SM4_BLOCK_SIZE);
699 ctx->len = 0;
708 struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
714 if (ctx->len || ctx->len + len < SM4_BLOCK_SIZE) {
715 l = min(len, SM4_BLOCK_SIZE - ctx->len);
717 crypto_xor(ctx->digest + ctx->len, p, l);
718 ctx->len += l;
723 if (len && (ctx->len % SM4_BLOCK_SIZE) == 0) {
726 if (len < SM4_BLOCK_SIZE && ctx->len == SM4_BLOCK_SIZE) {
728 ctx->digest, ctx->digest);
729 ctx->len = 0;
734 sm4_ce_mac_update(tctx->key.rkey_enc, ctx->digest, p,
735 nblocks, (ctx->len == SM4_BLOCK_SIZE),
741 ctx->len = SM4_BLOCK_SIZE;
747 crypto_xor(ctx->digest, p, len);
748 ctx->len = len;
758 struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
761 if (ctx->len != SM4_BLOCK_SIZE) {
762 ctx->digest[ctx->len] ^= 0x80;
767 sm4_ce_mac_update(tctx->key.rkey_enc, ctx->digest, consts, 1,
771 memcpy(out, ctx->digest, SM4_BLOCK_SIZE);
779 struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
781 if (ctx->len) {
783 sm4_ce_crypt_block(tctx->key.rkey_enc, ctx->digest,
784 ctx->digest);
788 memcpy(out, ctx->digest, SM4_BLOCK_SIZE);