Lines Matching refs:req
82 static int xts_xor_tweak(struct skcipher_request *req, bool second_pass,
85 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
86 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
87 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
94 req = &rctx->subreq;
96 skcipher_request_set_tfm(req, tfm);
98 err = skcipher_walk_virt(&w, req, false);
133 static int xts_xor_tweak_pre(struct skcipher_request *req, bool enc)
135 return xts_xor_tweak(req, false, enc);
138 static int xts_xor_tweak_post(struct skcipher_request *req, bool enc)
140 return xts_xor_tweak(req, true, enc);
145 struct skcipher_request *req = data;
149 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
156 skcipher_request_complete(req, err);
159 static int xts_cts_final(struct skcipher_request *req,
160 int (*crypt)(struct skcipher_request *req))
163 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
164 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1);
165 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
167 int tail = req->cryptlen % XTS_BLOCK_SIZE;
171 rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst,
176 scatterwalk_map_and_copy(b, req->src, offset, tail, 0);
183 skcipher_request_set_callback(subreq, req->base.flags, xts_cts_done,
184 req);
201 struct skcipher_request *req = data;
204 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
207 err = xts_xor_tweak_post(req, true);
209 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
210 err = xts_cts_final(req, crypto_skcipher_encrypt);
216 skcipher_request_complete(req, err);
221 struct skcipher_request *req = data;
224 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
227 err = xts_xor_tweak_post(req, false);
229 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
230 err = xts_cts_final(req, crypto_skcipher_decrypt);
236 skcipher_request_complete(req, err);
239 static int xts_init_crypt(struct skcipher_request *req,
243 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
244 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
247 if (req->cryptlen < XTS_BLOCK_SIZE)
251 skcipher_request_set_callback(subreq, req->base.flags, compl, req);
252 skcipher_request_set_crypt(subreq, req->dst, req->dst,
253 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL);
256 crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
261 static int xts_encrypt(struct skcipher_request *req)
263 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
267 err = xts_init_crypt(req, xts_encrypt_done) ?:
268 xts_xor_tweak_pre(req, true) ?:
270 xts_xor_tweak_post(req, true);
272 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
275 return xts_cts_final(req, crypto_skcipher_encrypt);
278 static int xts_decrypt(struct skcipher_request *req)
280 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
284 err = xts_init_crypt(req, xts_decrypt_done) ?:
285 xts_xor_tweak_pre(req, false) ?:
287 xts_xor_tweak_post(req, false);
289 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
292 return xts_cts_final(req, crypto_skcipher_decrypt);