Lines Matching refs:req
81 static int xts_xor_tweak(struct skcipher_request *req, bool second_pass,
84 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
85 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
86 const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
93 req = &rctx->subreq;
95 skcipher_request_set_tfm(req, tfm);
97 err = skcipher_walk_virt(&w, req, false);
132 static int xts_xor_tweak_pre(struct skcipher_request *req, bool enc)
134 return xts_xor_tweak(req, false, enc);
137 static int xts_xor_tweak_post(struct skcipher_request *req, bool enc)
139 return xts_xor_tweak(req, true, enc);
144 struct skcipher_request *req = areq->data;
148 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
155 skcipher_request_complete(req, err);
158 static int xts_cts_final(struct skcipher_request *req,
159 int (*crypt)(struct skcipher_request *req))
162 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
163 int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1);
164 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
166 int tail = req->cryptlen % XTS_BLOCK_SIZE;
170 rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst,
175 scatterwalk_map_and_copy(b, req->src, offset, tail, 0);
182 skcipher_request_set_callback(subreq, req->base.flags, xts_cts_done,
183 req);
200 struct skcipher_request *req = areq->data;
203 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
206 err = xts_xor_tweak_post(req, true);
208 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
209 err = xts_cts_final(req, crypto_skcipher_encrypt);
215 skcipher_request_complete(req, err);
220 struct skcipher_request *req = areq->data;
223 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
226 err = xts_xor_tweak_post(req, false);
228 if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
229 err = xts_cts_final(req, crypto_skcipher_decrypt);
235 skcipher_request_complete(req, err);
238 static int xts_init_crypt(struct skcipher_request *req,
242 crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
243 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
246 if (req->cryptlen < XTS_BLOCK_SIZE)
250 skcipher_request_set_callback(subreq, req->base.flags, compl, req);
251 skcipher_request_set_crypt(subreq, req->dst, req->dst,
252 req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL);
255 crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
260 static int xts_encrypt(struct skcipher_request *req)
262 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
266 err = xts_init_crypt(req, xts_encrypt_done) ?:
267 xts_xor_tweak_pre(req, true) ?:
269 xts_xor_tweak_post(req, true);
271 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
274 return xts_cts_final(req, crypto_skcipher_encrypt);
277 static int xts_decrypt(struct skcipher_request *req)
279 struct xts_request_ctx *rctx = skcipher_request_ctx(req);
283 err = xts_init_crypt(req, xts_decrypt_done) ?:
284 xts_xor_tweak_pre(req, false) ?:
286 xts_xor_tweak_post(req, false);
288 if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
291 return xts_cts_final(req, crypto_skcipher_decrypt);