Lines Matching defs:dst
89 be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
102 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
103 gf128mul_lle(&dst, &key->k);
106 dg[0] = be64_to_cpu(dst.b);
107 dg[1] = be64_to_cpu(dst.a);
144 static int ghash_final(struct shash_desc *desc, u8 *dst)
155 put_unaligned_be64(ctx->digest[1], dst);
156 put_unaligned_be64(ctx->digest[0], dst + 8);
366 struct gcm_key const *k, char *dst,
374 struct gcm_key const *k, char *dst,
500 u8 *tag, *dst;
514 dst = walk.dst.virt.addr;
519 pmull_gcm_encrypt(nblocks, dg, src, ctx, dst, iv,
525 dst += nblocks * AES_BLOCK_SIZE;
537 dst = walk.dst.virt.addr;
550 * Bounce via a buffer unless we are encrypting in place and src/dst
557 if (unlikely(tail && (tail == walk.nbytes || src != dst)))
564 if (unlikely(tail && src != dst))
565 memcpy(dst, src, tail);
573 /* copy authtag to end of dst */
574 scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen,
592 u8 *tag, *dst;
610 dst = walk.dst.virt.addr;
615 pmull_gcm_decrypt(nblocks, dg, src, ctx, dst, iv,
621 dst += nblocks * AES_BLOCK_SIZE;
633 dst = walk.dst.virt.addr;
644 if (unlikely(tail && (tail == walk.nbytes || src != dst)))
651 if (unlikely(tail && src != dst))
652 memcpy(dst, src, tail);