Lines Matching refs:src
49 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
52 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
55 asmlinkage void pmull_gcm_encrypt(int bytes, u8 dst[], const u8 src[],
59 asmlinkage void pmull_gcm_decrypt(int bytes, u8 dst[], const u8 src[],
71 static void ghash_do_update(int blocks, u64 dg[], const char *src,
77 const u8 *in = src;
84 src += GHASH_BLOCK_SIZE;
96 void ghash_do_simd_update(int blocks, u64 dg[], const char *src,
99 const char *src,
105 simd_update(blocks, dg, src, key->h, head);
108 ghash_do_update(blocks, dg, src, key, head);
115 static int ghash_update(struct shash_desc *desc, const u8 *src,
130 memcpy(ctx->buf + partial, src, p);
131 src += p;
141 ghash_do_simd_update(chunk, ctx->digest, src, key,
146 src += chunk * GHASH_BLOCK_SIZE;
151 memcpy(ctx->buf + partial, src, len);
274 static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
280 memcpy(&buf[*buf_count], src, buf_added);
283 src += buf_added;
290 ghash_do_simd_update(blocks, dg, src, &ctx->ghash_key,
294 src += blocks * GHASH_BLOCK_SIZE;
300 memcpy(buf, src, count);
314 scatterwalk_start(&walk, req->src);
367 const u8 *src = walk.src.virt.addr;
374 src = dst = memcpy(buf + sizeof(buf) - nbytes,
375 src, nbytes);
382 pmull_gcm_encrypt(nbytes, dst, src, ctx->ghash_key.h,
399 const u8 *src = walk.src.virt.addr;
405 crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE);
409 src += AES_BLOCK_SIZE;
423 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr,
481 const u8 *src = walk.src.virt.addr;
488 src = dst = memcpy(buf + sizeof(buf) - nbytes,
489 src, nbytes);
496 pmull_gcm_decrypt(nbytes, dst, src, ctx->ghash_key.h,
513 const u8 *src = walk.src.virt.addr;
516 ghash_do_update(blocks, dg, walk.src.virt.addr,
521 crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE);
525 src += AES_BLOCK_SIZE;
534 memcpy(buf, walk.src.virt.addr, walk.nbytes);
545 crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr,
562 scatterwalk_map_and_copy(buf, req->src,