/kernel/linux/linux-5.10/include/crypto/ |
H A D | sha256_base.h | 43 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_update() 47 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in sha256_base_do_update() 51 int p = SHA256_BLOCK_SIZE - partial; in sha256_base_do_update() 60 blocks = len / SHA256_BLOCK_SIZE; in sha256_base_do_update() 61 len %= SHA256_BLOCK_SIZE; in sha256_base_do_update() 65 data += blocks * SHA256_BLOCK_SIZE; in sha256_base_do_update() 78 const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); in sha256_base_do_finalize() 81 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_finalize() 85 memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); in sha256_base_do_finalize()
|
H A D | sha.h | 18 #define SHA256_BLOCK_SIZE 64 macro 87 u8 buf[SHA256_BLOCK_SIZE];
|
/kernel/linux/linux-6.6/include/crypto/ |
H A D | sha256_base.h | 42 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() 46 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in lib_sha256_base_do_update() 50 int p = SHA256_BLOCK_SIZE - partial; in lib_sha256_base_do_update() 59 blocks = len / SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() 60 len %= SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() 64 data += blocks * SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() 87 const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); in lib_sha256_base_do_finalize() 89 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in lib_sha256_base_do_finalize() 93 memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); in lib_sha256_base_do_finalize()
|
H A D | sha2.h | 15 #define SHA256_BLOCK_SIZE 64 macro 70 u8 buf[SHA256_BLOCK_SIZE];
|
/kernel/linux/linux-6.6/arch/sparc/crypto/ |
H A D | sha256_glue.c | 37 done = SHA256_BLOCK_SIZE - partial; in __sha256_sparc64_update() 41 if (len - done >= SHA256_BLOCK_SIZE) { in __sha256_sparc64_update() 42 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE; in __sha256_sparc64_update() 45 done += rounds * SHA256_BLOCK_SIZE; in __sha256_sparc64_update() 55 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_update() 58 if (partial + len < SHA256_BLOCK_SIZE) { in sha256_sparc64_update() 73 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; in sha256_sparc64_final() 78 index = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_final() 79 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56) - index); in sha256_sparc64_final() 141 .cra_blocksize = SHA256_BLOCK_SIZE, [all...] |
/kernel/linux/linux-5.10/net/mptcp/ |
H A D | crypto.c | 46 u8 input[SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE]; in mptcp_crypto_hmac_sha() 58 memset(input, 0x36, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha() 64 memcpy(&input[SHA256_BLOCK_SIZE], msg, len); in mptcp_crypto_hmac_sha() 69 sha256(input, SHA256_BLOCK_SIZE + len, &input[SHA256_BLOCK_SIZE]); in mptcp_crypto_hmac_sha() 72 memset(input, 0x5C, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha() 78 sha256(input, SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE, hmac); in mptcp_crypto_hmac_sha()
|
/kernel/linux/linux-6.6/net/mptcp/ |
H A D | crypto.c | 46 u8 input[SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE]; in mptcp_crypto_hmac_sha() 58 memset(input, 0x36, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha() 64 memcpy(&input[SHA256_BLOCK_SIZE], msg, len); in mptcp_crypto_hmac_sha() 69 sha256(input, SHA256_BLOCK_SIZE + len, &input[SHA256_BLOCK_SIZE]); in mptcp_crypto_hmac_sha() 72 memset(input, 0x5C, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha() 78 sha256(input, SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE, hmac); in mptcp_crypto_hmac_sha()
|
/kernel/linux/linux-5.10/arch/sparc/crypto/ |
H A D | sha256_glue.c | 68 done = SHA256_BLOCK_SIZE - partial; in __sha256_sparc64_update() 72 if (len - done >= SHA256_BLOCK_SIZE) { in __sha256_sparc64_update() 73 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE; in __sha256_sparc64_update() 76 done += rounds * SHA256_BLOCK_SIZE; in __sha256_sparc64_update() 86 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_update() 89 if (partial + len < SHA256_BLOCK_SIZE) { in sha256_sparc64_update() 104 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; in sha256_sparc64_final() 109 index = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_final() 110 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56) - index); in sha256_sparc64_final() 172 .cra_blocksize = SHA256_BLOCK_SIZE, [all...] |
/kernel/linux/linux-5.10/arch/mips/cavium-octeon/crypto/ |
H A D | octeon-sha256.c | 107 partial = sctx->count % SHA256_BLOCK_SIZE; in __octeon_sha256_update() 112 if ((partial + len) >= SHA256_BLOCK_SIZE) { in __octeon_sha256_update() 116 done + SHA256_BLOCK_SIZE); in __octeon_sha256_update() 122 done += SHA256_BLOCK_SIZE; in __octeon_sha256_update() 124 } while (done + SHA256_BLOCK_SIZE <= len); in __octeon_sha256_update() 143 if ((sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in octeon_sha256_update() 238 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-6.6/arch/mips/cavium-octeon/crypto/ |
H A D | octeon-sha256.c | 74 partial = sctx->count % SHA256_BLOCK_SIZE; in __octeon_sha256_update() 79 if ((partial + len) >= SHA256_BLOCK_SIZE) { in __octeon_sha256_update() 83 done + SHA256_BLOCK_SIZE); in __octeon_sha256_update() 89 done += SHA256_BLOCK_SIZE; in __octeon_sha256_update() 91 } while (done + SHA256_BLOCK_SIZE <= len); in __octeon_sha256_update() 110 if ((sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in octeon_sha256_update() 205 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-6.6/drivers/crypto/nx/ |
H A D | nx-sha256.c | 22 u8 buf[SHA256_BLOCK_SIZE]; 73 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE); in nx_sha256_update() 78 * 1: < SHA256_BLOCK_SIZE: copy into state, return 0 in nx_sha256_update() 79 * 2: >= SHA256_BLOCK_SIZE: process X blocks, copy in leftover in nx_sha256_update() 81 total = (sctx->count % SHA256_BLOCK_SIZE) + len; in nx_sha256_update() 82 if (total < SHA256_BLOCK_SIZE) { in nx_sha256_update() 125 /* to_process: SHA256_BLOCK_SIZE aligned chunk to be in nx_sha256_update() 134 to_process = to_process & ~(SHA256_BLOCK_SIZE - 1); in nx_sha256_update() 168 } while (leftover >= SHA256_BLOCK_SIZE); in nx_sha256_update() 201 if (sctx->count >= SHA256_BLOCK_SIZE) { in nx_sha256_final() [all...] |
/kernel/linux/linux-5.10/arch/arm64/crypto/ |
H A D | sha256-glue.c | 77 .base.cra_blocksize = SHA256_BLOCK_SIZE, 111 chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE) in sha256_update_neon() 112 chunk = SHA256_BLOCK_SIZE - in sha256_update_neon() 113 sctx->count % SHA256_BLOCK_SIZE; in sha256_update_neon() 157 .base.cra_blocksize = SHA256_BLOCK_SIZE,
|
H A D | sha2-ce-glue.c | 46 src += (blocks - rem) * SHA256_BLOCK_SIZE; in __sha2_ce_transform() 83 bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len; in sha256_ce_finup() 150 .cra_blocksize = SHA256_BLOCK_SIZE, 167 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-6.6/arch/arm64/crypto/ |
H A D | sha256-glue.c | 78 .base.cra_blocksize = SHA256_BLOCK_SIZE, 112 chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE) in sha256_update_neon() 113 chunk = SHA256_BLOCK_SIZE - in sha256_update_neon() 114 sctx->count % SHA256_BLOCK_SIZE; in sha256_update_neon() 158 .base.cra_blocksize = SHA256_BLOCK_SIZE,
|
H A D | sha2-ce-glue.c | 46 src += (blocks - rem) * SHA256_BLOCK_SIZE; in __sha2_ce_transform() 83 bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len; in sha256_ce_finup() 150 .cra_blocksize = SHA256_BLOCK_SIZE, 167 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-5.10/drivers/crypto/nx/ |
H A D | nx-sha256.c | 68 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE); in nx_sha256_update() 73 * 1: < SHA256_BLOCK_SIZE: copy into state, return 0 in nx_sha256_update() 74 * 2: >= SHA256_BLOCK_SIZE: process X blocks, copy in leftover in nx_sha256_update() 76 total = (sctx->count % SHA256_BLOCK_SIZE) + len; in nx_sha256_update() 77 if (total < SHA256_BLOCK_SIZE) { in nx_sha256_update() 120 /* to_process: SHA256_BLOCK_SIZE aligned chunk to be in nx_sha256_update() 129 to_process = to_process & ~(SHA256_BLOCK_SIZE - 1); in nx_sha256_update() 163 } while (leftover >= SHA256_BLOCK_SIZE); in nx_sha256_update() 196 if (sctx->count >= SHA256_BLOCK_SIZE) { in nx_sha256_final() 209 len = sctx->count & (SHA256_BLOCK_SIZE in nx_sha256_final() [all...] |
/kernel/linux/linux-5.10/arch/arm/crypto/ |
H A D | sha2-ce-glue.c | 36 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha2_ce_update() 79 .cra_blocksize = SHA256_BLOCK_SIZE, 93 .cra_blocksize = SHA256_BLOCK_SIZE,
|
H A D | sha256_neon_glue.c | 33 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in crypto_sha256_neon_update() 77 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-6.6/arch/arm/crypto/ |
H A D | sha2-ce-glue.c | 36 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha2_ce_update() 79 .cra_blocksize = SHA256_BLOCK_SIZE, 93 .cra_blocksize = SHA256_BLOCK_SIZE,
|
H A D | sha256_neon_glue.c | 33 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in crypto_sha256_neon_update() 75 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-5.10/arch/x86/crypto/ |
H A D | sha256_ssse3_glue.c | 52 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in _sha256_update() 112 .cra_blocksize = SHA256_BLOCK_SIZE, 177 .cra_blocksize = SHA256_BLOCK_SIZE, 253 .cra_blocksize = SHA256_BLOCK_SIZE, 328 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-6.6/arch/x86/crypto/ |
H A D | sha256_ssse3_glue.c | 61 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in _sha256_update() 121 .cra_blocksize = SHA256_BLOCK_SIZE, 186 .cra_blocksize = SHA256_BLOCK_SIZE, 262 .cra_blocksize = SHA256_BLOCK_SIZE, 337 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-5.10/drivers/crypto/ |
H A D | padlock-sha.c | 154 leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1; in padlock_sha256_finup() 155 space = SHA256_BLOCK_SIZE - leftover; in padlock_sha256_finup() 260 .cra_blocksize = SHA256_BLOCK_SIZE, 380 if ((partial + len) >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano() 386 done + SHA256_BLOCK_SIZE); in padlock_sha256_update_nano() 391 done += SHA256_BLOCK_SIZE; in padlock_sha256_update_nano() 396 if (len - done >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano() 487 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-6.6/drivers/crypto/ |
H A D | padlock-sha.c | 155 leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1; in padlock_sha256_finup() 156 space = SHA256_BLOCK_SIZE - leftover; in padlock_sha256_finup() 261 .cra_blocksize = SHA256_BLOCK_SIZE, 381 if ((partial + len) >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano() 387 done + SHA256_BLOCK_SIZE); in padlock_sha256_update_nano() 392 done += SHA256_BLOCK_SIZE; in padlock_sha256_update_nano() 397 if (len - done >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano() 488 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/kernel/linux/linux-5.10/drivers/crypto/qce/ |
H A D | sha.h | 15 #define QCE_SHA_MAX_BLOCKSIZE SHA256_BLOCK_SIZE
|