/kernel/linux/linux-5.10/fs/ext4/ |
H A D | indirect.c | 244 * @partial: pointer to the last triple within a chain 252 Indirect *partial) in ext4_find_goal() 260 goal = ext4_find_near(inode, partial); in ext4_find_goal() 316 * we had read the existing part of chain and partial points to the last 536 Indirect *partial; in ext4_ind_map_blocks() local 552 partial = ext4_get_branch(inode, depth, offsets, chain, &err); in ext4_ind_map_blocks() 555 if (!partial) { in ext4_ind_map_blocks() 578 * Count number blocks in a subtree under 'partial'. At each in ext4_ind_map_blocks() 584 for (i = partial - chain + 1; i < depth; i++) in ext4_ind_map_blocks() 618 ar.goal = ext4_find_goal(inode, map->m_lblk, partial); in ext4_ind_map_blocks() 251 ext4_find_goal(struct inode *inode, ext4_lblk_t block, Indirect *partial) ext4_find_goal() argument 805 Indirect *partial, *p; ext4_find_shared() local 1121 Indirect *partial; ext4_ind_truncate() local 1237 Indirect *partial, *partial2; ext4_ind_remove_space() local [all...] |
/kernel/linux/linux-6.6/fs/ext4/ |
H A D | indirect.c | 244 * @partial: pointer to the last triple within a chain 252 Indirect *partial) in ext4_find_goal() 260 goal = ext4_find_near(inode, partial); in ext4_find_goal() 316 * we had read the existing part of chain and partial points to the last 538 Indirect *partial; in ext4_ind_map_blocks() local 554 partial = ext4_get_branch(inode, depth, offsets, chain, &err); in ext4_ind_map_blocks() 557 if (!partial) { in ext4_ind_map_blocks() 580 * Count number blocks in a subtree under 'partial'. At each in ext4_ind_map_blocks() 586 for (i = partial - chain + 1; i < depth; i++) in ext4_ind_map_blocks() 620 ar.goal = ext4_find_goal(inode, map->m_lblk, partial); in ext4_ind_map_blocks() 251 ext4_find_goal(struct inode *inode, ext4_lblk_t block, Indirect *partial) ext4_find_goal() argument 808 Indirect *partial, *p; ext4_find_shared() local 1126 Indirect *partial; ext4_ind_truncate() local 1242 Indirect *partial, *partial2; ext4_ind_remove_space() local [all...] |
/kernel/linux/linux-5.10/include/crypto/ |
H A D | sha1_base.h | 40 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_update() local 44 if (unlikely((partial + len) >= SHA1_BLOCK_SIZE)) { in sha1_base_do_update() 47 if (partial) { in sha1_base_do_update() 48 int p = SHA1_BLOCK_SIZE - partial; in sha1_base_do_update() 50 memcpy(sctx->buffer + partial, data, p); in sha1_base_do_update() 64 partial = 0; in sha1_base_do_update() 67 memcpy(sctx->buffer + partial, data, len); in sha1_base_do_update() 78 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_finalize() local 80 sctx->buffer[partial++] = 0x80; in sha1_base_do_finalize() 81 if (partial > bit_offse in sha1_base_do_finalize() [all...] |
H A D | sha512_base.h | 61 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_update() local 67 if (unlikely((partial + len) >= SHA512_BLOCK_SIZE)) { in sha512_base_do_update() 70 if (partial) { in sha512_base_do_update() 71 int p = SHA512_BLOCK_SIZE - partial; in sha512_base_do_update() 73 memcpy(sctx->buf + partial, data, p); in sha512_base_do_update() 87 partial = 0; in sha512_base_do_update() 90 memcpy(sctx->buf + partial, data, len); in sha512_base_do_update() 101 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_finalize() local 103 sctx->buf[partial++] = 0x80; in sha512_base_do_finalize() 104 if (partial > bit_offse in sha512_base_do_finalize() [all...] |
H A D | sha256_base.h | 43 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_update() local 47 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in sha256_base_do_update() 50 if (partial) { in sha256_base_do_update() 51 int p = SHA256_BLOCK_SIZE - partial; in sha256_base_do_update() 53 memcpy(sctx->buf + partial, data, p); in sha256_base_do_update() 67 partial = 0; in sha256_base_do_update() 70 memcpy(sctx->buf + partial, data, len); in sha256_base_do_update() 81 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_finalize() local 83 sctx->buf[partial++] = 0x80; in sha256_base_do_finalize() 84 if (partial > bit_offse in sha256_base_do_finalize() [all...] |
H A D | sm3_base.h | 43 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_update() local 47 if (unlikely((partial + len) >= SM3_BLOCK_SIZE)) { in sm3_base_do_update() 50 if (partial) { in sm3_base_do_update() 51 int p = SM3_BLOCK_SIZE - partial; in sm3_base_do_update() 53 memcpy(sctx->buffer + partial, data, p); in sm3_base_do_update() 67 partial = 0; in sm3_base_do_update() 70 memcpy(sctx->buffer + partial, data, len); in sm3_base_do_update() 81 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_finalize() local 83 sctx->buffer[partial++] = 0x80; in sm3_base_do_finalize() 84 if (partial > bit_offse in sm3_base_do_finalize() [all...] |
/kernel/linux/linux-6.6/include/crypto/ |
H A D | sha256_base.h | 42 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in lib_sha256_base_do_update() local 46 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in lib_sha256_base_do_update() 49 if (partial) { in lib_sha256_base_do_update() 50 int p = SHA256_BLOCK_SIZE - partial; in lib_sha256_base_do_update() 52 memcpy(sctx->buf + partial, data, p); in lib_sha256_base_do_update() 66 partial = 0; in lib_sha256_base_do_update() 69 memcpy(sctx->buf + partial, data, len); in lib_sha256_base_do_update() 89 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in lib_sha256_base_do_finalize() local 91 sctx->buf[partial++] = 0x80; in lib_sha256_base_do_finalize() 92 if (partial > bit_offse in lib_sha256_base_do_finalize() [all...] |
H A D | sha512_base.h | 62 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_update() local 68 if (unlikely((partial + len) >= SHA512_BLOCK_SIZE)) { in sha512_base_do_update() 71 if (partial) { in sha512_base_do_update() 72 int p = SHA512_BLOCK_SIZE - partial; in sha512_base_do_update() 74 memcpy(sctx->buf + partial, data, p); in sha512_base_do_update() 88 partial = 0; in sha512_base_do_update() 91 memcpy(sctx->buf + partial, data, len); in sha512_base_do_update() 102 unsigned int partial = sctx->count[0] % SHA512_BLOCK_SIZE; in sha512_base_do_finalize() local 104 sctx->buf[partial++] = 0x80; in sha512_base_do_finalize() 105 if (partial > bit_offse in sha512_base_do_finalize() [all...] |
H A D | sha1_base.h | 41 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_update() local 45 if (unlikely((partial + len) >= SHA1_BLOCK_SIZE)) { in sha1_base_do_update() 48 if (partial) { in sha1_base_do_update() 49 int p = SHA1_BLOCK_SIZE - partial; in sha1_base_do_update() 51 memcpy(sctx->buffer + partial, data, p); in sha1_base_do_update() 65 partial = 0; in sha1_base_do_update() 68 memcpy(sctx->buffer + partial, data, len); in sha1_base_do_update() 79 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_base_do_finalize() local 81 sctx->buffer[partial++] = 0x80; in sha1_base_do_finalize() 82 if (partial > bit_offse in sha1_base_do_finalize() [all...] |
H A D | sm3_base.h | 44 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_update() local 48 if (unlikely((partial + len) >= SM3_BLOCK_SIZE)) { in sm3_base_do_update() 51 if (partial) { in sm3_base_do_update() 52 int p = SM3_BLOCK_SIZE - partial; in sm3_base_do_update() 54 memcpy(sctx->buffer + partial, data, p); in sm3_base_do_update() 68 partial = 0; in sm3_base_do_update() 71 memcpy(sctx->buffer + partial, data, len); in sm3_base_do_update() 82 unsigned int partial = sctx->count % SM3_BLOCK_SIZE; in sm3_base_do_finalize() local 84 sctx->buffer[partial++] = 0x80; in sm3_base_do_finalize() 85 if (partial > bit_offse in sm3_base_do_finalize() [all...] |
/kernel/linux/linux-5.10/fs/sysv/ |
H A D | itree.c | 213 Indirect *partial; in get_block() local 222 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 226 if (!partial) { in get_block() 231 partial = chain+depth-1; /* the whole chain */ in get_block() 238 while (partial > chain) { in get_block() 239 brelse(partial->bh); in get_block() 240 partial--; in get_block() 254 left = (chain + depth) - partial; in get_block() 255 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 287 Indirect *partial, *p; find_shared() local 373 Indirect *partial; sysv_truncate() local [all...] |
/kernel/linux/linux-5.10/fs/minix/ |
H A D | itree_common.c | 158 Indirect *partial; in get_block() local 166 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 169 if (!partial) { in get_block() 173 partial = chain+depth-1; /* the whole chain */ in get_block() 180 while (partial > chain) { in get_block() 181 brelse(partial->bh); in get_block() 182 partial--; in get_block() 196 left = (chain + depth) - partial; in get_block() 197 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 229 Indirect *partial, *p; find_shared() local 306 Indirect *partial; truncate() local [all...] |
/kernel/linux/linux-6.6/fs/minix/ |
H A D | itree_common.c | 158 Indirect *partial; in get_block() local 166 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 169 if (!partial) { in get_block() 173 partial = chain+depth-1; /* the whole chain */ in get_block() 180 while (partial > chain) { in get_block() 181 brelse(partial->bh); in get_block() 182 partial--; in get_block() 196 left = (chain + depth) - partial; in get_block() 197 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 229 Indirect *partial, *p; find_shared() local 306 Indirect *partial; truncate() local [all...] |
/kernel/linux/linux-6.6/fs/sysv/ |
H A D | itree.c | 213 Indirect *partial; in get_block() local 222 partial = get_branch(inode, depth, offsets, chain, &err); in get_block() 226 if (!partial) { in get_block() 231 partial = chain+depth-1; /* the whole chain */ in get_block() 238 while (partial > chain) { in get_block() 239 brelse(partial->bh); in get_block() 240 partial--; in get_block() 254 left = (chain + depth) - partial; in get_block() 255 err = alloc_branch(inode, left, offsets+(partial-chain), partial); in get_block() 287 Indirect *partial, *p; find_shared() local 373 Indirect *partial; sysv_truncate() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/ |
H A D | padlock-sha.c | 283 unsigned int partial, done; in padlock_sha1_update_nano() local 290 partial = sctx->count & 0x3f; in padlock_sha1_update_nano() 296 if ((partial + len) >= SHA1_BLOCK_SIZE) { in padlock_sha1_update_nano() 299 if (partial) { in padlock_sha1_update_nano() 300 done = -partial; in padlock_sha1_update_nano() 301 memcpy(sctx->buffer + partial, data, in padlock_sha1_update_nano() 320 partial = 0; in padlock_sha1_update_nano() 323 memcpy(sctx->buffer + partial, src, len - done); in padlock_sha1_update_nano() 331 unsigned int partial, padlen; in padlock_sha1_final_nano() local 338 partial in padlock_sha1_final_nano() 367 unsigned int partial, done; padlock_sha256_update_nano() local 416 unsigned int partial, padlen; padlock_sha256_final_nano() local [all...] |
/kernel/linux/linux-6.6/drivers/crypto/ |
H A D | padlock-sha.c | 284 unsigned int partial, done; in padlock_sha1_update_nano() local 291 partial = sctx->count & 0x3f; in padlock_sha1_update_nano() 297 if ((partial + len) >= SHA1_BLOCK_SIZE) { in padlock_sha1_update_nano() 300 if (partial) { in padlock_sha1_update_nano() 301 done = -partial; in padlock_sha1_update_nano() 302 memcpy(sctx->buffer + partial, data, in padlock_sha1_update_nano() 321 partial = 0; in padlock_sha1_update_nano() 324 memcpy(sctx->buffer + partial, src, len - done); in padlock_sha1_update_nano() 332 unsigned int partial, padlen; in padlock_sha1_final_nano() local 339 partial in padlock_sha1_final_nano() 368 unsigned int partial, done; padlock_sha256_update_nano() local 417 unsigned int partial, padlen; padlock_sha256_final_nano() local [all...] |
/kernel/linux/linux-5.10/arch/arm64/crypto/ |
H A D | sha3-ce-glue.c | 43 if ((sctx->partial + len) >= sctx->rsiz) { in sha3_update() 46 if (sctx->partial) { in sha3_update() 47 int p = sctx->rsiz - sctx->partial; in sha3_update() 49 memcpy(sctx->buf + sctx->partial, data, p); in sha3_update() 56 sctx->partial = 0; in sha3_update() 75 memcpy(sctx->buf + sctx->partial, data, len); in sha3_update() 76 sctx->partial += len; in sha3_update() 91 sctx->buf[sctx->partial++] = 0x06; in sha3_final() 92 memset(sctx->buf + sctx->partial, 0, sctx->rsiz - sctx->partial); in sha3_final() [all...] |
/kernel/linux/linux-6.6/arch/arm64/crypto/ |
H A D | sha3-ce-glue.c | 43 if ((sctx->partial + len) >= sctx->rsiz) { in sha3_update() 46 if (sctx->partial) { in sha3_update() 47 int p = sctx->rsiz - sctx->partial; in sha3_update() 49 memcpy(sctx->buf + sctx->partial, data, p); in sha3_update() 56 sctx->partial = 0; in sha3_update() 75 memcpy(sctx->buf + sctx->partial, data, len); in sha3_update() 76 sctx->partial += len; in sha3_update() 91 sctx->buf[sctx->partial++] = 0x06; in sha3_final() 92 memset(sctx->buf + sctx->partial, 0, sctx->rsiz - sctx->partial); in sha3_final() [all...] |
/kernel/linux/linux-6.6/fs/ext2/ |
H A D | inode.c | 325 * @partial: pointer to the last triple within a chain 331 Indirect *partial) in ext2_find_goal() 346 return ext2_find_near(inode, partial); in ext2_find_goal() 466 * we had read the existing part of chain and partial points to the last 632 Indirect *partial; in ext2_get_blocks() local 648 partial = ext2_get_branch(inode, depth, offsets, chain, &err); in ext2_get_blocks() 650 if (!partial) { in ext2_get_blocks() 666 partial = chain + depth - 1; in ext2_get_blocks() 696 if (err == -EAGAIN || !verify_chain(chain, partial)) { in ext2_get_blocks() 697 while (partial > chai in ext2_get_blocks() 330 ext2_find_goal(struct inode *inode, long block, Indirect *partial) ext2_find_goal() argument 1034 Indirect *partial, *p; ext2_find_shared() local 1170 Indirect *partial; __ext2_truncate_blocks() local [all...] |
/kernel/linux/linux-5.10/arch/powerpc/crypto/ |
H A D | sha1.c | 40 unsigned int partial, done; in powerpc_sha1_update() local 43 partial = sctx->count & 0x3f; in powerpc_sha1_update() 48 if ((partial + len) > 63) { in powerpc_sha1_update() 50 if (partial) { in powerpc_sha1_update() 51 done = -partial; in powerpc_sha1_update() 52 memcpy(sctx->buffer + partial, data, done + 64); in powerpc_sha1_update() 62 partial = 0; in powerpc_sha1_update() 64 memcpy(sctx->buffer + partial, src, len - done); in powerpc_sha1_update()
|
/kernel/linux/linux-6.6/arch/powerpc/crypto/ |
H A D | sha1.c | 30 unsigned int partial, done; in powerpc_sha1_update() local 33 partial = sctx->count & 0x3f; in powerpc_sha1_update() 38 if ((partial + len) > 63) { in powerpc_sha1_update() 40 if (partial) { in powerpc_sha1_update() 41 done = -partial; in powerpc_sha1_update() 42 memcpy(sctx->buffer + partial, data, done + 64); in powerpc_sha1_update() 52 partial = 0; in powerpc_sha1_update() 54 memcpy(sctx->buffer + partial, src, len - done); in powerpc_sha1_update()
|
/kernel/linux/linux-5.10/arch/sparc/crypto/ |
H A D | md5_glue.c | 46 unsigned int len, unsigned int partial) in __md5_sparc64_update() 51 if (partial) { in __md5_sparc64_update() 52 done = MD5_HMAC_BLOCK_SIZE - partial; in __md5_sparc64_update() 53 memcpy((u8 *)sctx->block + partial, data, done); in __md5_sparc64_update() 70 unsigned int partial = sctx->byte_count % MD5_HMAC_BLOCK_SIZE; in md5_sparc64_update() local 73 if (partial + len < MD5_HMAC_BLOCK_SIZE) { in md5_sparc64_update() 75 memcpy((u8 *)sctx->block + partial, data, len); in md5_sparc64_update() 77 __md5_sparc64_update(sctx, data, len, partial); in md5_sparc64_update() 45 __md5_sparc64_update(struct md5_state *sctx, const u8 *data, unsigned int len, unsigned int partial) __md5_sparc64_update() argument
|
H A D | sha1_glue.c | 41 unsigned int len, unsigned int partial) in __sha1_sparc64_update() 46 if (partial) { in __sha1_sparc64_update() 47 done = SHA1_BLOCK_SIZE - partial; in __sha1_sparc64_update() 48 memcpy(sctx->buffer + partial, data, done); in __sha1_sparc64_update() 65 unsigned int partial = sctx->count % SHA1_BLOCK_SIZE; in sha1_sparc64_update() local 68 if (partial + len < SHA1_BLOCK_SIZE) { in sha1_sparc64_update() 70 memcpy(sctx->buffer + partial, data, len); in sha1_sparc64_update() 72 __sha1_sparc64_update(sctx, data, len, partial); in sha1_sparc64_update() 40 __sha1_sparc64_update(struct sha1_state *sctx, const u8 *data, unsigned int len, unsigned int partial) __sha1_sparc64_update() argument
|
/kernel/linux/linux-6.6/arch/mips/cavium-octeon/crypto/ |
H A D | octeon-sha1.c | 78 unsigned int partial; in __octeon_sha1_update() local 82 partial = sctx->count % SHA1_BLOCK_SIZE; in __octeon_sha1_update() 87 if ((partial + len) >= SHA1_BLOCK_SIZE) { in __octeon_sha1_update() 88 if (partial) { in __octeon_sha1_update() 89 done = -partial; in __octeon_sha1_update() 90 memcpy(sctx->buffer + partial, data, in __octeon_sha1_update() 101 partial = 0; in __octeon_sha1_update() 103 memcpy(sctx->buffer + partial, src, len - done); in __octeon_sha1_update()
|
/kernel/linux/linux-6.6/arch/sparc/crypto/ |
H A D | md5_glue.c | 47 unsigned int len, unsigned int partial) in __md5_sparc64_update() 52 if (partial) { in __md5_sparc64_update() 53 done = MD5_HMAC_BLOCK_SIZE - partial; in __md5_sparc64_update() 54 memcpy((u8 *)sctx->block + partial, data, done); in __md5_sparc64_update() 71 unsigned int partial = sctx->byte_count % MD5_HMAC_BLOCK_SIZE; in md5_sparc64_update() local 74 if (partial + len < MD5_HMAC_BLOCK_SIZE) { in md5_sparc64_update() 76 memcpy((u8 *)sctx->block + partial, data, len); in md5_sparc64_update() 78 __md5_sparc64_update(sctx, data, len, partial); in md5_sparc64_update() 46 __md5_sparc64_update(struct md5_state *sctx, const u8 *data, unsigned int len, unsigned int partial) __md5_sparc64_update() argument
|