/kernel/linux/linux-5.10/fs/ubifs/ |
H A D | scan.c | 30 int pad_len = 0, max_pad_len = min_t(int, UBIFS_PAD_NODE_SZ, len); in scan_padding_bytes() local 35 while (pad_len < max_pad_len && *p++ == UBIFS_PADDING_BYTE) in scan_padding_bytes() 36 pad_len += 1; in scan_padding_bytes() 38 if (!pad_len || (pad_len & 7)) in scan_padding_bytes() 41 dbg_scan("%d padding bytes", pad_len); in scan_padding_bytes() 43 return pad_len; in scan_padding_bytes() 84 int pad_len = le32_to_cpu(pad->pad_len); in ubifs_scan_a_node() local 88 if (pad_len < in ubifs_scan_a_node() [all...] |
H A D | crypto.c | 32 unsigned int pad_len = round_up(in_len, UBIFS_CIPHER_BLOCK_SIZE); in ubifs_encrypt() local 35 ubifs_assert(c, pad_len <= *out_len); in ubifs_encrypt() 39 if (pad_len != in_len) in ubifs_encrypt() 40 memset(p + in_len, 0, pad_len - in_len); in ubifs_encrypt() 42 err = fscrypt_encrypt_block_inplace(inode, virt_to_page(p), pad_len, in ubifs_encrypt() 48 *out_len = pad_len; in ubifs_encrypt()
|
H A D | recovery.c | 432 int empty_offs, pad_len; in clean_buf() local 438 pad_len = empty_offs - *offs; in clean_buf() 439 ubifs_pad(c, *buf, pad_len); in clean_buf() 440 *offs += pad_len; in clean_buf() 441 *buf += pad_len; in clean_buf() 442 *len -= pad_len; in clean_buf() 538 int pad_len = len - ALIGN(endpt, 8); in fix_unclean_leb() local 540 if (pad_len > 0) { in fix_unclean_leb() 541 void *buf = sleb->buf + len - pad_len; in fix_unclean_leb() 543 ubifs_pad(c, buf, pad_len); in fix_unclean_leb() 1046 int pad_len = len - ALIGN(ucleb->endpt, 8); clean_an_unclean_leb() local [all...] |
H A D | lpt_commit.c | 1035 int offs, pad_len; in get_pad_len() local 1040 pad_len = ALIGN(offs, c->min_io_size) - offs; in get_pad_len() 1041 return pad_len; in get_pad_len() 1118 int pad_len; in lpt_gc_lnum() local 1120 pad_len = get_pad_len(c, buf, len); in lpt_gc_lnum() 1121 if (pad_len) { in lpt_gc_lnum() 1122 buf += pad_len; in lpt_gc_lnum() 1123 len -= pad_len; in lpt_gc_lnum() 1613 int i, pad_len; in dbg_check_ltab_lnum() local 1615 pad_len in dbg_check_ltab_lnum() 1861 int pad_len; dump_lpt_leb() local [all...] |
/kernel/linux/linux-6.6/fs/ubifs/ |
H A D | scan.c | 30 int pad_len = 0, max_pad_len = min_t(int, UBIFS_PAD_NODE_SZ, len); in scan_padding_bytes() local 35 while (pad_len < max_pad_len && *p++ == UBIFS_PADDING_BYTE) in scan_padding_bytes() 36 pad_len += 1; in scan_padding_bytes() 38 if (!pad_len || (pad_len & 7)) in scan_padding_bytes() 41 dbg_scan("%d padding bytes", pad_len); in scan_padding_bytes() 43 return pad_len; in scan_padding_bytes() 84 int pad_len = le32_to_cpu(pad->pad_len); in ubifs_scan_a_node() local 88 if (pad_len < in ubifs_scan_a_node() [all...] |
H A D | crypto.c | 43 unsigned int pad_len = round_up(in_len, UBIFS_CIPHER_BLOCK_SIZE); in ubifs_encrypt() local 46 ubifs_assert(c, pad_len <= *out_len); in ubifs_encrypt() 50 if (pad_len != in_len) in ubifs_encrypt() 51 memset(p + in_len, 0, pad_len - in_len); in ubifs_encrypt() 53 err = fscrypt_encrypt_block_inplace(inode, virt_to_page(p), pad_len, in ubifs_encrypt() 59 *out_len = pad_len; in ubifs_encrypt()
|
H A D | recovery.c | 432 int empty_offs, pad_len; in clean_buf() local 438 pad_len = empty_offs - *offs; in clean_buf() 439 ubifs_pad(c, *buf, pad_len); in clean_buf() 440 *offs += pad_len; in clean_buf() 441 *buf += pad_len; in clean_buf() 442 *len -= pad_len; in clean_buf() 538 int pad_len = len - ALIGN(endpt, 8); in fix_unclean_leb() local 540 if (pad_len > 0) { in fix_unclean_leb() 541 void *buf = sleb->buf + len - pad_len; in fix_unclean_leb() 543 ubifs_pad(c, buf, pad_len); in fix_unclean_leb() 1046 int pad_len = len - ALIGN(ucleb->endpt, 8); clean_an_unclean_leb() local [all...] |
H A D | lpt_commit.c | 1035 int offs, pad_len; in get_pad_len() local 1040 pad_len = ALIGN(offs, c->min_io_size) - offs; in get_pad_len() 1041 return pad_len; in get_pad_len() 1118 int pad_len; in lpt_gc_lnum() local 1120 pad_len = get_pad_len(c, buf, len); in lpt_gc_lnum() 1121 if (pad_len) { in lpt_gc_lnum() 1122 buf += pad_len; in lpt_gc_lnum() 1123 len -= pad_len; in lpt_gc_lnum() 1613 int i, pad_len; in dbg_check_ltab_lnum() local 1615 pad_len in dbg_check_ltab_lnum() 1861 int pad_len; dump_lpt_leb() local [all...] |
/kernel/linux/linux-6.6/drivers/s390/crypto/ |
H A D | zcrypt_cca_key.h | 62 * and modulus, in that order, where pad_len is the modulo 8 83 unsigned short pad_len; member 174 int short_len, long_len, pad_len, key_len, size; in zcrypt_type6_crt_key() local 189 pad_len = -(3 * long_len + 2 * short_len) & 7; in zcrypt_type6_crt_key() 190 key_len = 3 * long_len + 2 * short_len + pad_len + crt->inputdatalength; in zcrypt_type6_crt_key() 209 key->pvt.pad_len = pad_len; in zcrypt_type6_crt_key() 222 memset(key->key_parts + 3 * long_len + 2 * short_len + pad_len, in zcrypt_type6_crt_key()
|
/kernel/linux/linux-5.10/drivers/s390/crypto/ |
H A D | zcrypt_cca_key.h | 62 * and modulus, in that order, where pad_len is the modulo 8 83 unsigned short pad_len; member 186 int short_len, long_len, pad_len, key_len, size; in zcrypt_type6_crt_key() local 201 pad_len = -(3*long_len + 2*short_len) & 7; in zcrypt_type6_crt_key() 202 key_len = 3*long_len + 2*short_len + pad_len + crt->inputdatalength; in zcrypt_type6_crt_key() 221 key->pvt.pad_len = pad_len; in zcrypt_type6_crt_key() 234 memset(key->key_parts + 3*long_len + 2*short_len + pad_len, in zcrypt_type6_crt_key()
|
/kernel/linux/linux-6.6/drivers/net/can/spi/mcp251xfd/ |
H A D | mcp251xfd-tx.c | 101 int pad_len; in mcp251xfd_tx_obj_from_skb() local 103 pad_len = len_sanitized - cfd->len; in mcp251xfd_tx_obj_from_skb() 104 if (pad_len) in mcp251xfd_tx_obj_from_skb() 105 memset(hw_tx_obj->data + cfd->len, 0x0, pad_len); in mcp251xfd_tx_obj_from_skb()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/qualcomm/ |
H A D | qca_uart.c | 202 u8 pad_len = 0; in qcauart_netdev_xmit() local 219 pad_len = QCAFRM_MIN_LEN - skb->len; in qcauart_netdev_xmit() 221 pos += qcafrm_create_header(pos, skb->len + pad_len); in qcauart_netdev_xmit() 226 if (pad_len) { in qcauart_netdev_xmit() 227 memset(pos, 0, pad_len); in qcauart_netdev_xmit() 228 pos += pad_len; in qcauart_netdev_xmit()
|
/kernel/linux/linux-5.10/include/linux/ |
H A D | if_rmnet.h | 10 u8 pad_len:6; member 16 u8 pad_len:6;
|
/kernel/linux/linux-5.10/drivers/net/caif/ |
H A D | caif_virtio.c | 221 u32 cfpkt_len, pad_len; in cfv_alloc_and_copy_skb() local 235 pad_len = (unsigned long)(frm + cfv->rx_hr) & (IP_HDR_ALIGN - 1); in cfv_alloc_and_copy_skb() 237 skb = netdev_alloc_skb(cfv->ndev, frm_len + pad_len); in cfv_alloc_and_copy_skb() 243 skb_reserve(skb, cfv->rx_hr + pad_len); in cfv_alloc_and_copy_skb() 485 u8 pad_len, hdr_ofs; in cfv_alloc_and_copy_to_shm() local 502 pad_len = hdr_ofs & (IP_HDR_ALIGN - 1); in cfv_alloc_and_copy_to_shm() 503 buf_info->size = cfv->tx_hr + skb->len + cfv->tx_tr + pad_len; in cfv_alloc_and_copy_to_shm() 511 skb_copy_bits(skb, 0, buf_info->vaddr + cfv->tx_hr + pad_len, skb->len); in cfv_alloc_and_copy_to_shm() 512 sg_init_one(sg, buf_info->vaddr + pad_len, in cfv_alloc_and_copy_to_shm()
|
/kernel/linux/linux-6.6/drivers/net/caif/ |
H A D | caif_virtio.c | 221 u32 cfpkt_len, pad_len; in cfv_alloc_and_copy_skb() local 235 pad_len = (unsigned long)(frm + cfv->rx_hr) & (IP_HDR_ALIGN - 1); in cfv_alloc_and_copy_skb() 237 skb = netdev_alloc_skb(cfv->ndev, frm_len + pad_len); in cfv_alloc_and_copy_skb() 243 skb_reserve(skb, cfv->rx_hr + pad_len); in cfv_alloc_and_copy_skb() 485 u8 pad_len, hdr_ofs; in cfv_alloc_and_copy_to_shm() local 502 pad_len = hdr_ofs & (IP_HDR_ALIGN - 1); in cfv_alloc_and_copy_to_shm() 503 buf_info->size = cfv->tx_hr + skb->len + cfv->tx_tr + pad_len; in cfv_alloc_and_copy_to_shm() 511 skb_copy_bits(skb, 0, buf_info->vaddr + cfv->tx_hr + pad_len, skb->len); in cfv_alloc_and_copy_to_shm() 512 sg_init_one(sg, buf_info->vaddr + pad_len, in cfv_alloc_and_copy_to_shm()
|
/kernel/linux/linux-6.6/arch/mips/cavium-octeon/crypto/ |
H A D | octeon-sha1.c | 138 unsigned int pad_len; in octeon_sha1_final() local 149 pad_len = (index < 56) ? (56 - index) : ((64+56) - index); in octeon_sha1_final() 154 __octeon_sha1_update(sctx, padding, pad_len); in octeon_sha1_final()
|
H A D | octeon-sha256.c | 130 unsigned int pad_len; in octeon_sha256_final() local 141 pad_len = (index < 56) ? (56 - index) : ((64+56) - index); in octeon_sha256_final() 146 __octeon_sha256_update(sctx, padding, pad_len); in octeon_sha256_final()
|
H A D | octeon-sha512.c | 144 unsigned int pad_len; in octeon_sha512_final() local 156 pad_len = (index < 112) ? (112 - index) : ((128+112) - index); in octeon_sha512_final() 161 __octeon_sha512_update(sctx, padding, pad_len); in octeon_sha512_final()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/qualcomm/ |
H A D | qca_uart.c | 203 u8 pad_len = 0; in qcauart_netdev_xmit() local 220 pad_len = QCAFRM_MIN_LEN - skb->len; in qcauart_netdev_xmit() 222 pos += qcafrm_create_header(pos, skb->len + pad_len); in qcauart_netdev_xmit() 227 if (pad_len) { in qcauart_netdev_xmit() 228 memset(pos, 0, pad_len); in qcauart_netdev_xmit() 229 pos += pad_len; in qcauart_netdev_xmit()
|
/kernel/linux/linux-5.10/arch/mips/cavium-octeon/crypto/ |
H A D | octeon-sha256.c | 163 unsigned int pad_len; in octeon_sha256_final() local 174 pad_len = (index < 56) ? (56 - index) : ((64+56) - index); in octeon_sha256_final() 179 __octeon_sha256_update(sctx, padding, pad_len); in octeon_sha256_final()
|
H A D | octeon-sha512.c | 177 unsigned int pad_len; in octeon_sha512_final() local 189 pad_len = (index < 112) ? (112 - index) : ((128+112) - index); in octeon_sha512_final() 194 __octeon_sha512_update(sctx, padding, pad_len); in octeon_sha512_final()
|
H A D | octeon-sha1.c | 151 unsigned int pad_len; in octeon_sha1_final() local 162 pad_len = (index < 56) ? (56 - index) : ((64+56) - index); in octeon_sha1_final() 167 __octeon_sha1_update(sctx, padding, pad_len); in octeon_sha1_final()
|
/kernel/linux/linux-5.10/drivers/crypto/bcm/ |
H A D | spu.c | 467 u32 pad_len = 0; in spum_gcm_ccm_pad_len() local 472 pad_len = ((data_size + m1) & ~m1) - data_size; in spum_gcm_ccm_pad_len() 474 return pad_len; in spum_gcm_ccm_pad_len() 608 hash_parms->pad_len; in spum_create_request() 621 hash_parms->pad_len); in spum_create_request() 639 auth_len -= hash_parms->pad_len; in spum_create_request() 640 cipher_len -= hash_parms->pad_len; in spum_create_request() 661 flow_log(" hash_pad_len:%u\n", hash_parms->pad_len); in spum_create_request()
|
/kernel/linux/linux-6.6/drivers/crypto/bcm/ |
H A D | spu.c | 465 u32 pad_len = 0; in spum_gcm_ccm_pad_len() local 470 pad_len = ((data_size + m1) & ~m1) - data_size; in spum_gcm_ccm_pad_len() 472 return pad_len; in spum_gcm_ccm_pad_len() 606 hash_parms->pad_len; in spum_create_request() 619 hash_parms->pad_len); in spum_create_request() 637 auth_len -= hash_parms->pad_len; in spum_create_request() 638 cipher_len -= hash_parms->pad_len; in spum_create_request() 659 flow_log(" hash_pad_len:%u\n", hash_parms->pad_len); in spum_create_request()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/hfi1/ |
H A D | vnic_main.c | 260 u8 pad_len, q_idx = skb->queue_mapping; in hfi1_netdev_start_xmit() local 282 pad_len = -(skb->len + OPA_VNIC_ICRC_TAIL_LEN) & 0x7; in hfi1_netdev_start_xmit() 283 pad_len += OPA_VNIC_ICRC_TAIL_LEN; in hfi1_netdev_start_xmit() 290 pkt_len = (skb->len + pad_len) >> 2; in hfi1_netdev_start_xmit() 296 v_dbg("pbc 0x%016llX len %d pad_len %d\n", pbc, skb->len, pad_len); in hfi1_netdev_start_xmit() 297 err = dd->process_vnic_dma_send(dd, q_idx, vinfo, skb, pbc, pad_len); in hfi1_netdev_start_xmit()
|