/kernel/linux/linux-5.10/arch/loongarch/mm/ |
H A D | cache.c | 45 struct cache_desc *cdesc = current_cpu_data.cache_leaves + leaf; in flush_cache_leaf() local 47 nr_nodes = cache_private(cdesc) ? 1 : loongson_sysconf.nr_nodes; in flush_cache_leaf() 50 for (i = 0; i < cdesc->sets; i++) { in flush_cache_leaf() 51 for (j = 0; j < cdesc->ways; j++) { in flush_cache_leaf() 56 addr -= cdesc->ways; in flush_cache_leaf() 57 addr += cdesc->linesz; in flush_cache_leaf() 66 struct cache_desc *cdesc = current_cpu_data.cache_leaves; in __flush_cache_all() local 70 if (cache_inclusive(cdesc + leaf)) { in __flush_cache_all() 91 #define populate_cache_properties(cfg0, cdesc, level, leaf) \ 97 cdesc 116 struct cache_desc *cdesc = current_cpu_data.cache_leaves; cpu_cache_init() local [all...] |
/kernel/linux/linux-6.6/arch/loongarch/mm/ |
H A D | cache.c | 48 struct cache_desc *cdesc = current_cpu_data.cache_leaves + leaf; in flush_cache_leaf() local 50 nr_nodes = cache_private(cdesc) ? 1 : loongson_sysconf.nr_nodes; in flush_cache_leaf() 53 for (i = 0; i < cdesc->sets; i++) { in flush_cache_leaf() 54 for (j = 0; j < cdesc->ways; j++) { in flush_cache_leaf() 59 addr -= cdesc->ways; in flush_cache_leaf() 60 addr += cdesc->linesz; in flush_cache_leaf() 69 struct cache_desc *cdesc = current_cpu_data.cache_leaves; in __flush_cache_all() local 73 if (cache_inclusive(cdesc + leaf)) { in __flush_cache_all() 94 #define populate_cache_properties(cfg0, cdesc, level, leaf) \ 100 cdesc 119 struct cache_desc *cdesc = current_cpu_data.cache_leaves; cpu_cache_init() local [all...] |
/kernel/linux/linux-5.10/drivers/clk/ti/ |
H A D | clk-dra7-atl.c | 64 struct dra7_atl_desc *cdesc; member 82 struct dra7_atl_desc *cdesc = to_atl_desc(hw); in atl_clk_enable() local 84 if (!cdesc->probed) in atl_clk_enable() 87 if (unlikely(!cdesc->valid)) in atl_clk_enable() 88 dev_warn(cdesc->cinfo->dev, "atl%d has not been configured\n", in atl_clk_enable() 89 cdesc->id); in atl_clk_enable() 90 pm_runtime_get_sync(cdesc->cinfo->dev); in atl_clk_enable() 92 atl_write(cdesc->cinfo, DRA7_ATL_ATLCR_REG(cdesc->id), in atl_clk_enable() 93 cdesc in atl_clk_enable() 104 struct dra7_atl_desc *cdesc = to_atl_desc(hw); atl_clk_disable() local 118 struct dra7_atl_desc *cdesc = to_atl_desc(hw); atl_clk_is_enabled() local 126 struct dra7_atl_desc *cdesc = to_atl_desc(hw); atl_clk_recalc_rate() local 146 struct dra7_atl_desc *cdesc; atl_clk_set_rate() local 245 struct dra7_atl_desc *cdesc; of_dra7_atl_clk_probe() local [all...] |
/kernel/linux/linux-6.6/drivers/clk/ti/ |
H A D | clk-dra7-atl.c | 56 struct dra7_atl_desc *cdesc; member 74 struct dra7_atl_desc *cdesc = to_atl_desc(hw); in atl_clk_enable() local 76 if (!cdesc->probed) in atl_clk_enable() 79 if (unlikely(!cdesc->valid)) in atl_clk_enable() 80 dev_warn(cdesc->cinfo->dev, "atl%d has not been configured\n", in atl_clk_enable() 81 cdesc->id); in atl_clk_enable() 82 pm_runtime_get_sync(cdesc->cinfo->dev); in atl_clk_enable() 84 atl_write(cdesc->cinfo, DRA7_ATL_ATLCR_REG(cdesc->id), in atl_clk_enable() 85 cdesc in atl_clk_enable() 96 struct dra7_atl_desc *cdesc = to_atl_desc(hw); atl_clk_disable() local 110 struct dra7_atl_desc *cdesc = to_atl_desc(hw); atl_clk_is_enabled() local 118 struct dra7_atl_desc *cdesc = to_atl_desc(hw); atl_clk_recalc_rate() local 138 struct dra7_atl_desc *cdesc; atl_clk_set_rate() local 227 struct dra7_atl_desc *cdesc; of_dra7_atl_clk_probe() local [all...] |
/kernel/linux/linux-5.10/drivers/crypto/inside-secure/ |
H A D | safexcel_ring.c | 18 struct safexcel_command_desc *cdesc; in safexcel_init_ring_descriptors() local 48 cdesc = cdr->base; in safexcel_init_ring_descriptors() 51 cdesc->atok_lo = lower_32_bits(atok); in safexcel_init_ring_descriptors() 52 cdesc->atok_hi = upper_32_bits(atok); in safexcel_init_ring_descriptors() 53 cdesc = (void *)cdesc + cdr->offset; in safexcel_init_ring_descriptors() 187 struct safexcel_command_desc *cdesc; in safexcel_add_cdesc() local 189 cdesc = safexcel_ring_next_cwptr(priv, &priv->ring[ring_id].cdr, in safexcel_add_cdesc() 191 if (IS_ERR(cdesc)) in safexcel_add_cdesc() 192 return cdesc; in safexcel_add_cdesc() [all...] |
H A D | safexcel_cipher.c | 78 struct safexcel_command_desc *cdesc) in safexcel_skcipher_iv() 81 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; in safexcel_skcipher_iv() 83 cdesc->control_data.token[0] = ctx->nonce; in safexcel_skcipher_iv() 85 memcpy(&cdesc->control_data.token[1], iv, 8); in safexcel_skcipher_iv() 87 cdesc->control_data.token[3] = in safexcel_skcipher_iv() 92 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; in safexcel_skcipher_iv() 94 memcpy(&cdesc->control_data.token[0], &iv[4], 12); in safexcel_skcipher_iv() 96 cdesc->control_data.token[3] = *(u32 *)iv; in safexcel_skcipher_iv() 100 cdesc->control_data.options |= ctx->ivmask; in safexcel_skcipher_iv() 101 memcpy(cdesc in safexcel_skcipher_iv() 77 safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv, struct safexcel_command_desc *cdesc) safexcel_skcipher_iv() argument 105 safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv, struct safexcel_command_desc *cdesc, struct safexcel_token *atoken, u32 length) safexcel_skcipher_token() argument 134 safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv, struct safexcel_command_desc *cdesc) safexcel_aead_iv() argument 160 safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv, struct safexcel_command_desc *cdesc, struct safexcel_token *atoken, enum safexcel_cipher_direction direction, u32 cryptlen, u32 assoclen, u32 digestsize) safexcel_aead_token() argument 505 safexcel_context_control(struct safexcel_cipher_ctx *ctx, struct crypto_async_request *async, struct safexcel_cipher_req *sreq, struct safexcel_command_desc *cdesc) safexcel_context_control() argument 678 struct safexcel_command_desc *cdesc; safexcel_send_req() local [all...] |
H A D | safexcel.c | 816 int ret, nreq = 0, cdesc = 0, rdesc = 0, commands, results; in safexcel_dequeue() local 854 cdesc += commands; in safexcel_dequeue() 886 writel((cdesc * priv->config.cd_offset), in safexcel_dequeue() 952 struct safexcel_command_desc *cdesc; in safexcel_complete() local 956 cdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].cdr); in safexcel_complete() 957 if (IS_ERR(cdesc)) { in safexcel_complete() 962 } while (!cdesc->last_seg); in safexcel_complete() 980 struct safexcel_command_desc *cdesc; in safexcel_invalidate_cache() local 986 cdesc = safexcel_add_cdesc(priv, ring, true, true, 0, 0, 0, ctxr_dma, in safexcel_invalidate_cache() 988 if (IS_ERR(cdesc)) in safexcel_invalidate_cache() [all...] |
/kernel/linux/linux-6.6/drivers/crypto/inside-secure/ |
H A D | safexcel_ring.c | 18 struct safexcel_command_desc *cdesc; in safexcel_init_ring_descriptors() local 48 cdesc = cdr->base; in safexcel_init_ring_descriptors() 51 cdesc->atok_lo = lower_32_bits(atok); in safexcel_init_ring_descriptors() 52 cdesc->atok_hi = upper_32_bits(atok); in safexcel_init_ring_descriptors() 53 cdesc = (void *)cdesc + cdr->offset; in safexcel_init_ring_descriptors() 187 struct safexcel_command_desc *cdesc; in safexcel_add_cdesc() local 189 cdesc = safexcel_ring_next_cwptr(priv, &priv->ring[ring_id].cdr, in safexcel_add_cdesc() 191 if (IS_ERR(cdesc)) in safexcel_add_cdesc() 192 return cdesc; in safexcel_add_cdesc() [all...] |
H A D | safexcel_cipher.c | 78 struct safexcel_command_desc *cdesc) in safexcel_skcipher_iv() 81 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; in safexcel_skcipher_iv() 83 cdesc->control_data.token[0] = ctx->nonce; in safexcel_skcipher_iv() 85 memcpy(&cdesc->control_data.token[1], iv, 8); in safexcel_skcipher_iv() 87 cdesc->control_data.token[3] = in safexcel_skcipher_iv() 92 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD; in safexcel_skcipher_iv() 94 memcpy(&cdesc->control_data.token[0], &iv[4], 12); in safexcel_skcipher_iv() 96 cdesc->control_data.token[3] = *(u32 *)iv; in safexcel_skcipher_iv() 100 cdesc->control_data.options |= ctx->ivmask; in safexcel_skcipher_iv() 101 memcpy(cdesc in safexcel_skcipher_iv() 77 safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv, struct safexcel_command_desc *cdesc) safexcel_skcipher_iv() argument 105 safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv, struct safexcel_command_desc *cdesc, struct safexcel_token *atoken, u32 length) safexcel_skcipher_token() argument 134 safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv, struct safexcel_command_desc *cdesc) safexcel_aead_iv() argument 160 safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv, struct safexcel_command_desc *cdesc, struct safexcel_token *atoken, enum safexcel_cipher_direction direction, u32 cryptlen, u32 assoclen, u32 digestsize) safexcel_aead_token() argument 505 safexcel_context_control(struct safexcel_cipher_ctx *ctx, struct crypto_async_request *async, struct safexcel_cipher_req *sreq, struct safexcel_command_desc *cdesc) safexcel_context_control() argument 684 struct safexcel_command_desc *cdesc; safexcel_send_req() local [all...] |
H A D | safexcel.c | 824 int ret, nreq = 0, cdesc = 0, rdesc = 0, commands, results; in safexcel_dequeue() local 862 cdesc += commands; in safexcel_dequeue() 894 writel((cdesc * priv->config.cd_offset), in safexcel_dequeue() 960 struct safexcel_command_desc *cdesc; in safexcel_complete() local 964 cdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].cdr); in safexcel_complete() 965 if (IS_ERR(cdesc)) { in safexcel_complete() 970 } while (!cdesc->last_seg); in safexcel_complete() 977 struct safexcel_command_desc *cdesc; in safexcel_invalidate_cache() local 983 cdesc = safexcel_add_cdesc(priv, ring, true, true, 0, 0, 0, ctxr_dma, in safexcel_invalidate_cache() 985 if (IS_ERR(cdesc)) in safexcel_invalidate_cache() [all...] |
H A D | safexcel_hash.c | 75 static void safexcel_hash_token(struct safexcel_command_desc *cdesc, in safexcel_hash_token() argument 80 (struct safexcel_token *)cdesc->control_data.token; in safexcel_hash_token() 110 struct safexcel_command_desc *cdesc) in safexcel_context_control() 115 cdesc->control_data.control0 = ctx->alg; in safexcel_context_control() 116 cdesc->control_data.control1 = 0; in safexcel_context_control() 130 cdesc->control_data.control0 |= in safexcel_context_control() 137 cdesc->control_data.control0 |= in safexcel_context_control() 146 cdesc->control_data.control0 |= req->digest | in safexcel_context_control() 152 cdesc->control_data.control0 |= req->digest | in safexcel_context_control() 188 cdesc in safexcel_context_control() 108 safexcel_context_control(struct safexcel_ahash_ctx *ctx, struct safexcel_ahash_req *req, struct safexcel_command_desc *cdesc) safexcel_context_control() argument 318 struct safexcel_command_desc *cdesc, *first_cdesc = NULL; safexcel_ahash_send_req() local [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/amazon/ena/ |
H A D | ena_eth_com.c | 11 struct ena_eth_io_rx_cdesc_base *cdesc; in ena_com_get_next_rx_cdesc() local 18 cdesc = (struct ena_eth_io_rx_cdesc_base *)(io_cq->cdesc_addr.virt_addr in ena_com_get_next_rx_cdesc() 21 desc_phase = (READ_ONCE(cdesc->status) & in ena_com_get_next_rx_cdesc() 33 return cdesc; in ena_com_get_next_rx_cdesc() 228 struct ena_eth_io_rx_cdesc_base *cdesc; in ena_com_cdesc_rx_pkt_get() local 233 cdesc = ena_com_get_next_rx_cdesc(io_cq); in ena_com_cdesc_rx_pkt_get() 234 if (!cdesc) in ena_com_cdesc_rx_pkt_get() 239 last = (READ_ONCE(cdesc->status) & in ena_com_cdesc_rx_pkt_get() 336 struct ena_eth_io_rx_cdesc_base *cdesc) in ena_com_rx_set_flags() 338 ena_rx_ctx->l3_proto = cdesc in ena_com_rx_set_flags() 335 ena_com_rx_set_flags(struct ena_com_rx_ctx *ena_rx_ctx, struct ena_eth_io_rx_cdesc_base *cdesc) ena_com_rx_set_flags() argument 515 struct ena_eth_io_rx_cdesc_base *cdesc = NULL; ena_com_rx_pkt() local 602 struct ena_eth_io_rx_cdesc_base *cdesc; ena_com_cq_empty() local [all...] |
H A D | ena_eth_com.h | 221 struct ena_eth_io_tx_cdesc *cdesc; in ena_com_tx_comp_req_id_get() local 227 cdesc = (struct ena_eth_io_tx_cdesc *) in ena_com_tx_comp_req_id_get() 235 cdesc_phase = READ_ONCE(cdesc->flags) & ENA_ETH_IO_TX_CDESC_PHASE_MASK; in ena_com_tx_comp_req_id_get() 241 *req_id = READ_ONCE(cdesc->req_id); in ena_com_tx_comp_req_id_get() 243 pr_err("Invalid req id %d\n", cdesc->req_id); in ena_com_tx_comp_req_id_get()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/amazon/ena/ |
H A D | ena_eth_com.c | 11 struct ena_eth_io_rx_cdesc_base *cdesc; in ena_com_get_next_rx_cdesc() local 18 cdesc = (struct ena_eth_io_rx_cdesc_base *)(io_cq->cdesc_addr.virt_addr in ena_com_get_next_rx_cdesc() 21 desc_phase = (READ_ONCE(cdesc->status) & in ena_com_get_next_rx_cdesc() 33 return cdesc; in ena_com_get_next_rx_cdesc() 239 struct ena_eth_io_rx_cdesc_base *cdesc; in ena_com_cdesc_rx_pkt_get() local 244 cdesc = ena_com_get_next_rx_cdesc(io_cq); in ena_com_cdesc_rx_pkt_get() 245 if (!cdesc) in ena_com_cdesc_rx_pkt_get() 250 last = (READ_ONCE(cdesc->status) & in ena_com_cdesc_rx_pkt_get() 349 struct ena_eth_io_rx_cdesc_base *cdesc) in ena_com_rx_set_flags() 351 ena_rx_ctx->l3_proto = cdesc in ena_com_rx_set_flags() 347 ena_com_rx_set_flags(struct ena_com_io_cq *io_cq, struct ena_com_rx_ctx *ena_rx_ctx, struct ena_eth_io_rx_cdesc_base *cdesc) ena_com_rx_set_flags() argument 544 struct ena_eth_io_rx_cdesc_base *cdesc = NULL; ena_com_rx_pkt() local 639 struct ena_eth_io_rx_cdesc_base *cdesc; ena_com_cq_empty() local [all...] |
H A D | ena_eth_com.h | 229 struct ena_eth_io_tx_cdesc *cdesc; in ena_com_tx_comp_req_id_get() local 235 cdesc = (struct ena_eth_io_tx_cdesc *) in ena_com_tx_comp_req_id_get() 243 cdesc_phase = READ_ONCE(cdesc->flags) & ENA_ETH_IO_TX_CDESC_PHASE_MASK; in ena_com_tx_comp_req_id_get() 249 *req_id = READ_ONCE(cdesc->req_id); in ena_com_tx_comp_req_id_get() 252 "Invalid req id %d\n", cdesc->req_id); in ena_com_tx_comp_req_id_get()
|
/kernel/linux/linux-5.10/arch/loongarch/include/asm/ |
H A D | cacheflush.h | 12 static inline bool cache_present(struct cache_desc *cdesc) in cache_present() argument 14 return cdesc->flags & CACHE_PRESENT; in cache_present() 17 static inline bool cache_private(struct cache_desc *cdesc) in cache_private() argument 19 return cdesc->flags & CACHE_PRIVATE; in cache_private() 22 static inline bool cache_inclusive(struct cache_desc *cdesc) in cache_inclusive() argument 24 return cdesc->flags & CACHE_INCLUSIVE; in cache_inclusive()
|
/kernel/linux/linux-6.6/arch/loongarch/include/asm/ |
H A D | cacheflush.h | 12 static inline bool cache_present(struct cache_desc *cdesc) in cache_present() argument 14 return cdesc->flags & CACHE_PRESENT; in cache_present() 17 static inline bool cache_private(struct cache_desc *cdesc) in cache_private() argument 19 return cdesc->flags & CACHE_PRIVATE; in cache_private() 22 static inline bool cache_inclusive(struct cache_desc *cdesc) in cache_inclusive() argument 24 return cdesc->flags & CACHE_INCLUSIVE; in cache_inclusive()
|
/kernel/linux/linux-5.10/drivers/usb/core/ |
H A D | of.c | 60 struct usb_config_descriptor *cdesc; in usb_of_has_combined_node() local 69 cdesc = &udev->config->desc; in usb_of_has_combined_node() 70 if (cdesc->bNumInterfaces == 1) in usb_of_has_combined_node()
|
/kernel/linux/linux-6.6/drivers/usb/core/ |
H A D | of.c | 59 struct usb_config_descriptor *cdesc; in usb_of_has_combined_node() local 68 cdesc = &udev->config->desc; in usb_of_has_combined_node() 69 if (cdesc->bNumInterfaces == 1) in usb_of_has_combined_node()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlxfw/ |
H A D | mlxfw_mfa2.c | 189 const struct mlxfw_mfa2_tlv_component_descriptor *cdesc; in mlxfw_mfa2_file_comp_validate() local 211 cdesc = mlxfw_mfa2_tlv_component_descriptor_get(mfa2_file, tlv); in mlxfw_mfa2_file_comp_validate() 212 if (!cdesc) { in mlxfw_mfa2_file_comp_validate() 217 pr_debug(" -- Component type %d\n", be16_to_cpu(cdesc->identifier)); in mlxfw_mfa2_file_comp_validate() 219 ((u64) be32_to_cpu(cdesc->cb_offset_h) << 32) in mlxfw_mfa2_file_comp_validate() 220 | be32_to_cpu(cdesc->cb_offset_l), be32_to_cpu(cdesc->size)); in mlxfw_mfa2_file_comp_validate()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlxfw/ |
H A D | mlxfw_mfa2.c | 189 const struct mlxfw_mfa2_tlv_component_descriptor *cdesc; in mlxfw_mfa2_file_comp_validate() local 211 cdesc = mlxfw_mfa2_tlv_component_descriptor_get(mfa2_file, tlv); in mlxfw_mfa2_file_comp_validate() 212 if (!cdesc) { in mlxfw_mfa2_file_comp_validate() 217 pr_debug(" -- Component type %d\n", be16_to_cpu(cdesc->identifier)); in mlxfw_mfa2_file_comp_validate() 219 ((u64) be32_to_cpu(cdesc->cb_offset_h) << 32) in mlxfw_mfa2_file_comp_validate() 220 | be32_to_cpu(cdesc->cb_offset_l), be32_to_cpu(cdesc->size)); in mlxfw_mfa2_file_comp_validate()
|
/kernel/linux/linux-5.10/arch/loongarch/kernel/ |
H A D | cacheinfo.c | 66 struct cache_desc *cd, *cdesc = current_cpu_data.cache_leaves; in populate_cache_leaves() local 69 cd = cdesc + i; in populate_cache_leaves()
|
/kernel/linux/linux-6.6/arch/loongarch/kernel/ |
H A D | cacheinfo.c | 67 struct cache_desc *cd, *cdesc = current_cpu_data.cache_leaves; in populate_cache_leaves() local 70 cd = cdesc + i; in populate_cache_leaves()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/altera/ |
H A D | altera_sgdma.c | 172 struct sgdma_descrip __iomem *cdesc = &descbase[0]; in sgdma_tx_buffer() local 179 sgdma_setup_descrip(cdesc, /* current descriptor */ in sgdma_tx_buffer() 189 sgdma_async_write(priv, cdesc); in sgdma_tx_buffer() 341 struct sgdma_descrip __iomem *cdesc = &descbase[0]; in sgdma_async_read() local 352 sgdma_setup_descrip(cdesc, /* current descriptor */ in sgdma_async_read() 367 csrwr32(lower_32_bits(sgdma_rxphysaddr(priv, cdesc)), in sgdma_async_read()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/altera/ |
H A D | altera_sgdma.c | 172 struct sgdma_descrip __iomem *cdesc = &descbase[0]; in sgdma_tx_buffer() local 179 sgdma_setup_descrip(cdesc, /* current descriptor */ in sgdma_tx_buffer() 189 sgdma_async_write(priv, cdesc); in sgdma_tx_buffer() 341 struct sgdma_descrip __iomem *cdesc = &descbase[0]; in sgdma_async_read() local 352 sgdma_setup_descrip(cdesc, /* current descriptor */ in sgdma_async_read() 367 csrwr32(lower_32_bits(sgdma_rxphysaddr(priv, cdesc)), in sgdma_async_read()
|