Home
last modified time | relevance | path

Searched refs:eseg (Results 1 - 18 of 18) sorted by relevance

/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Dipsec_rxtx.h82 static inline bool mlx5e_ipsec_eseg_meta(struct mlx5_wqe_eth_seg *eseg) in mlx5e_ipsec_eseg_meta() argument
84 return eseg->flow_table_metadata & cpu_to_be32(MLX5_ETH_WQE_FT_META_IPSEC); in mlx5e_ipsec_eseg_meta()
88 struct mlx5_wqe_eth_seg *eseg);
126 struct mlx5_wqe_eth_seg *eseg) in mlx5e_ipsec_txwqe_build_eseg_csum()
130 if (!mlx5e_ipsec_eseg_meta(eseg)) in mlx5e_ipsec_txwqe_build_eseg_csum()
133 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum()
136 eseg->cs_flags |= MLX5_ETH_WQE_L3_INNER_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum()
138 eseg->cs_flags |= MLX5_ETH_WQE_L4_INNER_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum()
140 eseg->cs_flags |= MLX5_ETH_WQE_L4_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum()
153 static inline bool mlx5e_ipsec_eseg_meta(struct mlx5_wqe_eth_seg *eseg) in mlx5e_ipsec_eseg_meta() argument
125 mlx5e_ipsec_txwqe_build_eseg_csum(struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg) mlx5e_ipsec_txwqe_build_eseg_csum() argument
164 mlx5e_ipsec_txwqe_build_eseg_csum(struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg) mlx5e_ipsec_txwqe_build_eseg_csum() argument
[all...]
H A Dipsec_rxtx.c76 struct mlx5_wqe_eth_seg *eseg, u8 mode, in mlx5e_ipsec_set_swp()
93 eseg->swp_outer_l3_offset = skb_network_offset(skb) / 2; in mlx5e_ipsec_set_swp()
95 eseg->swp_flags |= MLX5_ETH_WQE_SWP_OUTER_L3_IPV6; in mlx5e_ipsec_set_swp()
99 eseg->swp_inner_l3_offset = skb_inner_network_offset(skb) / 2; in mlx5e_ipsec_set_swp()
101 eseg->swp_flags |= MLX5_ETH_WQE_SWP_INNER_L3_IPV6; in mlx5e_ipsec_set_swp()
105 eseg->swp_flags |= MLX5_ETH_WQE_SWP_INNER_L4_UDP; in mlx5e_ipsec_set_swp()
109 eseg->swp_inner_l4_offset = skb_inner_transport_offset(skb) / 2; in mlx5e_ipsec_set_swp()
124 eseg->swp_flags |= MLX5_ETH_WQE_SWP_OUTER_L4_UDP; in mlx5e_ipsec_set_swp()
128 eseg->swp_outer_l4_offset = skb_inner_transport_offset(skb) / 2; in mlx5e_ipsec_set_swp()
137 eseg in mlx5e_ipsec_set_swp()
75 mlx5e_ipsec_set_swp(struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, u8 mode, struct xfrm_offload *xo) mlx5e_ipsec_set_swp() argument
219 mlx5e_ipsec_tx_build_eseg(struct mlx5e_priv *priv, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg) mlx5e_ipsec_tx_build_eseg() argument
[all...]
H A Den_accel.h55 mlx5e_tx_tunnel_accel(struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, u16 ihs) in mlx5e_tx_tunnel_accel() argument
88 mlx5e_set_eseg_swp(skb, eseg, &swp_spec); in mlx5e_tx_tunnel_accel()
90 mlx5e_eseg_swp_offsets_add_vlan(eseg); in mlx5e_tx_tunnel_accel()
164 /* Part of the eseg touched by TX offloads */
169 struct mlx5_wqe_eth_seg *eseg, u16 ihs) in mlx5e_accel_tx_eseg()
173 mlx5e_ipsec_tx_build_eseg(priv, skb, eseg); in mlx5e_accel_tx_eseg()
178 mlx5e_macsec_tx_build_eseg(priv->macsec, skb, eseg); in mlx5e_accel_tx_eseg()
183 mlx5e_tx_tunnel_accel(skb, eseg, ihs); in mlx5e_accel_tx_eseg()
167 mlx5e_accel_tx_eseg(struct mlx5e_priv *priv, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, u16 ihs) mlx5e_accel_tx_eseg() argument
H A Dmacsec.h23 struct mlx5_wqe_eth_seg *eseg);
H A Dmacsec.c1664 struct mlx5_wqe_eth_seg *eseg) in mlx5e_macsec_tx_build_eseg()
1674 eseg->flow_table_metadata = cpu_to_be32(MLX5_ETH_WQE_FT_META_MACSEC | fs_id << 2); in mlx5e_macsec_tx_build_eseg()
1662 mlx5e_macsec_tx_build_eseg(struct mlx5e_macsec *macsec, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg) mlx5e_macsec_tx_build_eseg() argument
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/
H A Den_tx.c121 struct mlx5_wqe_eth_seg *eseg) in mlx5e_txwqe_build_eseg_csum()
123 if (unlikely(mlx5e_ipsec_txwqe_build_eseg_csum(sq, skb, eseg))) in mlx5e_txwqe_build_eseg_csum()
127 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM; in mlx5e_txwqe_build_eseg_csum()
129 eseg->cs_flags |= MLX5_ETH_WQE_L3_INNER_CSUM | in mlx5e_txwqe_build_eseg_csum()
133 eseg->cs_flags |= MLX5_ETH_WQE_L4_CSUM; in mlx5e_txwqe_build_eseg_csum()
138 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM | MLX5_ETH_WQE_L4_CSUM; in mlx5e_txwqe_build_eseg_csum()
375 struct mlx5_wqe_eth_seg *eseg, bool xmit_more) in mlx5e_txwqe_complete()
399 u8 metadata_index = be32_to_cpu(eseg->flow_table_metadata); in mlx5e_txwqe_complete()
426 struct mlx5_wqe_eth_seg *eseg; in mlx5e_sq_xmit_wqe() local
439 eseg in mlx5e_sq_xmit_wqe()
119 mlx5e_txwqe_build_eseg_csum(struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5e_accel_tx_state *accel, struct mlx5_wqe_eth_seg *eseg) mlx5e_txwqe_build_eseg_csum() argument
371 mlx5e_txwqe_complete(struct mlx5e_txqsq *sq, struct sk_buff *skb, const struct mlx5e_tx_attr *attr, const struct mlx5e_tx_wqe_attr *wqe_attr, u8 num_dma, struct mlx5e_tx_wqe_info *wi, struct mlx5_wqe_ctrl_seg *cseg, struct mlx5_wqe_eth_seg *eseg, bool xmit_more) mlx5e_txwqe_complete() argument
512 mlx5e_tx_mpwqe_same_eseg(struct mlx5e_txqsq *sq, struct mlx5_wqe_eth_seg *eseg) mlx5e_tx_mpwqe_same_eseg() argument
520 mlx5e_tx_mpwqe_session_start(struct mlx5e_txqsq *sq, struct mlx5_wqe_eth_seg *eseg) mlx5e_tx_mpwqe_session_start() argument
599 mlx5e_sq_xmit_mpwqe(struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, bool xmit_more) mlx5e_sq_xmit_mpwqe() argument
655 mlx5e_cqe_ts_id_eseg(struct mlx5e_ptpsq *ptpsq, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg) mlx5e_cqe_ts_id_eseg() argument
663 mlx5e_txwqe_build_eseg(struct mlx5e_priv *priv, struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5e_accel_tx_state *accel, struct mlx5_wqe_eth_seg *eseg, u16 ihs) mlx5e_txwqe_build_eseg() argument
710 struct mlx5_wqe_eth_seg eseg = {}; mlx5e_xmit() local
982 struct mlx5_wqe_eth_seg *eseg; mlx5i_sq_xmit() local
[all...]
H A Den_main.c1888 struct mlx5_wqe_eth_seg *eseg = &wqe->eth; in mlx5e_open_xdpsq() local
1896 eseg->inline_hdr.sz = cpu_to_be16(inline_hdr_sz); in mlx5e_open_xdpsq()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/
H A Den_tx.c152 struct mlx5_wqe_eth_seg *eseg) in ipsec_txwqe_build_eseg_csum()
154 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM; in ipsec_txwqe_build_eseg_csum()
156 eseg->cs_flags |= MLX5_ETH_WQE_L3_INNER_CSUM; in ipsec_txwqe_build_eseg_csum()
166 struct mlx5_wqe_eth_seg *eseg) in mlx5e_txwqe_build_eseg_csum()
169 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM; in mlx5e_txwqe_build_eseg_csum()
171 eseg->cs_flags |= MLX5_ETH_WQE_L3_INNER_CSUM | in mlx5e_txwqe_build_eseg_csum()
175 eseg->cs_flags |= MLX5_ETH_WQE_L4_CSUM; in mlx5e_txwqe_build_eseg_csum()
180 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM | MLX5_ETH_WQE_L4_CSUM; in mlx5e_txwqe_build_eseg_csum()
183 } else if (unlikely(eseg->flow_table_metadata & cpu_to_be32(MLX5_ETH_WQE_FT_META_IPSEC))) { in mlx5e_txwqe_build_eseg_csum()
184 ipsec_txwqe_build_eseg_csum(sq, skb, eseg); in mlx5e_txwqe_build_eseg_csum()
151 ipsec_txwqe_build_eseg_csum(struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg) ipsec_txwqe_build_eseg_csum() argument
164 mlx5e_txwqe_build_eseg_csum(struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5e_accel_tx_state *accel, struct mlx5_wqe_eth_seg *eseg) mlx5e_txwqe_build_eseg_csum() argument
416 struct mlx5_wqe_eth_seg *eseg; mlx5e_sq_xmit_wqe() local
472 mlx5e_tx_mpwqe_same_eseg(struct mlx5e_txqsq *sq, struct mlx5_wqe_eth_seg *eseg) mlx5e_tx_mpwqe_same_eseg() argument
480 mlx5e_tx_mpwqe_session_start(struct mlx5e_txqsq *sq, struct mlx5_wqe_eth_seg *eseg) mlx5e_tx_mpwqe_session_start() argument
559 mlx5e_sq_xmit_mpwqe(struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, bool xmit_more) mlx5e_sq_xmit_mpwqe() argument
616 mlx5e_txwqe_build_eseg(struct mlx5e_priv *priv, struct mlx5e_txqsq *sq, struct sk_buff *skb, struct mlx5e_accel_tx_state *accel, struct mlx5_wqe_eth_seg *eseg, u16 ihs) mlx5e_txwqe_build_eseg() argument
648 struct mlx5_wqe_eth_seg eseg = {}; mlx5e_xmit() local
931 struct mlx5_wqe_eth_seg *eseg; mlx5i_sq_xmit() local
[all...]
H A Den_main.c1520 struct mlx5_wqe_eth_seg *eseg = &wqe->eth; in mlx5e_open_xdpsq() local
1529 eseg->inline_hdr.sz = cpu_to_be16(inline_hdr_sz); in mlx5e_open_xdpsq()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dtxrx.h369 static inline void mlx5e_eseg_swp_offsets_add_vlan(struct mlx5_wqe_eth_seg *eseg) in mlx5e_eseg_swp_offsets_add_vlan() argument
372 eseg->swp_outer_l3_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan()
373 eseg->swp_outer_l4_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan()
374 eseg->swp_inner_l3_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan()
375 eseg->swp_inner_l4_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan()
379 mlx5e_set_eseg_swp(struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, in mlx5e_set_eseg_swp() argument
383 eseg->swp_outer_l3_offset = skb_network_offset(skb) / 2; in mlx5e_set_eseg_swp()
385 eseg->swp_flags |= MLX5_ETH_WQE_SWP_OUTER_L3_IPV6; in mlx5e_set_eseg_swp()
387 eseg->swp_outer_l4_offset = skb_transport_offset(skb) / 2; in mlx5e_set_eseg_swp()
389 eseg in mlx5e_set_eseg_swp()
[all...]
H A Dxdp.c320 struct mlx5_wqe_eth_seg *eseg = &wqe->eth; in mlx5e_xmit_xdp_frame() local
344 memcpy(eseg->inline_hdr.start, xdptxd->data, MLX5E_XDP_MIN_INLINE); in mlx5e_xmit_xdp_frame()
345 eseg->inline_hdr.sz = cpu_to_be16(MLX5E_XDP_MIN_INLINE); in mlx5e_xmit_xdp_frame()
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dtxrx.h421 static inline void mlx5e_eseg_swp_offsets_add_vlan(struct mlx5_wqe_eth_seg *eseg) in mlx5e_eseg_swp_offsets_add_vlan() argument
424 eseg->swp_outer_l3_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan()
425 eseg->swp_outer_l4_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan()
426 eseg->swp_inner_l3_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan()
427 eseg->swp_inner_l4_offset += VLAN_HLEN / 2; in mlx5e_eseg_swp_offsets_add_vlan()
431 mlx5e_set_eseg_swp(struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, in mlx5e_set_eseg_swp() argument
435 eseg->swp_outer_l3_offset = skb_network_offset(skb) / 2; in mlx5e_set_eseg_swp()
437 eseg->swp_flags |= MLX5_ETH_WQE_SWP_OUTER_L3_IPV6; in mlx5e_set_eseg_swp()
439 eseg->swp_outer_l4_offset = skb_transport_offset(skb) / 2; in mlx5e_set_eseg_swp()
441 eseg in mlx5e_set_eseg_swp()
[all...]
H A Dxdp.c490 struct mlx5_wqe_eth_seg *eseg; in mlx5e_xmit_xdp_frame() local
545 eseg = &wqe->eth; in mlx5e_xmit_xdp_frame()
550 memcpy(eseg->inline_hdr.start, xdptxd->data, sizeof(eseg->inline_hdr.start)); in mlx5e_xmit_xdp_frame()
551 memcpy(dseg, xdptxd->data + sizeof(eseg->inline_hdr.start), in mlx5e_xmit_xdp_frame()
552 inline_hdr_sz - sizeof(eseg->inline_hdr.start)); in mlx5e_xmit_xdp_frame()
572 memset(eseg, 0, sizeof(*eseg) - sizeof(eseg->trailer)); in mlx5e_xmit_xdp_frame()
574 eseg in mlx5e_xmit_xdp_frame()
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Den_accel.h54 mlx5e_tx_tunnel_accel(struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, u16 ihs) in mlx5e_tx_tunnel_accel() argument
87 mlx5e_set_eseg_swp(skb, eseg, &swp_spec); in mlx5e_tx_tunnel_accel()
89 mlx5e_eseg_swp_offsets_add_vlan(eseg); in mlx5e_tx_tunnel_accel()
163 /* Part of the eseg touched by TX offloads */
168 struct mlx5_wqe_eth_seg *eseg, u16 ihs) in mlx5e_accel_tx_eseg()
172 mlx5e_ipsec_tx_build_eseg(priv, skb, eseg); in mlx5e_accel_tx_eseg()
177 mlx5e_tx_tunnel_accel(skb, eseg, ihs); in mlx5e_accel_tx_eseg()
166 mlx5e_accel_tx_eseg(struct mlx5e_priv *priv, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, u16 ihs) mlx5e_accel_tx_eseg() argument
H A Dipsec_rxtx.c138 struct mlx5_wqe_eth_seg *eseg, u8 mode, in mlx5e_ipsec_set_swp()
167 mlx5e_set_eseg_swp(skb, eseg, &swp_spec); in mlx5e_ipsec_set_swp()
273 struct mlx5_wqe_eth_seg *eseg) in mlx5e_ipsec_tx_build_eseg()
294 mlx5e_ipsec_set_swp(skb, eseg, x->props.mode, xo); in mlx5e_ipsec_tx_build_eseg()
301 eseg->flow_table_metadata |= cpu_to_be32(MLX5_ETH_WQE_FT_META_IPSEC); in mlx5e_ipsec_tx_build_eseg()
302 eseg->trailer |= cpu_to_be32(MLX5_ETH_WQE_INSERT_TRAILER); in mlx5e_ipsec_tx_build_eseg()
305 eseg->trailer |= (l3_proto == IPPROTO_ESP) ? in mlx5e_ipsec_tx_build_eseg()
309 eseg->trailer |= (l3_proto == IPPROTO_ESP) ? in mlx5e_ipsec_tx_build_eseg()
137 mlx5e_ipsec_set_swp(struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg, u8 mode, struct xfrm_offload *xo) mlx5e_ipsec_set_swp() argument
272 mlx5e_ipsec_tx_build_eseg(struct mlx5e_priv *priv, struct sk_buff *skb, struct mlx5_wqe_eth_seg *eseg) mlx5e_ipsec_tx_build_eseg() argument
H A Dipsec_rxtx.h91 struct mlx5_wqe_eth_seg *eseg);
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
H A Dwr.c107 struct mlx5_wqe_eth_seg *eseg = *seg; in set_eth_seg() local
109 memset(eseg, 0, sizeof(struct mlx5_wqe_eth_seg)); in set_eth_seg()
112 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM | in set_eth_seg()
122 eseg->mss = cpu_to_be16(ud_wr->mss); in set_eth_seg()
123 eseg->inline_hdr.sz = cpu_to_be16(left); in set_eth_seg()
129 copysz = min_t(u64, *cur_edge - (void *)eseg->inline_hdr.start, in set_eth_seg()
131 memcpy(eseg->inline_hdr.start, pdata, copysz); in set_eth_seg()
133 sizeof(eseg->inline_hdr.start) + copysz, 16); in set_eth_seg()
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/
H A Dwr.c57 struct mlx5_wqe_eth_seg *eseg = *seg; in set_eth_seg() local
59 memset(eseg, 0, sizeof(struct mlx5_wqe_eth_seg)); in set_eth_seg()
62 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM | in set_eth_seg()
72 eseg->mss = cpu_to_be16(ud_wr->mss); in set_eth_seg()
73 eseg->inline_hdr.sz = cpu_to_be16(left); in set_eth_seg()
79 copysz = min_t(u64, *cur_edge - (void *)eseg->inline_hdr.start, in set_eth_seg()
81 memcpy(eseg->inline_hdr.data, pdata, copysz); in set_eth_seg()
83 sizeof(eseg->inline_hdr.start) + copysz, 16); in set_eth_seg()

Completed in 29 milliseconds