Home
last modified time | relevance | path

Searched refs:wrlen (Results 1 - 16 of 16) sorted by relevance

/kernel/linux/linux-5.10/drivers/net/ethernet/chelsio/inline_crypto/chtls/
H A Dchtls_hw.c47 unsigned int wrlen; in __set_tcb_field() local
49 wrlen = roundup(sizeof(*req) + sizeof(*sc), 16); in __set_tcb_field()
52 req = (struct cpl_set_tcb_field *)__skb_put(skb, wrlen); in __set_tcb_field()
68 unsigned int wrlen; in chtls_set_tcb_field() local
71 wrlen = roundup(sizeof(*req) + sizeof(*sc), 16); in chtls_set_tcb_field()
73 skb = alloc_skb(wrlen, GFP_ATOMIC); in chtls_set_tcb_field()
77 credits_needed = DIV_ROUND_UP(wrlen, 16); in chtls_set_tcb_field()
96 unsigned int wrlen; in chtls_set_tcb_field_rpl_skb() local
98 wrlen = sizeof(struct cpl_set_tcb_field) + sizeof(struct ulptx_idata); in chtls_set_tcb_field_rpl_skb()
99 wrlen in chtls_set_tcb_field_rpl_skb()
138 unsigned int wrlen; chtls_set_quiesce_ctrl() local
363 int wrlen, klen, len; chtls_setkey() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/chelsio/inline_crypto/chtls/
H A Dchtls_hw.c47 unsigned int wrlen; in __set_tcb_field() local
49 wrlen = roundup(sizeof(*req) + sizeof(*sc), 16); in __set_tcb_field()
52 req = (struct cpl_set_tcb_field *)__skb_put(skb, wrlen); in __set_tcb_field()
68 unsigned int wrlen; in chtls_set_tcb_field() local
71 wrlen = roundup(sizeof(*req) + sizeof(*sc), 16); in chtls_set_tcb_field()
73 skb = alloc_skb(wrlen, GFP_ATOMIC); in chtls_set_tcb_field()
77 credits_needed = DIV_ROUND_UP(wrlen, 16); in chtls_set_tcb_field()
96 unsigned int wrlen; in chtls_set_tcb_field_rpl_skb() local
98 wrlen = sizeof(struct cpl_set_tcb_field) + sizeof(struct ulptx_idata); in chtls_set_tcb_field_rpl_skb()
99 wrlen in chtls_set_tcb_field_rpl_skb()
138 unsigned int wrlen; chtls_set_quiesce_ctrl() local
363 int wrlen, klen, len; chtls_setkey() local
[all...]
/kernel/linux/linux-5.10/drivers/scsi/cxgbi/
H A Dlibcxgbi.h366 static inline struct sk_buff *alloc_wr(int wrlen, int dlen, gfp_t gfp) in alloc_wr() argument
368 struct sk_buff *skb = alloc_skb(wrlen + dlen, gfp); in alloc_wr()
371 __skb_put(skb, wrlen); in alloc_wr()
372 memset(skb->head, 0, wrlen + dlen); in alloc_wr()
374 pr_info("alloc cpl wr skb %u+%u, OOM.\n", wrlen, dlen); in alloc_wr()
/kernel/linux/linux-6.6/drivers/scsi/cxgbi/
H A Dlibcxgbi.h365 static inline struct sk_buff *alloc_wr(int wrlen, int dlen, gfp_t gfp) in alloc_wr() argument
367 struct sk_buff *skb = alloc_skb(wrlen + dlen, gfp); in alloc_wr()
370 __skb_put(skb, wrlen); in alloc_wr()
371 memset(skb->head, 0, wrlen + dlen); in alloc_wr()
373 pr_info("alloc cpl wr skb %u+%u, OOM.\n", wrlen, dlen); in alloc_wr()
/kernel/linux/linux-5.10/drivers/infiniband/hw/cxgb4/
H A Dcm.c649 u32 wrlen = roundup(sizeof(struct cpl_close_con_req), 16); in send_halfclose() local
655 cxgb_mk_close_con_req(skb, wrlen, ep->hwtid, ep->txq_idx, in send_halfclose()
665 int wrlen = roundup(sizeof(*req), 16); in read_tcb() local
672 req = (struct cpl_get_tcb *) skb_put(skb, wrlen); in read_tcb()
673 memset(req, 0, wrlen); in read_tcb()
689 u32 wrlen = roundup(sizeof(struct cpl_abort_req), 16); in send_abort_req() local
696 cxgb_mk_abort_req(req_skb, wrlen, ep->hwtid, ep->txq_idx, in send_abort_req()
726 int win, sizev4, sizev6, wrlen; in send_connect() local
762 wrlen = (ep->com.remote_addr.ss_family == AF_INET) ? in send_connect()
768 skb = get_skb(NULL, wrlen, GFP_KERNE in send_connect()
943 int mpalen, wrlen, ret; send_mpa_req() local
1047 int mpalen, wrlen; send_mpa_reject() local
1127 int mpalen, wrlen; send_mpa_reply() local
1416 u32 wrlen = roundup(sizeof(struct cpl_rx_data_ack), 16); update_rx_credits() local
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/cxgb4/
H A Dcm.c649 u32 wrlen = roundup(sizeof(struct cpl_close_con_req), 16); in send_halfclose() local
655 cxgb_mk_close_con_req(skb, wrlen, ep->hwtid, ep->txq_idx, in send_halfclose()
665 int wrlen = roundup(sizeof(*req), 16); in read_tcb() local
672 req = (struct cpl_get_tcb *) skb_put(skb, wrlen); in read_tcb()
673 memset(req, 0, wrlen); in read_tcb()
689 u32 wrlen = roundup(sizeof(struct cpl_abort_req), 16); in send_abort_req() local
696 cxgb_mk_abort_req(req_skb, wrlen, ep->hwtid, ep->txq_idx, in send_abort_req()
726 int win, sizev4, sizev6, wrlen; in send_connect() local
762 wrlen = (ep->com.remote_addr.ss_family == AF_INET) ? in send_connect()
768 skb = get_skb(NULL, wrlen, GFP_KERNE in send_connect()
943 int mpalen, wrlen, ret; send_mpa_req() local
1047 int mpalen, wrlen; send_mpa_reject() local
1127 int mpalen, wrlen; send_mpa_reply() local
1416 u32 wrlen = roundup(sizeof(struct cpl_rx_data_ack), 16); update_rx_credits() local
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/chelsio/cxgb4/
H A Dcxgb4_uld.h75 #define INIT_ULPTX_WR(w, wrlen, atomic, tid) do { \
78 (w)->wr.wr_mid = htonl(FW_WR_LEN16_V(DIV_ROUND_UP(wrlen, 16)) | \
H A Dsge.c2170 u32 wrlen; in ethofld_calc_tx_flits() local
2172 wrlen = sizeof(struct fw_eth_tx_eo_wr) + sizeof(struct cpl_tx_pkt_core); in ethofld_calc_tx_flits()
2175 wrlen += sizeof(struct cpl_tx_pkt_lso_core); in ethofld_calc_tx_flits()
2177 wrlen += roundup(hdr_len, 16); in ethofld_calc_tx_flits()
2180 flits = DIV_ROUND_UP(wrlen, 8); in ethofld_calc_tx_flits()
2196 u32 hdr_len, u32 wrlen) in write_eo_wr()
2207 wrlen16 = DIV_ROUND_UP(wrlen, 16); in write_eo_wr()
2261 u32 wrlen, wrlen16, hdr_len, data_len; in ethofld_hard_xmit() local
2298 wrlen = flits * 8; in ethofld_hard_xmit()
2299 wrlen16 = DIV_ROUND_UP(wrlen, 1 in ethofld_hard_xmit()
2194 write_eo_wr(struct adapter *adap, struct sge_eosw_txq *eosw_txq, struct sk_buff *skb, struct fw_eth_tx_eo_wr *wr, u32 hdr_len, u32 wrlen) write_eo_wr() argument
[all...]
H A Dcxgb4_filter.c1746 unsigned int wrlen; in cxgb4_del_hash_filter() local
1771 wrlen = roundup(sizeof(*wr) + (sizeof(*req) + sizeof(*aligner)) in cxgb4_del_hash_filter()
1773 skb = alloc_skb(wrlen, GFP_KERNEL); in cxgb4_del_hash_filter()
1779 req = (struct cpl_set_tcb_field *)__skb_put(skb, wrlen); in cxgb4_del_hash_filter()
1780 INIT_ULPTX_WR(req, wrlen, 0, 0); in cxgb4_del_hash_filter()
/kernel/linux/linux-6.6/drivers/net/ethernet/chelsio/cxgb4/
H A Dcxgb4_uld.h75 #define INIT_ULPTX_WR(w, wrlen, atomic, tid) do { \
78 (w)->wr.wr_mid = htonl(FW_WR_LEN16_V(DIV_ROUND_UP(wrlen, 16)) | \
H A Dsge.c2176 u32 wrlen; in ethofld_calc_tx_flits() local
2178 wrlen = sizeof(struct fw_eth_tx_eo_wr) + sizeof(struct cpl_tx_pkt_core); in ethofld_calc_tx_flits()
2181 wrlen += sizeof(struct cpl_tx_pkt_lso_core); in ethofld_calc_tx_flits()
2183 wrlen += roundup(hdr_len, 16); in ethofld_calc_tx_flits()
2186 flits = DIV_ROUND_UP(wrlen, 8); in ethofld_calc_tx_flits()
2202 u32 hdr_len, u32 wrlen) in write_eo_wr()
2213 wrlen16 = DIV_ROUND_UP(wrlen, 16); in write_eo_wr()
2267 u32 wrlen, wrlen16, hdr_len, data_len; in ethofld_hard_xmit() local
2304 wrlen = flits * 8; in ethofld_hard_xmit()
2305 wrlen16 = DIV_ROUND_UP(wrlen, 1 in ethofld_hard_xmit()
2200 write_eo_wr(struct adapter *adap, struct sge_eosw_txq *eosw_txq, struct sk_buff *skb, struct fw_eth_tx_eo_wr *wr, u32 hdr_len, u32 wrlen) write_eo_wr() argument
[all...]
H A Dcxgb4_filter.c1746 unsigned int wrlen; in cxgb4_del_hash_filter() local
1771 wrlen = roundup(sizeof(*wr) + (sizeof(*req) + sizeof(*aligner)) in cxgb4_del_hash_filter()
1773 skb = alloc_skb(wrlen, GFP_KERNEL); in cxgb4_del_hash_filter()
1779 req = (struct cpl_set_tcb_field *)__skb_put(skb, wrlen); in cxgb4_del_hash_filter()
1780 INIT_ULPTX_WR(req, wrlen, 0, 0); in cxgb4_del_hash_filter()
/kernel/linux/linux-5.10/drivers/scsi/cxgbi/cxgb3i/
H A Dcxgb3i.c332 static unsigned int wrlen __read_mostly;
348 wrlen = wr_len * 8; in init_wr_tab()
411 if (wrs_needed > 1 && len + sizeof(struct tx_data_wr) <= wrlen) in push_tx_frames()
/kernel/linux/linux-6.6/drivers/scsi/cxgbi/cxgb3i/
H A Dcxgb3i.c333 static unsigned int wrlen __read_mostly;
349 wrlen = wr_len * 8; in init_wr_tab()
412 if (wrs_needed > 1 && len + sizeof(struct tx_data_wr) <= wrlen) in push_tx_frames()
/kernel/linux/linux-5.10/drivers/crypto/chelsio/
H A Dchcr_algo.c2409 int aadmax, int wrlen, in chcr_aead_need_fallback()
2417 (wrlen > SGE_MAX_WR_LEN)) in chcr_aead_need_fallback()
2408 chcr_aead_need_fallback(struct aead_request *req, int dst_nents, int aadmax, int wrlen, unsigned short op_type) chcr_aead_need_fallback() argument
/kernel/linux/linux-6.6/drivers/crypto/chelsio/
H A Dchcr_algo.c2404 int aadmax, int wrlen, in chcr_aead_need_fallback()
2412 (wrlen > SGE_MAX_WR_LEN)) in chcr_aead_need_fallback()
2403 chcr_aead_need_fallback(struct aead_request *req, int dst_nents, int aadmax, int wrlen, unsigned short op_type) chcr_aead_need_fallback() argument

Completed in 54 milliseconds