/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/engine/sec2/ |
H A D | ga102.c | 43 for (i = 0; i < ARRAY_SIZE(msg.queue_info); i++) { in ga102_sec2_initmsg() 44 if (msg.queue_info[i].id == NV_SEC2_INIT_MSG_QUEUE_ID_MSGQ) { in ga102_sec2_initmsg() 45 nvkm_falcon_msgq_init(sec2->msgq, msg.queue_info[i].index, in ga102_sec2_initmsg() 46 msg.queue_info[i].offset, in ga102_sec2_initmsg() 47 msg.queue_info[i].size); in ga102_sec2_initmsg() 49 nvkm_falcon_cmdq_init(sec2->cmdq, msg.queue_info[i].index, in ga102_sec2_initmsg() 50 msg.queue_info[i].offset, in ga102_sec2_initmsg() 51 msg.queue_info[i].size); in ga102_sec2_initmsg()
|
H A D | gp102.c | 135 for (i = 0; i < ARRAY_SIZE(msg.queue_info); i++) { in gp102_sec2_initmsg() 136 if (msg.queue_info[i].id == NV_SEC2_INIT_MSG_QUEUE_ID_MSGQ) { in gp102_sec2_initmsg() 138 msg.queue_info[i].index, in gp102_sec2_initmsg() 139 msg.queue_info[i].offset, in gp102_sec2_initmsg() 140 msg.queue_info[i].size); in gp102_sec2_initmsg() 143 msg.queue_info[i].index, in gp102_sec2_initmsg() 144 msg.queue_info[i].offset, in gp102_sec2_initmsg() 145 msg.queue_info[i].size); in gp102_sec2_initmsg()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
H A D | gm20b.c | 183 nvkm_falcon_cmdq_init(pmu->hpq, msg.queue_info[0].index, in gm20b_pmu_initmsg() 184 msg.queue_info[0].offset, in gm20b_pmu_initmsg() 185 msg.queue_info[0].size); in gm20b_pmu_initmsg() 186 nvkm_falcon_cmdq_init(pmu->lpq, msg.queue_info[1].index, in gm20b_pmu_initmsg() 187 msg.queue_info[1].offset, in gm20b_pmu_initmsg() 188 msg.queue_info[1].size); in gm20b_pmu_initmsg() 189 nvkm_falcon_msgq_init(pmu->msgq, msg.queue_info[4].index, in gm20b_pmu_initmsg() 190 msg.queue_info[4].offset, in gm20b_pmu_initmsg() 191 msg.queue_info[4].size); in gm20b_pmu_initmsg()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
H A D | gm20b.c | 172 nvkm_falcon_cmdq_init(pmu->hpq, msg.queue_info[0].index, in gm20b_pmu_initmsg() 173 msg.queue_info[0].offset, in gm20b_pmu_initmsg() 174 msg.queue_info[0].size); in gm20b_pmu_initmsg() 175 nvkm_falcon_cmdq_init(pmu->lpq, msg.queue_info[1].index, in gm20b_pmu_initmsg() 176 msg.queue_info[1].offset, in gm20b_pmu_initmsg() 177 msg.queue_info[1].size); in gm20b_pmu_initmsg() 178 nvkm_falcon_msgq_init(pmu->msgq, msg.queue_info[4].index, in gm20b_pmu_initmsg() 179 msg.queue_info[4].offset, in gm20b_pmu_initmsg() 180 msg.queue_info[4].size); in gm20b_pmu_initmsg()
|
/kernel/linux/linux-6.6/drivers/net/wireless/intel/iwlwifi/mvm/ |
H A D | sta.c | 316 sta_id = mvm->queue_info[queue].ra_sta_id; in iwl_mvm_invalidate_sta_queue() 390 if (WARN_ON(mvm->queue_info[queue].tid_bitmap == 0)) in iwl_mvm_disable_txq() 393 mvm->queue_info[queue].tid_bitmap &= ~BIT(tid); in iwl_mvm_disable_txq() 395 cmd.action = mvm->queue_info[queue].tid_bitmap ? in iwl_mvm_disable_txq() 398 mvm->queue_info[queue].status = IWL_MVM_QUEUE_FREE; in iwl_mvm_disable_txq() 403 mvm->queue_info[queue].tid_bitmap); in iwl_mvm_disable_txq() 409 cmd.sta_id = mvm->queue_info[queue].ra_sta_id; in iwl_mvm_disable_txq() 410 cmd.tid = mvm->queue_info[queue].txq_tid; in iwl_mvm_disable_txq() 413 WARN(mvm->queue_info[queue].tid_bitmap, in iwl_mvm_disable_txq() 415 queue, mvm->queue_info[queu in iwl_mvm_disable_txq() [all...] |
H A D | tx.c | 1074 unsigned long queue_tid_bitmap = mvm->queue_info[txq_id].tid_bitmap; in iwl_mvm_txq_should_update() 1082 if (time_before(mvm->queue_info[txq_id].last_frame_time[tid] + in iwl_mvm_txq_should_update() 1226 mvm->queue_info[txq_id].last_frame_time[tid] = jiffies; in iwl_mvm_tx_mpdu() 1238 if (unlikely(mvm->queue_info[txq_id].status == in iwl_mvm_tx_mpdu() 2333 struct iwl_flush_queue_info *queue_info = &rsp->queues[i]; in iwl_mvm_flush_sta_tids() local 2334 int tid = le16_to_cpu(queue_info->tid); in iwl_mvm_flush_sta_tids() 2335 int read_before = le16_to_cpu(queue_info->read_before_flush); in iwl_mvm_flush_sta_tids() 2336 int read_after = le16_to_cpu(queue_info->read_after_flush); in iwl_mvm_flush_sta_tids() 2337 int queue_num = le16_to_cpu(queue_info->queue_num); in iwl_mvm_flush_sta_tids()
|
/kernel/linux/linux-5.10/drivers/net/wireless/intel/iwlwifi/mvm/ |
H A D | sta.c | 329 sta_id = mvm->queue_info[queue].ra_sta_id; in iwl_mvm_invalidate_sta_queue() 381 if (WARN_ON(mvm->queue_info[queue].tid_bitmap == 0)) in iwl_mvm_disable_txq() 384 mvm->queue_info[queue].tid_bitmap &= ~BIT(tid); in iwl_mvm_disable_txq() 386 cmd.action = mvm->queue_info[queue].tid_bitmap ? in iwl_mvm_disable_txq() 389 mvm->queue_info[queue].status = IWL_MVM_QUEUE_FREE; in iwl_mvm_disable_txq() 394 mvm->queue_info[queue].tid_bitmap); in iwl_mvm_disable_txq() 400 cmd.sta_id = mvm->queue_info[queue].ra_sta_id; in iwl_mvm_disable_txq() 401 cmd.tid = mvm->queue_info[queue].txq_tid; in iwl_mvm_disable_txq() 404 WARN(mvm->queue_info[queue].tid_bitmap, in iwl_mvm_disable_txq() 406 queue, mvm->queue_info[queu in iwl_mvm_disable_txq() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/engine/sec2/ |
H A D | gp102.c | 146 for (i = 0; i < ARRAY_SIZE(msg.queue_info); i++) { in gp102_sec2_initmsg() 147 if (msg.queue_info[i].id == NV_SEC2_INIT_MSG_QUEUE_ID_MSGQ) { in gp102_sec2_initmsg() 149 msg.queue_info[i].index, in gp102_sec2_initmsg() 150 msg.queue_info[i].offset, in gp102_sec2_initmsg() 151 msg.queue_info[i].size); in gp102_sec2_initmsg() 154 msg.queue_info[i].index, in gp102_sec2_initmsg() 155 msg.queue_info[i].offset, in gp102_sec2_initmsg() 156 msg.queue_info[i].size); in gp102_sec2_initmsg()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_tx.c | 272 static int offload_tso(struct hinic_sq_task *task, u32 *queue_info, in offload_tso() argument 346 hinic_set_tso_inner_l4(task, queue_info, l4_offload, l4_len, offset, in offload_tso() 352 static int offload_csum(struct hinic_sq_task *task, u32 *queue_info, in offload_csum() argument 432 hinic_set_cs_inner_l4(task, queue_info, l4_offload, l4_len, offset); in offload_csum() 437 static void offload_vlan(struct hinic_sq_task *task, u32 *queue_info, in offload_vlan() argument 443 *queue_info |= HINIC_SQ_CTRL_SET(vlan_pri, QUEUE_INFO_PRI); in offload_vlan() 447 u32 *queue_info) in hinic_tx_offload() 453 enabled = offload_tso(task, queue_info, skb); in hinic_tx_offload() 457 enabled = offload_csum(task, queue_info, skb); in hinic_tx_offload() 466 offload_vlan(task, queue_info, vlan_ta in hinic_tx_offload() 446 hinic_tx_offload(struct sk_buff *skb, struct hinic_sq_task *task, u32 *queue_info) hinic_tx_offload() argument [all...] |
H A D | hinic_hw_qp.c | 491 ctrl->queue_info = HINIC_SQ_CTRL_SET(HINIC_MSS_DEFAULT, in sq_prepare_ctrl() 538 void hinic_set_cs_inner_l4(struct hinic_sq_task *task, u32 *queue_info, in hinic_set_cs_inner_l4() argument 554 *queue_info |= HINIC_SQ_CTRL_SET(offset, QUEUE_INFO_PLDOFF) | in hinic_set_cs_inner_l4() 558 *queue_info = HINIC_SQ_CTRL_CLEAR(*queue_info, QUEUE_INFO_MSS); in hinic_set_cs_inner_l4() 559 *queue_info |= HINIC_SQ_CTRL_SET(mss, QUEUE_INFO_MSS); in hinic_set_cs_inner_l4() 562 void hinic_set_tso_inner_l4(struct hinic_sq_task *task, u32 *queue_info, in hinic_set_tso_inner_l4() argument 579 *queue_info |= HINIC_SQ_CTRL_SET(offset, QUEUE_INFO_PLDOFF) | in hinic_set_tso_inner_l4() 585 *queue_info = HINIC_SQ_CTRL_CLEAR(*queue_info, QUEUE_INFO_MS in hinic_set_tso_inner_l4() [all...] |
H A D | hinic_hw_qp.h | 168 u32 *queue_info, 173 u32 *queue_info,
|
H A D | hinic_hw_wqe.h | 390 u32 queue_info; member
|
/kernel/linux/linux-6.6/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_tx.c | 273 static int offload_tso(struct hinic_sq_task *task, u32 *queue_info, in offload_tso() argument 347 hinic_set_tso_inner_l4(task, queue_info, l4_offload, l4_len, offset, in offload_tso() 353 static int offload_csum(struct hinic_sq_task *task, u32 *queue_info, in offload_csum() argument 434 hinic_set_cs_inner_l4(task, queue_info, l4_offload, l4_len, offset); in offload_csum() 439 static void offload_vlan(struct hinic_sq_task *task, u32 *queue_info, in offload_vlan() argument 445 *queue_info |= HINIC_SQ_CTRL_SET(vlan_pri, QUEUE_INFO_PRI); in offload_vlan() 449 u32 *queue_info) in hinic_tx_offload() 455 enabled = offload_tso(task, queue_info, skb); in hinic_tx_offload() 459 enabled = offload_csum(task, queue_info, skb); in hinic_tx_offload() 468 offload_vlan(task, queue_info, vlan_ta in hinic_tx_offload() 448 hinic_tx_offload(struct sk_buff *skb, struct hinic_sq_task *task, u32 *queue_info) hinic_tx_offload() argument [all...] |
H A D | hinic_hw_qp.c | 489 ctrl->queue_info = HINIC_SQ_CTRL_SET(HINIC_MSS_DEFAULT, in sq_prepare_ctrl() 536 void hinic_set_cs_inner_l4(struct hinic_sq_task *task, u32 *queue_info, in hinic_set_cs_inner_l4() argument 552 *queue_info |= HINIC_SQ_CTRL_SET(offset, QUEUE_INFO_PLDOFF) | in hinic_set_cs_inner_l4() 556 *queue_info = HINIC_SQ_CTRL_CLEAR(*queue_info, QUEUE_INFO_MSS); in hinic_set_cs_inner_l4() 557 *queue_info |= HINIC_SQ_CTRL_SET(mss, QUEUE_INFO_MSS); in hinic_set_cs_inner_l4() 560 void hinic_set_tso_inner_l4(struct hinic_sq_task *task, u32 *queue_info, in hinic_set_tso_inner_l4() argument 577 *queue_info |= HINIC_SQ_CTRL_SET(offset, QUEUE_INFO_PLDOFF) | in hinic_set_tso_inner_l4() 583 *queue_info = HINIC_SQ_CTRL_CLEAR(*queue_info, QUEUE_INFO_MS in hinic_set_tso_inner_l4() [all...] |
H A D | hinic_hw_qp.h | 168 u32 *queue_info, 173 u32 *queue_info,
|
/kernel/linux/linux-5.10/drivers/net/wireless/ath/ath5k/ |
H A D | qcu.c | 135 * @queue_info: The &struct ath5k_txq_info to fill 139 struct ath5k_txq_info *queue_info) in ath5k_hw_get_tx_queueprops() 141 memcpy(queue_info, &ah->ah_txq[queue], sizeof(struct ath5k_txq_info)); in ath5k_hw_get_tx_queueprops() 198 * @queue_info: The &struct ath5k_txq_info to use 204 struct ath5k_txq_info *queue_info) in ath5k_hw_setup_tx_queue() 228 queue = queue_info->tqi_subtype; in ath5k_hw_setup_tx_queue() 250 if (queue_info != NULL) { in ath5k_hw_setup_tx_queue() 251 queue_info->tqi_type = queue_type; in ath5k_hw_setup_tx_queue() 252 ret = ath5k_hw_set_tx_queueprops(ah, queue, queue_info); in ath5k_hw_setup_tx_queue() 138 ath5k_hw_get_tx_queueprops(struct ath5k_hw *ah, int queue, struct ath5k_txq_info *queue_info) ath5k_hw_get_tx_queueprops() argument 203 ath5k_hw_setup_tx_queue(struct ath5k_hw *ah, enum ath5k_tx_queue queue_type, struct ath5k_txq_info *queue_info) ath5k_hw_setup_tx_queue() argument
|
H A D | ath5k.h | 1563 struct ath5k_txq_info *queue_info); 1565 const struct ath5k_txq_info *queue_info); 1568 struct ath5k_txq_info *queue_info);
|
/kernel/linux/linux-6.6/drivers/net/wireless/ath/ath5k/ |
H A D | qcu.c | 135 * @queue_info: The &struct ath5k_txq_info to fill 139 struct ath5k_txq_info *queue_info) in ath5k_hw_get_tx_queueprops() 141 memcpy(queue_info, &ah->ah_txq[queue], sizeof(struct ath5k_txq_info)); in ath5k_hw_get_tx_queueprops() 198 * @queue_info: The &struct ath5k_txq_info to use 204 struct ath5k_txq_info *queue_info) in ath5k_hw_setup_tx_queue() 228 queue = queue_info->tqi_subtype; in ath5k_hw_setup_tx_queue() 250 if (queue_info != NULL) { in ath5k_hw_setup_tx_queue() 251 queue_info->tqi_type = queue_type; in ath5k_hw_setup_tx_queue() 252 ret = ath5k_hw_set_tx_queueprops(ah, queue, queue_info); in ath5k_hw_setup_tx_queue() 138 ath5k_hw_get_tx_queueprops(struct ath5k_hw *ah, int queue, struct ath5k_txq_info *queue_info) ath5k_hw_get_tx_queueprops() argument 203 ath5k_hw_setup_tx_queue(struct ath5k_hw *ah, enum ath5k_tx_queue queue_type, struct ath5k_txq_info *queue_info) ath5k_hw_setup_tx_queue() argument
|
H A D | ath5k.h | 1559 struct ath5k_txq_info *queue_info); 1561 const struct ath5k_txq_info *queue_info); 1564 struct ath5k_txq_info *queue_info);
|
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/include/nvfw/ |
H A D | sec2.h | 31 } queue_info[2]; member 52 } queue_info[2]; member
|
H A D | pmu.h | 40 } queue_info[5]; member
|
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/include/nvfw/ |
H A D | sec2.h | 30 } queue_info[2]; member
|
H A D | pmu.h | 40 } queue_info[5]; member
|
/kernel/linux/linux-6.6/drivers/net/ethernet/hisilicon/hns3/hns3pf/ |
H A D | hclge_mbx.c | 516 struct hclge_mbx_vf_queue_info *queue_info; in hclge_get_vf_queue_info() local 520 queue_info = (struct hclge_mbx_vf_queue_info *)resp_msg->data; in hclge_get_vf_queue_info() 521 queue_info->num_tqps = cpu_to_le16(vport->alloc_tqps); in hclge_get_vf_queue_info() 522 queue_info->rss_size = cpu_to_le16(vport->nic.kinfo.rss_size); in hclge_get_vf_queue_info() 523 queue_info->rx_buf_len = cpu_to_le16(hdev->rx_buf_len); in hclge_get_vf_queue_info()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/ice/ |
H A D | ice_lag.c | 343 qbuf->queue_info[count].q_handle = cpu_to_le16(qid); in ice_lag_qbuf_recfg() 344 qbuf->queue_info[count].tc = tc; in ice_lag_qbuf_recfg() 345 qbuf->queue_info[count].q_teid = cpu_to_le32(q_ctx->q_teid); in ice_lag_qbuf_recfg() 477 qbuf_size = struct_size(qbuf, queue_info, numq); in ice_lag_move_vf_node_tc() 836 qbuf_size = struct_size(qbuf, queue_info, numq); in ice_lag_reclaim_vf_tc() 1864 qbuf_size = struct_size(qbuf, queue_info, numq); in ice_lag_move_vf_nodes_tc_sync()
|