Home
last modified time | relevance | path

Searched refs:eqe (Results 1 - 25 of 100) sorted by relevance

1234

/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx4/
H A Deq.c123 struct mlx4_eqe *eqe = get_eqe(eq, eq->cons_index, eqe_factor, size); in next_eqe_sw() local
124 return !!(eqe->owner & 0x80) ^ !!(eq->cons_index & eq->nent) ? NULL : eqe; in next_eqe_sw()
129 struct mlx4_eqe *eqe = in next_slave_event_eqe() local
131 return (!!(eqe->owner & 0x80) ^ in next_slave_event_eqe()
133 eqe : NULL; in next_slave_event_eqe()
146 struct mlx4_eqe *eqe; in mlx4_gen_slave_eqe() local
150 for (eqe = next_slave_event_eqe(slave_eq); eqe; in mlx4_gen_slave_eqe()
151 eqe in mlx4_gen_slave_eqe()
194 slave_event(struct mlx4_dev *dev, u8 slave, struct mlx4_eqe *eqe) slave_event() argument
223 mlx4_slave_event(struct mlx4_dev *dev, int slave, struct mlx4_eqe *eqe) mlx4_slave_event() argument
255 struct mlx4_eqe eqe; mlx4_gen_pkey_eqe() local
275 struct mlx4_eqe eqe; mlx4_gen_guid_change_eqe() local
293 struct mlx4_eqe eqe; mlx4_gen_port_state_change_eqe() local
434 struct mlx4_eqe eqe; mlx4_gen_slaves_port_mgt_ev() local
497 struct mlx4_eqe *eqe; mlx4_eq_int() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx4/
H A Deq.c123 struct mlx4_eqe *eqe = get_eqe(eq, eq->cons_index, eqe_factor, size); in next_eqe_sw() local
124 return !!(eqe->owner & 0x80) ^ !!(eq->cons_index & eq->nent) ? NULL : eqe; in next_eqe_sw()
129 struct mlx4_eqe *eqe = in next_slave_event_eqe() local
131 return (!!(eqe->owner & 0x80) ^ in next_slave_event_eqe()
133 eqe : NULL; in next_slave_event_eqe()
146 struct mlx4_eqe *eqe; in mlx4_gen_slave_eqe() local
150 for (eqe = next_slave_event_eqe(slave_eq); eqe; in mlx4_gen_slave_eqe()
151 eqe in mlx4_gen_slave_eqe()
194 slave_event(struct mlx4_dev *dev, u8 slave, struct mlx4_eqe *eqe) slave_event() argument
223 mlx4_slave_event(struct mlx4_dev *dev, int slave, struct mlx4_eqe *eqe) mlx4_slave_event() argument
255 struct mlx4_eqe eqe; mlx4_gen_pkey_eqe() local
275 struct mlx4_eqe eqe; mlx4_gen_guid_change_eqe() local
293 struct mlx4_eqe eqe; mlx4_gen_port_state_change_eqe() local
434 struct mlx4_eqe eqe; mlx4_gen_slaves_port_mgt_ev() local
497 struct mlx4_eqe *eqe; mlx4_eq_int() local
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/mthca/
H A Dmthca_eq.c236 struct mthca_eqe *eqe; in next_eqe_sw() local
237 eqe = get_eqe(eq, eq->cons_index); in next_eqe_sw()
238 return (MTHCA_EQ_ENTRY_OWNER_HW & eqe->owner) ? NULL : eqe; in next_eqe_sw()
241 static inline void set_eqe_hw(struct mthca_eqe *eqe) in set_eqe_hw() argument
243 eqe->owner = MTHCA_EQ_ENTRY_OWNER_HW; in set_eqe_hw()
262 struct mthca_eqe *eqe; in mthca_eq_int() local
267 while ((eqe = next_eqe_sw(eq))) { in mthca_eq_int()
274 switch (eqe->type) { in mthca_eq_int()
276 disarm_cqn = be32_to_cpu(eqe in mthca_eq_int()
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mthca/
H A Dmthca_eq.c236 struct mthca_eqe *eqe; in next_eqe_sw() local
237 eqe = get_eqe(eq, eq->cons_index); in next_eqe_sw()
238 return (MTHCA_EQ_ENTRY_OWNER_HW & eqe->owner) ? NULL : eqe; in next_eqe_sw()
241 static inline void set_eqe_hw(struct mthca_eqe *eqe) in set_eqe_hw() argument
243 eqe->owner = MTHCA_EQ_ENTRY_OWNER_HW; in set_eqe_hw()
262 struct mthca_eqe *eqe; in mthca_eq_int() local
267 while ((eqe = next_eqe_sw(eq))) { in mthca_eq_int()
274 switch (eqe->type) { in mthca_eq_int()
276 disarm_cqn = be32_to_cpu(eqe in mthca_eq_int()
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/
H A Devents.c136 /* handles all FW events, type == eqe->type */
142 struct mlx5_eqe *eqe = data; in any_notifier() local
144 mlx5_core_dbg(events->dev, "Async eqe type %s, subtype (%d)\n", in any_notifier()
145 eqe_type_str(eqe->type), eqe->sub_type); in any_notifier()
154 struct mlx5_eqe *eqe = data; in temp_warn() local
158 value_lsb = be64_to_cpu(eqe->data.temp_warning.sensor_warning_lsb); in temp_warn()
159 value_msb = be64_to_cpu(eqe->data.temp_warning.sensor_warning_msb); in temp_warn()
216 struct mlx5_eqe *eqe = data; in port_module() local
224 module_event_eqe = &eqe in port_module()
307 struct mlx5_eqe *eqe = data; pcie_core() local
330 struct mlx5_eqe *eqe = data; forward_event() local
[all...]
H A Deq.c133 struct mlx5_eqe *eqe; in mlx5_eq_comp_int() local
137 eqe = next_eqe_sw(eq); in mlx5_eq_comp_int()
138 if (!eqe) in mlx5_eq_comp_int()
148 /* Assume (eqe->type) is always MLX5_EVENT_TYPE_COMP */ in mlx5_eq_comp_int()
149 cqn = be32_to_cpu(eqe->data.comp.cqn) & 0xffffff; in mlx5_eq_comp_int()
154 cq->comp(cq, eqe); in mlx5_eq_comp_int()
163 } while ((++num_eqes < MLX5_EQ_POLLING_BUDGET) && (eqe = next_eqe_sw(eq))); in mlx5_eq_comp_int()
223 struct mlx5_eqe *eqe; in mlx5_eq_async_int() local
232 eqe = next_eqe_sw(eq); in mlx5_eq_async_int()
233 if (!eqe) in mlx5_eq_async_int()
269 struct mlx5_eqe *eqe; init_eq_buf() local
515 struct mlx5_eqe *eqe; cq_err_event_notifier() local
762 struct mlx5_eqe *eqe; mlx5_eq_get_eqe() local
[all...]
H A Dfw_reset.c369 static void mlx5_sync_reset_events_handle(struct mlx5_fw_reset *fw_reset, struct mlx5_eqe *eqe) in mlx5_sync_reset_events_handle() argument
374 sync_fw_update_eqe = &eqe->data.sync_fw_update; in mlx5_sync_reset_events_handle()
392 struct mlx5_eqe *eqe = data; in fw_reset_event_notifier() local
394 switch (eqe->sub_type) { in fw_reset_event_notifier()
399 mlx5_sync_reset_events_handle(fw_reset, eqe); in fw_reset_event_notifier()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlxsw/
H A Dpci_hw.h231 MLXSW_ITEM32(pci, eqe, event_type, 0x0C, 24, 8);
238 MLXSW_ITEM32(pci, eqe, event_sub_type, 0x0C, 16, 8);
243 MLXSW_ITEM32(pci, eqe, cqn, 0x0C, 8, 7);
248 MLXSW_ITEM32(pci, eqe, owner, 0x0C, 0, 1);
253 MLXSW_ITEM32(pci, eqe, cmd_token, 0x00, 16, 16);
258 MLXSW_ITEM32(pci, eqe, cmd_status, 0x00, 0, 8);
263 MLXSW_ITEM32(pci, eqe, cmd_out_param_h, 0x04, 0, 32);
268 MLXSW_ITEM32(pci, eqe, cmd_out_param_l, 0x08, 0, 32);
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/
H A Devents.c143 /* handles all FW events, type == eqe->type */
149 struct mlx5_eqe *eqe = data; in any_notifier() local
151 mlx5_core_dbg(events->dev, "Async eqe type %s, subtype (%d)\n", in any_notifier()
152 eqe_type_str(eqe->type), eqe->sub_type); in any_notifier()
161 struct mlx5_eqe *eqe = data; in temp_warn() local
165 value_lsb = be64_to_cpu(eqe->data.temp_warning.sensor_warning_lsb); in temp_warn()
166 value_msb = be64_to_cpu(eqe->data.temp_warning.sensor_warning_msb); in temp_warn()
223 struct mlx5_eqe *eqe = data; in port_module() local
231 module_event_eqe = &eqe in port_module()
314 struct mlx5_eqe *eqe = data; pcie_core() local
337 struct mlx5_eqe *eqe = data; forward_event() local
[all...]
H A Deq.c115 struct mlx5_eqe *eqe; in mlx5_eq_comp_int() local
119 eqe = next_eqe_sw(eq); in mlx5_eq_comp_int()
120 if (!eqe) in mlx5_eq_comp_int()
130 /* Assume (eqe->type) is always MLX5_EVENT_TYPE_COMP */ in mlx5_eq_comp_int()
131 cqn = be32_to_cpu(eqe->data.comp.cqn) & 0xffffff; in mlx5_eq_comp_int()
136 cq->comp(cq, eqe); in mlx5_eq_comp_int()
145 } while ((++num_eqes < MLX5_EQ_POLLING_BUDGET) && (eqe = next_eqe_sw(eq))); in mlx5_eq_comp_int()
207 struct mlx5_eqe *eqe; in mlx5_eq_async_int() local
218 eqe = next_eqe_sw(eq); in mlx5_eq_async_int()
219 if (!eqe) in mlx5_eq_async_int()
255 struct mlx5_eqe *eqe; init_eq_buf() local
506 struct mlx5_eqe *eqe; cq_err_event_notifier() local
790 struct mlx5_eqe *eqe; mlx5_eq_get_eqe() local
[all...]
H A Dfw_reset.c607 static void mlx5_sync_reset_events_handle(struct mlx5_fw_reset *fw_reset, struct mlx5_eqe *eqe) in mlx5_sync_reset_events_handle() argument
612 sync_fw_update_eqe = &eqe->data.sync_fw_update; in mlx5_sync_reset_events_handle()
633 struct mlx5_eqe *eqe = data; in fw_reset_event_notifier() local
638 switch (eqe->sub_type) { in fw_reset_event_notifier()
643 mlx5_sync_reset_events_handle(fw_reset, eqe); in fw_reset_event_notifier()
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlxsw/
H A Dpci_hw.h377 MLXSW_ITEM32(pci, eqe, event_type, 0x0C, 24, 8);
384 MLXSW_ITEM32(pci, eqe, event_sub_type, 0x0C, 16, 8);
389 MLXSW_ITEM32(pci, eqe, cqn, 0x0C, 8, 7);
394 MLXSW_ITEM32(pci, eqe, owner, 0x0C, 0, 1);
399 MLXSW_ITEM32(pci, eqe, cmd_token, 0x00, 16, 16);
404 MLXSW_ITEM32(pci, eqe, cmd_status, 0x00, 0, 8);
409 MLXSW_ITEM32(pci, eqe, cmd_out_param_h, 0x04, 0, 32);
414 MLXSW_ITEM32(pci, eqe, cmd_out_param_l, 0x08, 0, 32);
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/fpga/
H A Dcore.c164 static int fpga_err_event(struct notifier_block *nb, unsigned long event, void *eqe) in fpga_err_event() argument
168 return mlx5_fpga_event(fdev, event, eqe); in fpga_err_event()
171 static int fpga_qp_err_event(struct notifier_block *nb, unsigned long event, void *eqe) in fpga_qp_err_event() argument
175 return mlx5_fpga_event(fdev, event, eqe); in fpga_qp_err_event()
335 unsigned long event, void *eqe) in mlx5_fpga_event()
337 void *data = ((struct mlx5_eqe *)eqe)->data.raw; in mlx5_fpga_event()
334 mlx5_fpga_event(struct mlx5_fpga_device *fdev, unsigned long event, void *eqe) mlx5_fpga_event() argument
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/fpga/
H A Dcore.c163 static int fpga_err_event(struct notifier_block *nb, unsigned long event, void *eqe) in fpga_err_event() argument
167 return mlx5_fpga_event(fdev, event, eqe); in fpga_err_event()
170 static int fpga_qp_err_event(struct notifier_block *nb, unsigned long event, void *eqe) in fpga_qp_err_event() argument
174 return mlx5_fpga_event(fdev, event, eqe); in fpga_qp_err_event()
334 unsigned long event, void *eqe) in mlx5_fpga_event()
336 void *data = ((struct mlx5_eqe *)eqe)->data.raw; in mlx5_fpga_event()
333 mlx5_fpga_event(struct mlx5_fpga_device *fdev, unsigned long event, void *eqe) mlx5_fpga_event() argument
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/lib/
H A Deq.h57 struct mlx5_eqe *eqe = get_eqe(eq, eq->cons_index & (eq->nent - 1)); in next_eqe_sw() local
59 return ((eqe->owner & 1) ^ !!(eq->cons_index & eq->nent)) ? NULL : eqe; in next_eqe_sw()
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/lib/
H A Deq.h63 struct mlx5_eqe *eqe = get_eqe(eq, eq->cons_index & eq->fbc.sz_m1); in next_eqe_sw() local
65 return (eqe->owner ^ (eq->cons_index >> eq->fbc.log_sz)) & 1 ? NULL : eqe; in next_eqe_sw()
/kernel/linux/linux-5.10/drivers/infiniband/hw/vmw_pvrdma/
H A Dpvrdma_main.c440 struct pvrdma_eqe *eqe; in pvrdma_intr1_handler() local
442 eqe = get_eqe(dev, head); in pvrdma_intr1_handler()
444 switch (eqe->type) { in pvrdma_intr1_handler()
453 pvrdma_qp_event(dev, eqe->info, eqe->type); in pvrdma_intr1_handler()
457 pvrdma_cq_event(dev, eqe->info, eqe->type); in pvrdma_intr1_handler()
462 pvrdma_srq_event(dev, eqe->info, eqe->type); in pvrdma_intr1_handler()
472 pvrdma_dev_event(dev, eqe in pvrdma_intr1_handler()
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/vmw_pvrdma/
H A Dpvrdma_main.c413 struct pvrdma_eqe *eqe; in pvrdma_intr1_handler() local
415 eqe = get_eqe(dev, head); in pvrdma_intr1_handler()
417 switch (eqe->type) { in pvrdma_intr1_handler()
426 pvrdma_qp_event(dev, eqe->info, eqe->type); in pvrdma_intr1_handler()
430 pvrdma_cq_event(dev, eqe->info, eqe->type); in pvrdma_intr1_handler()
435 pvrdma_srq_event(dev, eqe->info, eqe->type); in pvrdma_intr1_handler()
445 pvrdma_dev_event(dev, eqe in pvrdma_intr1_handler()
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
H A Dqpc.c99 struct mlx5_eqe *eqe; in rsc_event_notifier() local
104 eqe = data; in rsc_event_notifier()
105 rsn = be32_to_cpu(eqe->data.dct.dctn) & 0xffffff; in rsc_event_notifier()
116 eqe = data; in rsc_event_notifier()
117 rsn = be32_to_cpu(eqe->data.qp_srq.qp_srq_n) & 0xffffff; in rsc_event_notifier()
118 rsn |= (eqe->data.qp_srq.type << MLX5_USER_INDEX_LEN); in rsc_event_notifier()
/kernel/linux/linux-6.6/drivers/infiniband/hw/efa/
H A Defa_main.c72 static void efa_process_comp_eqe(struct efa_dev *dev, struct efa_admin_eqe *eqe) in efa_process_comp_eqe() argument
74 u16 cqn = eqe->u.comp_event.cqn; in efa_process_comp_eqe()
89 static void efa_process_eqe(struct efa_com_eq *eeq, struct efa_admin_eqe *eqe) in efa_process_eqe() argument
93 if (likely(EFA_GET(&eqe->common, EFA_ADMIN_EQE_EVENT_TYPE) == in efa_process_eqe()
95 efa_process_comp_eqe(dev, eqe); in efa_process_eqe()
99 EFA_GET(&eqe->common, in efa_process_eqe()
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/
H A Dmad.c63 #define GET_BLK_PTR_FROM_EQE(eqe) be32_to_cpu(eqe->event.port_mgmt_change.params.tbl_change_info.block_ptr)
64 #define GET_MASK_FROM_EQE(eqe) be32_to_cpu(eqe->event.port_mgmt_change.params.tbl_change_info.tbl_entries_mask)
1118 struct mlx4_eqe *eqe) in propagate_pkey_ev()
1120 __propagate_pkey_ev(dev, port_num, GET_BLK_PTR_FROM_EQE(eqe), in propagate_pkey_ev()
1121 GET_MASK_FROM_EQE(eqe)); in propagate_pkey_ev()
1179 struct mlx4_eqe *eqe = &(ew->ib_eqe); in handle_port_mgmt_change_event() local
1180 u8 port = eqe->event.port_mgmt_change.port; in handle_port_mgmt_change_event()
1185 switch (eqe in handle_port_mgmt_change_event()
1117 propagate_pkey_ev(struct mlx4_ib_dev *dev, int port_num, struct mlx4_eqe *eqe) propagate_pkey_ev() argument
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx4/
H A Dmad.c63 #define GET_BLK_PTR_FROM_EQE(eqe) be32_to_cpu(eqe->event.port_mgmt_change.params.tbl_change_info.block_ptr)
64 #define GET_MASK_FROM_EQE(eqe) be32_to_cpu(eqe->event.port_mgmt_change.params.tbl_change_info.tbl_entries_mask)
1120 struct mlx4_eqe *eqe) in propagate_pkey_ev()
1122 __propagate_pkey_ev(dev, port_num, GET_BLK_PTR_FROM_EQE(eqe), in propagate_pkey_ev()
1123 GET_MASK_FROM_EQE(eqe)); in propagate_pkey_ev()
1181 struct mlx4_eqe *eqe = &(ew->ib_eqe); in handle_port_mgmt_change_event() local
1182 u32 port = eqe->event.port_mgmt_change.port; in handle_port_mgmt_change_event()
1187 switch (eqe in handle_port_mgmt_change_event()
1119 propagate_pkey_ev(struct mlx4_ib_dev *dev, int port_num, struct mlx4_eqe *eqe) propagate_pkey_ev() argument
[all...]
/kernel/linux/linux-5.10/include/linux/mlx5/
H A Dcq.h49 void (*comp)(struct mlx5_core_cq *cq, struct mlx5_eqe *eqe);
57 void (*comp)(struct mlx5_core_cq *cq, struct mlx5_eqe *eqe);
/kernel/linux/linux-6.6/include/linux/mlx5/
H A Dcq.h49 void (*comp)(struct mlx5_core_cq *cq, struct mlx5_eqe *eqe);
57 void (*comp)(struct mlx5_core_cq *cq, struct mlx5_eqe *eqe);
/kernel/linux/linux-6.6/drivers/infiniband/hw/erdma/
H A Derdma_eq.c24 u64 *eqe = get_queue_entry(eq->qbuf, eq->ci, eq->depth, EQE_SHIFT); in get_next_valid_eqe() local
25 u32 owner = FIELD_GET(ERDMA_CEQE_HDR_O_MASK, READ_ONCE(*eqe)); in get_next_valid_eqe()
27 return owner ^ !!(eq->ci & eq->depth) ? eqe : NULL; in get_next_valid_eqe()

Completed in 22 milliseconds

1234