Lines Matching refs:event

961  * queue or arm its CQ for event generation), no further harm is expected.
1271 struct devx_event *event;
1285 event = xa_load(&dev->devx_event_table.event_xa,
1287 WARN_ON(!event);
1289 xa_val_level2 = xa_load(&event->object_ids, sub->xa_key_level2);
1291 xa_erase(&event->object_ids,
1345 struct devx_event *event;
1351 event = xa_load(&table->event_xa, MLX5_EVENT_TYPE_COMP);
1352 if (!event)
1355 obj_event = xa_load(&event->object_ids, obj_id);
1754 struct devx_event *event;
1761 event = xa_load(&devx_event_table->event_xa, key_level1);
1762 WARN_ON(!event);
1764 xa_val_level2 = xa_load(&event->object_ids,
1767 xa_erase(&event->object_ids,
1780 struct devx_event *event;
1783 event = xa_load(&devx_event_table->event_xa, key_level1);
1784 if (!event) {
1785 event = kzalloc(sizeof(*event), GFP_KERNEL);
1786 if (!event)
1789 INIT_LIST_HEAD(&event->unaffiliated_list);
1790 xa_init(&event->object_ids);
1794 event,
1797 kfree(event);
1805 obj_event = xa_load(&event->object_ids, key_level2);
1812 err = xa_insert(&event->object_ids,
2027 struct devx_event *event;
2037 event = xa_load(&devx_event_table->event_xa,
2039 WARN_ON(!event);
2043 &event->unaffiliated_list);
2047 obj_event = xa_load(&event->object_ids, obj_id);
2345 struct devx_event *event;
2364 event = xa_load(&table->event_xa, event_type | (obj_type << 16));
2365 if (!event) {
2371 dispatch_event_fd(&event->unaffiliated_list, data);
2377 obj_event = xa_load(&event->object_ids, obj_id);
2410 struct devx_event *event;
2418 event = entry;
2420 sub, tmp, &event->unaffiliated_list, xa_list)
2436 struct devx_async_data *event;
2462 event = list_entry(ev_queue->event_list.next,
2464 eventsz = event->cmd_out_len +
2475 if (copy_to_user(buf, &event->hdr, eventsz))
2480 atomic_sub(event->cmd_out_len, &ev_queue->bytes_in_use);
2481 kvfree(event);
2517 struct devx_async_event_data *event;
2560 event = list_first_entry(&ev_file->event_list,
2564 event_data = &event->hdr;
2575 list_del(&event->list);
2586 kfree(event);