Lines Matching refs:urb
26 static struct urb *udl_get_urb_locked(struct udl_device *udl, long timeout);
121 void udl_urb_completion(struct urb *urb)
123 struct urb_node *unode = urb->context;
128 if (urb->status) {
129 if (!(urb->status == -ENOENT ||
130 urb->status == -ECONNRESET ||
131 urb->status == -EPROTO ||
132 urb->status == -ESHUTDOWN)) {
134 __func__, urb->status);
138 urb->transfer_buffer_length = udl->urbs.size; /* reset to actual */
152 struct urb *urb;
159 urb = udl_get_urb_locked(udl, MAX_SCHEDULE_TIMEOUT);
162 if (WARN_ON(!urb))
164 unode = urb->context;
166 usb_free_coherent(urb->dev, udl->urbs.size,
167 urb->transfer_buffer, urb->transfer_dma);
168 usb_free_urb(urb);
178 struct urb *urb;
199 urb = usb_alloc_urb(0, GFP_KERNEL);
200 if (!urb) {
204 unode->urb = urb;
207 &urb->transfer_dma);
210 usb_free_urb(urb);
219 /* urb->transfer_buffer_length set to actual before submit */
220 usb_fill_bulk_urb(urb, udev, usb_sndbulkpipe(udev, 1),
222 urb->transfer_flags |= URB_NO_TRANSFER_DMA_MAP;
235 static struct urb *udl_get_urb_locked(struct udl_device *udl, long timeout)
246 DRM_INFO("wait for urb interrupted: available: %d\n",
258 return unode->urb;
262 struct urb *udl_get_urb(struct drm_device *dev)
265 struct urb *urb;
268 urb = udl_get_urb_locked(udl, GET_URB_TIMEOUT);
270 return urb;
273 int udl_submit_urb(struct drm_device *dev, struct urb *urb, size_t len)
282 urb->transfer_buffer_length = len; /* set to actual payload len */
283 ret = usb_submit_urb(urb, GFP_ATOMIC);
286 udl_urb_completion(urb); /* because no one else will */