Lines Matching defs:cdev

29 static void verify_start(struct ccw_device *cdev);
34 static void verify_done(struct ccw_device *cdev, int rc)
36 struct subchannel *sch = to_subchannel(cdev->dev.parent);
37 struct ccw_dev_id *id = &cdev->private->dev_id;
38 int mpath = cdev->private->flags.mpath;
39 int pgroup = cdev->private->flags.pgroup;
52 ccw_device_verify_done(cdev, rc);
58 static void nop_build_cp(struct ccw_device *cdev)
60 struct ccw_request *req = &cdev->private->req;
61 struct ccw1 *cp = cdev->private->dma_area->iccws;
73 static void nop_do(struct ccw_device *cdev)
75 struct subchannel *sch = to_subchannel(cdev->dev.parent);
76 struct ccw_request *req = &cdev->private->req;
79 ~cdev->private->path_noirq_mask);
82 nop_build_cp(cdev);
83 ccw_request_start(cdev);
87 verify_done(cdev, sch->vpm ? 0 : -EACCES);
93 static enum io_status nop_filter(struct ccw_device *cdev, void *data,
105 static void nop_callback(struct ccw_device *cdev, void *data, int rc)
107 struct subchannel *sch = to_subchannel(cdev->dev.parent);
108 struct ccw_request *req = &cdev->private->req;
115 cdev->private->path_noirq_mask |= req->lpm;
118 cdev->private->path_notoper_mask |= req->lpm;
125 nop_do(cdev);
129 verify_done(cdev, rc);
135 static void spid_build_cp(struct ccw_device *cdev, u8 fn)
137 struct ccw_request *req = &cdev->private->req;
138 struct ccw1 *cp = cdev->private->dma_area->iccws;
140 struct pgid *pgid = &cdev->private->dma_area->pgid[i];
150 static void pgid_wipeout_callback(struct ccw_device *cdev, void *data, int rc)
154 verify_done(cdev, rc);
161 cdev->private->flags.pgid_unknown = 0;
162 verify_start(cdev);
168 static void pgid_wipeout_start(struct ccw_device *cdev)
170 struct subchannel *sch = to_subchannel(cdev->dev.parent);
171 struct ccw_dev_id *id = &cdev->private->dev_id;
172 struct ccw_request *req = &cdev->private->req;
176 id->ssid, id->devno, cdev->private->pgid_valid_mask,
177 cdev->private->path_noirq_mask);
186 if (cdev->private->flags.mpath)
188 spid_build_cp(cdev, fn);
189 ccw_request_start(cdev);
195 static void spid_do(struct ccw_device *cdev)
197 struct subchannel *sch = to_subchannel(cdev->dev.parent);
198 struct ccw_request *req = &cdev->private->req;
202 req->lpm = lpm_adjust(req->lpm, cdev->private->pgid_todo_mask);
210 if (cdev->private->flags.mpath)
212 spid_build_cp(cdev, fn);
213 ccw_request_start(cdev);
217 if (cdev->private->flags.pgid_unknown) {
219 pgid_wipeout_start(cdev);
222 verify_done(cdev, sch->vpm ? 0 : -EACCES);
228 static void spid_callback(struct ccw_device *cdev, void *data, int rc)
230 struct subchannel *sch = to_subchannel(cdev->dev.parent);
231 struct ccw_request *req = &cdev->private->req;
238 cdev->private->flags.pgid_unknown = 1;
239 cdev->private->path_noirq_mask |= req->lpm;
242 cdev->private->path_notoper_mask |= req->lpm;
245 if (cdev->private->flags.mpath) {
247 cdev->private->flags.mpath = 0;
251 cdev->private->flags.pgroup = 0;
257 spid_do(cdev);
261 verify_start(cdev);
264 verify_done(cdev, rc);
267 static void spid_start(struct ccw_device *cdev)
269 struct ccw_request *req = &cdev->private->req;
278 spid_do(cdev);
301 static void pgid_analyze(struct ccw_device *cdev, struct pgid **p,
304 struct pgid *pgid = &cdev->private->dma_area->pgid[0];
313 if ((cdev->private->pgid_valid_mask & lpm) == 0)
333 static u8 pgid_to_donepm(struct ccw_device *cdev)
335 struct subchannel *sch = to_subchannel(cdev->dev.parent);
344 if ((cdev->private->pgid_valid_mask & lpm) == 0)
346 pgid = &cdev->private->dma_area->pgid[i];
354 if (cdev->private->flags.mpath) {
367 static void pgid_fill(struct ccw_device *cdev, struct pgid *pgid)
372 memcpy(&cdev->private->dma_area->pgid[i], pgid,
379 static void snid_done(struct ccw_device *cdev, int rc)
381 struct ccw_dev_id *id = &cdev->private->dev_id;
382 struct subchannel *sch = to_subchannel(cdev->dev.parent);
391 pgid_analyze(cdev, &pgid, &mismatch, &reserved, &reset);
392 if (reserved == cdev->private->pgid_valid_mask)
397 donepm = pgid_to_donepm(cdev);
399 cdev->private->pgid_reset_mask |= reset;
400 cdev->private->pgid_todo_mask &=
401 ~(donepm | cdev->private->path_noirq_mask);
402 pgid_fill(cdev, pgid);
407 id->devno, rc, cdev->private->pgid_valid_mask, sch->vpm,
408 cdev->private->pgid_todo_mask, mismatch, reserved, reset);
411 if (cdev->private->flags.pgid_unknown) {
412 pgid_wipeout_start(cdev);
416 if (cdev->private->pgid_todo_mask == 0) {
417 verify_done(cdev, sch->vpm == 0 ? -EACCES : 0);
421 spid_start(cdev);
425 cdev->private->flags.pgroup = 0;
426 cdev->private->flags.mpath = 0;
427 verify_start(cdev);
430 verify_done(cdev, rc);
437 static void snid_build_cp(struct ccw_device *cdev)
439 struct ccw_request *req = &cdev->private->req;
440 struct ccw1 *cp = cdev->private->dma_area->iccws;
445 cp->cda = (u32)virt_to_phys(&cdev->private->dma_area->pgid[i]);
454 static void snid_do(struct ccw_device *cdev)
456 struct subchannel *sch = to_subchannel(cdev->dev.parent);
457 struct ccw_request *req = &cdev->private->req;
461 ~cdev->private->path_noirq_mask);
464 snid_build_cp(cdev);
465 ccw_request_start(cdev);
469 if (cdev->private->pgid_valid_mask)
471 else if (cdev->private->path_noirq_mask)
475 snid_done(cdev, ret);
481 static void snid_callback(struct ccw_device *cdev, void *data, int rc)
483 struct ccw_request *req = &cdev->private->req;
487 cdev->private->pgid_valid_mask |= req->lpm;
490 cdev->private->flags.pgid_unknown = 1;
491 cdev->private->path_noirq_mask |= req->lpm;
494 cdev->private->path_notoper_mask |= req->lpm;
501 snid_do(cdev);
505 snid_done(cdev, rc);
511 static void verify_start(struct ccw_device *cdev)
513 struct subchannel *sch = to_subchannel(cdev->dev.parent);
514 struct ccw_request *req = &cdev->private->req;
515 struct ccw_dev_id *devid = &cdev->private->dev_id;
521 memset(cdev->private->dma_area->pgid, 0,
522 sizeof(cdev->private->dma_area->pgid));
523 cdev->private->pgid_valid_mask = 0;
524 cdev->private->pgid_todo_mask = sch->schib.pmcw.pam;
525 cdev->private->path_notoper_mask = 0;
533 if (cdev->private->flags.pgroup) {
537 snid_do(cdev);
543 nop_do(cdev);
549 * @cdev: ccw device
551 * Perform an I/O on each available channel path to @cdev to determine which
557 void ccw_device_verify_start(struct ccw_device *cdev)
560 CIO_HEX_EVENT(4, &cdev->private->dev_id, sizeof(cdev->private->dev_id));
565 cdev->private->flags.pgroup = cdev->private->options.pgroup;
566 cdev->private->flags.mpath = cdev->private->options.mpath;
567 cdev->private->flags.doverify = 0;
568 cdev->private->path_noirq_mask = 0;
569 verify_start(cdev);
575 static void disband_callback(struct ccw_device *cdev, void *data, int rc)
577 struct subchannel *sch = to_subchannel(cdev->dev.parent);
578 struct ccw_dev_id *id = &cdev->private->dev_id;
583 cdev->private->flags.mpath = 0;
591 ccw_device_disband_done(cdev, rc);
596 * @cdev: ccw device
598 * Execute a SET PGID channel program on @cdev to disband a previously
602 void ccw_device_disband_start(struct ccw_device *cdev)
604 struct subchannel *sch = to_subchannel(cdev->dev.parent);
605 struct ccw_request *req = &cdev->private->req;
609 CIO_HEX_EVENT(4, &cdev->private->dev_id, sizeof(cdev->private->dev_id));
618 if (cdev->private->flags.mpath)
620 spid_build_cp(cdev, fn);
621 ccw_request_start(cdev);
629 static void stlck_build_cp(struct ccw_device *cdev, void *buf1, void *buf2)
631 struct ccw_request *req = &cdev->private->req;
632 struct ccw1 *cp = cdev->private->dma_area->iccws;
645 static void stlck_callback(struct ccw_device *cdev, void *data, int rc)
655 * @cdev: ccw device
660 * Execute a channel program on @cdev to release an existing PGID reservation.
662 static void ccw_device_stlck_start(struct ccw_device *cdev, void *data,
665 struct subchannel *sch = to_subchannel(cdev->dev.parent);
666 struct ccw_request *req = &cdev->private->req;
669 CIO_HEX_EVENT(4, &cdev->private->dev_id, sizeof(cdev->private->dev_id));
677 stlck_build_cp(cdev, buf1, buf2);
678 ccw_request_start(cdev);
684 int ccw_device_stlck(struct ccw_device *cdev)
686 struct subchannel *sch = to_subchannel(cdev->dev.parent);
692 if (cdev->drv) {
693 if (!cdev->private->options.force)
706 cdev->private->state = DEV_STATE_STEAL_LOCK;
707 ccw_device_stlck_start(cdev, &data, &buffer[0], &buffer[32]);
713 ccw_request_cancel(cdev);
721 cdev->private->state = DEV_STATE_BOXED;