Lines Matching defs:cdev
28 static void verify_start(struct ccw_device *cdev);
33 static void verify_done(struct ccw_device *cdev, int rc)
35 struct subchannel *sch = to_subchannel(cdev->dev.parent);
36 struct ccw_dev_id *id = &cdev->private->dev_id;
37 int mpath = cdev->private->flags.mpath;
38 int pgroup = cdev->private->flags.pgroup;
51 ccw_device_verify_done(cdev, rc);
57 static void nop_build_cp(struct ccw_device *cdev)
59 struct ccw_request *req = &cdev->private->req;
60 struct ccw1 *cp = cdev->private->dma_area->iccws;
72 static void nop_do(struct ccw_device *cdev)
74 struct subchannel *sch = to_subchannel(cdev->dev.parent);
75 struct ccw_request *req = &cdev->private->req;
78 ~cdev->private->path_noirq_mask);
81 nop_build_cp(cdev);
82 ccw_request_start(cdev);
86 verify_done(cdev, sch->vpm ? 0 : -EACCES);
92 static enum io_status nop_filter(struct ccw_device *cdev, void *data,
104 static void nop_callback(struct ccw_device *cdev, void *data, int rc)
106 struct subchannel *sch = to_subchannel(cdev->dev.parent);
107 struct ccw_request *req = &cdev->private->req;
114 cdev->private->path_noirq_mask |= req->lpm;
117 cdev->private->path_notoper_mask |= req->lpm;
124 nop_do(cdev);
128 verify_done(cdev, rc);
134 static void spid_build_cp(struct ccw_device *cdev, u8 fn)
136 struct ccw_request *req = &cdev->private->req;
137 struct ccw1 *cp = cdev->private->dma_area->iccws;
139 struct pgid *pgid = &cdev->private->dma_area->pgid[i];
149 static void pgid_wipeout_callback(struct ccw_device *cdev, void *data, int rc)
153 verify_done(cdev, rc);
160 cdev->private->flags.pgid_unknown = 0;
161 verify_start(cdev);
167 static void pgid_wipeout_start(struct ccw_device *cdev)
169 struct subchannel *sch = to_subchannel(cdev->dev.parent);
170 struct ccw_dev_id *id = &cdev->private->dev_id;
171 struct ccw_request *req = &cdev->private->req;
175 id->ssid, id->devno, cdev->private->pgid_valid_mask,
176 cdev->private->path_noirq_mask);
185 if (cdev->private->flags.mpath)
187 spid_build_cp(cdev, fn);
188 ccw_request_start(cdev);
194 static void spid_do(struct ccw_device *cdev)
196 struct subchannel *sch = to_subchannel(cdev->dev.parent);
197 struct ccw_request *req = &cdev->private->req;
201 req->lpm = lpm_adjust(req->lpm, cdev->private->pgid_todo_mask);
209 if (cdev->private->flags.mpath)
211 spid_build_cp(cdev, fn);
212 ccw_request_start(cdev);
216 if (cdev->private->flags.pgid_unknown) {
218 pgid_wipeout_start(cdev);
221 verify_done(cdev, sch->vpm ? 0 : -EACCES);
227 static void spid_callback(struct ccw_device *cdev, void *data, int rc)
229 struct subchannel *sch = to_subchannel(cdev->dev.parent);
230 struct ccw_request *req = &cdev->private->req;
237 cdev->private->flags.pgid_unknown = 1;
238 cdev->private->path_noirq_mask |= req->lpm;
241 cdev->private->path_notoper_mask |= req->lpm;
244 if (cdev->private->flags.mpath) {
246 cdev->private->flags.mpath = 0;
250 cdev->private->flags.pgroup = 0;
256 spid_do(cdev);
260 verify_start(cdev);
263 verify_done(cdev, rc);
266 static void spid_start(struct ccw_device *cdev)
268 struct ccw_request *req = &cdev->private->req;
277 spid_do(cdev);
300 static void pgid_analyze(struct ccw_device *cdev, struct pgid **p,
303 struct pgid *pgid = &cdev->private->dma_area->pgid[0];
312 if ((cdev->private->pgid_valid_mask & lpm) == 0)
332 static u8 pgid_to_donepm(struct ccw_device *cdev)
334 struct subchannel *sch = to_subchannel(cdev->dev.parent);
343 if ((cdev->private->pgid_valid_mask & lpm) == 0)
345 pgid = &cdev->private->dma_area->pgid[i];
353 if (cdev->private->flags.mpath) {
366 static void pgid_fill(struct ccw_device *cdev, struct pgid *pgid)
371 memcpy(&cdev->private->dma_area->pgid[i], pgid,
378 static void snid_done(struct ccw_device *cdev, int rc)
380 struct ccw_dev_id *id = &cdev->private->dev_id;
381 struct subchannel *sch = to_subchannel(cdev->dev.parent);
390 pgid_analyze(cdev, &pgid, &mismatch, &reserved, &reset);
391 if (reserved == cdev->private->pgid_valid_mask)
396 donepm = pgid_to_donepm(cdev);
398 cdev->private->pgid_reset_mask |= reset;
399 cdev->private->pgid_todo_mask &=
400 ~(donepm | cdev->private->path_noirq_mask);
401 pgid_fill(cdev, pgid);
406 id->devno, rc, cdev->private->pgid_valid_mask, sch->vpm,
407 cdev->private->pgid_todo_mask, mismatch, reserved, reset);
410 if (cdev->private->flags.pgid_unknown) {
411 pgid_wipeout_start(cdev);
415 if (cdev->private->pgid_todo_mask == 0) {
416 verify_done(cdev, sch->vpm == 0 ? -EACCES : 0);
420 spid_start(cdev);
424 cdev->private->flags.pgroup = 0;
425 cdev->private->flags.mpath = 0;
426 verify_start(cdev);
429 verify_done(cdev, rc);
436 static void snid_build_cp(struct ccw_device *cdev)
438 struct ccw_request *req = &cdev->private->req;
439 struct ccw1 *cp = cdev->private->dma_area->iccws;
444 cp->cda = (u32) (addr_t) &cdev->private->dma_area->pgid[i];
453 static void snid_do(struct ccw_device *cdev)
455 struct subchannel *sch = to_subchannel(cdev->dev.parent);
456 struct ccw_request *req = &cdev->private->req;
460 ~cdev->private->path_noirq_mask);
463 snid_build_cp(cdev);
464 ccw_request_start(cdev);
468 if (cdev->private->pgid_valid_mask)
470 else if (cdev->private->path_noirq_mask)
474 snid_done(cdev, ret);
480 static void snid_callback(struct ccw_device *cdev, void *data, int rc)
482 struct ccw_request *req = &cdev->private->req;
486 cdev->private->pgid_valid_mask |= req->lpm;
489 cdev->private->flags.pgid_unknown = 1;
490 cdev->private->path_noirq_mask |= req->lpm;
493 cdev->private->path_notoper_mask |= req->lpm;
500 snid_do(cdev);
504 snid_done(cdev, rc);
510 static void verify_start(struct ccw_device *cdev)
512 struct subchannel *sch = to_subchannel(cdev->dev.parent);
513 struct ccw_request *req = &cdev->private->req;
514 struct ccw_dev_id *devid = &cdev->private->dev_id;
520 memset(cdev->private->dma_area->pgid, 0,
521 sizeof(cdev->private->dma_area->pgid));
522 cdev->private->pgid_valid_mask = 0;
523 cdev->private->pgid_todo_mask = sch->schib.pmcw.pam;
524 cdev->private->path_notoper_mask = 0;
532 if (cdev->private->flags.pgroup) {
536 snid_do(cdev);
542 nop_do(cdev);
548 * @cdev: ccw device
550 * Perform an I/O on each available channel path to @cdev to determine which
556 void ccw_device_verify_start(struct ccw_device *cdev)
559 CIO_HEX_EVENT(4, &cdev->private->dev_id, sizeof(cdev->private->dev_id));
564 cdev->private->flags.pgroup = cdev->private->options.pgroup;
565 cdev->private->flags.mpath = cdev->private->options.mpath;
566 cdev->private->flags.doverify = 0;
567 cdev->private->path_noirq_mask = 0;
568 verify_start(cdev);
574 static void disband_callback(struct ccw_device *cdev, void *data, int rc)
576 struct subchannel *sch = to_subchannel(cdev->dev.parent);
577 struct ccw_dev_id *id = &cdev->private->dev_id;
582 cdev->private->flags.mpath = 0;
590 ccw_device_disband_done(cdev, rc);
595 * @cdev: ccw device
597 * Execute a SET PGID channel program on @cdev to disband a previously
601 void ccw_device_disband_start(struct ccw_device *cdev)
603 struct subchannel *sch = to_subchannel(cdev->dev.parent);
604 struct ccw_request *req = &cdev->private->req;
608 CIO_HEX_EVENT(4, &cdev->private->dev_id, sizeof(cdev->private->dev_id));
617 if (cdev->private->flags.mpath)
619 spid_build_cp(cdev, fn);
620 ccw_request_start(cdev);
628 static void stlck_build_cp(struct ccw_device *cdev, void *buf1, void *buf2)
630 struct ccw_request *req = &cdev->private->req;
631 struct ccw1 *cp = cdev->private->dma_area->iccws;
644 static void stlck_callback(struct ccw_device *cdev, void *data, int rc)
654 * @cdev: ccw device
659 * Execute a channel program on @cdev to release an existing PGID reservation.
661 static void ccw_device_stlck_start(struct ccw_device *cdev, void *data,
664 struct subchannel *sch = to_subchannel(cdev->dev.parent);
665 struct ccw_request *req = &cdev->private->req;
668 CIO_HEX_EVENT(4, &cdev->private->dev_id, sizeof(cdev->private->dev_id));
676 stlck_build_cp(cdev, buf1, buf2);
677 ccw_request_start(cdev);
683 int ccw_device_stlck(struct ccw_device *cdev)
685 struct subchannel *sch = to_subchannel(cdev->dev.parent);
691 if (cdev->drv) {
692 if (!cdev->private->options.force)
705 cdev->private->state = DEV_STATE_STEAL_LOCK;
706 ccw_device_stlck_start(cdev, &data, &buffer[0], &buffer[32]);
712 ccw_request_cancel(cdev);
720 cdev->private->state = DEV_STATE_BOXED;