Lines Matching refs:cdev
63 struct ccw_device *cdev = to_ccwdev(dev); in ccw_bus_match() local
70 found = ccw_device_id_match(ids, &cdev->id); in ccw_bus_match()
74 cdev->id.driver_info = found->driver_info; in ccw_bus_match()
106 const struct ccw_device *cdev = to_ccwdev(dev); in ccw_uevent() local
107 const struct ccw_device_id *id = &(cdev->id); in ccw_uevent()
200 struct ccw_device *cdev = to_ccwdev(dev); in devtype_show() local
201 struct ccw_device_id *id = &(cdev->id); in devtype_show()
213 struct ccw_device *cdev = to_ccwdev(dev); in cutype_show() local
214 struct ccw_device_id *id = &(cdev->id); in cutype_show()
223 struct ccw_device *cdev = to_ccwdev(dev); in modalias_show() local
224 struct ccw_device_id *id = &(cdev->id); in modalias_show()
235 struct ccw_device *cdev = to_ccwdev(dev); in online_show() local
237 return sprintf(buf, cdev->online ? "1\n" : "0\n"); in online_show()
240 int ccw_device_is_orphan(struct ccw_device *cdev) in ccw_device_is_orphan() argument
242 return sch_is_pseudo_sch(to_subchannel(cdev->dev.parent)); in ccw_device_is_orphan()
245 static void ccw_device_unregister(struct ccw_device *cdev) in ccw_device_unregister() argument
247 mutex_lock(&cdev->reg_mutex); in ccw_device_unregister()
248 if (device_is_registered(&cdev->dev)) { in ccw_device_unregister()
250 device_del(&cdev->dev); in ccw_device_unregister()
252 mutex_unlock(&cdev->reg_mutex); in ccw_device_unregister()
254 if (cdev->private->flags.initialized) { in ccw_device_unregister()
255 cdev->private->flags.initialized = 0; in ccw_device_unregister()
257 put_device(&cdev->dev); in ccw_device_unregister()
274 int ccw_device_set_offline(struct ccw_device *cdev) in ccw_device_set_offline() argument
279 if (!cdev) in ccw_device_set_offline()
281 if (!cdev->online || !cdev->drv) in ccw_device_set_offline()
284 if (cdev->drv->set_offline) { in ccw_device_set_offline()
285 ret = cdev->drv->set_offline(cdev); in ccw_device_set_offline()
289 spin_lock_irq(cdev->ccwlock); in ccw_device_set_offline()
290 sch = to_subchannel(cdev->dev.parent); in ccw_device_set_offline()
291 cdev->online = 0; in ccw_device_set_offline()
293 while (!dev_fsm_final_state(cdev) && in ccw_device_set_offline()
294 cdev->private->state != DEV_STATE_DISCONNECTED) { in ccw_device_set_offline()
295 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_offline()
296 wait_event(cdev->private->wait_q, (dev_fsm_final_state(cdev) || in ccw_device_set_offline()
297 cdev->private->state == DEV_STATE_DISCONNECTED)); in ccw_device_set_offline()
298 spin_lock_irq(cdev->ccwlock); in ccw_device_set_offline()
301 ret = ccw_device_offline(cdev); in ccw_device_set_offline()
305 "0.%x.%04x\n", ret, cdev->private->dev_id.ssid, in ccw_device_set_offline()
306 cdev->private->dev_id.devno); in ccw_device_set_offline()
309 state = cdev->private->state; in ccw_device_set_offline()
310 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_offline()
312 spin_lock_irq(cdev->ccwlock); in ccw_device_set_offline()
313 cdev->private->state = state; in ccw_device_set_offline()
315 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_offline()
316 wait_event(cdev->private->wait_q, (dev_fsm_final_state(cdev) || in ccw_device_set_offline()
317 cdev->private->state == DEV_STATE_DISCONNECTED)); in ccw_device_set_offline()
319 if (cdev->private->state == DEV_STATE_BOXED) { in ccw_device_set_offline()
321 dev_name(&cdev->dev)); in ccw_device_set_offline()
322 } else if (cdev->private->state == DEV_STATE_NOT_OPER) { in ccw_device_set_offline()
324 dev_name(&cdev->dev)); in ccw_device_set_offline()
327 put_device(&cdev->dev); in ccw_device_set_offline()
331 cdev->private->state = DEV_STATE_OFFLINE; in ccw_device_set_offline()
332 dev_fsm_event(cdev, DEV_EVENT_NOTOPER); in ccw_device_set_offline()
333 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_offline()
335 put_device(&cdev->dev); in ccw_device_set_offline()
351 int ccw_device_set_online(struct ccw_device *cdev) in ccw_device_set_online() argument
356 if (!cdev) in ccw_device_set_online()
358 if (cdev->online || !cdev->drv) in ccw_device_set_online()
361 if (!get_device(&cdev->dev)) in ccw_device_set_online()
364 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
365 ret = ccw_device_online(cdev); in ccw_device_set_online()
367 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
370 ret, cdev->private->dev_id.ssid, in ccw_device_set_online()
371 cdev->private->dev_id.devno); in ccw_device_set_online()
373 put_device(&cdev->dev); in ccw_device_set_online()
377 while (!dev_fsm_final_state(cdev)) { in ccw_device_set_online()
378 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
379 wait_event(cdev->private->wait_q, dev_fsm_final_state(cdev)); in ccw_device_set_online()
380 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
383 if ((cdev->private->state != DEV_STATE_ONLINE) && in ccw_device_set_online()
384 (cdev->private->state != DEV_STATE_W4SENSE)) { in ccw_device_set_online()
385 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
387 if (cdev->private->state == DEV_STATE_BOXED) { in ccw_device_set_online()
389 dev_name(&cdev->dev)); in ccw_device_set_online()
390 } else if (cdev->private->state == DEV_STATE_NOT_OPER) { in ccw_device_set_online()
392 dev_name(&cdev->dev)); in ccw_device_set_online()
395 put_device(&cdev->dev); in ccw_device_set_online()
398 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
399 if (cdev->drv->set_online) in ccw_device_set_online()
400 ret = cdev->drv->set_online(cdev); in ccw_device_set_online()
404 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
405 cdev->online = 1; in ccw_device_set_online()
406 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
410 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
412 while (!dev_fsm_final_state(cdev) && in ccw_device_set_online()
413 cdev->private->state != DEV_STATE_DISCONNECTED) { in ccw_device_set_online()
414 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
415 wait_event(cdev->private->wait_q, (dev_fsm_final_state(cdev) || in ccw_device_set_online()
416 cdev->private->state == DEV_STATE_DISCONNECTED)); in ccw_device_set_online()
417 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
419 ret2 = ccw_device_offline(cdev); in ccw_device_set_online()
422 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
423 wait_event(cdev->private->wait_q, (dev_fsm_final_state(cdev) || in ccw_device_set_online()
424 cdev->private->state == DEV_STATE_DISCONNECTED)); in ccw_device_set_online()
426 put_device(&cdev->dev); in ccw_device_set_online()
432 ret2, cdev->private->dev_id.ssid, in ccw_device_set_online()
433 cdev->private->dev_id.devno); in ccw_device_set_online()
434 cdev->private->state = DEV_STATE_OFFLINE; in ccw_device_set_online()
435 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
437 put_device(&cdev->dev); in ccw_device_set_online()
441 static int online_store_handle_offline(struct ccw_device *cdev) in online_store_handle_offline() argument
443 if (cdev->private->state == DEV_STATE_DISCONNECTED) { in online_store_handle_offline()
444 spin_lock_irq(cdev->ccwlock); in online_store_handle_offline()
445 ccw_device_sched_todo(cdev, CDEV_TODO_UNREG_EVAL); in online_store_handle_offline()
446 spin_unlock_irq(cdev->ccwlock); in online_store_handle_offline()
449 if (cdev->drv && cdev->drv->set_offline) in online_store_handle_offline()
450 return ccw_device_set_offline(cdev); in online_store_handle_offline()
454 static int online_store_recog_and_online(struct ccw_device *cdev) in online_store_recog_and_online() argument
457 if (cdev->private->state == DEV_STATE_BOXED) { in online_store_recog_and_online()
458 spin_lock_irq(cdev->ccwlock); in online_store_recog_and_online()
459 ccw_device_recognition(cdev); in online_store_recog_and_online()
460 spin_unlock_irq(cdev->ccwlock); in online_store_recog_and_online()
461 wait_event(cdev->private->wait_q, in online_store_recog_and_online()
462 cdev->private->flags.recog_done); in online_store_recog_and_online()
463 if (cdev->private->state != DEV_STATE_OFFLINE) in online_store_recog_and_online()
467 if (cdev->drv && cdev->drv->set_online) in online_store_recog_and_online()
468 return ccw_device_set_online(cdev); in online_store_recog_and_online()
472 static int online_store_handle_online(struct ccw_device *cdev, int force) in online_store_handle_online() argument
476 ret = online_store_recog_and_online(cdev); in online_store_handle_online()
479 if (force && cdev->private->state == DEV_STATE_BOXED) { in online_store_handle_online()
480 ret = ccw_device_stlck(cdev); in online_store_handle_online()
483 if (cdev->id.cu_type == 0) in online_store_handle_online()
484 cdev->private->state = DEV_STATE_NOT_OPER; in online_store_handle_online()
485 ret = online_store_recog_and_online(cdev); in online_store_handle_online()
495 struct ccw_device *cdev = to_ccwdev(dev); in online_store() local
500 if (atomic_cmpxchg(&cdev->private->onoff, 0, 1) != 0) in online_store()
503 if (!dev_fsm_final_state(cdev) && in online_store()
504 cdev->private->state != DEV_STATE_DISCONNECTED) { in online_store()
509 if (work_pending(&cdev->private->todo_work)) { in online_store()
527 ret = online_store_handle_offline(cdev); in online_store()
530 ret = online_store_handle_online(cdev, force); in online_store()
538 atomic_set(&cdev->private->onoff, 0); in online_store()
545 struct ccw_device *cdev = to_ccwdev(dev); in available_show() local
548 if (ccw_device_is_orphan(cdev)) in available_show()
550 switch (cdev->private->state) { in available_show()
632 struct ccw_device *cdev = to_ccwdev(dev); in match_dev_id() local
635 return ccw_dev_id_is_equal(&cdev->private->dev_id, dev_id); in match_dev_id()
658 static void ccw_device_do_unbind_bind(struct ccw_device *cdev) in ccw_device_do_unbind_bind() argument
662 mutex_lock(&cdev->reg_mutex); in ccw_device_do_unbind_bind()
663 if (device_is_registered(&cdev->dev)) { in ccw_device_do_unbind_bind()
664 device_release_driver(&cdev->dev); in ccw_device_do_unbind_bind()
665 ret = device_attach(&cdev->dev); in ccw_device_do_unbind_bind()
668 mutex_unlock(&cdev->reg_mutex); in ccw_device_do_unbind_bind()
674 struct ccw_device *cdev; in ccw_device_release() local
676 cdev = to_ccwdev(dev); in ccw_device_release()
677 cio_gp_dma_free(cdev->private->dma_pool, cdev->private->dma_area, in ccw_device_release()
678 sizeof(*cdev->private->dma_area)); in ccw_device_release()
679 cio_gp_dma_destroy(cdev->private->dma_pool, &cdev->dev); in ccw_device_release()
681 put_device(cdev->dev.parent); in ccw_device_release()
682 kfree(cdev->private); in ccw_device_release()
683 kfree(cdev); in ccw_device_release()
688 struct ccw_device *cdev; in io_subchannel_allocate_dev() local
692 cdev = kzalloc(sizeof(*cdev), GFP_KERNEL); in io_subchannel_allocate_dev()
693 if (!cdev) { in io_subchannel_allocate_dev()
697 cdev->private = kzalloc(sizeof(struct ccw_device_private), in io_subchannel_allocate_dev()
699 if (!cdev->private) { in io_subchannel_allocate_dev()
704 cdev->dev.dma_mask = sch->dev.dma_mask; in io_subchannel_allocate_dev()
705 ret = dma_set_coherent_mask(&cdev->dev, sch->dev.coherent_dma_mask); in io_subchannel_allocate_dev()
709 dma_pool = cio_gp_dma_create(&cdev->dev, 1); in io_subchannel_allocate_dev()
714 cdev->private->dma_pool = dma_pool; in io_subchannel_allocate_dev()
715 cdev->private->dma_area = cio_gp_dma_zalloc(dma_pool, &cdev->dev, in io_subchannel_allocate_dev()
716 sizeof(*cdev->private->dma_area)); in io_subchannel_allocate_dev()
717 if (!cdev->private->dma_area) { in io_subchannel_allocate_dev()
721 return cdev; in io_subchannel_allocate_dev()
723 cio_gp_dma_destroy(dma_pool, &cdev->dev); in io_subchannel_allocate_dev()
726 kfree(cdev->private); in io_subchannel_allocate_dev()
728 kfree(cdev); in io_subchannel_allocate_dev()
736 struct ccw_device *cdev) in io_subchannel_initialize_dev() argument
738 struct ccw_device_private *priv = cdev->private; in io_subchannel_initialize_dev()
741 priv->cdev = cdev; in io_subchannel_initialize_dev()
751 mutex_init(&cdev->reg_mutex); in io_subchannel_initialize_dev()
754 cdev->ccwlock = &sch->lock; in io_subchannel_initialize_dev()
755 cdev->dev.parent = &sch->dev; in io_subchannel_initialize_dev()
756 cdev->dev.release = ccw_device_release; in io_subchannel_initialize_dev()
757 cdev->dev.bus = &ccw_bus_type; in io_subchannel_initialize_dev()
758 cdev->dev.groups = ccwdev_attr_groups; in io_subchannel_initialize_dev()
760 device_initialize(&cdev->dev); in io_subchannel_initialize_dev()
761 ret = dev_set_name(&cdev->dev, "0.%x.%04x", cdev->private->dev_id.ssid, in io_subchannel_initialize_dev()
762 cdev->private->dev_id.devno); in io_subchannel_initialize_dev()
771 sch_set_cdev(sch, cdev); in io_subchannel_initialize_dev()
777 put_device(&cdev->dev); in io_subchannel_initialize_dev()
783 struct ccw_device *cdev; in io_subchannel_create_ccwdev() local
786 cdev = io_subchannel_allocate_dev(sch); in io_subchannel_create_ccwdev()
787 if (!IS_ERR(cdev)) { in io_subchannel_create_ccwdev()
788 ret = io_subchannel_initialize_dev(sch, cdev); in io_subchannel_create_ccwdev()
790 cdev = ERR_PTR(ret); in io_subchannel_create_ccwdev()
792 return cdev; in io_subchannel_create_ccwdev()
799 struct ccw_device *cdev; in sch_create_and_recog_new_device() local
802 cdev = io_subchannel_create_ccwdev(sch); in sch_create_and_recog_new_device()
803 if (IS_ERR(cdev)) { in sch_create_and_recog_new_device()
809 io_subchannel_recog(cdev, sch); in sch_create_and_recog_new_device()
815 static void io_subchannel_register(struct ccw_device *cdev) in io_subchannel_register() argument
821 sch = to_subchannel(cdev->dev.parent); in io_subchannel_register()
837 mutex_lock(&cdev->reg_mutex); in io_subchannel_register()
838 if (device_is_registered(&cdev->dev)) { in io_subchannel_register()
839 if (!cdev->drv) { in io_subchannel_register()
840 ret = device_reprobe(&cdev->dev); in io_subchannel_register()
845 cdev->private->dev_id.ssid, in io_subchannel_register()
846 cdev->private->dev_id.devno); in io_subchannel_register()
852 ret = device_add(&cdev->dev); in io_subchannel_register()
855 cdev->private->dev_id.ssid, in io_subchannel_register()
856 cdev->private->dev_id.devno, ret); in io_subchannel_register()
860 mutex_unlock(&cdev->reg_mutex); in io_subchannel_register()
862 put_device(&cdev->dev); in io_subchannel_register()
866 cdev->private->flags.recog_done = 1; in io_subchannel_register()
867 mutex_unlock(&cdev->reg_mutex); in io_subchannel_register()
868 wake_up(&cdev->private->wait_q); in io_subchannel_register()
878 io_subchannel_recog_done(struct ccw_device *cdev) in io_subchannel_recog_done() argument
881 cdev->private->flags.recog_done = 1; in io_subchannel_recog_done()
884 switch (cdev->private->state) { in io_subchannel_recog_done()
888 cdev->private->flags.recog_done = 1; in io_subchannel_recog_done()
890 ccw_device_sched_todo(cdev, CDEV_TODO_UNREG); in io_subchannel_recog_done()
899 ccw_device_sched_todo(cdev, CDEV_TODO_REGISTER); in io_subchannel_recog_done()
904 static void io_subchannel_recog(struct ccw_device *cdev, struct subchannel *sch) in io_subchannel_recog() argument
911 ccw_device_recognition(cdev); in io_subchannel_recog()
915 static int ccw_device_move_to_sch(struct ccw_device *cdev, in ccw_device_move_to_sch() argument
921 old_sch = to_subchannel(cdev->dev.parent); in ccw_device_move_to_sch()
941 rc = device_move(&cdev->dev, &sch->dev, DPM_ORDER_PARENT_BEFORE_DEV); in ccw_device_move_to_sch()
945 cdev->private->dev_id.ssid, in ccw_device_move_to_sch()
946 cdev->private->dev_id.devno, sch->schid.ssid, in ccw_device_move_to_sch()
969 cdev->ccwlock = &sch->lock; in ccw_device_move_to_sch()
971 sch_set_cdev(sch, cdev); in ccw_device_move_to_sch()
978 static int ccw_device_move_to_orph(struct ccw_device *cdev) in ccw_device_move_to_orph() argument
980 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_move_to_orph()
983 return ccw_device_move_to_sch(cdev, css->pseudo_subchannel); in ccw_device_move_to_orph()
988 struct ccw_device *cdev; in io_subchannel_irq() local
990 cdev = sch_get_cdev(sch); in io_subchannel_irq()
994 if (cdev) in io_subchannel_irq()
995 dev_fsm_event(cdev, DEV_EVENT_INTERRUPT); in io_subchannel_irq()
1031 struct ccw_device *cdev; in io_subchannel_probe() local
1046 cdev = sch_get_cdev(sch); in io_subchannel_probe()
1047 rc = device_add(&cdev->dev); in io_subchannel_probe()
1050 put_device(&cdev->dev); in io_subchannel_probe()
1092 struct ccw_device *cdev; in io_subchannel_remove() local
1094 cdev = sch_get_cdev(sch); in io_subchannel_remove()
1095 if (!cdev) in io_subchannel_remove()
1098 ccw_device_unregister(cdev); in io_subchannel_remove()
1112 struct ccw_device *cdev; in io_subchannel_verify() local
1114 cdev = sch_get_cdev(sch); in io_subchannel_verify()
1115 if (cdev) in io_subchannel_verify()
1116 dev_fsm_event(cdev, DEV_EVENT_VERIFY); in io_subchannel_verify()
1123 struct ccw_device *cdev; in io_subchannel_terminate_path() local
1125 cdev = sch_get_cdev(sch); in io_subchannel_terminate_path()
1126 if (!cdev) in io_subchannel_terminate_path()
1133 if (cdev->private->state == DEV_STATE_ONLINE) { in io_subchannel_terminate_path()
1134 ccw_device_kill_io(cdev); in io_subchannel_terminate_path()
1141 dev_fsm_event(cdev, DEV_EVENT_VERIFY); in io_subchannel_terminate_path()
1145 dev_fsm_event(cdev, DEV_EVENT_NOTOPER); in io_subchannel_terminate_path()
1151 struct ccw_device *cdev = sch_get_cdev(sch); in io_subchannel_chp_event() local
1162 if (cdev) in io_subchannel_chp_event()
1163 cdev->private->path_gone_mask |= mask; in io_subchannel_chp_event()
1169 if (cdev) in io_subchannel_chp_event()
1170 cdev->private->path_new_mask |= mask; in io_subchannel_chp_event()
1176 if (cdev) in io_subchannel_chp_event()
1177 cdev->private->path_gone_mask |= mask; in io_subchannel_chp_event()
1184 if (cdev) in io_subchannel_chp_event()
1185 cdev->private->path_new_mask |= mask; in io_subchannel_chp_event()
1197 if (cdev && cdev->drv && cdev->drv->path_event) in io_subchannel_chp_event()
1198 cdev->drv->path_event(cdev, path_event); in io_subchannel_chp_event()
1206 struct ccw_device *cdev; in io_subchannel_quiesce() local
1210 cdev = sch_get_cdev(sch); in io_subchannel_quiesce()
1218 if (cdev->handler) in io_subchannel_quiesce()
1219 cdev->handler(cdev, cdev->private->intparm, ERR_PTR(-EIO)); in io_subchannel_quiesce()
1221 cdev->private->state = DEV_STATE_QUIESCE; in io_subchannel_quiesce()
1222 cdev->private->iretry = 255; in io_subchannel_quiesce()
1223 ret = ccw_device_cancel_halt_clear(cdev); in io_subchannel_quiesce()
1225 ccw_device_set_timeout(cdev, HZ/10); in io_subchannel_quiesce()
1227 wait_event(cdev->private->wait_q, in io_subchannel_quiesce()
1228 cdev->private->state != DEV_STATE_QUIESCE); in io_subchannel_quiesce()
1242 static int device_is_disconnected(struct ccw_device *cdev) in device_is_disconnected() argument
1244 if (!cdev) in device_is_disconnected()
1246 return (cdev->private->state == DEV_STATE_DISCONNECTED || in device_is_disconnected()
1247 cdev->private->state == DEV_STATE_DISCONNECTED_SENSE_ID); in device_is_disconnected()
1252 struct ccw_device *cdev = to_ccwdev(dev); in recovery_check() local
1256 spin_lock_irq(cdev->ccwlock); in recovery_check()
1257 switch (cdev->private->state) { in recovery_check()
1259 sch = to_subchannel(cdev->dev.parent); in recovery_check()
1265 cdev->private->dev_id.ssid, in recovery_check()
1266 cdev->private->dev_id.devno); in recovery_check()
1267 dev_fsm_event(cdev, DEV_EVENT_VERIFY); in recovery_check()
1274 spin_unlock_irq(cdev->ccwlock); in recovery_check()
1323 struct ccw_device *cdev = to_ccwdev(dev); in purge_fn() local
1324 struct ccw_dev_id *id = &cdev->private->dev_id; in purge_fn()
1325 struct subchannel *sch = to_subchannel(cdev->dev.parent); in purge_fn()
1327 spin_lock_irq(cdev->ccwlock); in purge_fn()
1329 (cdev->private->state == DEV_STATE_OFFLINE) && in purge_fn()
1330 (atomic_cmpxchg(&cdev->private->onoff, 0, 1) == 0)) { in purge_fn()
1333 ccw_device_sched_todo(cdev, CDEV_TODO_UNREG); in purge_fn()
1335 atomic_set(&cdev->private->onoff, 0); in purge_fn()
1337 spin_unlock_irq(cdev->ccwlock); in purge_fn()
1357 void ccw_device_set_disconnected(struct ccw_device *cdev) in ccw_device_set_disconnected() argument
1359 if (!cdev) in ccw_device_set_disconnected()
1361 ccw_device_set_timeout(cdev, 0); in ccw_device_set_disconnected()
1362 cdev->private->flags.fake_irb = 0; in ccw_device_set_disconnected()
1363 cdev->private->state = DEV_STATE_DISCONNECTED; in ccw_device_set_disconnected()
1364 if (cdev->online) in ccw_device_set_disconnected()
1368 void ccw_device_set_notoper(struct ccw_device *cdev) in ccw_device_set_notoper() argument
1370 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_set_notoper()
1374 ccw_device_set_timeout(cdev, 0); in ccw_device_set_notoper()
1376 cdev->private->state = DEV_STATE_NOT_OPER; in ccw_device_set_notoper()
1394 struct ccw_device *cdev; in sch_get_action() local
1396 cdev = sch_get_cdev(sch); in sch_get_action()
1399 if (!cdev) in sch_get_action()
1401 if (ccw_device_notify(cdev, CIO_GONE) != NOTIFY_OK) in sch_get_action()
1406 if (!cdev) in sch_get_action()
1408 if (sch->schib.pmcw.dev != cdev->private->dev_id.devno) { in sch_get_action()
1409 if (ccw_device_notify(cdev, CIO_GONE) != NOTIFY_OK) in sch_get_action()
1414 if (ccw_device_notify(cdev, CIO_NO_PATH) != NOTIFY_OK) in sch_get_action()
1418 if (device_is_disconnected(cdev)) in sch_get_action()
1420 if (cdev->online) in sch_get_action()
1422 if (cdev->private->state == DEV_STATE_NOT_OPER) in sch_get_action()
1440 struct ccw_device *cdev; in io_subchannel_sch_event() local
1450 cdev = sch_get_cdev(sch); in io_subchannel_sch_event()
1451 if (cdev && work_pending(&cdev->private->todo_work)) in io_subchannel_sch_event()
1461 ccw_device_trigger_reprobe(cdev); in io_subchannel_sch_event()
1470 ccw_device_set_disconnected(cdev); in io_subchannel_sch_event()
1475 ccw_device_set_disconnected(cdev); in io_subchannel_sch_event()
1480 if (!cdev) in io_subchannel_sch_event()
1482 if (cdev->private->state == DEV_STATE_SENSE_ID) { in io_subchannel_sch_event()
1488 dev_fsm_event(cdev, DEV_EVENT_NOTOPER); in io_subchannel_sch_event()
1490 ccw_device_set_notoper(cdev); in io_subchannel_sch_event()
1507 rc = ccw_device_move_to_orph(cdev); in io_subchannel_sch_event()
1517 ccw_device_unregister(cdev); in io_subchannel_sch_event()
1533 cdev = get_ccwdev_by_dev_id(&dev_id); in io_subchannel_sch_event()
1534 if (!cdev) { in io_subchannel_sch_event()
1538 rc = ccw_device_move_to_sch(cdev, sch); in io_subchannel_sch_event()
1541 put_device(&cdev->dev); in io_subchannel_sch_event()
1545 ccw_device_trigger_reprobe(cdev); in io_subchannel_sch_event()
1548 put_device(&cdev->dev); in io_subchannel_sch_event()
1561 static void ccw_device_set_int_class(struct ccw_device *cdev) in ccw_device_set_int_class() argument
1563 struct ccw_driver *cdrv = cdev->drv; in ccw_device_set_int_class()
1568 cdev->private->int_class = cdrv->int_class; in ccw_device_set_int_class()
1570 cdev->private->int_class = IRQIO_CIO; in ccw_device_set_int_class()
1574 int __init ccw_device_enable_console(struct ccw_device *cdev) in ccw_device_enable_console() argument
1576 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_enable_console()
1579 if (!cdev->drv || !cdev->handler) in ccw_device_enable_console()
1587 io_subchannel_recog(cdev, sch); in ccw_device_enable_console()
1589 spin_lock_irq(cdev->ccwlock); in ccw_device_enable_console()
1590 while (!dev_fsm_final_state(cdev)) in ccw_device_enable_console()
1591 ccw_device_wait_idle(cdev); in ccw_device_enable_console()
1594 get_device(&cdev->dev); in ccw_device_enable_console()
1595 rc = ccw_device_online(cdev); in ccw_device_enable_console()
1599 while (!dev_fsm_final_state(cdev)) in ccw_device_enable_console()
1600 ccw_device_wait_idle(cdev); in ccw_device_enable_console()
1602 if (cdev->private->state == DEV_STATE_ONLINE) in ccw_device_enable_console()
1603 cdev->online = 1; in ccw_device_enable_console()
1607 spin_unlock_irq(cdev->ccwlock); in ccw_device_enable_console()
1609 put_device(&cdev->dev); in ccw_device_enable_console()
1616 struct ccw_device *cdev; in ccw_device_create_console() local
1632 cdev = io_subchannel_create_ccwdev(sch); in ccw_device_create_console()
1633 if (IS_ERR(cdev)) { in ccw_device_create_console()
1639 return cdev; in ccw_device_create_console()
1641 cdev->drv = drv; in ccw_device_create_console()
1642 ccw_device_set_int_class(cdev); in ccw_device_create_console()
1643 return cdev; in ccw_device_create_console()
1652 void __init ccw_device_destroy_console(struct ccw_device *cdev) in ccw_device_destroy_console() argument
1654 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_destroy_console()
1661 put_device(&cdev->dev); in ccw_device_destroy_console()
1673 void ccw_device_wait_idle(struct ccw_device *cdev) in ccw_device_wait_idle() argument
1675 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_wait_idle()
1720 struct ccw_device *cdev = to_ccwdev(dev); in ccw_device_probe() local
1724 cdev->drv = cdrv; /* to let the driver call _set_online */ in ccw_device_probe()
1725 ccw_device_set_int_class(cdev); in ccw_device_probe()
1726 ret = cdrv->probe ? cdrv->probe(cdev) : -ENODEV; in ccw_device_probe()
1728 cdev->drv = NULL; in ccw_device_probe()
1729 cdev->private->int_class = IRQIO_CIO; in ccw_device_probe()
1738 struct ccw_device *cdev = to_ccwdev(dev); in ccw_device_remove() local
1739 struct ccw_driver *cdrv = cdev->drv; in ccw_device_remove()
1744 cdrv->remove(cdev); in ccw_device_remove()
1746 spin_lock_irq(cdev->ccwlock); in ccw_device_remove()
1747 if (cdev->online) { in ccw_device_remove()
1748 cdev->online = 0; in ccw_device_remove()
1749 ret = ccw_device_offline(cdev); in ccw_device_remove()
1750 spin_unlock_irq(cdev->ccwlock); in ccw_device_remove()
1752 wait_event(cdev->private->wait_q, in ccw_device_remove()
1753 dev_fsm_final_state(cdev)); in ccw_device_remove()
1757 ret, cdev->private->dev_id.ssid, in ccw_device_remove()
1758 cdev->private->dev_id.devno); in ccw_device_remove()
1760 put_device(&cdev->dev); in ccw_device_remove()
1761 spin_lock_irq(cdev->ccwlock); in ccw_device_remove()
1763 ccw_device_set_timeout(cdev, 0); in ccw_device_remove()
1764 cdev->drv = NULL; in ccw_device_remove()
1765 cdev->private->int_class = IRQIO_CIO; in ccw_device_remove()
1766 sch = to_subchannel(cdev->dev.parent); in ccw_device_remove()
1767 spin_unlock_irq(cdev->ccwlock); in ccw_device_remove()
1769 __disable_cmf(cdev); in ccw_device_remove()
1774 struct ccw_device *cdev; in ccw_device_shutdown() local
1776 cdev = to_ccwdev(dev); in ccw_device_shutdown()
1777 if (cdev->drv && cdev->drv->shutdown) in ccw_device_shutdown()
1778 cdev->drv->shutdown(cdev); in ccw_device_shutdown()
1779 __disable_cmf(cdev); in ccw_device_shutdown()
1822 struct ccw_device *cdev; in ccw_device_todo() local
1827 cdev = priv->cdev; in ccw_device_todo()
1828 sch = to_subchannel(cdev->dev.parent); in ccw_device_todo()
1830 spin_lock_irq(cdev->ccwlock); in ccw_device_todo()
1835 spin_unlock_irq(cdev->ccwlock); in ccw_device_todo()
1839 cmf_reenable(cdev); in ccw_device_todo()
1842 ccw_device_do_unbind_bind(cdev); in ccw_device_todo()
1845 io_subchannel_register(cdev); in ccw_device_todo()
1855 ccw_device_unregister(cdev); in ccw_device_todo()
1861 put_device(&cdev->dev); in ccw_device_todo()
1873 void ccw_device_sched_todo(struct ccw_device *cdev, enum cdev_todo todo) in ccw_device_sched_todo() argument
1876 cdev->private->dev_id.ssid, cdev->private->dev_id.devno, in ccw_device_sched_todo()
1878 if (cdev->private->todo >= todo) in ccw_device_sched_todo()
1880 cdev->private->todo = todo; in ccw_device_sched_todo()
1882 if (!get_device(&cdev->dev)) in ccw_device_sched_todo()
1884 if (!queue_work(cio_work_q, &cdev->private->todo_work)) { in ccw_device_sched_todo()
1886 put_device(&cdev->dev); in ccw_device_sched_todo()
1897 int ccw_device_siosl(struct ccw_device *cdev) in ccw_device_siosl() argument
1899 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_siosl()