Lines Matching refs:cdev
63 struct ccw_device *cdev = to_ccwdev(dev); in ccw_bus_match() local
70 found = ccw_device_id_match(ids, &cdev->id); in ccw_bus_match()
74 cdev->id.driver_info = found->driver_info; in ccw_bus_match()
106 const struct ccw_device *cdev = to_ccwdev(dev); in ccw_uevent() local
107 const struct ccw_device_id *id = &(cdev->id); in ccw_uevent()
200 struct ccw_device *cdev = to_ccwdev(dev); in devtype_show() local
201 struct ccw_device_id *id = &(cdev->id); in devtype_show()
212 struct ccw_device *cdev = to_ccwdev(dev); in cutype_show() local
213 struct ccw_device_id *id = &(cdev->id); in cutype_show()
221 struct ccw_device *cdev = to_ccwdev(dev); in modalias_show() local
222 struct ccw_device_id *id = &(cdev->id); in modalias_show()
233 struct ccw_device *cdev = to_ccwdev(dev); in online_show() local
235 return sysfs_emit(buf, cdev->online ? "1\n" : "0\n"); in online_show()
238 int ccw_device_is_orphan(struct ccw_device *cdev) in ccw_device_is_orphan() argument
240 return sch_is_pseudo_sch(to_subchannel(cdev->dev.parent)); in ccw_device_is_orphan()
243 static void ccw_device_unregister(struct ccw_device *cdev) in ccw_device_unregister() argument
245 mutex_lock(&cdev->reg_mutex); in ccw_device_unregister()
246 if (device_is_registered(&cdev->dev)) { in ccw_device_unregister()
248 device_del(&cdev->dev); in ccw_device_unregister()
250 mutex_unlock(&cdev->reg_mutex); in ccw_device_unregister()
252 if (cdev->private->flags.initialized) { in ccw_device_unregister()
253 cdev->private->flags.initialized = 0; in ccw_device_unregister()
255 put_device(&cdev->dev); in ccw_device_unregister()
272 int ccw_device_set_offline(struct ccw_device *cdev) in ccw_device_set_offline() argument
277 if (!cdev) in ccw_device_set_offline()
279 if (!cdev->online || !cdev->drv) in ccw_device_set_offline()
282 if (cdev->drv->set_offline) { in ccw_device_set_offline()
283 ret = cdev->drv->set_offline(cdev); in ccw_device_set_offline()
287 spin_lock_irq(cdev->ccwlock); in ccw_device_set_offline()
288 sch = to_subchannel(cdev->dev.parent); in ccw_device_set_offline()
289 cdev->online = 0; in ccw_device_set_offline()
291 while (!dev_fsm_final_state(cdev) && in ccw_device_set_offline()
292 cdev->private->state != DEV_STATE_DISCONNECTED) { in ccw_device_set_offline()
293 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_offline()
294 wait_event(cdev->private->wait_q, (dev_fsm_final_state(cdev) || in ccw_device_set_offline()
295 cdev->private->state == DEV_STATE_DISCONNECTED)); in ccw_device_set_offline()
296 spin_lock_irq(cdev->ccwlock); in ccw_device_set_offline()
299 ret = ccw_device_offline(cdev); in ccw_device_set_offline()
303 "0.%x.%04x\n", ret, cdev->private->dev_id.ssid, in ccw_device_set_offline()
304 cdev->private->dev_id.devno); in ccw_device_set_offline()
307 state = cdev->private->state; in ccw_device_set_offline()
308 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_offline()
310 spin_lock_irq(cdev->ccwlock); in ccw_device_set_offline()
311 cdev->private->state = state; in ccw_device_set_offline()
313 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_offline()
314 wait_event(cdev->private->wait_q, (dev_fsm_final_state(cdev) || in ccw_device_set_offline()
315 cdev->private->state == DEV_STATE_DISCONNECTED)); in ccw_device_set_offline()
317 if (cdev->private->state == DEV_STATE_BOXED) { in ccw_device_set_offline()
319 dev_name(&cdev->dev)); in ccw_device_set_offline()
320 } else if (cdev->private->state == DEV_STATE_NOT_OPER) { in ccw_device_set_offline()
322 dev_name(&cdev->dev)); in ccw_device_set_offline()
325 put_device(&cdev->dev); in ccw_device_set_offline()
329 cdev->private->state = DEV_STATE_OFFLINE; in ccw_device_set_offline()
330 dev_fsm_event(cdev, DEV_EVENT_NOTOPER); in ccw_device_set_offline()
331 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_offline()
333 put_device(&cdev->dev); in ccw_device_set_offline()
349 int ccw_device_set_online(struct ccw_device *cdev) in ccw_device_set_online() argument
354 if (!cdev) in ccw_device_set_online()
356 if (cdev->online || !cdev->drv) in ccw_device_set_online()
359 if (!get_device(&cdev->dev)) in ccw_device_set_online()
362 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
363 ret = ccw_device_online(cdev); in ccw_device_set_online()
365 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
368 ret, cdev->private->dev_id.ssid, in ccw_device_set_online()
369 cdev->private->dev_id.devno); in ccw_device_set_online()
371 put_device(&cdev->dev); in ccw_device_set_online()
375 while (!dev_fsm_final_state(cdev)) { in ccw_device_set_online()
376 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
377 wait_event(cdev->private->wait_q, dev_fsm_final_state(cdev)); in ccw_device_set_online()
378 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
381 if ((cdev->private->state != DEV_STATE_ONLINE) && in ccw_device_set_online()
382 (cdev->private->state != DEV_STATE_W4SENSE)) { in ccw_device_set_online()
383 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
385 if (cdev->private->state == DEV_STATE_BOXED) { in ccw_device_set_online()
387 dev_name(&cdev->dev)); in ccw_device_set_online()
388 } else if (cdev->private->state == DEV_STATE_NOT_OPER) { in ccw_device_set_online()
390 dev_name(&cdev->dev)); in ccw_device_set_online()
393 put_device(&cdev->dev); in ccw_device_set_online()
396 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
397 if (cdev->drv->set_online) in ccw_device_set_online()
398 ret = cdev->drv->set_online(cdev); in ccw_device_set_online()
402 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
403 cdev->online = 1; in ccw_device_set_online()
404 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
408 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
410 while (!dev_fsm_final_state(cdev) && in ccw_device_set_online()
411 cdev->private->state != DEV_STATE_DISCONNECTED) { in ccw_device_set_online()
412 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
413 wait_event(cdev->private->wait_q, (dev_fsm_final_state(cdev) || in ccw_device_set_online()
414 cdev->private->state == DEV_STATE_DISCONNECTED)); in ccw_device_set_online()
415 spin_lock_irq(cdev->ccwlock); in ccw_device_set_online()
417 ret2 = ccw_device_offline(cdev); in ccw_device_set_online()
420 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
421 wait_event(cdev->private->wait_q, (dev_fsm_final_state(cdev) || in ccw_device_set_online()
422 cdev->private->state == DEV_STATE_DISCONNECTED)); in ccw_device_set_online()
424 put_device(&cdev->dev); in ccw_device_set_online()
430 ret2, cdev->private->dev_id.ssid, in ccw_device_set_online()
431 cdev->private->dev_id.devno); in ccw_device_set_online()
432 cdev->private->state = DEV_STATE_OFFLINE; in ccw_device_set_online()
433 spin_unlock_irq(cdev->ccwlock); in ccw_device_set_online()
435 put_device(&cdev->dev); in ccw_device_set_online()
439 static int online_store_handle_offline(struct ccw_device *cdev) in online_store_handle_offline() argument
441 if (cdev->private->state == DEV_STATE_DISCONNECTED) { in online_store_handle_offline()
442 spin_lock_irq(cdev->ccwlock); in online_store_handle_offline()
443 ccw_device_sched_todo(cdev, CDEV_TODO_UNREG_EVAL); in online_store_handle_offline()
444 spin_unlock_irq(cdev->ccwlock); in online_store_handle_offline()
447 if (cdev->drv && cdev->drv->set_offline) in online_store_handle_offline()
448 return ccw_device_set_offline(cdev); in online_store_handle_offline()
452 static int online_store_recog_and_online(struct ccw_device *cdev) in online_store_recog_and_online() argument
455 if (cdev->private->state == DEV_STATE_BOXED) { in online_store_recog_and_online()
456 spin_lock_irq(cdev->ccwlock); in online_store_recog_and_online()
457 ccw_device_recognition(cdev); in online_store_recog_and_online()
458 spin_unlock_irq(cdev->ccwlock); in online_store_recog_and_online()
459 wait_event(cdev->private->wait_q, in online_store_recog_and_online()
460 cdev->private->flags.recog_done); in online_store_recog_and_online()
461 if (cdev->private->state != DEV_STATE_OFFLINE) in online_store_recog_and_online()
465 if (cdev->drv && cdev->drv->set_online) in online_store_recog_and_online()
466 return ccw_device_set_online(cdev); in online_store_recog_and_online()
470 static int online_store_handle_online(struct ccw_device *cdev, int force) in online_store_handle_online() argument
474 ret = online_store_recog_and_online(cdev); in online_store_handle_online()
477 if (force && cdev->private->state == DEV_STATE_BOXED) { in online_store_handle_online()
478 ret = ccw_device_stlck(cdev); in online_store_handle_online()
481 if (cdev->id.cu_type == 0) in online_store_handle_online()
482 cdev->private->state = DEV_STATE_NOT_OPER; in online_store_handle_online()
483 ret = online_store_recog_and_online(cdev); in online_store_handle_online()
493 struct ccw_device *cdev = to_ccwdev(dev); in online_store() local
498 if (atomic_cmpxchg(&cdev->private->onoff, 0, 1) != 0) in online_store()
501 if (!dev_fsm_final_state(cdev) && in online_store()
502 cdev->private->state != DEV_STATE_DISCONNECTED) { in online_store()
507 if (work_pending(&cdev->private->todo_work)) { in online_store()
525 ret = online_store_handle_offline(cdev); in online_store()
528 ret = online_store_handle_online(cdev, force); in online_store()
536 atomic_set(&cdev->private->onoff, 0); in online_store()
543 struct ccw_device *cdev = to_ccwdev(dev); in available_show() local
546 if (ccw_device_is_orphan(cdev)) in available_show()
548 switch (cdev->private->state) { in available_show()
630 struct ccw_device *cdev = to_ccwdev(dev); in match_dev_id() local
633 return ccw_dev_id_is_equal(&cdev->private->dev_id, dev_id); in match_dev_id()
656 static void ccw_device_do_unbind_bind(struct ccw_device *cdev) in ccw_device_do_unbind_bind() argument
660 mutex_lock(&cdev->reg_mutex); in ccw_device_do_unbind_bind()
661 if (device_is_registered(&cdev->dev)) { in ccw_device_do_unbind_bind()
662 device_release_driver(&cdev->dev); in ccw_device_do_unbind_bind()
663 ret = device_attach(&cdev->dev); in ccw_device_do_unbind_bind()
666 mutex_unlock(&cdev->reg_mutex); in ccw_device_do_unbind_bind()
672 struct ccw_device *cdev; in ccw_device_release() local
674 cdev = to_ccwdev(dev); in ccw_device_release()
675 cio_gp_dma_free(cdev->private->dma_pool, cdev->private->dma_area, in ccw_device_release()
676 sizeof(*cdev->private->dma_area)); in ccw_device_release()
677 cio_gp_dma_destroy(cdev->private->dma_pool, &cdev->dev); in ccw_device_release()
679 put_device(cdev->dev.parent); in ccw_device_release()
680 kfree(cdev->private); in ccw_device_release()
681 kfree(cdev); in ccw_device_release()
686 struct ccw_device *cdev; in io_subchannel_allocate_dev() local
690 cdev = kzalloc(sizeof(*cdev), GFP_KERNEL); in io_subchannel_allocate_dev()
691 if (!cdev) { in io_subchannel_allocate_dev()
695 cdev->private = kzalloc(sizeof(struct ccw_device_private), in io_subchannel_allocate_dev()
697 if (!cdev->private) { in io_subchannel_allocate_dev()
702 cdev->dev.dma_mask = sch->dev.dma_mask; in io_subchannel_allocate_dev()
703 ret = dma_set_coherent_mask(&cdev->dev, sch->dev.coherent_dma_mask); in io_subchannel_allocate_dev()
707 dma_pool = cio_gp_dma_create(&cdev->dev, 1); in io_subchannel_allocate_dev()
712 cdev->private->dma_pool = dma_pool; in io_subchannel_allocate_dev()
713 cdev->private->dma_area = cio_gp_dma_zalloc(dma_pool, &cdev->dev, in io_subchannel_allocate_dev()
714 sizeof(*cdev->private->dma_area)); in io_subchannel_allocate_dev()
715 if (!cdev->private->dma_area) { in io_subchannel_allocate_dev()
719 return cdev; in io_subchannel_allocate_dev()
721 cio_gp_dma_destroy(dma_pool, &cdev->dev); in io_subchannel_allocate_dev()
724 kfree(cdev->private); in io_subchannel_allocate_dev()
726 kfree(cdev); in io_subchannel_allocate_dev()
734 struct ccw_device *cdev) in io_subchannel_initialize_dev() argument
736 struct ccw_device_private *priv = cdev->private; in io_subchannel_initialize_dev()
739 priv->cdev = cdev; in io_subchannel_initialize_dev()
749 mutex_init(&cdev->reg_mutex); in io_subchannel_initialize_dev()
752 cdev->ccwlock = &sch->lock; in io_subchannel_initialize_dev()
753 cdev->dev.parent = &sch->dev; in io_subchannel_initialize_dev()
754 cdev->dev.release = ccw_device_release; in io_subchannel_initialize_dev()
755 cdev->dev.bus = &ccw_bus_type; in io_subchannel_initialize_dev()
756 cdev->dev.groups = ccwdev_attr_groups; in io_subchannel_initialize_dev()
758 device_initialize(&cdev->dev); in io_subchannel_initialize_dev()
759 ret = dev_set_name(&cdev->dev, "0.%x.%04x", cdev->private->dev_id.ssid, in io_subchannel_initialize_dev()
760 cdev->private->dev_id.devno); in io_subchannel_initialize_dev()
769 sch_set_cdev(sch, cdev); in io_subchannel_initialize_dev()
775 put_device(&cdev->dev); in io_subchannel_initialize_dev()
781 struct ccw_device *cdev; in io_subchannel_create_ccwdev() local
784 cdev = io_subchannel_allocate_dev(sch); in io_subchannel_create_ccwdev()
785 if (!IS_ERR(cdev)) { in io_subchannel_create_ccwdev()
786 ret = io_subchannel_initialize_dev(sch, cdev); in io_subchannel_create_ccwdev()
788 cdev = ERR_PTR(ret); in io_subchannel_create_ccwdev()
790 return cdev; in io_subchannel_create_ccwdev()
797 struct ccw_device *cdev; in sch_create_and_recog_new_device() local
800 cdev = io_subchannel_create_ccwdev(sch); in sch_create_and_recog_new_device()
801 if (IS_ERR(cdev)) { in sch_create_and_recog_new_device()
807 io_subchannel_recog(cdev, sch); in sch_create_and_recog_new_device()
813 static void io_subchannel_register(struct ccw_device *cdev) in io_subchannel_register() argument
819 sch = to_subchannel(cdev->dev.parent); in io_subchannel_register()
835 mutex_lock(&cdev->reg_mutex); in io_subchannel_register()
836 if (device_is_registered(&cdev->dev)) { in io_subchannel_register()
837 if (!cdev->drv) { in io_subchannel_register()
838 ret = device_reprobe(&cdev->dev); in io_subchannel_register()
843 cdev->private->dev_id.ssid, in io_subchannel_register()
844 cdev->private->dev_id.devno); in io_subchannel_register()
850 ret = device_add(&cdev->dev); in io_subchannel_register()
853 cdev->private->dev_id.ssid, in io_subchannel_register()
854 cdev->private->dev_id.devno, ret); in io_subchannel_register()
858 mutex_unlock(&cdev->reg_mutex); in io_subchannel_register()
860 put_device(&cdev->dev); in io_subchannel_register()
864 cdev->private->flags.recog_done = 1; in io_subchannel_register()
865 mutex_unlock(&cdev->reg_mutex); in io_subchannel_register()
866 wake_up(&cdev->private->wait_q); in io_subchannel_register()
876 io_subchannel_recog_done(struct ccw_device *cdev) in io_subchannel_recog_done() argument
879 cdev->private->flags.recog_done = 1; in io_subchannel_recog_done()
882 switch (cdev->private->state) { in io_subchannel_recog_done()
886 cdev->private->flags.recog_done = 1; in io_subchannel_recog_done()
888 ccw_device_sched_todo(cdev, CDEV_TODO_UNREG); in io_subchannel_recog_done()
897 ccw_device_sched_todo(cdev, CDEV_TODO_REGISTER); in io_subchannel_recog_done()
902 static void io_subchannel_recog(struct ccw_device *cdev, struct subchannel *sch) in io_subchannel_recog() argument
909 ccw_device_recognition(cdev); in io_subchannel_recog()
913 static int ccw_device_move_to_sch(struct ccw_device *cdev, in ccw_device_move_to_sch() argument
919 old_sch = to_subchannel(cdev->dev.parent); in ccw_device_move_to_sch()
939 rc = device_move(&cdev->dev, &sch->dev, DPM_ORDER_PARENT_BEFORE_DEV); in ccw_device_move_to_sch()
943 cdev->private->dev_id.ssid, in ccw_device_move_to_sch()
944 cdev->private->dev_id.devno, sch->schid.ssid, in ccw_device_move_to_sch()
967 cdev->ccwlock = &sch->lock; in ccw_device_move_to_sch()
969 sch_set_cdev(sch, cdev); in ccw_device_move_to_sch()
976 static int ccw_device_move_to_orph(struct ccw_device *cdev) in ccw_device_move_to_orph() argument
978 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_move_to_orph()
981 return ccw_device_move_to_sch(cdev, css->pseudo_subchannel); in ccw_device_move_to_orph()
986 struct ccw_device *cdev; in io_subchannel_irq() local
988 cdev = sch_get_cdev(sch); in io_subchannel_irq()
992 if (cdev) in io_subchannel_irq()
993 dev_fsm_event(cdev, DEV_EVENT_INTERRUPT); in io_subchannel_irq()
1029 struct ccw_device *cdev; in io_subchannel_probe() local
1044 cdev = sch_get_cdev(sch); in io_subchannel_probe()
1045 rc = device_add(&cdev->dev); in io_subchannel_probe()
1048 put_device(&cdev->dev); in io_subchannel_probe()
1090 struct ccw_device *cdev; in io_subchannel_remove() local
1092 cdev = sch_get_cdev(sch); in io_subchannel_remove()
1093 if (!cdev) in io_subchannel_remove()
1096 ccw_device_unregister(cdev); in io_subchannel_remove()
1110 struct ccw_device *cdev; in io_subchannel_verify() local
1112 cdev = sch_get_cdev(sch); in io_subchannel_verify()
1113 if (cdev) in io_subchannel_verify()
1114 dev_fsm_event(cdev, DEV_EVENT_VERIFY); in io_subchannel_verify()
1121 struct ccw_device *cdev; in io_subchannel_terminate_path() local
1123 cdev = sch_get_cdev(sch); in io_subchannel_terminate_path()
1124 if (!cdev) in io_subchannel_terminate_path()
1131 if (cdev->private->state == DEV_STATE_ONLINE) { in io_subchannel_terminate_path()
1132 ccw_device_kill_io(cdev); in io_subchannel_terminate_path()
1139 dev_fsm_event(cdev, DEV_EVENT_VERIFY); in io_subchannel_terminate_path()
1143 dev_fsm_event(cdev, DEV_EVENT_NOTOPER); in io_subchannel_terminate_path()
1149 struct ccw_device *cdev = sch_get_cdev(sch); in io_subchannel_chp_event() local
1160 if (cdev) in io_subchannel_chp_event()
1161 cdev->private->path_gone_mask |= mask; in io_subchannel_chp_event()
1167 if (cdev) in io_subchannel_chp_event()
1168 cdev->private->path_new_mask |= mask; in io_subchannel_chp_event()
1174 if (cdev) in io_subchannel_chp_event()
1175 cdev->private->path_gone_mask |= mask; in io_subchannel_chp_event()
1182 if (cdev) in io_subchannel_chp_event()
1183 cdev->private->path_new_mask |= mask; in io_subchannel_chp_event()
1195 if (cdev && cdev->drv && cdev->drv->path_event) in io_subchannel_chp_event()
1196 cdev->drv->path_event(cdev, path_event); in io_subchannel_chp_event()
1204 struct ccw_device *cdev; in io_subchannel_quiesce() local
1208 cdev = sch_get_cdev(sch); in io_subchannel_quiesce()
1216 if (cdev->handler) in io_subchannel_quiesce()
1217 cdev->handler(cdev, cdev->private->intparm, ERR_PTR(-EIO)); in io_subchannel_quiesce()
1219 cdev->private->state = DEV_STATE_QUIESCE; in io_subchannel_quiesce()
1220 cdev->private->iretry = 255; in io_subchannel_quiesce()
1221 ret = ccw_device_cancel_halt_clear(cdev); in io_subchannel_quiesce()
1223 ccw_device_set_timeout(cdev, HZ/10); in io_subchannel_quiesce()
1225 wait_event(cdev->private->wait_q, in io_subchannel_quiesce()
1226 cdev->private->state != DEV_STATE_QUIESCE); in io_subchannel_quiesce()
1240 static int device_is_disconnected(struct ccw_device *cdev) in device_is_disconnected() argument
1242 if (!cdev) in device_is_disconnected()
1244 return (cdev->private->state == DEV_STATE_DISCONNECTED || in device_is_disconnected()
1245 cdev->private->state == DEV_STATE_DISCONNECTED_SENSE_ID); in device_is_disconnected()
1250 struct ccw_device *cdev = to_ccwdev(dev); in recovery_check() local
1254 spin_lock_irq(cdev->ccwlock); in recovery_check()
1255 switch (cdev->private->state) { in recovery_check()
1257 sch = to_subchannel(cdev->dev.parent); in recovery_check()
1263 cdev->private->dev_id.ssid, in recovery_check()
1264 cdev->private->dev_id.devno); in recovery_check()
1265 dev_fsm_event(cdev, DEV_EVENT_VERIFY); in recovery_check()
1272 spin_unlock_irq(cdev->ccwlock); in recovery_check()
1321 struct ccw_device *cdev = to_ccwdev(dev); in purge_fn() local
1322 struct ccw_dev_id *id = &cdev->private->dev_id; in purge_fn()
1323 struct subchannel *sch = to_subchannel(cdev->dev.parent); in purge_fn()
1325 spin_lock_irq(cdev->ccwlock); in purge_fn()
1327 (cdev->private->state == DEV_STATE_OFFLINE) && in purge_fn()
1328 (atomic_cmpxchg(&cdev->private->onoff, 0, 1) == 0)) { in purge_fn()
1331 ccw_device_sched_todo(cdev, CDEV_TODO_UNREG); in purge_fn()
1333 atomic_set(&cdev->private->onoff, 0); in purge_fn()
1335 spin_unlock_irq(cdev->ccwlock); in purge_fn()
1355 void ccw_device_set_disconnected(struct ccw_device *cdev) in ccw_device_set_disconnected() argument
1357 if (!cdev) in ccw_device_set_disconnected()
1359 ccw_device_set_timeout(cdev, 0); in ccw_device_set_disconnected()
1360 cdev->private->flags.fake_irb = 0; in ccw_device_set_disconnected()
1361 cdev->private->state = DEV_STATE_DISCONNECTED; in ccw_device_set_disconnected()
1362 if (cdev->online) in ccw_device_set_disconnected()
1366 void ccw_device_set_notoper(struct ccw_device *cdev) in ccw_device_set_notoper() argument
1368 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_set_notoper()
1372 ccw_device_set_timeout(cdev, 0); in ccw_device_set_notoper()
1374 cdev->private->state = DEV_STATE_NOT_OPER; in ccw_device_set_notoper()
1393 struct ccw_device *cdev; in sch_get_action() local
1396 cdev = sch_get_cdev(sch); in sch_get_action()
1401 if (!cdev) in sch_get_action()
1403 if (ccw_device_notify(cdev, CIO_GONE) != NOTIFY_OK) in sch_get_action()
1410 if (!cdev) in sch_get_action()
1412 if (ccw_device_notify(cdev, CIO_GONE) != NOTIFY_OK) in sch_get_action()
1418 if (!cdev) in sch_get_action()
1420 if (sch->schib.pmcw.dev != cdev->private->dev_id.devno) { in sch_get_action()
1421 if (ccw_device_notify(cdev, CIO_GONE) != NOTIFY_OK) in sch_get_action()
1426 if (ccw_device_notify(cdev, CIO_NO_PATH) != NOTIFY_OK) in sch_get_action()
1430 if (device_is_disconnected(cdev)) in sch_get_action()
1432 if (cdev->online) in sch_get_action()
1434 if (cdev->private->state == DEV_STATE_NOT_OPER) in sch_get_action()
1452 struct ccw_device *cdev; in io_subchannel_sch_event() local
1462 cdev = sch_get_cdev(sch); in io_subchannel_sch_event()
1463 if (cdev && work_pending(&cdev->private->todo_work)) in io_subchannel_sch_event()
1473 ccw_device_trigger_reprobe(cdev); in io_subchannel_sch_event()
1482 ccw_device_set_disconnected(cdev); in io_subchannel_sch_event()
1488 ccw_device_set_disconnected(cdev); in io_subchannel_sch_event()
1493 if (!cdev) in io_subchannel_sch_event()
1495 if (cdev->private->state == DEV_STATE_SENSE_ID) { in io_subchannel_sch_event()
1501 dev_fsm_event(cdev, DEV_EVENT_NOTOPER); in io_subchannel_sch_event()
1503 ccw_device_set_notoper(cdev); in io_subchannel_sch_event()
1521 rc = ccw_device_move_to_orph(cdev); in io_subchannel_sch_event()
1531 ccw_device_unregister(cdev); in io_subchannel_sch_event()
1547 cdev = get_ccwdev_by_dev_id(&dev_id); in io_subchannel_sch_event()
1548 if (!cdev) { in io_subchannel_sch_event()
1552 rc = ccw_device_move_to_sch(cdev, sch); in io_subchannel_sch_event()
1555 put_device(&cdev->dev); in io_subchannel_sch_event()
1559 ccw_device_trigger_reprobe(cdev); in io_subchannel_sch_event()
1562 put_device(&cdev->dev); in io_subchannel_sch_event()
1575 static void ccw_device_set_int_class(struct ccw_device *cdev) in ccw_device_set_int_class() argument
1577 struct ccw_driver *cdrv = cdev->drv; in ccw_device_set_int_class()
1582 cdev->private->int_class = cdrv->int_class; in ccw_device_set_int_class()
1584 cdev->private->int_class = IRQIO_CIO; in ccw_device_set_int_class()
1588 int __init ccw_device_enable_console(struct ccw_device *cdev) in ccw_device_enable_console() argument
1590 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_enable_console()
1593 if (!cdev->drv || !cdev->handler) in ccw_device_enable_console()
1601 io_subchannel_recog(cdev, sch); in ccw_device_enable_console()
1603 spin_lock_irq(cdev->ccwlock); in ccw_device_enable_console()
1604 while (!dev_fsm_final_state(cdev)) in ccw_device_enable_console()
1605 ccw_device_wait_idle(cdev); in ccw_device_enable_console()
1608 get_device(&cdev->dev); in ccw_device_enable_console()
1609 rc = ccw_device_online(cdev); in ccw_device_enable_console()
1613 while (!dev_fsm_final_state(cdev)) in ccw_device_enable_console()
1614 ccw_device_wait_idle(cdev); in ccw_device_enable_console()
1616 if (cdev->private->state == DEV_STATE_ONLINE) in ccw_device_enable_console()
1617 cdev->online = 1; in ccw_device_enable_console()
1621 spin_unlock_irq(cdev->ccwlock); in ccw_device_enable_console()
1623 put_device(&cdev->dev); in ccw_device_enable_console()
1630 struct ccw_device *cdev; in ccw_device_create_console() local
1646 cdev = io_subchannel_create_ccwdev(sch); in ccw_device_create_console()
1647 if (IS_ERR(cdev)) { in ccw_device_create_console()
1653 return cdev; in ccw_device_create_console()
1655 cdev->drv = drv; in ccw_device_create_console()
1656 ccw_device_set_int_class(cdev); in ccw_device_create_console()
1657 return cdev; in ccw_device_create_console()
1666 void __init ccw_device_destroy_console(struct ccw_device *cdev) in ccw_device_destroy_console() argument
1668 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_destroy_console()
1675 put_device(&cdev->dev); in ccw_device_destroy_console()
1687 void ccw_device_wait_idle(struct ccw_device *cdev) in ccw_device_wait_idle() argument
1689 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_wait_idle()
1734 struct ccw_device *cdev = to_ccwdev(dev); in ccw_device_probe() local
1738 cdev->drv = cdrv; /* to let the driver call _set_online */ in ccw_device_probe()
1739 ccw_device_set_int_class(cdev); in ccw_device_probe()
1740 ret = cdrv->probe ? cdrv->probe(cdev) : -ENODEV; in ccw_device_probe()
1742 cdev->drv = NULL; in ccw_device_probe()
1743 cdev->private->int_class = IRQIO_CIO; in ccw_device_probe()
1752 struct ccw_device *cdev = to_ccwdev(dev); in ccw_device_remove() local
1753 struct ccw_driver *cdrv = cdev->drv; in ccw_device_remove()
1758 cdrv->remove(cdev); in ccw_device_remove()
1760 spin_lock_irq(cdev->ccwlock); in ccw_device_remove()
1761 if (cdev->online) { in ccw_device_remove()
1762 cdev->online = 0; in ccw_device_remove()
1763 ret = ccw_device_offline(cdev); in ccw_device_remove()
1764 spin_unlock_irq(cdev->ccwlock); in ccw_device_remove()
1766 wait_event(cdev->private->wait_q, in ccw_device_remove()
1767 dev_fsm_final_state(cdev)); in ccw_device_remove()
1771 ret, cdev->private->dev_id.ssid, in ccw_device_remove()
1772 cdev->private->dev_id.devno); in ccw_device_remove()
1774 put_device(&cdev->dev); in ccw_device_remove()
1775 spin_lock_irq(cdev->ccwlock); in ccw_device_remove()
1777 ccw_device_set_timeout(cdev, 0); in ccw_device_remove()
1778 cdev->drv = NULL; in ccw_device_remove()
1779 cdev->private->int_class = IRQIO_CIO; in ccw_device_remove()
1780 sch = to_subchannel(cdev->dev.parent); in ccw_device_remove()
1781 spin_unlock_irq(cdev->ccwlock); in ccw_device_remove()
1783 __disable_cmf(cdev); in ccw_device_remove()
1788 struct ccw_device *cdev; in ccw_device_shutdown() local
1790 cdev = to_ccwdev(dev); in ccw_device_shutdown()
1791 if (cdev->drv && cdev->drv->shutdown) in ccw_device_shutdown()
1792 cdev->drv->shutdown(cdev); in ccw_device_shutdown()
1793 __disable_cmf(cdev); in ccw_device_shutdown()
1836 struct ccw_device *cdev; in ccw_device_todo() local
1841 cdev = priv->cdev; in ccw_device_todo()
1842 sch = to_subchannel(cdev->dev.parent); in ccw_device_todo()
1844 spin_lock_irq(cdev->ccwlock); in ccw_device_todo()
1849 spin_unlock_irq(cdev->ccwlock); in ccw_device_todo()
1853 cmf_reenable(cdev); in ccw_device_todo()
1856 ccw_device_do_unbind_bind(cdev); in ccw_device_todo()
1859 io_subchannel_register(cdev); in ccw_device_todo()
1869 ccw_device_unregister(cdev); in ccw_device_todo()
1875 put_device(&cdev->dev); in ccw_device_todo()
1887 void ccw_device_sched_todo(struct ccw_device *cdev, enum cdev_todo todo) in ccw_device_sched_todo() argument
1890 cdev->private->dev_id.ssid, cdev->private->dev_id.devno, in ccw_device_sched_todo()
1892 if (cdev->private->todo >= todo) in ccw_device_sched_todo()
1894 cdev->private->todo = todo; in ccw_device_sched_todo()
1896 if (!get_device(&cdev->dev)) in ccw_device_sched_todo()
1898 if (!queue_work(cio_work_q, &cdev->private->todo_work)) { in ccw_device_sched_todo()
1900 put_device(&cdev->dev); in ccw_device_sched_todo()
1911 int ccw_device_siosl(struct ccw_device *cdev) in ccw_device_siosl() argument
1913 struct subchannel *sch = to_subchannel(cdev->dev.parent); in ccw_device_siosl()