Home
last modified time | relevance | path

Searched refs:io_work (Results 1 – 10 of 10) sorted by relevance

/drivers/scsi/qedf/
A Dqedf_main.c2239 struct qedf_io_work *io_work; in qedf_process_completions() local
2300 if (!io_work) { in qedf_process_completions()
2307 INIT_WORK(&io_work->work, qedf_fp_io_handler); in qedf_process_completions()
2312 io_work->qedf = fp->qedf; in qedf_process_completions()
2315 queue_work_on(cpu, qedf_io_wq, &io_work->work); in qedf_process_completions()
2701 struct qedf_io_work *io_work = in qedf_fp_io_handler() local
2709 comp_type = (io_work->cqe.cqe_data >> in qedf_fp_io_handler()
2713 io_work->fp) in qedf_fp_io_handler()
2714 fc_exch_recv(io_work->qedf->lport, io_work->fp); in qedf_fp_io_handler()
2716 qedf_process_cqe(io_work->qedf, &io_work->cqe); in qedf_fp_io_handler()
[all …]
A Dqedf_io.c2497 struct qedf_io_work *io_work; in qedf_process_unsol_compl() local
2561 io_work = mempool_alloc(qedf->io_mempool, GFP_ATOMIC); in qedf_process_unsol_compl()
2562 if (!io_work) { in qedf_process_unsol_compl()
2568 memset(io_work, 0, sizeof(struct qedf_io_work)); in qedf_process_unsol_compl()
2570 INIT_WORK(&io_work->work, qedf_fp_io_handler); in qedf_process_unsol_compl()
2573 memcpy(&io_work->cqe, cqe, sizeof(struct fcoe_cqe)); in qedf_process_unsol_compl()
2575 io_work->qedf = qedf; in qedf_process_unsol_compl()
2576 io_work->fp = fp; in qedf_process_unsol_compl()
2578 queue_work_on(smp_processor_id(), qedf_io_wq, &io_work->work); in qedf_process_unsol_compl()
/drivers/nvme/target/
A Dtcp.c151 struct work_struct io_work; member
601 queue_work_on(queue_cpu(queue), nvmet_tcp_wq, &cmd->queue->io_work); in nvmet_tcp_queue_response()
1397 container_of(w, struct nvmet_tcp_queue, io_work); in nvmet_tcp_io_work()
1423 queue_work_on(queue_cpu(queue), nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_io_work()
1572 cancel_work_sync(&queue->io_work); in nvmet_tcp_release_queue_work()
1580 cancel_work_sync(&queue->io_work); in nvmet_tcp_release_queue_work()
1603 &queue->io_work); in nvmet_tcp_data_ready()
1624 queue_work_on(queue_cpu(queue), nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_write_space()
1705 queue_work_on(queue_cpu(queue), nvmet_tcp_wq, &queue->io_work); in nvmet_tcp_set_queue_sock()
1882 INIT_WORK(&queue->io_work, nvmet_tcp_io_work); in nvmet_tcp_alloc_queue()
/drivers/s390/cio/
A Dvfio_ccw_private.h126 struct work_struct io_work; member
A Dvfio_ccw_drv.c89 private = container_of(work, struct vfio_ccw_private, io_work); in vfio_ccw_sch_io_todo()
A Dvfio_ccw_fsm.c367 queue_work(vfio_ccw_work_q, &private->io_work); in fsm_irq()
A Dvfio_ccw_ops.c55 INIT_WORK(&private->io_work, vfio_ccw_sch_io_todo); in vfio_ccw_mdev_init_dev()
/drivers/nvme/host/
A Dtcp.c143 struct work_struct io_work; member
426 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_queue_request()
791 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_handle_r2t()
1072 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_data_ready()
1084 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_write_space()
1375 container_of(w, struct nvme_tcp_queue, io_work); in nvme_tcp_io_work()
1407 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_io_work()
1774 INIT_WORK(&queue->io_work, nvme_tcp_io_work); in nvme_tcp_alloc_queue()
1925 cancel_work_sync(&queue->io_work); in __nvme_tcp_stop_queue()
2751 queue_work_on(queue->io_cpu, nvme_tcp_wq, &queue->io_work); in nvme_tcp_commit_rqs()
/drivers/memstick/core/
A Dms_block.h160 struct work_struct io_work; member
A Dms_block.c1504 queue_work(msb->io_queue, &msb->io_work); in msb_cache_flush_timer()
1878 struct msb_data *msb = container_of(work, struct msb_data, io_work); in msb_io_work()
2005 queue_work(msb->io_queue, &msb->io_work); in msb_queue_rq()
2063 queue_work(msb->io_queue, &msb->io_work); in msb_start()
2123 INIT_WORK(&msb->io_work, msb_io_work); in msb_init_disk()

Completed in 36 milliseconds