Home
last modified time | relevance | path

Searched refs:blk_mq_hw_ctx (Results 1 – 25 of 54) sorted by relevance

123

/linux-6.3-rc2/block/
A Dblk-mq.h26 struct blk_mq_hw_ctx *hctxs[HCTX_MAX_TYPES];
44 struct request *blk_mq_dequeue_from_ctx(struct blk_mq_hw_ctx *hctx,
68 void blk_mq_try_issue_list_directly(struct blk_mq_hw_ctx *hctx,
125 extern void blk_mq_hctx_kobj_init(struct blk_mq_hw_ctx *hctx);
164 struct blk_mq_hw_ctx *hctx;
179 static inline bool blk_mq_hctx_stopped(struct blk_mq_hw_ctx *hctx) in blk_mq_hctx_stopped()
184 static inline bool blk_mq_hw_queue_mapped(struct blk_mq_hw_ctx *hctx) in blk_mq_hw_queue_mapped()
246 static inline int __blk_mq_active_requests(struct blk_mq_hw_ctx *hctx) in __blk_mq_active_requests()
252 static inline void __blk_mq_put_driver_tag(struct blk_mq_hw_ctx *hctx, in __blk_mq_put_driver_tag()
276 struct blk_mq_hw_ctx *hctx = rq->mq_hctx; in blk_mq_get_driver_tag()
[all …]
A Dblk-mq-debugfs.h9 struct blk_mq_hw_ctx;
25 struct blk_mq_hw_ctx *hctx);
26 void blk_mq_debugfs_unregister_hctx(struct blk_mq_hw_ctx *hctx);
33 struct blk_mq_hw_ctx *hctx);
34 void blk_mq_debugfs_unregister_sched_hctx(struct blk_mq_hw_ctx *hctx);
44 struct blk_mq_hw_ctx *hctx) in blk_mq_debugfs_register_hctx()
48 static inline void blk_mq_debugfs_unregister_hctx(struct blk_mq_hw_ctx *hctx) in blk_mq_debugfs_unregister_hctx()
69 struct blk_mq_hw_ctx *hctx) in blk_mq_debugfs_register_sched_hctx()
73 static inline void blk_mq_debugfs_unregister_sched_hctx(struct blk_mq_hw_ctx *hctx) in blk_mq_debugfs_unregister_sched_hctx()
A Dblk-mq-sysfs.c36 struct blk_mq_hw_ctx *hctx = container_of(kobj, struct blk_mq_hw_ctx, in blk_mq_hw_sysfs_release()
48 ssize_t (*show)(struct blk_mq_hw_ctx *, char *);
55 struct blk_mq_hw_ctx *hctx; in blk_mq_hw_sysfs_show()
60 hctx = container_of(kobj, struct blk_mq_hw_ctx, kobj); in blk_mq_hw_sysfs_show()
146 static void blk_mq_unregister_hctx(struct blk_mq_hw_ctx *hctx) in blk_mq_unregister_hctx()
160 static int blk_mq_register_hctx(struct blk_mq_hw_ctx *hctx) in blk_mq_register_hctx()
189 void blk_mq_hctx_kobj_init(struct blk_mq_hw_ctx *hctx) in blk_mq_hctx_kobj_init()
224 struct blk_mq_hw_ctx *hctx; in blk_mq_sysfs_register()
261 struct blk_mq_hw_ctx *hctx; in blk_mq_sysfs_unregister()
277 struct blk_mq_hw_ctx *hctx; in blk_mq_sysfs_unregister_hctxs()
[all …]
A Dblk-mq-sched.h17 void blk_mq_sched_mark_restart_hctx(struct blk_mq_hw_ctx *hctx);
18 void __blk_mq_sched_restart(struct blk_mq_hw_ctx *hctx);
22 void blk_mq_sched_insert_requests(struct blk_mq_hw_ctx *hctx,
26 void blk_mq_sched_dispatch_requests(struct blk_mq_hw_ctx *hctx);
32 static inline void blk_mq_sched_restart(struct blk_mq_hw_ctx *hctx) in blk_mq_sched_restart()
77 static inline bool blk_mq_sched_has_work(struct blk_mq_hw_ctx *hctx) in blk_mq_sched_has_work()
87 static inline bool blk_mq_sched_needs_restart(struct blk_mq_hw_ctx *hctx) in blk_mq_sched_needs_restart()
A Dblk-mq-tag.h23 extern int blk_mq_tag_update_depth(struct blk_mq_hw_ctx *hctx,
37 struct blk_mq_hw_ctx *hctx) in bt_wait_ptr()
50 extern void __blk_mq_tag_busy(struct blk_mq_hw_ctx *);
51 extern void __blk_mq_tag_idle(struct blk_mq_hw_ctx *);
53 static inline void blk_mq_tag_busy(struct blk_mq_hw_ctx *hctx) in blk_mq_tag_busy()
59 static inline void blk_mq_tag_idle(struct blk_mq_hw_ctx *hctx) in blk_mq_tag_idle()
A Dblk-mq-sched.c24 void blk_mq_sched_mark_restart_hctx(struct blk_mq_hw_ctx *hctx) in blk_mq_sched_mark_restart_hctx()
33 void __blk_mq_sched_restart(struct blk_mq_hw_ctx *hctx) in __blk_mq_sched_restart()
60 struct blk_mq_hw_ctx *hctx = in blk_mq_dispatch_hctx_list()
217 static int blk_mq_do_dispatch_ctx(struct blk_mq_hw_ctx *hctx) in blk_mq_do_dispatch_ctx()
349 struct blk_mq_hw_ctx *hctx; in blk_mq_sched_bio_merge()
413 struct blk_mq_hw_ctx *hctx = rq->mq_hctx; in blk_mq_sched_insert_request()
498 struct blk_mq_hw_ctx *hctx, in blk_mq_sched_alloc_map_and_rqs()
523 struct blk_mq_hw_ctx *hctx; in blk_mq_sched_tags_teardown()
561 struct blk_mq_hw_ctx *hctx; in blk_mq_init_sched()
626 struct blk_mq_hw_ctx *hctx; in blk_mq_sched_free_rqs()
[all …]
A Dblk-mq-debugfs.c202 struct blk_mq_hw_ctx *hctx = data; in hctx_state_show()
230 struct blk_mq_hw_ctx *hctx = data; in hctx_flags_show()
372 struct blk_mq_hw_ctx *hctx;
392 struct blk_mq_hw_ctx *hctx = data; in hctx_busy_show()
409 struct blk_mq_hw_ctx *hctx = data; in hctx_type_show()
418 struct blk_mq_hw_ctx *hctx = data; in hctx_ctx_map_show()
683 struct blk_mq_hw_ctx *hctx; in blk_mq_debugfs_register()
727 struct blk_mq_hw_ctx *hctx) in blk_mq_debugfs_register_hctx()
756 struct blk_mq_hw_ctx *hctx; in blk_mq_debugfs_register_hctxs()
765 struct blk_mq_hw_ctx *hctx; in blk_mq_debugfs_unregister_hctxs()
[all …]
A Delevator.h23 struct blk_mq_hw_ctx;
28 int (*init_hctx)(struct blk_mq_hw_ctx *, unsigned int);
29 void (*exit_hctx)(struct blk_mq_hw_ctx *, unsigned int);
30 void (*depth_updated)(struct blk_mq_hw_ctx *);
40 void (*insert_requests)(struct blk_mq_hw_ctx *, struct list_head *, bool);
41 struct request *(*dispatch_request)(struct blk_mq_hw_ctx *);
42 bool (*has_work)(struct blk_mq_hw_ctx *);
A Dblk-mq.c347 struct blk_mq_hw_ctx *hctx; in blk_mq_wake_waiters()
1635 struct blk_mq_hw_ctx *hctx; in blk_mq_timeout_work()
1688 struct blk_mq_hw_ctx *hctx;
1722 struct blk_mq_hw_ctx *hctx;
1802 struct blk_mq_hw_ctx *hctx; in blk_mq_dispatch_wake()
2418 struct blk_mq_hw_ctx *hctx; in blk_mq_stop_hw_queues()
2436 struct blk_mq_hw_ctx *hctx; in blk_mq_start_hw_queues()
2456 struct blk_mq_hw_ctx *hctx; in blk_mq_start_stopped_hw_queues()
2466 struct blk_mq_hw_ctx *hctx; in blk_mq_run_work_fn()
3420 struct blk_mq_hw_ctx *hctx;
[all …]
A Dkyber-iosched.c455 static void kyber_depth_updated(struct blk_mq_hw_ctx *hctx) in kyber_depth_updated()
699 struct blk_mq_hw_ctx *hctx = READ_ONCE(wqe->private); in kyber_domain_wake()
709 struct blk_mq_hw_ctx *hctx) in kyber_get_domain_token()
757 struct blk_mq_hw_ctx *hctx) in kyber_dispatch_cur_domain()
850 static bool kyber_has_work(struct blk_mq_hw_ctx *hctx) in kyber_has_work()
914 struct blk_mq_hw_ctx *hctx = m->private; \
924 struct blk_mq_hw_ctx *hctx = m->private; \
933 struct blk_mq_hw_ctx *hctx = m->private; \
948 struct blk_mq_hw_ctx *hctx = data; \
972 struct blk_mq_hw_ctx *hctx = data; in kyber_cur_domain_show()
[all …]
A Dblk-mq-tag.c40 void __blk_mq_tag_busy(struct blk_mq_hw_ctx *hctx) in __blk_mq_tag_busy()
75 void __blk_mq_tag_idle(struct blk_mq_hw_ctx *hctx) in __blk_mq_tag_idle()
238 struct blk_mq_hw_ctx *hctx;
262 struct blk_mq_hw_ctx *hctx = iter_data->hctx; in bt_iter()
305 static void bt_for_each(struct blk_mq_hw_ctx *hctx, struct request_queue *q, in bt_for_each()
509 struct blk_mq_hw_ctx *hctx; in blk_mq_queue_tag_busy_iter()
594 int blk_mq_tag_update_depth(struct blk_mq_hw_ctx *hctx, in blk_mq_tag_update_depth()
A Dmq-deadline.c572 static struct request *dd_dispatch_request(struct blk_mq_hw_ctx *hctx) in dd_dispatch_request()
620 static void dd_depth_updated(struct blk_mq_hw_ctx *hctx) in dd_depth_updated()
632 static int dd_init_hctx(struct blk_mq_hw_ctx *hctx, unsigned int hctx_idx) in dd_init_hctx()
770 static void dd_insert_request(struct blk_mq_hw_ctx *hctx, struct request *rq, in dd_insert_request()
827 static void dd_insert_requests(struct blk_mq_hw_ctx *hctx, in dd_insert_requests()
850 static bool dd_has_write_work(struct blk_mq_hw_ctx *hctx) in dd_has_write_work()
915 static bool dd_has_work(struct blk_mq_hw_ctx *hctx) in dd_has_work()
A Dblk-flush.c362 struct blk_mq_hw_ctx *hctx = rq->mq_hctx; in mq_flush_data_end_io()
528 void blk_mq_hctx_set_fq_lock_class(struct blk_mq_hw_ctx *hctx, in blk_mq_hctx_set_fq_lock_class()
A Dbsg-lib.c272 static blk_status_t bsg_queue_rq(struct blk_mq_hw_ctx *hctx, in bsg_queue_rq()
/linux-6.3-rc2/include/linux/
A Dblk-mq.h87 struct blk_mq_hw_ctx *mq_hctx;
295 struct blk_mq_hw_ctx { struct
550 blk_status_t (*queue_rq)(struct blk_mq_hw_ctx *,
560 void (*commit_rqs)(struct blk_mq_hw_ctx *);
600 int (*poll)(struct blk_mq_hw_ctx *, struct io_comp_batch *);
612 int (*init_hctx)(struct blk_mq_hw_ctx *, void *, unsigned int);
616 void (*exit_hctx)(struct blk_mq_hw_ctx *, unsigned int);
877 void blk_mq_stop_hw_queue(struct blk_mq_hw_ctx *hctx);
878 void blk_mq_start_hw_queue(struct blk_mq_hw_ctx *hctx);
889 void blk_mq_run_hw_queue(struct blk_mq_hw_ctx *hctx, bool async);
[all …]
/linux-6.3-rc2/drivers/block/rnbd/
A Drnbd-clt.h105 struct blk_mq_hw_ctx *hctx;
A Drnbd-clt.c1096 struct blk_mq_hw_ctx *hctx, in rnbd_clt_dev_kick_mq_queue()
1111 static blk_status_t rnbd_queue_rq(struct blk_mq_hw_ctx *hctx, in rnbd_queue_rq()
1158 static int rnbd_rdma_poll(struct blk_mq_hw_ctx *hctx, struct io_comp_batch *iob) in rnbd_rdma_poll()
1312 struct blk_mq_hw_ctx *hctx) in rnbd_init_hw_queue()
1322 struct blk_mq_hw_ctx *hctx; in rnbd_init_mq_hw_queues()
/linux-6.3-rc2/drivers/s390/block/
A Dscm_blk.c282 static blk_status_t scm_blk_request(struct blk_mq_hw_ctx *hctx, in scm_blk_request()
331 static int scm_blk_init_hctx(struct blk_mq_hw_ctx *hctx, void *data, in scm_blk_init_hctx()
345 static void scm_blk_exit_hctx(struct blk_mq_hw_ctx *hctx, unsigned int idx) in scm_blk_exit_hctx()
/linux-6.3-rc2/drivers/nvme/target/
A Dloop.c131 static blk_status_t nvme_loop_queue_rq(struct blk_mq_hw_ctx *hctx, in nvme_loop_queue_rq()
218 static int nvme_loop_init_hctx(struct blk_mq_hw_ctx *hctx, void *data, in nvme_loop_init_hctx()
238 static int nvme_loop_init_admin_hctx(struct blk_mq_hw_ctx *hctx, void *data, in nvme_loop_init_admin_hctx()
/linux-6.3-rc2/drivers/block/
A Dvirtio_blk.c134 static inline struct virtio_blk_vq *get_virtio_blk_vq(struct blk_mq_hw_ctx *hctx) in get_virtio_blk_vq()
216 static int virtblk_map_data(struct blk_mq_hw_ctx *hctx, struct request *req, in virtblk_map_data()
394 static void virtio_commit_rqs(struct blk_mq_hw_ctx *hctx) in virtio_commit_rqs()
421 static blk_status_t virtblk_prep_rq(struct blk_mq_hw_ctx *hctx, in virtblk_prep_rq()
443 static blk_status_t virtio_queue_rq(struct blk_mq_hw_ctx *hctx, in virtio_queue_rq()
1207 static int virtblk_poll(struct blk_mq_hw_ctx *hctx, struct io_comp_batch *iob) in virtblk_poll()
A Dz2ram.c68 static blk_status_t z2_queue_rq(struct blk_mq_hw_ctx *hctx, in z2_queue_rq()
/linux-6.3-rc2/drivers/block/null_blk/
A Dmain.c1604 static int null_poll(struct blk_mq_hw_ctx *hctx, struct io_comp_batch *iob) in null_poll()
1634 struct blk_mq_hw_ctx *hctx = rq->mq_hctx; in null_timeout_rq()
1660 static blk_status_t null_queue_rq(struct blk_mq_hw_ctx *hctx, in null_queue_rq()
1724 static void null_exit_hctx(struct blk_mq_hw_ctx *hctx, unsigned int hctx_idx) in null_exit_hctx()
1741 static int null_init_hctx(struct blk_mq_hw_ctx *hctx, void *driver_data, in null_init_hctx()
/linux-6.3-rc2/drivers/nvme/host/
A Dapple.c732 static blk_status_t apple_nvme_queue_rq(struct blk_mq_hw_ctx *hctx, in apple_nvme_queue_rq()
775 static int apple_nvme_init_hctx(struct blk_mq_hw_ctx *hctx, void *data, in apple_nvme_init_hctx()
936 static int apple_nvme_poll(struct blk_mq_hw_ctx *hctx, in apple_nvme_poll()
A Dtcp.c487 static int nvme_tcp_init_hctx(struct blk_mq_hw_ctx *hctx, void *data, in nvme_tcp_init_hctx()
497 static int nvme_tcp_init_admin_hctx(struct blk_mq_hw_ctx *hctx, void *data, in nvme_tcp_init_admin_hctx()
2396 static void nvme_tcp_commit_rqs(struct blk_mq_hw_ctx *hctx) in nvme_tcp_commit_rqs()
2404 static blk_status_t nvme_tcp_queue_rq(struct blk_mq_hw_ctx *hctx, in nvme_tcp_queue_rq()
2471 static int nvme_tcp_poll(struct blk_mq_hw_ctx *hctx, struct io_comp_batch *iob) in nvme_tcp_poll()
/linux-6.3-rc2/drivers/scsi/
A Dscsi_lib.c1703 static blk_status_t scsi_queue_rq(struct blk_mq_hw_ctx *hctx, in scsi_queue_rq()
1846 static int scsi_mq_poll(struct blk_mq_hw_ctx *hctx, struct io_comp_batch *iob) in scsi_mq_poll()
1856 static int scsi_init_hctx(struct blk_mq_hw_ctx *hctx, void *data, in scsi_init_hctx()
1932 static void scsi_commit_rqs(struct blk_mq_hw_ctx *hctx) in scsi_commit_rqs()

Completed in 84 milliseconds

123