Home
last modified time | relevance | path

Searched refs:sqe (Results 1 – 25 of 98) sorted by relevance

1234

/linux/io_uring/
A Dfs.c55 if (sqe->buf_index || sqe->splice_fd_in) in io_renameat_prep()
60 ren->old_dfd = READ_ONCE(sqe->fd); in io_renameat_prep()
63 ren->new_dfd = READ_ONCE(sqe->len); in io_renameat_prep()
109 if (sqe->off || sqe->len || sqe->buf_index || sqe->splice_fd_in) in io_unlinkat_prep()
114 un->dfd = READ_ONCE(sqe->fd); in io_unlinkat_prep()
159 if (sqe->off || sqe->rw_flags || sqe->buf_index || sqe->splice_fd_in) in io_mkdirat_prep()
164 mkd->dfd = READ_ONCE(sqe->fd); in io_mkdirat_prep()
165 mkd->mode = READ_ONCE(sqe->len); in io_mkdirat_prep()
203 if (sqe->len || sqe->rw_flags || sqe->buf_index || sqe->splice_fd_in) in io_symlinkat_prep()
208 sl->new_dfd = READ_ONCE(sqe->fd); in io_symlinkat_prep()
[all …]
A Dsync.c29 if (unlikely(sqe->addr || sqe->buf_index || sqe->splice_fd_in)) in io_sfr_prep()
32 sync->off = READ_ONCE(sqe->off); in io_sfr_prep()
33 sync->len = READ_ONCE(sqe->len); in io_sfr_prep()
57 if (unlikely(sqe->addr || sqe->buf_index || sqe->splice_fd_in)) in io_fsync_prep()
60 sync->flags = READ_ONCE(sqe->fsync_flags); in io_fsync_prep()
64 sync->off = READ_ONCE(sqe->off); in io_fsync_prep()
65 sync->len = READ_ONCE(sqe->len); in io_fsync_prep()
89 if (sqe->buf_index || sqe->rw_flags || sqe->splice_fd_in) in io_fallocate_prep()
92 sync->off = READ_ONCE(sqe->off); in io_fallocate_prep()
93 sync->len = READ_ONCE(sqe->addr); in io_fallocate_prep()
[all …]
A Dopenclose.c56 if (unlikely(sqe->buf_index)) in __io_openat_prep()
65 open->dfd = READ_ONCE(sqe->fd); in __io_openat_prep()
88 u64 mode = READ_ONCE(sqe->len); in io_openat_prep()
89 u64 flags = READ_ONCE(sqe->open_flags); in io_openat_prep()
92 return __io_openat_prep(req, sqe); in io_openat_prep()
103 len = READ_ONCE(sqe->len); in io_openat2_prep()
111 return __io_openat_prep(req, sqe); in io_openat2_prep()
211 if (sqe->off || sqe->addr || sqe->len || sqe->rw_flags || sqe->buf_index) in io_close_prep()
216 close->fd = READ_ONCE(sqe->fd); in io_close_prep()
268 if (sqe->off || sqe->addr || sqe->len || sqe->buf_index || in io_install_fixed_fd_prep()
[all …]
A Dadvise.c31 int io_madvise_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_madvise_prep() argument
36 if (sqe->buf_index || sqe->splice_fd_in) in io_madvise_prep()
39 ma->addr = READ_ONCE(sqe->addr); in io_madvise_prep()
40 ma->len = READ_ONCE(sqe->off); in io_madvise_prep()
42 ma->len = READ_ONCE(sqe->len); in io_madvise_prep()
43 ma->advice = READ_ONCE(sqe->fadvise_advice); in io_madvise_prep()
83 if (sqe->buf_index || sqe->splice_fd_in) in io_fadvise_prep()
86 fa->offset = READ_ONCE(sqe->off); in io_fadvise_prep()
87 fa->len = READ_ONCE(sqe->addr); in io_fadvise_prep()
89 fa->len = READ_ONCE(sqe->len); in io_fadvise_prep()
[all …]
A During_cmd.c43 ioucmd->sqe = NULL; in io_req_uring_cleanup()
70 cmd->sqe = NULL; in io_uring_try_cancel_uring_cmd()
179 const struct io_uring_sqe *sqe) in io_uring_cmd_prep_setup() argument
190 ioucmd->sqe = sqe; in io_uring_cmd_prep_setup()
195 ioucmd->sqe = req->async_data; in io_uring_cmd_prep_setup()
203 if (sqe->__pad1) in io_uring_cmd_prep()
257 if (ioucmd->sqe != (void *) cache) in io_uring_cmd()
295 level = READ_ONCE(cmd->sqe->level); in io_uring_cmd_getsockopt()
301 optlen = READ_ONCE(cmd->sqe->optlen); in io_uring_cmd_getsockopt()
325 level = READ_ONCE(cmd->sqe->level); in io_uring_cmd_setsockopt()
[all …]
A Dxattr.c45 const struct io_uring_sqe *sqe) in __io_getxattr_prep() argument
56 name = u64_to_user_ptr(READ_ONCE(sqe->addr)); in __io_getxattr_prep()
58 ix->ctx.size = READ_ONCE(sqe->len); in __io_getxattr_prep()
59 ix->ctx.flags = READ_ONCE(sqe->xattr_flags); in __io_getxattr_prep()
84 return __io_getxattr_prep(req, sqe); in io_fgetxattr_prep()
93 ret = __io_getxattr_prep(req, sqe); in io_getxattr_prep()
149 const struct io_uring_sqe *sqe) in __io_setxattr_prep() argument
162 ix->ctx.size = READ_ONCE(sqe->len); in __io_setxattr_prep()
163 ix->ctx.flags = READ_ONCE(sqe->xattr_flags); in __io_setxattr_prep()
186 ret = __io_setxattr_prep(req, sqe); in io_setxattr_prep()
[all …]
A Dtruncate.c23 int io_ftruncate_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_ftruncate_prep() argument
27 if (sqe->rw_flags || sqe->addr || sqe->len || sqe->buf_index || in io_ftruncate_prep()
28 sqe->splice_fd_in || sqe->addr3) in io_ftruncate_prep()
31 ft->len = READ_ONCE(sqe->off); in io_ftruncate_prep()
A Dsplice.c27 const struct io_uring_sqe *sqe) in __io_splice_prep() argument
32 sp->len = READ_ONCE(sqe->len); in __io_splice_prep()
33 sp->flags = READ_ONCE(sqe->splice_flags); in __io_splice_prep()
36 sp->splice_fd_in = READ_ONCE(sqe->splice_fd_in); in __io_splice_prep()
41 int io_tee_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_tee_prep() argument
43 if (READ_ONCE(sqe->splice_off_in) || READ_ONCE(sqe->off)) in io_tee_prep()
45 return __io_splice_prep(req, sqe); in io_tee_prep()
79 int io_splice_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_splice_prep() argument
83 sp->off_in = READ_ONCE(sqe->splice_off_in); in io_splice_prep()
84 sp->off_out = READ_ONCE(sqe->off); in io_splice_prep()
[all …]
A Dnet.h23 int io_shutdown_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
27 int io_sendmsg_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
32 int io_recvmsg_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
38 int io_accept_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
41 int io_socket_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
44 int io_connect_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
49 int io_send_zc_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
52 int io_bind_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
55 int io_listen_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
A Drw.h15 int io_prep_read_fixed(struct io_kiocb *req, const struct io_uring_sqe *sqe);
16 int io_prep_write_fixed(struct io_kiocb *req, const struct io_uring_sqe *sqe);
17 int io_prep_readv(struct io_kiocb *req, const struct io_uring_sqe *sqe);
18 int io_prep_writev(struct io_kiocb *req, const struct io_uring_sqe *sqe);
19 int io_prep_read(struct io_kiocb *req, const struct io_uring_sqe *sqe);
20 int io_prep_write(struct io_kiocb *req, const struct io_uring_sqe *sqe);
26 int io_read_mshot_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
A Dstatx.c23 int io_statx_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_statx_prep() argument
28 if (sqe->buf_index || sqe->splice_fd_in) in io_statx_prep()
33 sx->dfd = READ_ONCE(sqe->fd); in io_statx_prep()
34 sx->mask = READ_ONCE(sqe->len); in io_statx_prep()
35 path = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_statx_prep()
36 sx->buffer = u64_to_user_ptr(READ_ONCE(sqe->addr2)); in io_statx_prep()
37 sx->flags = READ_ONCE(sqe->statx_flags); in io_statx_prep()
A Depoll.c24 int io_epoll_ctl_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_epoll_ctl_prep() argument
28 if (sqe->buf_index || sqe->splice_fd_in) in io_epoll_ctl_prep()
31 epoll->epfd = READ_ONCE(sqe->fd); in io_epoll_ctl_prep()
32 epoll->op = READ_ONCE(sqe->len); in io_epoll_ctl_prep()
33 epoll->fd = READ_ONCE(sqe->off); in io_epoll_ctl_prep()
38 ev = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_epoll_ctl_prep()
A Dfutex.c164 int io_futex_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_futex_prep() argument
169 if (unlikely(sqe->len || sqe->futex_flags || sqe->buf_index || in io_futex_prep()
170 sqe->file_index)) in io_futex_prep()
173 iof->uaddr = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_futex_prep()
174 iof->futex_val = READ_ONCE(sqe->addr2); in io_futex_prep()
175 iof->futex_mask = READ_ONCE(sqe->addr3); in io_futex_prep()
176 flags = READ_ONCE(sqe->fd); in io_futex_prep()
214 if (unlikely(sqe->fd || sqe->buf_index || sqe->file_index || in io_futexv_prep()
215 sqe->addr2 || sqe->futex_flags || sqe->addr3)) in io_futexv_prep()
218 iof->uaddr = u64_to_user_ptr(READ_ONCE(sqe->addr)); in io_futexv_prep()
[all …]
A Dnet.c96 if (unlikely(sqe->off || sqe->addr || sqe->rw_flags || in io_shutdown_prep()
97 sqe->buf_index || sqe->splice_fd_in)) in io_shutdown_prep()
416 } else if (sqe->addr2 || sqe->file_index) { in io_sendmsg_prep()
781 if (unlikely(sqe->file_index || sqe->addr2)) in io_recvmsg_prep()
1230 if (unlikely(READ_ONCE(sqe->__pad2[0]) || READ_ONCE(sqe->addr3))) in io_send_zc_prep()
1273 if (unlikely(sqe->addr2 || sqe->file_index)) in io_send_zc_prep()
1513 if (sqe->len || sqe->buf_index) in io_accept_prep()
1622 if (sqe->addr || sqe->rw_flags || sqe->buf_index) in io_socket_prep()
1677 if (sqe->len || sqe->buf_index || sqe->rw_flags || sqe->splice_fd_in) in io_connect_prep()
1740 if (sqe->len || sqe->buf_index || sqe->rw_flags || sqe->splice_fd_in) in io_bind_prep()
[all …]
A Dfs.h3 int io_renameat_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
7 int io_unlinkat_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
11 int io_mkdirat_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
15 int io_symlinkat_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
18 int io_linkat_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
A Dfdinfo.c95 struct io_uring_sqe *sqe; in io_uring_show_fdinfo() local
103 sqe = &ctx->sq_sqes[sq_idx << sq_shift]; in io_uring_show_fdinfo()
107 sq_idx, io_uring_get_opcode(sqe->opcode), sqe->fd, in io_uring_show_fdinfo()
108 sqe->flags, (unsigned long long) sqe->off, in io_uring_show_fdinfo()
109 (unsigned long long) sqe->addr, sqe->rw_flags, in io_uring_show_fdinfo()
110 sqe->buf_index, sqe->user_data); in io_uring_show_fdinfo()
112 u64 *sqeb = (void *) (sqe + 1); in io_uring_show_fdinfo()
A Dxattr.h5 int io_fsetxattr_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
8 int io_setxattr_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
11 int io_fgetxattr_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
14 int io_getxattr_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
A Dopenclose.h6 int io_openat_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
10 int io_openat2_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
13 int io_close_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
16 int io_install_fixed_fd_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe);
/linux/drivers/crypto/hisilicon/zip/
A Dzip_crypto.c170 sqe->dw9 = val; in hisi_zip_fill_buf_type()
179 sqe->dw9 = val; in hisi_zip_fill_req_type()
184 sqe->dw26 = req->req_id; in hisi_zip_fill_tag()
193 sqe->dw7 = val; in hisi_zip_fill_sqe_type()
203 ops->fill_addr(sqe, req); in hisi_zip_fill_sqe()
204 ops->fill_buf_size(sqe, req); in hisi_zip_fill_sqe()
207 ops->fill_tag(sqe, req); in hisi_zip_fill_sqe()
269 return sqe->dw26; in hisi_zip_get_tag()
279 return sqe->produced; in hisi_zip_get_dstlen()
290 u32 tag = ops->get_tag(sqe); in hisi_zip_acomp_cb()
[all …]
/linux/drivers/infiniband/sw/siw/
A Dsiw_qp.c278 wqe->sqe.flags = 0; in siw_qp_mpa_rts()
279 wqe->sqe.num_sge = 1; in siw_qp_mpa_rts()
287 wqe->sqe.rkey = 1; in siw_qp_mpa_rts()
288 wqe->sqe.raddr = 0; in siw_qp_mpa_rts()
874 rreq->id = sqe->id; in siw_read_to_orq()
886 struct siw_sqe *sqe; in siw_activate_tx_from_sq() local
891 if (!sqe) in siw_activate_tx_from_sq()
898 memcpy(&wqe->sqe, sqe, sizeof(*sqe)); in siw_activate_tx_from_sq()
914 wqe->sqe.sge[0].laddr = (uintptr_t)&wqe->sqe.sge[1]; in siw_activate_tx_from_sq()
1006 wqe->sqe.flags = 0; in siw_activate_tx()
[all …]
A Dsiw_qp_tx.c52 struct siw_sge *sge = &wqe->sqe.sge[0]; in siw_try_1seg()
62 memcpy(paddr, &wqe->sqe.sge[1], bytes); in siw_try_1seg()
137 cpu_to_be64(wqe->sqe.sge[0].laddr); in siw_qp_prepare_tx()
244 cpu_to_be64(wqe->sqe.raddr); in siw_qp_prepare_tx()
649 sge = &wqe->sqe.sge[c_tx->sge_idx]; in siw_tx_hdt()
806 wqe->sqe.num_sge = 1; in siw_qp_sq_proc_tx()
834 wqe->sqe.sge[0].laddr = in siw_qp_sq_proc_tx()
835 (u64)(uintptr_t)&wqe->sqe.sge[1]; in siw_qp_sq_proc_tx()
857 wqe->sqe.id); in siw_qp_sq_proc_tx()
964 mem->stag = sqe->rkey; in siw_fastreg_mr()
[all …]
A Dsiw_verbs.c646 void *kbuf = &sqe->sge[1]; in siw_copy_inline_sgl()
650 sqe->sge[0].lkey = 0; in siw_copy_inline_sgl()
681 struct siw_sqe sqe = {}; in siw_sq_flush_wr() local
688 sqe.opcode = SIW_OP_READ; in siw_sq_flush_wr()
713 sqe.id = wr->wr_id; in siw_sq_flush_wr()
820 if (sqe->flags) { in siw_post_send()
830 sqe->id = wr->wr_id; in siw_post_send()
856 sqe->num_sge = 1; in siw_post_send()
885 sqe->num_sge = 1; in siw_post_send()
905 sqe->num_sge = 1; in siw_post_send()
[all …]
/linux/include/net/libeth/
A Dtx.h96 static inline void libeth_tx_complete(struct libeth_sqe *sqe, in libeth_tx_complete() argument
99 switch (sqe->type) { in libeth_tx_complete()
105 dma_unmap_page(cp->dev, dma_unmap_addr(sqe, dma), in libeth_tx_complete()
106 dma_unmap_len(sqe, len), DMA_TO_DEVICE); in libeth_tx_complete()
112 switch (sqe->type) { in libeth_tx_complete()
114 cp->ss->packets += sqe->packets; in libeth_tx_complete()
115 cp->ss->bytes += sqe->bytes; in libeth_tx_complete()
117 napi_consume_skb(sqe->skb, cp->napi); in libeth_tx_complete()
120 kfree(sqe->raw); in libeth_tx_complete()
126 sqe->type = LIBETH_SQE_EMPTY; in libeth_tx_complete()
/linux/tools/include/io_uring/
A Dmini_liburing.h236 memset(sqe, 0, sizeof(*sqe)); in io_uring_prep_cmd()
238 sqe->fd = sockfd; in io_uring_prep_cmd()
239 sqe->cmd_op = op; in io_uring_prep_cmd()
241 sqe->level = level; in io_uring_prep_cmd()
242 sqe->optname = optname; in io_uring_prep_cmd()
244 sqe->optlen = optlen; in io_uring_prep_cmd()
261 memset(sqe, 0, sizeof(*sqe)); in io_uring_prep_send()
263 sqe->fd = sockfd; in io_uring_prep_send()
265 sqe->len = len; in io_uring_prep_send()
266 sqe->msg_flags = (__u32)flags; in io_uring_prep_send()
[all …]
/linux/drivers/net/ethernet/qlogic/qed/
A Dqed_nvmetcp_fw_funcs.c68 if (!task_params->sqe) in init_sqe()
71 memset(task_params->sqe, 0, sizeof(*task_params->sqe)); in init_sqe()
72 task_params->sqe->task_id = cpu_to_le16(task_params->itid); in init_sqe()
79 SET_FIELD(task_params->sqe->contlen_cdbsize, in init_sqe()
81 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_WQE_TYPE, in init_sqe()
94 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_NUM_SGES, num_sges); in init_sqe()
99 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_WQE_TYPE, in init_sqe()
101 SET_FIELD(task_params->sqe->contlen_cdbsize, in init_sqe()
106 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_WQE_TYPE, in init_sqe()
112 SET_FIELD(task_params->sqe->flags, NVMETCP_WQE_NUM_SGES, in init_sqe()
[all …]

Completed in 53 milliseconds

1234