Home
last modified time | relevance | path

Searched refs:req (Results 1 – 25 of 197) sorted by relevance

12345678

/arch/um/drivers/
A Dmconsole_user.c85 req->originlen = sizeof(req->origin); in mconsole_get_request()
86 req->len = recvfrom(fd, &req->request, sizeof(req->request), 0, in mconsole_get_request()
87 (struct sockaddr *) req->origin, &req->originlen); in mconsole_get_request()
88 if (req->len < 0) in mconsole_get_request()
91 req->originating_fd = fd; in mconsole_get_request()
97 memmove(req->request.data, &req->request, len); in mconsole_get_request()
102 req->request.len = len; in mconsole_get_request()
118 req->request.data[req->request.len] = '\0'; in mconsole_get_request()
119 req->cmd = mconsole_parse(req); in mconsole_get_request()
120 if (req->cmd == NULL) { in mconsole_get_request()
[all …]
A Dmconsole_kern.c69 list_del(&req->list); in mc_work_proc()
71 req->request.cmd->handler(&req->request); in mc_work_proc()
72 kfree(req); in mc_work_proc()
88 (*req.cmd->handler)(&req); in mconsole_interrupt()
94 new->request = req; in mconsole_interrupt()
122 len = req->len - (ptr - req->request.data); in mconsole_log()
231 if (!mconsole_get_request(req->originating_fd, req)) in mconsole_stop()
242 mconsole_sysrq(req); in mconsole_stop()
246 (*req->cmd->handler)(req); in mconsole_stop()
552 struct mc_request *req; member
[all …]
A Dmconsole.h51 void (*handler)(struct mc_request *req);
78 extern void mconsole_help(struct mc_request *req);
79 extern void mconsole_halt(struct mc_request *req);
80 extern void mconsole_reboot(struct mc_request *req);
83 extern void mconsole_sysrq(struct mc_request *req);
84 extern void mconsole_cad(struct mc_request *req);
85 extern void mconsole_stop(struct mc_request *req);
86 extern void mconsole_go(struct mc_request *req);
87 extern void mconsole_log(struct mc_request *req);
88 extern void mconsole_proc(struct mc_request *req);
[all …]
/arch/arm/crypto/
A Dnh-neon-core.S15 KEY .req r0
16 MESSAGE .req r1
18 HASH .req r3
32 K0 .req q4
33 K1 .req q5
34 K2 .req q6
35 K3 .req q7
36 T0 .req q8
39 T1 .req q9
42 T2 .req q10
[all …]
A Daes-ce-glue.c270 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_encrypt()
287 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_encrypt()
300 if (req->dst != req->src) in cts_cbc_encrypt()
328 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_decrypt()
345 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_decrypt()
358 if (req->dst != req->src) in cts_cbc_decrypt()
445 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_encrypt()
472 if (req->dst != req->src) in xts_encrypt()
473 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_encrypt()
517 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_decrypt()
[all …]
A Dghash-ce-core.S15 SHASH .req q0
16 T1 .req q1
17 XL .req q2
18 XM .req q3
19 XH .req q4
20 IN1 .req q4
24 T1_L .req d2
25 T1_H .req d3
26 XL_L .req d4
43 t0q .req q5
[all …]
A Daes-neonbs-glue.c111 return __ecb_crypt(req, aesbs_ecb_encrypt); in ecb_encrypt()
116 return __ecb_crypt(req, aesbs_ecb_decrypt); in ecb_decrypt()
265 if (req->cryptlen < AES_BLOCK_SIZE) in __xts_crypt()
271 skcipher_request_flags(req), in __xts_crypt()
273 skcipher_request_set_crypt(&subreq, req->src, req->dst, in __xts_crypt()
274 req->cryptlen - tail, req->iv); in __xts_crypt()
275 req = &subreq; in __xts_crypt()
306 scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, in __xts_crypt()
309 scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0); in __xts_crypt()
311 crypto_xor(buf, req->iv, AES_BLOCK_SIZE); in __xts_crypt()
[all …]
/arch/arm/kernel/
A Diwmmxt.h18 .macro wldrd, reg:req, base:req, offset:req
22 .macro wldrw, reg:req, base:req, offset:req
26 .macro wstrd, reg:req, base:req, offset:req
30 .macro wstrw, reg:req, base:req, offset:req
38 .macro tmrc, dest:req, control:req
42 .macro tmcr, control:req, src:req
/arch/riscv/crypto/
A Daes-riscv64-glue.c244 skcipher_request_set_crypt(&subreq, req->src, req->dst, in riscv64_aes_cbc_cts_crypt()
250 if (req->dst != req->src) in riscv64_aes_cbc_cts_crypt()
253 req->cryptlen - cbc_len, req->iv); in riscv64_aes_cbc_cts_crypt()
370 aes_encrypt_zvkned(&ctx->ctx2, req->iv, req->iv); in riscv64_aes_xts_crypt()
389 skcipher_request_set_crypt(&subreq, req->src, req->dst, in riscv64_aes_xts_crypt()
391 req->iv); in riscv64_aes_xts_crypt()
392 req = &subreq; in riscv64_aes_xts_crypt()
422 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in riscv64_aes_xts_crypt()
423 if (req->dst != req->src) in riscv64_aes_xts_crypt()
424 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in riscv64_aes_xts_crypt()
[all …]
/arch/arm64/crypto/
A Dnh-neon-core.S13 KEY .req x0
16 HASH .req x3
22 K0 .req v4
23 K1 .req v5
24 K2 .req v6
25 K3 .req v7
26 T0 .req v8
27 T1 .req v9
28 T2 .req v10
29 T3 .req v11
[all …]
A Daes-glue.c283 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_encrypt()
299 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_encrypt()
312 if (req->dst != req->src) in cts_cbc_encrypt()
340 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_decrypt()
356 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_decrypt()
369 if (req->dst != req->src) in cts_cbc_decrypt()
567 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_encrypt()
594 if (req->dst != req->src) in xts_encrypt()
595 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in xts_encrypt()
639 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_decrypt()
[all …]
A Dsm4-ce-ccm-glue.c57 unsigned int l = req->iv[0] + 1; in ccm_format_input()
69 memcpy(info, req->iv, SM4_BLOCK_SIZE); in ccm_format_input()
75 if (req->assoclen) in ccm_format_input()
96 u32 assoclen = req->assoclen; in ccm_calculate_auth_mac()
112 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac()
172 if (req->assoclen) in ccm_crypt()
173 ccm_calculate_auth_mac(req, mac); in ccm_crypt()
216 err = ccm_format_input(mac, req, req->cryptlen); in ccm_encrypt()
229 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, in ccm_encrypt()
245 err = ccm_format_input(mac, req, req->cryptlen - authsize); in ccm_decrypt()
[all …]
A Dpolyval-ce-core.S38 M0 .req v0
39 M1 .req v1
40 M2 .req v2
41 M3 .req v3
42 M4 .req v4
43 M5 .req v5
44 M6 .req v6
45 M7 .req v7
54 PL .req v16
94 X .req \X
[all …]
A Daes-ce-ccm-glue.c66 u32 l = req->iv[0] + 1; in ccm_init_mac()
93 if (req->assoclen) in ccm_init_mac()
141 u32 len = req->assoclen; in ccm_calculate_auth_mac()
176 u32 len = req->cryptlen; in ccm_encrypt()
179 err = ccm_init_mac(req, mac, len); in ccm_encrypt()
192 if (req->assoclen) in ccm_encrypt()
193 ccm_calculate_auth_mac(req, mac); in ccm_encrypt()
229 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, in ccm_encrypt()
246 err = ccm_init_mac(req, mac, len); in ccm_decrypt()
259 if (req->assoclen) in ccm_decrypt()
[all …]
A Dsm4-ce-glue.c208 struct scatterlist *src = req->src; in sm4_cbc_cts_crypt()
209 struct scatterlist *dst = req->dst; in sm4_cbc_cts_crypt()
216 if (req->cryptlen < SM4_BLOCK_SIZE) in sm4_cbc_cts_crypt()
231 req->iv); in sm4_cbc_cts_crypt()
238 if (req->dst != req->src) in sm4_cbc_cts_crypt()
246 req->iv); in sm4_cbc_cts_crypt()
348 skcipher_request_set_crypt(&subreq, req->src, req->dst, in sm4_xts_crypt()
388 if (req->dst != req->src) in sm4_xts_crypt()
392 req->iv); in sm4_xts_crypt()
416 return sm4_xts_crypt(req, true); in sm4_xts_encrypt()
[all …]
A Dsm4-ce-gcm-glue.c78 u32 assoclen = req->assoclen; in gcm_calculate_auth_mac()
82 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac()
146 lengths.a = cpu_to_be64(req->assoclen * 8); in gcm_crypt()
149 memcpy(iv, req->iv, GCM_IV_SIZE); in gcm_crypt()
154 if (req->assoclen) in gcm_crypt()
155 gcm_calculate_auth_mac(req, ghash); in gcm_crypt()
193 static int gcm_encrypt(struct aead_request *req) in gcm_encrypt() argument
206 scatterwalk_map_and_copy(ghash, req->dst, req->assoclen + req->cryptlen, in gcm_encrypt()
212 static int gcm_decrypt(struct aead_request *req) in gcm_decrypt() argument
227 scatterwalk_map_and_copy(authtag, req->src, in gcm_decrypt()
[all …]
A Daes-neonbs-glue.c126 return __ecb_crypt(req, aesbs_ecb_encrypt); in ecb_encrypt()
131 return __ecb_crypt(req, aesbs_ecb_decrypt); in ecb_decrypt()
293 if (req->cryptlen < AES_BLOCK_SIZE) in __xts_crypt()
303 skcipher_request_flags(req), in __xts_crypt()
305 skcipher_request_set_crypt(&subreq, req->src, req->dst, in __xts_crypt()
307 req->iv); in __xts_crypt()
308 req = &subreq; in __xts_crypt()
357 dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); in __xts_crypt()
358 if (req->dst != req->src) in __xts_crypt()
359 dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); in __xts_crypt()
[all …]
A Dghash-ce-glue.c277 scatterwalk_start(&walk, req->src); in gcm_calculate_auth_mac()
308 lengths.b = cpu_to_be64(req->cryptlen * 8); in gcm_encrypt()
352 scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen, in gcm_encrypt()
381 scatterwalk_map_and_copy(otag, req->src, in gcm_decrypt()
382 req->assoclen + req->cryptlen - authsize, in gcm_decrypt()
428 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_encrypt()
429 return gcm_encrypt(req, iv, req->assoclen); in gcm_aes_encrypt()
436 memcpy(iv, req->iv, GCM_AES_IV_SIZE); in gcm_aes_decrypt()
437 return gcm_decrypt(req, iv, req->assoclen); in gcm_aes_decrypt()
470 gcm_encrypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE); in rfc4106_encrypt()
[all …]
/arch/powerpc/platforms/512x/
A Dmpc512x_lpbfifo.c75 req = lpbfifo.req; in mpc512x_lpbfifo_irq()
76 if (!req || req->dir == MPC512X_LPBFIFO_REQ_DIR_READ) { in mpc512x_lpbfifo_irq()
101 if (req->callback) in mpc512x_lpbfifo_irq()
102 req->callback(req); in mpc512x_lpbfifo_irq()
128 req = lpbfifo.req; in mpc512x_lpbfifo_callback()
129 if (!req) { in mpc512x_lpbfifo_callback()
152 req->callback(req); in mpc512x_lpbfifo_callback()
189 if (lpbfifo.req->size == 0 || !IS_ALIGNED(lpbfifo.req->size, 4)) in mpc512x_lpbfifo_kick()
249 lpbfifo.req->ram_virt_addr, lpbfifo.req->size, dir); in mpc512x_lpbfifo_kick()
338 if (lpbfifo.req) in mpc512x_lpbfifo_submit_locked()
[all …]
/arch/s390/pci/
A Dpci_insn.c28 u64 req; member
39 u64 req, u64 offset) in zpci_err_insn_req() argument
43 .req = req, .offset = offset}; in zpci_err_insn_req()
66 : CC_OUT(cc, cc), [req] "+d" (req), [fib] "+Q" (*fib) in __mpcifc()
69 *status = req >> 24 & 0xff; in __mpcifc()
79 cc = __mpcifc(req, fib, status); in zpci_mod_fc()
197 cc = __pcilg(data, req, offset, &status); in __zpci_load()
290 cc = __pcistg(data, req, offset, &status); in __zpci_store()
365 : CC_OUT(cc, cc), [req] "+d" (req), [exc] "+d" (exception) in __pcistb()
368 *status = req >> 24 & 0xff; in __pcistb()
[all …]
/arch/powerpc/crypto/
A Daes-spe-glue.c334 subreq = *req; in ppc_xts_encrypt()
335 skcipher_request_set_crypt(&subreq, req->src, req->dst, in ppc_xts_encrypt()
336 req->cryptlen - tail, req->iv); in ppc_xts_encrypt()
337 req = &subreq; in ppc_xts_encrypt()
350 req->iv, NULL); in ppc_xts_encrypt()
373 subreq = *req; in ppc_xts_decrypt()
374 skcipher_request_set_crypt(&subreq, req->src, req->dst, in ppc_xts_decrypt()
375 offset, req->iv); in ppc_xts_decrypt()
376 req = &subreq; in ppc_xts_decrypt()
387 ppc_encrypt_ecb(req->iv, req->iv, ctx->key_twk, ctx->rounds, in ppc_xts_decrypt()
[all …]
A Daes-gcm-p10-glue.c211 struct crypto_tfm *tfm = req->base.tfm; in p10_aes_gcm_crypt()
220 unsigned int cryptlen = req->cryptlen; in p10_aes_gcm_crypt()
235 if (req->src->length >= assoclen && req->src->length) { in p10_aes_gcm_crypt()
236 assoc = sg_virt(req->src); /* ppc64 is !HIGHMEM */ in p10_aes_gcm_crypt()
299 scatterwalk_map_and_copy(hash->Htable, req->dst, req->assoclen + cryptlen, in p10_aes_gcm_crypt()
302 scatterwalk_map_and_copy(otag, req->src, in p10_aes_gcm_crypt()
303 req->assoclen + cryptlen - auth_tag_len, in p10_aes_gcm_crypt()
345 p10_aes_gcm_crypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE, 1); in rfc4106_encrypt()
358 p10_aes_gcm_crypt(req, iv, req->assoclen - GCM_RFC4106_IV_SIZE, 0); in rfc4106_decrypt()
363 return p10_aes_gcm_crypt(req, req->iv, req->assoclen, 1); in p10_aes_gcm_encrypt()
[all …]
/arch/x86/crypto/
A Daesni-intel_glue.c248 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_encrypt()
265 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_encrypt()
277 if (req->dst != req->src) in cts_cbc_encrypt()
304 struct scatterlist *src = req->src, *dst = req->dst; in cts_cbc_decrypt()
321 skcipher_request_set_crypt(&subreq, req->src, req->dst, in cts_cbc_decrypt()
333 if (req->dst != req->src) in cts_cbc_decrypt()
442 skcipher_request_set_crypt(&subreq, req->src, req->dst, in xts_crypt_slowpath()
466 if (req->dst != req->src) in xts_crypt_slowpath()
506 likely(req->src->length >= req->cryptlen && in xts_crypt()
507 req->dst->length >= req->cryptlen)) { in xts_crypt()
[all …]
/arch/arm64/lib/
A Dstrnlen.S27 srcin .req x0
28 len .req x0
29 limit .req x1
32 src .req x2
33 data1 .req x3
34 data2 .req x4
38 tmp1 .req x8
39 tmp2 .req x9
40 tmp3 .req x10
41 tmp4 .req x11
[all …]
A Dcopy_template.S25 src .req x1
27 tmp1 .req x3
29 tmp2 .req x4
31 dst .req x6
33 A_l .req x7
34 A_h .req x8
35 B_l .req x9
36 B_h .req x10
37 C_l .req x11
38 C_h .req x12
[all …]

Completed in 59 milliseconds

12345678