Lines Matching refs:sg_in
49 struct scatterlist sg_in[3]; in tls_enc_record() local
75 sg_init_table(sg_in, ARRAY_SIZE(sg_in)); in tls_enc_record()
77 sg_set_buf(sg_in, aad, TLS_AAD_SPACE_SIZE); in tls_enc_record()
79 scatterwalk_get_sglist(in, sg_in + 1); in tls_enc_record()
104 aead_request_set_crypt(aead_req, sg_in, sg_out, len, iv); in tls_enc_record()
132 struct crypto_aead *aead, struct scatterlist *sg_in, in tls_enc_records() argument
139 scatterwalk_start(&in, sg_in); in tls_enc_records()
216 static int fill_sg_in(struct scatterlist *sg_in, in fill_sg_in() argument
263 sg_set_page(sg_in + i, skb_frag_page(frag), in fill_sg_in()
269 sg_in[i].length += remaining; in fill_sg_in()
274 if (skb_to_sgvec(skb, &sg_in[i], tcp_payload_offset, payload_len) < 0) in fill_sg_in()
300 struct scatterlist *sg_in, in tls_enc_skb() argument
341 if (tls_enc_records(aead_req, ctx->aead_send, sg_in, sg_out, aad, iv, in tls_enc_skb()
370 struct scatterlist *sg_in, sg_out[3]; in tls_sw_fallback() local
386 sg_in = kmalloc_array(sg_in_max_elements, sizeof(*sg_in), GFP_ATOMIC); in tls_sw_fallback()
387 if (!sg_in) in tls_sw_fallback()
390 sg_init_table(sg_in, sg_in_max_elements); in tls_sw_fallback()
393 if (fill_sg_in(sg_in, skb, ctx, &rcd_sn, &sync_size, &resync_sgs)) { in tls_sw_fallback()
400 nskb = tls_enc_skb(tls_ctx, sg_out, sg_in, skb, sync_size, rcd_sn); in tls_sw_fallback()
404 put_page(sg_page(&sg_in[--resync_sgs])); in tls_sw_fallback()
405 kfree(sg_in); in tls_sw_fallback()