/linux-6.3-rc2/include/net/ |
A D | tls.h | 392 const struct tls_context *tls_ctx) in tls_sw_ctx_rx() argument 398 const struct tls_context *tls_ctx) in tls_sw_ctx_tx() argument 437 return tls_offload_ctx_tx(tls_ctx)->driver_state; in __tls_driver_ctx() 439 return tls_offload_ctx_rx(tls_ctx)->driver_state; in __tls_driver_ctx() 453 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_offload_rx_resync_request() local 463 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_offload_rx_resync_async_request_start() local 475 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_offload_rx_resync_async_request_end() local 485 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_offload_rx_resync_set_type() local 487 tls_offload_ctx_rx(tls_ctx)->resync_type = type; in tls_offload_rx_resync_set_type() 493 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_offload_tx_resync_pending() local [all …]
|
/linux-6.3-rc2/net/tls/ |
A D | tls_sw.c | 191 struct tls_context *tls_ctx; in tls_decrypt_done() local 202 tls_ctx = tls_get_ctx(sk); in tls_decrypt_done() 203 ctx = tls_sw_ctx_rx(tls_ctx); in tls_decrypt_done() 435 struct tls_context *tls_ctx; in tls_encrypt_done() local 447 tls_ctx = tls_get_ctx(sk); in tls_encrypt_done() 448 prot = &tls_ctx->prot_info; in tls_encrypt_done() 449 ctx = tls_sw_ctx_tx(tls_ctx); in tls_encrypt_done() 524 tls_ctx->tx.rec_seq); in tls_do_encryption() 769 tls_fill_prepend(tls_ctx, in tls_push_record() 2383 kfree(tls_ctx->rx.iv); in tls_sw_release_resources_rx() [all …]
|
A D | tls_device.c | 168 if (!tls_ctx) in tls_icsk_clean_acked() 171 ctx = tls_offload_ctx_tx(tls_ctx); in tls_icsk_clean_acked() 200 tls_ctx->sk_destruct(sk); in tls_device_sk_destruct() 202 if (tls_ctx->tx_conf == TLS_HW) { in tls_device_sk_destruct() 242 rcd_sn = tls_ctx->tx.rec_seq; in tls_device_resync_tx() 552 tls_ctx, in tls_push_data() 576 mutex_lock(&tls_ctx->tx_lock); in tls_device_sendmsg() 590 mutex_unlock(&tls_ctx->tx_lock); in tls_device_sendmsg() 607 mutex_lock(&tls_ctx->tx_lock); in tls_device_sendpage() 805 if (tls_ctx->rx_conf != TLS_HW) in tls_device_rx_resync_new_rec() [all …]
|
A D | tls_device_fallback.c | 305 struct tls_context *tls_ctx, in fill_sg_out() argument 313 &tls_cipher_size_desc[tls_ctx->crypto_send.info.cipher_type]; in fill_sg_out() 322 static struct sk_buff *tls_enc_skb(struct tls_context *tls_ctx, in tls_enc_skb() argument 328 struct tls_offload_context_tx *ctx = tls_offload_ctx_tx(tls_ctx); in tls_enc_skb() 341 switch (tls_ctx->crypto_send.info.cipher_type) { in tls_enc_skb() 343 salt = tls_ctx->crypto_send.aes_gcm_128.salt; in tls_enc_skb() 346 salt = tls_ctx->crypto_send.aes_gcm_256.salt; in tls_enc_skb() 369 fill_sg_out(sg_out, buf, tls_ctx, nskb, tcp_payload_offset, in tls_enc_skb() 374 &tls_ctx->prot_info) < 0) in tls_enc_skb() 398 struct tls_context *tls_ctx = tls_get_ctx(sk); in tls_sw_fallback() local [all …]
|
A D | tls.h | 96 void tls_update_rx_zc_capable(struct tls_context *tls_ctx); 98 void tls_sw_strparser_done(struct tls_context *tls_ctx); 104 void tls_sw_cancel_work_tx(struct tls_context *tls_ctx); 106 void tls_sw_free_ctx_tx(struct tls_context *tls_ctx); 109 void tls_sw_free_ctx_rx(struct tls_context *tls_ctx); 178 int tls_device_decrypted(struct sock *sk, struct tls_context *tls_ctx); 202 tls_device_decrypted(struct sock *sk, struct tls_context *tls_ctx) in tls_device_decrypted() argument 220 static inline bool tls_is_pending_open_record(struct tls_context *tls_ctx) in tls_is_pending_open_record() argument 222 return tls_ctx->pending_open_record_frags; in tls_is_pending_open_record()
|
/linux-6.3-rc2/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
A D | ktls_tx.c | 110 __tls_driver_ctx(tls_ctx, TLS_OFFLOAD_CTX_DIR_TX); in mlx5e_set_ktls_tx_priv_ctx() 121 __tls_driver_ctx(tls_ctx, TLS_OFFLOAD_CTX_DIR_TX); in mlx5e_get_ktls_tx_priv_ctx() 459 struct tls_context *tls_ctx; in mlx5e_ktls_add_tx() local 464 tls_ctx = tls_get_ctx(sk); in mlx5e_ktls_add_tx() 494 priv_tx->tx_ctx = tls_offload_ctx_tx(tls_ctx); in mlx5e_ktls_add_tx() 496 mlx5e_set_ktls_tx_priv_ctx(tls_ctx, priv_tx); in mlx5e_ktls_add_tx() 514 priv_tx = mlx5e_get_ktls_tx_priv_ctx(tls_ctx); in mlx5e_ktls_del_tx() 834 struct tls_context *tls_ctx; in mlx5e_ktls_handle_tx_skb() local 844 tls_ctx = tls_get_ctx(skb->sk); in mlx5e_ktls_handle_tx_skb() 845 tls_netdev = rcu_dereference_bh(tls_ctx->netdev); in mlx5e_ktls_handle_tx_skb() [all …]
|
A D | ktls_rx.c | 229 mlx5e_set_ktls_rx_priv_ctx(struct tls_context *tls_ctx, in mlx5e_set_ktls_rx_priv_ctx() argument 233 __tls_driver_ctx(tls_ctx, TLS_OFFLOAD_CTX_DIR_RX); in mlx5e_set_ktls_rx_priv_ctx() 241 mlx5e_get_ktls_rx_priv_ctx(struct tls_context *tls_ctx) in mlx5e_get_ktls_rx_priv_ctx() argument 244 __tls_driver_ctx(tls_ctx, TLS_OFFLOAD_CTX_DIR_RX); in mlx5e_get_ktls_rx_priv_ctx() 613 struct tls_context *tls_ctx; in mlx5e_ktls_add_rx() local 618 tls_ctx = tls_get_ctx(sk); in mlx5e_ktls_add_rx() 654 mlx5e_set_ktls_rx_priv_ctx(tls_ctx, priv_rx); in mlx5e_ktls_add_rx() 665 tls_offload_ctx_rx(tls_ctx)->resync_async = &resync->core; in mlx5e_ktls_add_rx() 685 void mlx5e_ktls_del_rx(struct net_device *netdev, struct tls_context *tls_ctx) in mlx5e_ktls_del_rx() argument 693 priv_rx = mlx5e_get_ktls_rx_priv_ctx(tls_ctx); in mlx5e_ktls_del_rx() [all …]
|
A D | ktls.c | 69 struct tls_context *tls_ctx, in mlx5e_ktls_del() argument 73 mlx5e_ktls_del_tx(netdev, tls_ctx); in mlx5e_ktls_del() 75 mlx5e_ktls_del_rx(netdev, tls_ctx); in mlx5e_ktls_del()
|
A D | ktls_utils.h | 24 void mlx5e_ktls_del_tx(struct net_device *netdev, struct tls_context *tls_ctx); 27 void mlx5e_ktls_del_rx(struct net_device *netdev, struct tls_context *tls_ctx);
|
/linux-6.3-rc2/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
A D | chcr_ktls.h | 83 chcr_get_ktls_tx_context(struct tls_context *tls_ctx) in chcr_get_ktls_tx_context() argument 87 return container_of(tls_offload_ctx_tx(tls_ctx), in chcr_get_ktls_tx_context()
|
A D | chcr_ktls.c | 361 struct tls_context *tls_ctx, in chcr_ktls_dev_del() argument 365 chcr_get_ktls_tx_context(tls_ctx); in chcr_ktls_dev_del() 418 struct tls_context *tls_ctx = tls_get_ctx(sk); in chcr_ktls_dev_add() local 430 tx_ctx = chcr_get_ktls_tx_context(tls_ctx); in chcr_ktls_dev_add() 653 struct tls_context *tls_ctx; in chcr_ktls_cpl_act_open_rpl() local 688 tls_ctx = tls_get_ctx(tx_info->sk); in chcr_ktls_cpl_act_open_rpl() 689 tx_ctx = chcr_get_ktls_tx_context(tls_ctx); in chcr_ktls_cpl_act_open_rpl() 1934 struct tls_context *tls_ctx; in chcr_ktls_xmit() local 1946 tls_ctx = tls_get_ctx(skb->sk); in chcr_ktls_xmit() 1947 tls_netdev = rcu_dereference_bh(tls_ctx->netdev); in chcr_ktls_xmit() [all …]
|
/linux-6.3-rc2/drivers/net/ethernet/fungible/funeth/ |
A D | funeth_tx.c | 111 const struct fun_ktls_tx_ctx *tls_ctx; in fun_tls_tx() local 120 tls_ctx = tls_driver_ctx(skb->sk, TLS_OFFLOAD_CTX_DIR_TX); in fun_tls_tx() 122 if (likely(tls_ctx->next_seq == seq)) { in fun_tls_tx() 126 if (seq - tls_ctx->next_seq < U32_MAX / 4) { in fun_tls_tx() 128 tls_ctx->next_seq); in fun_tls_tx() 281 struct fun_ktls_tx_ctx *tls_ctx; in write_pkt_desc() local 286 tls_ctx = tls_driver_ctx(skb->sk, TLS_OFFLOAD_CTX_DIR_TX); in write_pkt_desc() 287 tls->tlsid = tls_ctx->tlsid; in write_pkt_desc() 288 tls_ctx->next_seq += tls_len; in write_pkt_desc()
|
A D | funeth_ktls.c | 72 struct tls_context *tls_ctx, in fun_ktls_del() argument 82 tx_ctx = __tls_driver_ctx(tls_ctx, direction); in fun_ktls_del()
|
/linux-6.3-rc2/drivers/net/ethernet/netronome/nfp/crypto/ |
A D | tls.c | 410 nfp_net_tls_del(struct net_device *netdev, struct tls_context *tls_ctx, in nfp_net_tls_del() argument 418 ntls = __tls_driver_ctx(tls_ctx, direction); in nfp_net_tls_del()
|
/linux-6.3-rc2/drivers/net/ethernet/chelsio/cxgb4/ |
A D | cxgb4_main.c | 6468 struct tls_context *tls_ctx, in cxgb4_ktls_dev_del() argument 6477 adap->uld[CXGB4_ULD_KTLS].tlsdev_ops->tls_dev_del(netdev, tls_ctx, in cxgb4_ktls_dev_del()
|