Lines Matching refs:rx
285 static bool tipc_crypto_key_try_align(struct tipc_crypto *rx, u8 new_pending);
287 struct tipc_crypto *rx,
290 static void tipc_crypto_key_synch(struct tipc_crypto *rx, struct sk_buff *skb);
305 static bool tipc_crypto_key_rcv(struct tipc_crypto *rx, struct tipc_msg *hdr);
1213 struct tipc_crypto *tx, *rx; in tipc_crypto_key_flush() local
1219 rx = c; in tipc_crypto_key_flush()
1220 tx = tipc_net(rx->net)->crypto_tx; in tipc_crypto_key_flush()
1221 if (cancel_delayed_work(&rx->work)) { in tipc_crypto_key_flush()
1222 kfree(rx->skey); in tipc_crypto_key_flush()
1223 rx->skey = NULL; in tipc_crypto_key_flush()
1224 atomic_xchg(&rx->key_distr, 0); in tipc_crypto_key_flush()
1225 tipc_node_put(rx->node); in tipc_crypto_key_flush()
1228 k = atomic_xchg(&rx->peer_rx_active, 0); in tipc_crypto_key_flush()
1257 static bool tipc_crypto_key_try_align(struct tipc_crypto *rx, u8 new_pending) in tipc_crypto_key_try_align() argument
1265 spin_lock(&rx->lock); in tipc_crypto_key_try_align()
1266 key = rx->key; in tipc_crypto_key_try_align()
1275 if (tipc_aead_users(rx->aead[key.pending]) > 0) in tipc_crypto_key_try_align()
1279 tmp1 = tipc_aead_rcu_ptr(rx->aead[key.pending], &rx->lock); in tipc_crypto_key_try_align()
1282 rcu_assign_pointer(rx->aead[key.pending], NULL); in tipc_crypto_key_try_align()
1286 tmp2 = rcu_replace_pointer(rx->aead[key.passive], tmp2, lockdep_is_held(&rx->lock)); in tipc_crypto_key_try_align()
1292 tipc_crypto_key_set_state(rx, new_passive, 0, new_pending); in tipc_crypto_key_try_align()
1293 rcu_assign_pointer(rx->aead[new_pending], tmp1); in tipc_crypto_key_try_align()
1295 rcu_assign_pointer(rx->aead[new_passive], tmp2); in tipc_crypto_key_try_align()
1298 pr_info_ratelimited("%s: key[%d] -> key[%d]\n", rx->name, key.pending, in tipc_crypto_key_try_align()
1302 spin_unlock(&rx->lock); in tipc_crypto_key_try_align()
1320 struct tipc_crypto *rx, in tipc_crypto_key_pick_tx() argument
1335 skb_cb->tx_clone_ctx.rx = rx; in tipc_crypto_key_pick_tx()
1390 static void tipc_crypto_key_synch(struct tipc_crypto *rx, struct sk_buff *skb) in tipc_crypto_key_synch() argument
1393 struct tipc_crypto *tx = tipc_net(rx->net)->crypto_tx; in tipc_crypto_key_synch()
1395 u32 self = tipc_own_addr(rx->net); in tipc_crypto_key_synch()
1402 rx->key_master = ehdr->master_key; in tipc_crypto_key_synch()
1403 if (!rx->key_master) in tipc_crypto_key_synch()
1416 !atomic_cmpxchg(&rx->key_distr, 0, KEY_DISTR_SCHED)) { in tipc_crypto_key_synch()
1420 if (queue_delayed_work(tx->wq, &rx->work, delay)) in tipc_crypto_key_synch()
1421 tipc_node_get(rx->node); in tipc_crypto_key_synch()
1425 atomic_xchg(&rx->key_distr, 0); in tipc_crypto_key_synch()
1429 cur = atomic_read(&rx->peer_rx_active); in tipc_crypto_key_synch()
1433 atomic_cmpxchg(&rx->peer_rx_active, cur, new) == cur) { in tipc_crypto_key_synch()
1439 atomic64_set(&rx->sndnxt, 0); in tipc_crypto_key_synch()
1444 tx->name, cur, new, rx->name); in tipc_crypto_key_synch()
1551 void tipc_crypto_timeout(struct tipc_crypto *rx) in tipc_crypto_timeout() argument
1553 struct tipc_net *tn = tipc_net(rx->net); in tipc_crypto_timeout()
1578 spin_lock(&rx->lock); in tipc_crypto_timeout()
1579 key = rx->key; in tipc_crypto_timeout()
1580 if (!key.pending || tipc_aead_users(rx->aead[key.pending]) <= 0) in tipc_crypto_timeout()
1586 rx->timer2 = jiffies; in tipc_crypto_timeout()
1587 tipc_crypto_key_set_state(rx, key.passive, key.active, 0); in tipc_crypto_timeout()
1588 this_cpu_inc(rx->stats->stat[STAT_SWITCHES]); in tipc_crypto_timeout()
1589 pr_info("%s: key[%d] is activated\n", rx->name, key.pending); in tipc_crypto_timeout()
1594 if (!key.pending || tipc_aead_users(rx->aead[key.pending]) > -10) in tipc_crypto_timeout()
1597 tipc_crypto_key_set_state(rx, key.passive, key.active, 0); in tipc_crypto_timeout()
1598 tipc_crypto_key_detach(rx->aead[key.pending], &rx->lock); in tipc_crypto_timeout()
1599 pr_debug("%s: key[%d] is removed\n", rx->name, key.pending); in tipc_crypto_timeout()
1606 if (time_before(jiffies, rx->timer1 + TIPC_RX_ACTIVE_LIM) && in tipc_crypto_timeout()
1607 tipc_aead_users(rx->aead[key.active]) > 0) in tipc_crypto_timeout()
1614 rx->timer2 = jiffies; in tipc_crypto_timeout()
1615 tipc_crypto_key_set_state(rx, key.passive, 0, key.pending); in tipc_crypto_timeout()
1616 tipc_aead_users_set(rx->aead[key.pending], 0); in tipc_crypto_timeout()
1617 pr_debug("%s: key[%d] is deactivated\n", rx->name, key.active); in tipc_crypto_timeout()
1624 if (time_before(jiffies, rx->timer2 + TIPC_RX_PASSIVE_LIM) && in tipc_crypto_timeout()
1625 tipc_aead_users(rx->aead[key.passive]) > -10) in tipc_crypto_timeout()
1628 tipc_crypto_key_set_state(rx, 0, key.active, key.pending); in tipc_crypto_timeout()
1629 tipc_crypto_key_detach(rx->aead[key.passive], &rx->lock); in tipc_crypto_timeout()
1630 pr_debug("%s: key[%d] is freed\n", rx->name, key.passive); in tipc_crypto_timeout()
1633 spin_unlock(&rx->lock); in tipc_crypto_timeout()
1647 tipc_crypto_do_cmd(rx->net, cmd); in tipc_crypto_timeout()
1814 int tipc_crypto_rcv(struct net *net, struct tipc_crypto *rx, in tipc_crypto_rcv() argument
1829 if (unlikely(!rx || tx_key == KEY_MASTER)) in tipc_crypto_rcv()
1833 key = rx->key; in tipc_crypto_rcv()
1839 if (tipc_crypto_key_try_align(rx, tx_key)) in tipc_crypto_rcv()
1844 aead = tipc_crypto_key_pick_tx(tx, rx, *skb, tx_key); in tipc_crypto_rcv()
1852 aead = tipc_aead_get(rx->aead[tx_key]); in tipc_crypto_rcv()
1857 stats = ((rx) ?: tx)->stats; in tipc_crypto_rcv()
1872 if (rx) { in tipc_crypto_rcv()
1878 rx->nokey = !(rx->skey || in tipc_crypto_rcv()
1879 rcu_access_pointer(rx->aead[n])); in tipc_crypto_rcv()
1881 rx->name, rx->nokey, in tipc_crypto_rcv()
1882 tx_key, rx->key.keys); in tipc_crypto_rcv()
1883 tipc_node_put(rx->node); in tipc_crypto_rcv()
1902 struct tipc_crypto *rx = aead->crypto; in tipc_crypto_rcv_complete() local
1909 rx = skb_cb->tx_clone_ctx.rx; in tipc_crypto_rcv_complete()
1911 (rx) ? tipc_node_get_id_str(rx->node) : "-", err, aead, in tipc_crypto_rcv_complete()
1928 if (!rx) { in tipc_crypto_rcv_complete()
1932 rx = tipc_node_crypto_rx(n); in tipc_crypto_rcv_complete()
1933 if (unlikely(!rx)) in tipc_crypto_rcv_complete()
1943 if (tipc_crypto_key_attach(rx, tmp, ehdr->tx_key, false) < 0) { in tipc_crypto_rcv_complete()
1960 rx->timer1 = jiffies; in tipc_crypto_rcv_complete()
1967 if (rx->key.passive && ehdr->tx_key == rx->key.passive) in tipc_crypto_rcv_complete()
1968 rx->timer2 = jiffies; in tipc_crypto_rcv_complete()
1982 tipc_crypto_key_synch(rx, *skb); in tipc_crypto_rcv_complete()
2003 if (rx) in tipc_crypto_rcv_complete()
2004 tipc_node_put(rx->node); in tipc_crypto_rcv_complete()
2010 struct tipc_crypto *tx = tn->crypto_tx, *rx; in tipc_crypto_do_cmd() local
2035 rx = tipc_node_crypto_rx_by_list(p); in tipc_crypto_do_cmd()
2036 pr_info("RX(%7.7s)\n%s", tipc_node_get_id_str(rx->node), in tipc_crypto_do_cmd()
2037 tipc_crypto_key_dump(rx, buf)); in tipc_crypto_do_cmd()
2062 rx = tipc_node_crypto_rx_by_list(p); in tipc_crypto_do_cmd()
2064 tipc_node_get_id_str(rx->node)); in tipc_crypto_do_cmd()
2067 stat = per_cpu_ptr(rx->stats, cpu)->stat[i]; in tipc_crypto_do_cmd()
2166 struct tipc_crypto *rx; in tipc_crypto_msg_rcv() local
2173 rx = tipc_node_crypto_rx_by_addr(net, msg_prevnode(hdr)); in tipc_crypto_msg_rcv()
2174 if (unlikely(!rx)) in tipc_crypto_msg_rcv()
2179 if (tipc_crypto_key_rcv(rx, hdr)) in tipc_crypto_msg_rcv()
2186 tipc_node_put(rx->node); in tipc_crypto_msg_rcv()
2287 static bool tipc_crypto_key_rcv(struct tipc_crypto *rx, struct tipc_msg *hdr) in tipc_crypto_key_rcv() argument
2289 struct tipc_crypto *tx = tipc_net(rx->net)->crypto_tx; in tipc_crypto_key_rcv()
2298 pr_debug("%s: message data size is too small\n", rx->name); in tipc_crypto_key_rcv()
2307 pr_debug("%s: invalid MSG_CRYPTO key size\n", rx->name); in tipc_crypto_key_rcv()
2311 spin_lock(&rx->lock); in tipc_crypto_key_rcv()
2312 if (unlikely(rx->skey || (key_gen == rx->key_gen && rx->key.keys))) { in tipc_crypto_key_rcv()
2313 pr_err("%s: key existed <%p>, gen %d vs %d\n", rx->name, in tipc_crypto_key_rcv()
2314 rx->skey, key_gen, rx->key_gen); in tipc_crypto_key_rcv()
2321 pr_err("%s: unable to allocate memory for skey\n", rx->name); in tipc_crypto_key_rcv()
2331 rx->key_gen = key_gen; in tipc_crypto_key_rcv()
2332 rx->skey_mode = msg_key_mode(hdr); in tipc_crypto_key_rcv()
2333 rx->skey = skey; in tipc_crypto_key_rcv()
2334 rx->nokey = 0; in tipc_crypto_key_rcv()
2338 spin_unlock(&rx->lock); in tipc_crypto_key_rcv()
2342 if (likely(skey && queue_delayed_work(tx->wq, &rx->work, 0))) in tipc_crypto_key_rcv()
2358 struct tipc_crypto *rx = container_of(dwork, struct tipc_crypto, work); in tipc_crypto_work_rx() local
2359 struct tipc_crypto *tx = tipc_net(rx->net)->crypto_tx; in tipc_crypto_work_rx()
2366 if (atomic_cmpxchg(&rx->key_distr, in tipc_crypto_work_rx()
2371 rc = tipc_crypto_key_distr(tx, key, rx->node); in tipc_crypto_work_rx()
2374 tx->name, key, tipc_node_get_id_str(rx->node), in tipc_crypto_work_rx()
2380 atomic_cmpxchg(&rx->key_distr, KEY_DISTR_COMPL, 0); in tipc_crypto_work_rx()
2384 if (rx->skey) { in tipc_crypto_work_rx()
2385 rc = tipc_crypto_key_init(rx, rx->skey, rx->skey_mode, false); in tipc_crypto_work_rx()
2388 rx->name, rc); in tipc_crypto_work_rx()
2397 kfree(rx->skey); in tipc_crypto_work_rx()
2398 rx->skey = NULL; in tipc_crypto_work_rx()
2403 if (resched && queue_delayed_work(tx->wq, &rx->work, delay)) in tipc_crypto_work_rx()
2406 tipc_node_put(rx->node); in tipc_crypto_work_rx()