Lines Matching refs:dev_data
49 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in bt_notif_enabled() local
51 (void)atomic_set(&dev_data->bt.enabled, enabled ? 1 : 0); in bt_notif_enabled()
55 if (!ring_buf_is_empty(dev_data->uart.tx_ringbuf)) { in bt_notif_enabled()
56 k_work_reschedule_for_queue(&nus_work_queue, &dev_data->uart.tx_work, K_NO_WAIT); in bt_notif_enabled()
68 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in bt_received() local
69 struct ring_buf *ringbuf = dev_data->uart.rx_ringbuf; in bt_received()
80 k_work_submit_to_queue(&nus_work_queue, &dev_data->uart.cb_work); in bt_received()
117 struct uart_bt_data *dev_data = CONTAINER_OF(work, struct uart_bt_data, uart.cb_work); in cb_work_handler() local
119 if (dev_data->uart.callback.cb) { in cb_work_handler()
120 dev_data->uart.callback.cb( in cb_work_handler()
121 dev_data->uart.callback.dev, in cb_work_handler()
122 dev_data->uart.callback.cb_data); in cb_work_handler()
129 struct uart_bt_data *dev_data = CONTAINER_OF(dwork, struct uart_bt_data, uart.tx_work); in tx_work_handler() local
134 __ASSERT_NO_MSG(dev_data); in tx_work_handler()
142 len = ring_buf_get_claim(dev_data->uart.tx_ringbuf, &data, chunk_size); in tx_work_handler()
144 err = bt_nus_inst_send(NULL, dev_data->bt.inst, data, len); in tx_work_handler()
150 ring_buf_get_finish(dev_data->uart.tx_ringbuf, len); in tx_work_handler()
153 if ((ring_buf_space_get(dev_data->uart.tx_ringbuf) > 0) && dev_data->uart.tx_irq_ena) { in tx_work_handler()
154 k_work_submit_to_queue(&nus_work_queue, &dev_data->uart.cb_work); in tx_work_handler()
160 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_fifo_fill() local
163 wrote = ring_buf_put(dev_data->uart.tx_ringbuf, tx_data, len); in uart_bt_fifo_fill()
168 if (atomic_get(&dev_data->bt.enabled)) { in uart_bt_fifo_fill()
169 k_work_reschedule_for_queue(&nus_work_queue, &dev_data->uart.tx_work, K_NO_WAIT); in uart_bt_fifo_fill()
177 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_fifo_read() local
179 return ring_buf_get(dev_data->uart.rx_ringbuf, rx_data, size); in uart_bt_fifo_read()
191 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_poll_out() local
192 struct ring_buf *ringbuf = dev_data->uart.tx_ringbuf; in uart_bt_poll_out()
196 if (k_is_in_isr() || !atomic_get(&dev_data->bt.enabled)) { in uart_bt_poll_out()
205 if (atomic_get(&dev_data->bt.enabled)) { in uart_bt_poll_out()
210 k_work_schedule_for_queue(&nus_work_queue, &dev_data->uart.tx_work, K_MSEC(1)); in uart_bt_poll_out()
216 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_irq_tx_ready() local
218 if ((ring_buf_space_get(dev_data->uart.tx_ringbuf) > 0) && dev_data->uart.tx_irq_ena) { in uart_bt_irq_tx_ready()
227 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_irq_tx_enable() local
229 dev_data->uart.tx_irq_ena = true; in uart_bt_irq_tx_enable()
232 k_work_submit_to_queue(&nus_work_queue, &dev_data->uart.cb_work); in uart_bt_irq_tx_enable()
238 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_irq_tx_disable() local
240 dev_data->uart.tx_irq_ena = false; in uart_bt_irq_tx_disable()
245 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_irq_rx_ready() local
247 if (!ring_buf_is_empty(dev_data->uart.rx_ringbuf) && dev_data->uart.rx_irq_ena) { in uart_bt_irq_rx_ready()
256 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_irq_rx_enable() local
258 dev_data->uart.rx_irq_ena = true; in uart_bt_irq_rx_enable()
260 k_work_submit_to_queue(&nus_work_queue, &dev_data->uart.cb_work); in uart_bt_irq_rx_enable()
265 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_irq_rx_disable() local
267 dev_data->uart.rx_irq_ena = false; in uart_bt_irq_rx_disable()
286 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_irq_callback_set() local
288 dev_data->uart.callback.cb = cb; in uart_bt_irq_callback_set()
289 dev_data->uart.callback.cb_data = cb_data; in uart_bt_irq_callback_set()
325 struct uart_bt_data *dev_data = (struct uart_bt_data *)dev->data; in uart_bt_init() local
330 dev_data->uart.callback.dev = dev; in uart_bt_init()
332 k_work_init_delayable(&dev_data->uart.tx_work, tx_work_handler); in uart_bt_init()
333 k_work_init(&dev_data->uart.cb_work, cb_work_handler); in uart_bt_init()
335 err = bt_nus_inst_cb_register(dev_data->bt.inst, &dev_data->bt.cb, (void *)dev); in uart_bt_init()