| /drivers/net/hamradio/ |
| A D | baycom_ser_hdx.c | 181 #define SER12_ARB_DIVIDER(bc) (bc->opt_dcd ? 24 : 36) argument 183 #define SER12_DCD_INTERVAL(bc) (bc->opt_dcd ? 12 : 240) argument 196 bc->modem.ser12.tx_bit = !(bc->modem.ser12.tx_bit ^ in ser12_tx() 234 bc->modem.ser12.dcd_sum2 = bc->modem.ser12.dcd_sum1; in ser12_rx() 235 bc->modem.ser12.dcd_sum1 = bc->modem.ser12.dcd_sum0; in ser12_rx() 238 bc->modem.ser12.dcd_time = SER12_DCD_INTERVAL(bc); in ser12_rx() 341 hdlcdrv_putbits(&bc->hdrv, bc->modem.shreg >> 1); in ser12_rx() 355 bc->modem.ser12.dcd_time = SER12_DCD_INTERVAL(bc); in ser12_rx() 368 if (!dev || !bc || bc->hdrv.magic != HDLCDRV_MAGIC) in ser12_interrupt() 404 bc->modem.arb_divider = SER12_ARB_DIVIDER(bc); in ser12_interrupt() [all …]
|
| A D | baycom_epp.c | 306 bc->cfg.extmodem ? "ext" : "int", bc->cfg.fclk, bc->cfg.bps, in eppconfig() 307 (bc->cfg.fclk + 8 * bc->cfg.bps) / (16 * bc->cfg.bps), in eppconfig() 412 bc->hdlctx.bufptr = bc->hdlctx.buf; in encode_hdlc() 413 bc->hdlctx.bufcnt = wp - bc->hdlctx.buf; in encode_hdlc() 440 bc->hdlctx.slotcnt = bc->ch_params.slottime; in transmit() 447 bc->hdlctx.flags = tenms_to_flags(bc, bc->ch_params.tx_delay); in transmit() 472 bc->hdlctx.flags = tenms_to_flags(bc, bc->ch_params.tx_tail); in transmit() 594 bc->hdlcrx.bufptr = bc->hdlcrx.buf; in receive() 842 memset(&bc->modem, 0, sizeof(bc->modem)); in epp_open() 927 bc->hdlctx.slotcnt = bc->ch_params.slottime; in epp_open() [all …]
|
| A D | baycom_ser_fdx.c | 155 bc->debug_vals.last_intcnt = bc->debug_vals.cur_intcnt; in baycom_int_freq() 204 bc->modem.ser12.pll_time += bc->baud_us; in ser12_rx() 219 bc->modem.ser12.dcd_sum2 = bc->modem.ser12.dcd_sum1; in ser12_rx() 220 bc->modem.ser12.dcd_sum1 = bc->modem.ser12.dcd_sum0; in ser12_rx() 255 if (!bc || bc->hdrv.magic != HDLCDRV_MAGIC) in ser12_interrupt() 314 bc->modem.ser12.tx_bit = !(bc->modem.ser12.tx_bit ^ (bc->modem.ser12.txshreg & 1)); in ser12_interrupt() 389 if (bc->baud < 300 || bc->baud > 4800) { in ser12_open() 399 memset(&bc->modem, 0, sizeof(bc->modem)); in ser12_open() 400 bc->hdrv.par.bitrate = bc->baud; in ser12_open() 401 bc->baud_us = 1000000/bc->baud; in ser12_open() [all …]
|
| A D | baycom_par.c | 153 bc->debug_vals.last_intcnt = bc->debug_vals.cur_intcnt; in baycom_int_freq() 155 bc->debug_vals.last_pllcorr = bc->debug_vals.cur_pllcorr; in baycom_int_freq() 187 bc->modem.par96.scram = ((bc->modem.par96.scram << 1) | in par96_tx() 213 bc->modem.par96.descram = (bc->modem.par96.descram << 1); in par96_rx() 232 bc->modem.par96.dcd_shreg = (bc->modem.par96.dcd_shreg >> 16) in par96_rx() 248 hdlcdrv_setdcd(&bc->hdrv, bc->modem.par96.dcd_count > 0); in par96_rx() 302 if (!dev || !bc) in par96_open() 319 memset(&bc->modem, 0, sizeof(bc->modem)); in par96_open() 337 if (!bc->pdev) { in par96_open() 364 if (!dev || !bc) in par96_close() [all …]
|
| /drivers/tty/ |
| A D | ehv_bytechan.c | 422 ret = local_ev_byte_channel_send(bc->handle, &len, bc->buf + bc->tail); in ehv_bc_tx_dequeue() 426 bc->tail = (bc->tail + len) & (BUF_SIZE - 1); in ehv_bc_tx_dequeue() 428 count = CIRC_CNT(bc->head, bc->tail, BUF_SIZE); in ehv_bc_tx_dequeue() 433 if (CIRC_CNT(bc->head, bc->tail, BUF_SIZE)) in ehv_bc_tx_dequeue() 485 memcpy(bc->buf + bc->head, s, len); in ehv_bc_tty_write() 486 bc->head = (bc->head + len) & (BUF_SIZE - 1); in ehv_bc_tty_write() 548 count = CIRC_SPACE(bc->head, bc->tail, BUF_SIZE); in ehv_bc_tty_write_room() 644 free_irq(bc->rx_irq, bc); in ehv_bc_tty_port_activate() 660 free_irq(bc->tx_irq, bc); in ehv_bc_tty_port_shutdown() 661 free_irq(bc->rx_irq, bc); in ehv_bc_tty_port_shutdown() [all …]
|
| /drivers/pmdomain/imx/ |
| A D | imx8mp-blk-ctrl.c | 67 struct imx8mp_blk_ctrl *bc; member 527 struct imx8mp_blk_ctrl *bc = domain->bc; in imx8mp_blk_ctrl_power_on() local 545 bc->power_on(bc, domain); in imx8mp_blk_ctrl_power_on() 574 struct imx8mp_blk_ctrl *bc = domain->bc; in imx8mp_blk_ctrl_power_off() local 584 bc->power_off(bc, domain); in imx8mp_blk_ctrl_power_off() 613 bc = devm_kzalloc(dev, sizeof(*bc), GFP_KERNEL); in imx8mp_blk_ctrl_probe() 614 if (!bc) in imx8mp_blk_ctrl_probe() 617 bc->dev = dev; in imx8mp_blk_ctrl_probe() 635 if (!bc->domains) in imx8mp_blk_ctrl_probe() 704 domain->bc = bc; in imx8mp_blk_ctrl_probe() [all …]
|
| A D | imx8m-blk-ctrl.c | 68 struct imx8m_blk_ctrl *bc; member 89 struct imx8m_blk_ctrl *bc = domain->bc; in imx8m_blk_ctrl_power_on() local 149 struct imx8m_blk_ctrl *bc = domain->bc; in imx8m_blk_ctrl_power_off() local 173 struct imx8m_blk_ctrl *bc; in imx8m_blk_ctrl_probe() local 183 bc = devm_kzalloc(dev, sizeof(*bc), GFP_KERNEL); in imx8m_blk_ctrl_probe() 184 if (!bc) in imx8m_blk_ctrl_probe() 187 bc->dev = dev; in imx8m_blk_ctrl_probe() 197 if (IS_ERR(bc->regmap)) in imx8m_blk_ctrl_probe() 204 if (!bc->domains) in imx8m_blk_ctrl_probe() 275 domain->bc = bc; in imx8m_blk_ctrl_probe() [all …]
|
| A D | imx93-blk-ctrl.c | 104 struct imx93_blk_ctrl *bc = domain->bc; in imx93_blk_ctrl_set_qos() local 129 struct imx93_blk_ctrl *bc = domain->bc; in imx93_blk_ctrl_power_on() local 132 ret = clk_bulk_prepare_enable(bc->num_clks, bc->clks); in imx93_blk_ctrl_power_on() 140 clk_bulk_disable_unprepare(bc->num_clks, bc->clks); in imx93_blk_ctrl_power_on() 165 clk_bulk_disable_unprepare(bc->num_clks, bc->clks); in imx93_blk_ctrl_power_on() 174 struct imx93_blk_ctrl *bc = domain->bc; in imx93_blk_ctrl_power_off() local 185 clk_bulk_disable_unprepare(bc->num_clks, bc->clks); in imx93_blk_ctrl_power_off() 209 bc = devm_kzalloc(dev, sizeof(*bc), GFP_KERNEL); in imx93_blk_ctrl_probe() 210 if (!bc) in imx93_blk_ctrl_probe() 241 ret = devm_clk_bulk_get(dev, bc->num_clks, bc->clks); in imx93_blk_ctrl_probe() [all …]
|
| /drivers/isdn/hardware/mISDN/ |
| A D | netjet.c | 204 bc->bch.nr, bc->bch.state, protocol); in mode_tiger() 436 card->name, bc->bch.nr, bc->bch.maxlen); in read_dma() 482 __func__, bc->bch.nr, bc->free, bc->idx, card->send.idx); in resync() 498 __func__, bc->bch.nr, bc->free, bc->txstate, in fill_hdlc_flag() 503 bc->hsbuf, bc->free); in fill_hdlc_flag() 546 bc->bch.tx_idx, bc->bch.tx_skb->len, bc->txstate, in fill_dma() 554 bc->hsbuf, bc->free); in fill_dma() 606 if (bc->bch.tx_skb && bc->bch.tx_idx < bc->bch.tx_skb->len) { in bc_next_frame() 634 bc->bch.nr, bc->txstate); in send_tiger_bc() 646 bc->bch.nr, bc->free, bc->idx, card->send.idx); in send_tiger_bc() [all …]
|
| A D | w6692.c | 66 struct w6692_ch bc[2]; member 83 card->bc[0].bch.debug = debug; in _set_debug() 84 card->bc[1].bch.debug = debug; in _set_debug() 129 return inb(bc->addr + offset); in ReadW6692B() 135 outb(value, bc->addr + offset); in WriteW6692B() 947 W6692_fill_Bfifo(bc); in w6692_l2l1B() 965 w6692_mode(bc, ISDN_P_NONE); in w6692_l2l1B() 1040 w6692_mode(bc, ISDN_P_NONE); in w6692_bctrl() 1224 card->bc[0].addr = card->addr; in setup_w6692() 1297 card->bc[i].bch.hw = card; in setup_instance() [all …]
|
| A D | hfcpci.c | 1249 fifo2 = bc; in mode_hfcpci() 1258 bc = bc & 0xff; in mode_hfcpci() 1266 if (bc & 2) { in mode_hfcpci() 1283 bch->nr = bc; in mode_hfcpci() 1288 if (bc & 2) { in mode_hfcpci() 1311 bch->nr = bc; in mode_hfcpci() 1317 bch->nr = bc; in mode_hfcpci() 1320 if (bc & 2) { in mode_hfcpci() 1352 bch->nr = bc; in mode_hfcpci() 1355 if (bc & 2) { in mode_hfcpci() [all …]
|
| /drivers/gpu/drm/i915/gt/ |
| A D | gen7_renderclear.c | 95 bc->vma = vma; in batch_init() 97 bc->start = start + bc->offset / sizeof(*bc->start); in batch_init() 98 bc->end = bc->start; in batch_init() 99 bc->max_items = max_bytes / sizeof(*bc->start); in batch_init() 104 return (cs - bc->start) * sizeof(*bc->start) + bc->offset; in batch_offset() 114 GEM_BUG_ON((bc->end - bc->start) >= bc->max_items); in batch_add() 115 *bc->end++ = d; in batch_add() 125 memset32(bc->end, 0, end - bc->end); in batch_alloc_items() 126 bc->end = end; in batch_alloc_items() 129 map = bc->end; in batch_alloc_items() [all …]
|
| /drivers/clk/imx/ |
| A D | clk-imx95-blk-ctl.c | 353 struct imx95_blk_ctl *bc; in imx95_bc_probe() local 359 bc = devm_kzalloc(dev, sizeof(*bc), GFP_KERNEL); in imx95_bc_probe() 360 if (!bc) in imx95_bc_probe() 362 bc->dev = dev; in imx95_bc_probe() 363 dev_set_drvdata(&pdev->dev, bc); in imx95_bc_probe() 365 spin_lock_init(&bc->lock); in imx95_bc_probe() 371 bc->base = base; in imx95_bc_probe() 373 if (IS_ERR(bc->clk_apb)) in imx95_bc_probe() 487 bc->clk_reg_restore = readl(bc->base + bc_data->clk_reg_offset); in imx95_bc_suspend() 501 writel(bc->clk_reg_restore, bc->base + bc_data->clk_reg_offset); in imx95_bc_resume() [all …]
|
| /drivers/spmi/ |
| A D | hisi-spmi-controller.c | 122 if (bc > SPMI_CONTROLLER_MAX_TRANS_BYTES) { in spmi_read_cmd() 125 SPMI_CONTROLLER_MAX_TRANS_BYTES, bc); in spmi_read_cmd() 165 if ((bc - i * SPMI_PER_DATAREG_BYTE) >> 2) { in spmi_read_cmd() 169 memcpy(buf, &data, bc % SPMI_PER_DATAREG_BYTE); in spmi_read_cmd() 170 buf += (bc % SPMI_PER_DATAREG_BYTE); in spmi_read_cmd() 179 opc, slave_id, slave_addr, bc + 1); in spmi_read_cmd() 198 if (bc > SPMI_CONTROLLER_MAX_TRANS_BYTES) { in spmi_write_cmd() 201 SPMI_CONTROLLER_MAX_TRANS_BYTES, bc); in spmi_write_cmd() 231 if ((bc - i * SPMI_PER_DATAREG_BYTE) >> 2) { in spmi_write_cmd() 236 buf += (bc % SPMI_PER_DATAREG_BYTE); in spmi_write_cmd() [all …]
|
| A D | spmi-pmic-arb.c | 282 u32 reg, u8 bc) in pmic_arb_write_data() argument 391 u8 bc = len - 1; in pmic_arb_fmt_read_cmd() local 427 u8 bc = len - 1; in pmic_arb_read_cmd_unlocked() local 437 min_t(u8, bc, 3)); in pmic_arb_read_cmd_unlocked() 439 if (bc > 3) in pmic_arb_read_cmd_unlocked() 441 bc - 4); in pmic_arb_read_cmd_unlocked() 470 u8 bc = len - 1; in pmic_arb_fmt_write_cmd() local 508 u8 bc = len - 1; in pmic_arb_write_cmd_unlocked() local 512 min_t(u8, bc, 3)); in pmic_arb_write_cmd_unlocked() 513 if (bc > 3) in pmic_arb_write_cmd_unlocked() [all …]
|
| /drivers/md/ |
| A D | dm-bufio.c | 407 read_lock_bh(&bc->trees[cache_index(block, bc->num_locks)].u.spinlock); in cache_read_lock() 409 down_read(&bc->trees[cache_index(block, bc->num_locks)].u.lock); in cache_read_lock() 415 read_unlock_bh(&bc->trees[cache_index(block, bc->num_locks)].u.spinlock); in cache_read_unlock() 417 up_read(&bc->trees[cache_index(block, bc->num_locks)].u.lock); in cache_read_unlock() 423 write_lock_bh(&bc->trees[cache_index(block, bc->num_locks)].u.spinlock); in cache_write_lock() 425 down_write(&bc->trees[cache_index(block, bc->num_locks)].u.lock); in cache_write_lock() 433 up_write(&bc->trees[cache_index(block, bc->num_locks)].u.lock); in cache_write_unlock() 570 return cache_count(bc, LIST_CLEAN) + cache_count(bc, LIST_DIRTY); in cache_total() 678 le = lru_evict(&bc->lru[list_mode], __evict_pred, &w, bc->no_sleep); in __cache_evict() 732 le = lru_evict(&bc->lru[old_mode], __evict_pred, &w, bc->no_sleep); in __cache_mark_many() [all …]
|
| A D | dm-io-rewind.c | 96 struct bio_crypt_ctx *bc = bio->bi_crypt_context; in dm_bio_crypt_rewind() local 98 dm_bio_crypt_dun_decrement(bc->bc_dun, in dm_bio_crypt_rewind() 99 bytes >> bc->bc_key->data_unit_size_bits); in dm_bio_crypt_rewind()
|
| /drivers/net/ethernet/qlogic/qlcnic/ |
| A D | qlcnic_sriov_common.c | 158 bc = &sriov->bc; in qlcnic_sriov_init() 174 bc->bc_trans_wq = wq; in qlcnic_sriov_init() 183 bc->bc_async_wq = wq; in qlcnic_sriov_init() 187 bc->adapter = adapter; in qlcnic_sriov_init() 264 struct qlcnic_back_channel *bc = &sriov->bc; in __qlcnic_sriov_cleanup() local 1537 spin_lock(&bc->queue_lock); in qlcnic_sriov_cleanup_async_list() 1612 head = &bc->async_cmd_list; in qlcnic_sriov_handle_async_issue_cmd() 1614 spin_lock(&bc->queue_lock); in qlcnic_sriov_handle_async_issue_cmd() 1626 queue_work(bc->bc_async_wq, &bc->vf_async_work); in qlcnic_sriov_handle_async_issue_cmd() 1662 queue_work(bc->bc_async_wq, &bc->vf_async_work); in qlcnic_sriov_schedule_async_cmd() [all …]
|
| /drivers/net/ethernet/davicom/ |
| A D | dm9051.c | 112 struct rx_ctl_mach bc; member 285 db->bc.fifo_rst_counter++; in dm9051_core_reset() 723 db->bc.status_err_counter + db->bc.large_err_counter, in dm9051_all_restart() 724 db->bc.fifo_rst_counter); in dm9051_all_restart() 774 db->bc.large_err_counter++; in dm9051_loop_rx() 792 db->bc.rx_err_counter++; in dm9051_loop_rx() 856 db->bc.tx_err_counter++; in dm9051_loop_tx() 1101 db->bc.large_err_counter = 0; in dm9051_operation_clear() 1102 db->bc.rx_err_counter = 0; in dm9051_operation_clear() 1103 db->bc.tx_err_counter = 0; in dm9051_operation_clear() [all …]
|
| /drivers/media/dvb-frontends/ |
| A D | bcm3510.c | 363 static int bcm3510_tuner_cmd(struct bcm3510_state* st,u8 bc, u16 n, u8 a) in bcm3510_tuner_cmd() argument 379 c.ctl_dat[0].data = 0x80 | bc; in bcm3510_tuner_cmd() 454 u8 bc,a; in bcm3510_set_freq() local 463 bc = 0x1c; in bcm3510_set_freq() 465 bc = 0x2c; in bcm3510_set_freq() 467 bc = 0x30; in bcm3510_set_freq() 488 deb_info(" BC1_2_3_4: %x, N: %x A: %x\n", bc, n, a); in bcm3510_set_freq() 490 return bcm3510_tuner_cmd(st,bc,n,a); in bcm3510_set_freq()
|
| /drivers/gpu/drm/bridge/ |
| A D | lontium-lt9211.c | 226 u8 bc[3]; in lt9211_autodetect_rx() local 238 ret = regmap_bulk_read(ctx->regmap, 0x8608, bc, sizeof(bc)); in lt9211_autodetect_rx() 243 byteclk = ((bc[0] & 0xf) << 16) | (bc[1] << 8) | bc[2]; in lt9211_autodetect_rx()
|
| /drivers/parisc/ |
| A D | pdc_stable.c | 239 if (devpath->path.bc[i] < 0) in pdcspath_hwpath_read() 241 out += sprintf(out, "%d/", devpath->path.bc[i]); in pdcspath_hwpath_read() 296 hwpath.bc[i] = simple_strtoul(temp+1, NULL, 10); in pdcspath_hwpath_write() 298 DPRINTK("%s: bc[%d]: %d\n", __func__, i, hwpath.path.bc[i]); in pdcspath_hwpath_write() 302 hwpath.bc[i] = simple_strtoul(in, NULL, 10); in pdcspath_hwpath_write() 303 DPRINTK("%s: bc[%d]: %d\n", __func__, i, hwpath.path.bc[i]); in pdcspath_hwpath_write()
|
| /drivers/gpu/drm/ci/ |
| A D | container.yml | 8 …pciaccess-dev libproc2-dev libudev-dev libunwind-dev python3-docutils bc python3-ply libssl-dev bc"
|
| /drivers/infiniband/hw/hfi1/ |
| A D | trace_tx.h | 763 struct buffer_control *bc), 764 TP_ARGS(dd, bc), 766 __dynamic_array(u8, bct, sizeof(*bc)) 769 memcpy(__get_dynamic_array(bct), bc, 770 sizeof(*bc)); 805 TP_PROTO(struct hfi1_devdata *dd, struct buffer_control *bc), 806 TP_ARGS(dd, bc)); 809 TP_PROTO(struct hfi1_devdata *dd, struct buffer_control *bc), 810 TP_ARGS(dd, bc));
|
| /drivers/net/ethernet/tehuti/ |
| A D | tn40.h | 220 #define TN40_TXD_W1_VAL(bc, checksum, vtag, lgsnd, vlan_id) ( \ argument 222 FIELD_PREP(GENMASK(4, 0), (bc)) | \
|