| /drivers/scsi/bfa/ |
| A D | bfa_ioc_ct.c | 185 void __iomem *rb; in bfa_ioc_ct_reg_init() local 188 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct_reg_init() 244 void __iomem *rb; in bfa_ioc_ct2_reg_init() local 247 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct2_reg_init() 597 writel(0, (rb + OP_MODE)); in bfa_ioc_ct_pll_init() 821 bfa_ioc_ct2_sclk_init(rb); in bfa_ioc_ct2_clk_reset() 822 bfa_ioc_ct2_lclk_init(rb); in bfa_ioc_ct2_clk_reset() 898 bfa_ioc_ct2_clk_reset(rb); in bfa_ioc_ct2_pll_init() 901 bfa_ioc_ct2_mac_reset(rb); in bfa_ioc_ct2_pll_init() 903 bfa_ioc_ct2_clk_reset(rb); in bfa_ioc_ct2_pll_init() [all …]
|
| A D | bfa_ioc_cb.c | 138 void __iomem *rb; in bfa_ioc_cb_reg_init() local 141 rb = bfa_ioc_bar0(ioc); in bfa_ioc_cb_reg_init() 186 ioc->ioc_regs.err_set = (rb + ERR_SET_REG); in bfa_ioc_cb_reg_init() 369 join_bits = readl(rb + BFA_IOC0_STATE_REG) & in bfa_ioc_cb_pll_init() 372 join_bits = readl(rb + BFA_IOC1_STATE_REG) & in bfa_ioc_cb_pll_init() 375 writel(0xffffffffU, (rb + HOSTFN0_INT_MSK)); in bfa_ioc_cb_pll_init() 376 writel(0xffffffffU, (rb + HOSTFN1_INT_MSK)); in bfa_ioc_cb_pll_init() 383 rb + APP_PLL_SCLK_CTL_REG); in bfa_ioc_cb_pll_init() 386 rb + APP_PLL_LCLK_CTL_REG); in bfa_ioc_cb_pll_init() 391 rb + APP_PLL_SCLK_CTL_REG); in bfa_ioc_cb_pll_init() [all …]
|
| /drivers/net/ethernet/brocade/bna/ |
| A D | bfa_ioc_ct.c | 251 void __iomem *rb; in bfa_ioc_ct_reg_init() local 254 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct_reg_init() 310 void __iomem *rb; in bfa_ioc_ct2_reg_init() local 313 rb = bfa_ioc_bar0(ioc); in bfa_ioc_ct2_reg_init() 616 writel(0, (rb + OP_MODE)); in bfa_ioc_ct_pll_init() 620 (rb + ETH_MAC_SER_REG)); in bfa_ioc_ct_pll_init() 624 (rb + ETH_MAC_SER_REG)); in bfa_ioc_ct_pll_init() 789 bfa_ioc_ct2_sclk_init(rb); in bfa_ioc_ct2_mac_reset() 790 bfa_ioc_ct2_lclk_init(rb); in bfa_ioc_ct2_mac_reset() 888 bfa_ioc_ct2_mac_reset(rb); in bfa_ioc_ct2_pll_init() [all …]
|
| /drivers/hid/intel-ish-hid/ishtp/ |
| A D | client-buffers.c | 29 if (!rb) { in ishtp_cl_alloc_rx_ring() 109 kfree(rb); in ishtp_cl_free_rx_ring() 119 kfree(rb); in ishtp_cl_free_rx_ring() 171 kfree(rb); in ishtp_io_rb_free() 187 if (!rb) in ishtp_io_rb_init() 191 rb->cl = cl; in ishtp_io_rb_init() 193 return rb; in ishtp_io_rb_init() 207 if (!rb) in ishtp_io_rb_alloc_buf() 235 if (!rb || !rb->cl) in ishtp_cl_io_rb_recycle() 270 if (rb) in ishtp_cl_rx_get_rb() [all …]
|
| A D | client.c | 31 if (rb->cl && ishtp_cl_cmp_id(cl, rb->cl)) { in ishtp_read_list_flush() 623 rb = NULL; in ishtp_cl_read_start() 631 rb->cl = cl; in ishtp_cl_read_start() 632 rb->buf_idx = 0; in ishtp_cl_read_start() 1004 cl = rb->cl; in recv_ishtp_cl_msg() 1011 if (rb->buffer.size == 0 || rb->buffer.data == NULL) { in recv_ishtp_cl_msg() 1027 if (rb->buffer.size < ishtp_hdr->length + rb->buf_idx) { in recv_ishtp_cl_msg() 1039 buffer = rb->buffer.data + rb->buf_idx; in recv_ishtp_cl_msg() 1124 cl = rb->cl; in recv_ishtp_cl_msg_dma() 1133 if (rb->buffer.size == 0 || rb->buffer.data == NULL) { in recv_ishtp_cl_msg_dma() [all …]
|
| /drivers/gpu/drm/ |
| A D | drm_mm.c | 175 rb = &hole_node->rb; in drm_mm_interval_tree_add_node() 177 parent = rb_entry(rb, struct drm_mm_node, rb); in drm_mm_interval_tree_add_node() 182 rb = rb_parent(rb); in drm_mm_interval_tree_add_node() 185 rb = &hole_node->rb; in drm_mm_interval_tree_add_node() 189 rb = NULL; in drm_mm_interval_tree_add_node() 195 rb = *link; in drm_mm_interval_tree_add_node() 196 parent = rb_entry(rb, struct drm_mm_node, rb); in drm_mm_interval_tree_add_node() 207 rb_link_node(&node->rb, rb, link); in drm_mm_interval_tree_add_node() 316 rb = rb->rb_right; in best_hole() 318 rb = rb->rb_left; in best_hole() [all …]
|
| A D | drm_prime.c | 110 rb = NULL; in drm_prime_add_buf_handle() 115 rb = *p; in drm_prime_add_buf_handle() 125 rb = NULL; in drm_prime_add_buf_handle() 130 rb = *p; in drm_prime_add_buf_handle() 149 while (rb) { in drm_prime_lookup_buf_by_handle() 156 rb = rb->rb_right; in drm_prime_lookup_buf_by_handle() 158 rb = rb->rb_left; in drm_prime_lookup_buf_by_handle() 179 rb = rb->rb_right; in drm_prime_lookup_buf_handle() 181 rb = rb->rb_left; in drm_prime_lookup_buf_handle() 208 rb = rb->rb_right; in drm_prime_remove_buf_handle() [all …]
|
| /drivers/misc/mchp_pci1xxxx/ |
| A D | mchp_pci1xxxx_otpe2p.c | 99 void __iomem *rb = priv->reg_base; in is_eeprom_responsive() local 104 rb + MMAP_EEPROM_OFFSET(EEPROM_CMD_REG)); in is_eeprom_responsive() 106 rb + MMAP_EEPROM_OFFSET(EEPROM_CMD_REG)); in is_eeprom_responsive() 124 void __iomem *rb = priv->reg_base; in pci1xxxx_eeprom_read() local 148 rb + MMAP_EEPROM_OFFSET(EEPROM_CMD_REG)); in pci1xxxx_eeprom_read() 165 void __iomem *rb = priv->reg_base; in pci1xxxx_eeprom_write() local 193 rb + MMAP_EEPROM_OFFSET(EEPROM_CMD_REG)); in pci1xxxx_eeprom_write() 219 void __iomem *rb = priv->reg_base; in pci1xxxx_otp_read() local 249 rb + MMAP_OTP_OFFSET(OTP_STATUS_OFFSET)); in pci1xxxx_otp_read() 268 void __iomem *rb = priv->reg_base; in pci1xxxx_otp_write() local [all …]
|
| /drivers/target/iscsi/ |
| A D | iscsi_target_configfs.c | 44 ssize_t rb; in lio_target_np_driver_show() local 52 return rb; in lio_target_np_driver_show() 546 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show() 595 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show() 599 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show() 603 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show() 607 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show() 611 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show() 615 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show() 619 rb += sysfs_emit_at(page, rb, in lio_target_nacl_info_show() [all …]
|
| /drivers/firmware/arm_scmi/ |
| A D | raw_mode.c | 271 return rb; in scmi_raw_buffer_get() 280 rb->msg.len = rb->max_len; in scmi_raw_buffer_put() 309 return rb; in scmi_raw_buffer_dequeue_unlocked() 321 return rb; in scmi_raw_buffer_dequeue() 330 if (rb) in scmi_raw_buffer_queue_flush() 332 } while (rb); in scmi_raw_buffer_queue_flush() 731 return rb; in scmi_raw_message_dequeue() 769 memcpy(buf, rb->msg.buf, rb->msg.len); in scmi_raw_message_receive() 1088 if (!rb) in scmi_raw_queue_init() 1405 if (!rb) { in scmi_raw_message_report() [all …]
|
| /drivers/gpu/drm/amd/display/dmub/inc/ |
| A D | dmub_cmd.h | 6374 if (rb->wrpt >= rb->rptr) in dmub_rb_num_outstanding() 6377 data_count = rb->capacity - (rb->rptr - rb->wrpt); in dmub_rb_num_outstanding() 6392 if (rb->wrpt >= rb->rptr) in dmub_rb_num_free() 6395 data_count = rb->capacity - (rb->rptr - rb->wrpt); in dmub_rb_num_free() 6414 if (rb->wrpt >= rb->rptr) in dmub_rb_full() 6417 data_count = rb->capacity - (rb->rptr - rb->wrpt); in dmub_rb_full() 6447 if (rb->wrpt >= rb->capacity) in dmub_rb_push_front() 6448 rb->wrpt %= rb->capacity; in dmub_rb_push_front() 6474 if (rb->wrpt >= rb->capacity) in dmub_rb_out_push_front() 6475 rb->wrpt %= rb->capacity; in dmub_rb_out_push_front() [all …]
|
| /drivers/infiniband/hw/hfi1/ |
| A D | pin_system.c | 17 struct mmu_rb_node rb; member 74 return node->rb.handler->mn.mm; in mm_from_sdma_node() 151 node->rb.addr = start_address; in pin_system_pages() 152 node->rb.len = length; in pin_system_pages() 181 kref_init(&node->rb.refcount); in add_system_pinning() 184 kref_get(&node->rb.refcount); in add_system_pinning() 239 if (node->rb.addr <= start) { in get_system_cache_entry() 249 node->rb.addr, kref_read(&node->rb.refcount)); in get_system_cache_entry() 250 prepend_len = node->rb.addr - start; in get_system_cache_entry() 366 from_this_cache_entry = cache_entry->rb.len - (start - cache_entry->rb.addr); in add_system_iovec_to_sdma_packet() [all …]
|
| /drivers/xen/xenbus/ |
| A D | xenbus_dev_frontend.c | 151 size_t sz = min_t(size_t, len - i, rb->len - rb->cons); in xenbus_file_read() 153 ret = copy_to_user(ubuf + i, &rb->msg[rb->cons], sz); in xenbus_file_read() 156 rb->cons += sz - ret; in xenbus_file_read() 165 if (rb->cons == rb->len) { in xenbus_file_read() 167 kfree(rb); in xenbus_file_read() 198 rb = kmalloc(struct_size(rb, msg, len), GFP_KERNEL); in queue_reply() 199 if (rb == NULL) in queue_reply() 202 rb->cons = 0; in queue_reply() 203 rb->len = len; in queue_reply() 222 kfree(rb); in queue_cleanup() [all …]
|
| /drivers/block/drbd/ |
| A D | drbd_interval.c | 12 struct drbd_interval *this = rb_entry(node, struct drbd_interval, rb); in interval_end() 19 struct drbd_interval, rb, sector_t, end, NODE_END); 34 rb_entry(*new, struct drbd_interval, rb); in drbd_insert_interval() 52 rb_link_node(&this->rb, parent, new); in drbd_insert_interval() 53 rb_insert_augmented(&this->rb, root, &augment_callbacks); in drbd_insert_interval() 76 rb_entry(node, struct drbd_interval, rb); in drbd_contains_interval() 102 rb_erase_augmented(&this->rb, root, &augment_callbacks); in drbd_remove_interval() 128 rb_entry(node, struct drbd_interval, rb); in drbd_find_overlap() 154 node = rb_next(&i->rb); in drbd_next_overlap() 157 i = rb_entry(node, struct drbd_interval, rb); in drbd_next_overlap()
|
| /drivers/tty/hvc/ |
| A D | hvc_iucv.c | 246 rb->mbuf = kmalloc(rb->msg.length, GFP_ATOMIC | GFP_DMA); in hvc_iucv_write() 247 if (!rb->mbuf) in hvc_iucv_write() 251 rb->mbuf, rb->msg.length, NULL); in hvc_iucv_write() 264 (rb->msg.length != MSG_SIZE(rb->mbuf->datalen))) in hvc_iucv_write() 270 written = min_t(int, rb->mbuf->datalen - rb->offset, count); in hvc_iucv_write() 271 memcpy(buf, rb->mbuf->data + rb->offset, written); in hvc_iucv_write() 272 if (written < (rb->mbuf->datalen - rb->offset)) { in hvc_iucv_write() 294 list_del(&rb->list); in hvc_iucv_write() 295 destroy_tty_buffer(rb); in hvc_iucv_write() 923 if (!rb) { in hvc_iucv_msg_pending() [all …]
|
| /drivers/gpu/drm/i915/ |
| A D | i915_scheduler.c | 36 return rb_entry(rb, struct i915_priolist, node); in to_priolist() 41 struct rb_node *rb; in assert_priolists() local 51 for (rb = rb_first_cached(&sched_engine->queue); rb; rb = rb_next(rb)) { in assert_priolists() 52 const struct i915_priolist *p = to_priolist(rb); in assert_priolists() 63 struct rb_node **parent, *rb; in i915_sched_lookup_priolist() local 74 rb = NULL; in i915_sched_lookup_priolist() 77 rb = *parent; in i915_sched_lookup_priolist() 78 p = to_priolist(rb); in i915_sched_lookup_priolist() 80 parent = &rb->rb_left; in i915_sched_lookup_priolist() 82 parent = &rb->rb_right; in i915_sched_lookup_priolist() [all …]
|
| /drivers/mtd/ubi/ |
| A D | wl.c | 1638 struct rb_node *rb; local 1641 rb = root->rb_node; 1642 while (rb) { 1643 if (rb->rb_left) 1644 rb = rb->rb_left; 1645 else if (rb->rb_right) 1646 rb = rb->rb_right; 1648 e = rb_entry(rb, struct ubi_wl_entry, u.rb); 1650 rb = rb_parent(rb); 1651 if (rb) { [all …]
|
| A D | attach.c | 1315 struct rb_node *rb; in destroy_ai() local 1340 while (rb) { in destroy_ai() 1341 if (rb->rb_left) in destroy_ai() 1342 rb = rb->rb_left; in destroy_ai() 1343 else if (rb->rb_right) in destroy_ai() 1344 rb = rb->rb_right; in destroy_ai() 1346 av = rb_entry(rb, struct ubi_ainf_volume, rb); in destroy_ai() 1348 rb = rb_parent(rb); in destroy_ai() 1349 if (rb) { in destroy_ai() 1350 if (rb->rb_left == &av->rb) in destroy_ai() [all …]
|
| A D | ubi.h | 172 struct rb_node rb; member 194 struct rb_node rb; member 684 struct rb_node rb; member 719 struct rb_node rb; member 1004 ubi_rb_for_each_entry((tmp_rb), (e), &(ubi)->free, u.rb) 1013 ubi_rb_for_each_entry((tmp_rb), (e), &(ubi)->used, u.rb) 1022 ubi_rb_for_each_entry((tmp_rb), (e), &(ubi)->scrub, u.rb) 1043 pos = (rb ? container_of(rb, typeof(*pos), member) : NULL); \ 1045 rb = rb_next(rb), \ 1046 pos = (rb ? container_of(rb, typeof(*pos), member) : NULL)) [all …]
|
| /drivers/gpu/drm/amd/display/dmub/src/ |
| A D | dmub_srv.c | 815 dmub->inbox1.rb.wrpt = 0; in dmub_srv_hw_reset() 816 dmub->inbox1.rb.rptr = 0; in dmub_srv_hw_reset() 841 if (dmub->inbox1.rb.rptr > dmub->inbox1.rb.capacity || in dmub_srv_fb_cmd_queue() 842 dmub->inbox1.rb.wrpt > dmub->inbox1.rb.capacity) { in dmub_srv_fb_cmd_queue() 869 flush_rb = dmub->inbox1.rb; in dmub_srv_fb_cmd_execute() 974 if (scratch_inbox1.rb.rptr > dmub->inbox1.rb.capacity) in dmub_srv_wait_for_pending() 1124 const uint64_t *src = (const uint64_t *)(rb->base_address) + rb->rptr / sizeof(uint64_t); in dmub_rb_out_trace_buffer_front() 1129 if (rb->rptr == rb->wrpt) in dmub_rb_out_trace_buffer_front() 1139 rb->rptr %= rb->capacity; in dmub_rb_out_trace_buffer_front() 1288 if (rptr > dmub->inbox1.rb.capacity || wptr > dmub->inbox1.rb.capacity) { in dmub_srv_sync_inbox1() [all …]
|
| /drivers/gpu/drm/tests/ |
| A D | drm_cmdline_parser_test.c | 26 KUNIT_EXPECT_FALSE(test, mode.rb); in drm_test_cmdline_force_e_only() 44 KUNIT_EXPECT_FALSE(test, mode.rb); in drm_test_cmdline_force_D_only_not_digital() 66 KUNIT_EXPECT_FALSE(test, mode.rb); in drm_test_cmdline_force_D_only_hdmi() 88 KUNIT_EXPECT_FALSE(test, mode.rb); in drm_test_cmdline_force_D_only_dvi() 106 KUNIT_EXPECT_FALSE(test, mode.rb); in drm_test_cmdline_force_d_only() 128 KUNIT_EXPECT_FALSE(test, mode.rb); in drm_test_cmdline_res() 150 KUNIT_EXPECT_FALSE(test, mode.rb); in drm_test_cmdline_res_vesa() 172 KUNIT_EXPECT_TRUE(test, mode.rb); in drm_test_cmdline_res_vesa_rblank() 194 KUNIT_EXPECT_TRUE(test, mode.rb); in drm_test_cmdline_res_rblank() 217 KUNIT_EXPECT_FALSE(test, mode.rb); in drm_test_cmdline_res_bpp() [all …]
|
| /drivers/i2c/ |
| A D | i2c-stub.c | 93 struct smbus_block_data *b, *rb = NULL; in stub_find_block() local 97 rb = b; in stub_find_block() 101 if (rb == NULL && create) { in stub_find_block() 102 rb = devm_kzalloc(dev, sizeof(*rb), GFP_KERNEL); in stub_find_block() 103 if (rb == NULL) in stub_find_block() 104 return rb; in stub_find_block() 105 rb->command = command; in stub_find_block() 106 list_add(&rb->node, &chip->smbus_blocks); in stub_find_block() 108 return rb; in stub_find_block()
|
| /drivers/gpu/drm/i915/gt/ |
| A D | intel_execlists_submission.c | 284 if (!rb) in queue_prio() 294 return rb ? rb_entry(rb, struct ve_node, rb)->prio : INT_MIN; in virtual_prio() 1021 while (rb) { in first_virtual_engine() 1023 rb_entry(rb, typeof(*ve), nodes[engine->id].rb); in first_virtual_engine() 1439 rb = &ve->nodes[engine->id].rb; in execlists_dequeue() 3194 rb_entry(rb, typeof(*ve), nodes[engine->id].rb); in execlists_reset_cancel() 3857 other = rb_entry(rb, typeof(*other), rb); in virtual_submission_tasklet() 3866 rb_link_node(&node->rb, rb, parent); in virtual_submission_tasklet() 4107 for (rb = rb_first_cached(&sched_engine->queue); rb; rb = rb_next(rb)) { in intel_execlists_show_requests() 4128 for (rb = rb_first_cached(&execlists->virtual); rb; rb = rb_next(rb)) { in intel_execlists_show_requests() [all …]
|
| /drivers/net/wireless/microchip/wilc1000/ |
| A D | spi.c | 369 .rx_buf = rb, in wilc_spi_rx() 410 .rx_buf = rb, in wilc_spi_tx_rx() 524 u8 wb[32], rb[32]; in wilc_spi_single_read() local 532 memset(rb, 0x0, sizeof(rb)); in wilc_spi_single_read() 571 r = (struct wilc_spi_rsp_data *)&rb[cmd_len]; in wilc_spi_single_read() 619 u8 wb[32], rb[32]; in wilc_spi_write_cmd() local 625 memset(rb, 0x0, sizeof(rb)); in wilc_spi_write_cmd() 694 u8 wb[32], rb[32]; in wilc_spi_dma_rw() local 702 memset(rb, 0x0, sizeof(rb)); in wilc_spi_dma_rw() 820 u8 wb[32], rb[32]; in wilc_spi_special_cmd() local [all …]
|
| /drivers/net/ethernet/fungible/funeth/ |
| A D | funeth_rx.c | 79 *rb = *buf; in cache_get() 81 refresh_refs(rb); in cache_get() 103 if (cache_get(q, rb)) in funeth_alloc_page() 120 rb->page = p; in funeth_alloc_page() 121 rb->pg_refs = 1; in funeth_alloc_page() 122 refresh_refs(rb); in funeth_alloc_page() 123 rb->node = page_is_pfmemalloc(p) ? -1 : page_to_nid(p); in funeth_alloc_page() 129 if (rb->page) { in funeth_free_page() 130 dma_unmap_page(q->dma_dev, rb->dma_addr, PAGE_SIZE, in funeth_free_page() 132 __page_frag_cache_drain(rb->page, rb->pg_refs); in funeth_free_page() [all …]
|