Home
last modified time | relevance | path

Searched refs:bb (Results 1 – 25 of 69) sorted by relevance

123

/drivers/gpu/drm/xe/
A Dxe_bb.c35 struct xe_bb *bb = kmalloc(sizeof(*bb), GFP_KERNEL); in xe_bb_new() local
38 if (!bb) in xe_bb_new()
49 if (IS_ERR(bb->bo)) { in xe_bb_new()
54 bb->cs = xe_sa_bo_cpu_addr(bb->bo); in xe_bb_new()
55 bb->len = 0; in xe_bb_new()
57 return bb; in xe_bb_new()
59 kfree(bb); in xe_bb_new()
68 if (bb->len == 0 || bb->cs[bb->len - 1] != MI_BATCH_BUFFER_END) in __xe_bb_create_job()
69 bb->cs[bb->len++] = MI_BATCH_BUFFER_END; in __xe_bb_create_job()
108 if (!bb) in xe_bb_free()
[all …]
A Dxe_migrate.c584 bb->cs[bb->len++] = ofs; in emit_pte()
585 bb->cs[bb->len++] = 0; in emit_pte()
629 u32 *cs = bb->cs + bb->len; in emit_copy_ccs()
660 bb->len = cs - bb->cs; in emit_copy_ccs()
685 bb->cs[bb->len++] = 0; in emit_copy()
689 bb->cs[bb->len++] = 0; in emit_copy()
947 u32 *cs = bb->cs + bb->len; in emit_clear_link_copy()
970 u32 *cs = bb->cs + bb->len; in emit_clear_main_copy()
1227 bb->cs[bb->len++] = MI_NOOP; in write_pgtable()
1395 bb->cs[bb->len++] = ofs; in __xe_migrate_update_pgtables()
[all …]
A Dxe_gsc_submit.c177 struct xe_bb *bb; in xe_gsc_pkt_submit_kernel() local
189 if (IS_ERR(bb)) in xe_gsc_pkt_submit_kernel()
190 return PTR_ERR(bb); in xe_gsc_pkt_submit_kernel()
192 bb->cs[bb->len++] = GSC_HECI_CMD_PKT; in xe_gsc_pkt_submit_kernel()
193 bb->cs[bb->len++] = lower_32_bits(addr_in); in xe_gsc_pkt_submit_kernel()
194 bb->cs[bb->len++] = upper_32_bits(addr_in); in xe_gsc_pkt_submit_kernel()
195 bb->cs[bb->len++] = size_in; in xe_gsc_pkt_submit_kernel()
196 bb->cs[bb->len++] = lower_32_bits(addr_out); in xe_gsc_pkt_submit_kernel()
197 bb->cs[bb->len++] = upper_32_bits(addr_out); in xe_gsc_pkt_submit_kernel()
198 bb->cs[bb->len++] = size_out; in xe_gsc_pkt_submit_kernel()
[all …]
A Dxe_gsc.c74 struct xe_bb *bb; in emit_gsc_upload() local
79 bb = xe_bb_new(gt, 4, false); in emit_gsc_upload()
80 if (IS_ERR(bb)) in emit_gsc_upload()
81 return PTR_ERR(bb); in emit_gsc_upload()
83 bb->cs[bb->len++] = GSC_FW_LOAD; in emit_gsc_upload()
84 bb->cs[bb->len++] = lower_32_bits(offset); in emit_gsc_upload()
85 bb->cs[bb->len++] = upper_32_bits(offset); in emit_gsc_upload()
86 bb->cs[bb->len++] = (xe_bo_size(gsc->private) / SZ_4K) | in emit_gsc_upload()
89 job = xe_bb_create_job(gsc->q, bb); in emit_gsc_upload()
91 xe_bb_free(bb, NULL); in emit_gsc_upload()
[all …]
A Dxe_gt.c176 struct xe_bb *bb; in emit_nop_job() local
179 bb = xe_bb_new(gt, 4, false); in emit_nop_job()
180 if (IS_ERR(bb)) in emit_nop_job()
181 return PTR_ERR(bb); in emit_nop_job()
184 xe_bb_free(bb, NULL); in emit_nop_job()
195 struct xe_bb *bb; in emit_wa_job() local
223 if (IS_ERR(bb)) in emit_wa_job()
224 return PTR_ERR(bb); in emit_wa_job()
226 cs = bb->cs; in emit_wa_job()
304 bb->len = cs - bb->cs; in emit_wa_job()
[all …]
A Dxe_bb.h19 struct xe_bb *bb);
21 struct xe_bb *bb, u64 batch_ofs,
23 void xe_bb_free(struct xe_bb *bb, struct dma_fence *fence);
A Dxe_oa.c108 struct xe_bb *bb; member
680 bb->cs[bb->len++] = MI_LOAD_REGISTER_IMM | MI_LRI_NUM_REGS(n_lri); in write_cs_mi_lri()
682 bb->cs[bb->len++] = reg_data[i].addr.addr; in write_cs_mi_lri()
683 bb->cs[bb->len++] = reg_data[i].value; in write_cs_mi_lri()
717 struct xe_bb *bb; in xe_oa_load_with_lri() local
721 if (IS_ERR(bb)) { in xe_oa_load_with_lri()
722 err = PTR_ERR(bb); in xe_oa_load_with_lri()
904 struct xe_bb *bb; in __xe_oa_alloc_config_buffer() local
914 if (IS_ERR(bb)) in __xe_oa_alloc_config_buffer()
919 oa_bo->bb = bb; in __xe_oa_alloc_config_buffer()
[all …]
/drivers/net/wireless/realtek/rtw89/
A Dphy.c5349 bb->phy_idx); in rtw89_phy_ccx_top_setting_init()
5351 bb->phy_idx); in rtw89_phy_ccx_top_setting_init()
5516 bb->phy_idx); in rtw89_phy_ifs_clm_setting_init()
5518 bb->phy_idx); in rtw89_phy_ifs_clm_setting_init()
5520 bb->phy_idx); in rtw89_phy_ifs_clm_setting_init()
5522 bb->phy_idx); in rtw89_phy_ifs_clm_setting_init()
5524 bb->phy_idx); in rtw89_phy_ifs_clm_setting_init()
5568 bb->phy_idx); in rtw89_phy_ccx_trigger()
5570 bb->phy_idx); in rtw89_phy_ccx_trigger()
5572 bb->phy_idx); in rtw89_phy_ccx_trigger()
[all …]
/drivers/nvdimm/
A Dbadrange.c165 static void set_badblock(struct badblocks *bb, sector_t s, int num) in set_badblock() argument
167 dev_dbg(bb->dev, "Found a bad range (0x%llx, 0x%llx)\n", in set_badblock()
170 if (!badblocks_set(bb, s, num, 1)) in set_badblock()
171 dev_info_once(bb->dev, "%s: failed for sector %llx\n", in set_badblock()
205 set_badblock(bb, s, done); in __add_badblock_range()
210 set_badblock(bb, start_sector, num_sectors); in __add_badblock_range()
214 struct badblocks *bb, const struct range *range) in badblocks_populate() argument
239 __add_badblock_range(bb, start - range->start, len); in badblocks_populate()
253 __add_badblock_range(bb, 0, len); in badblocks_populate()
270 struct badblocks *bb, const struct range *range) in nvdimm_badblocks_populate() argument
[all …]
A Dpmem.c40 return pmem->bb.dev; in to_dev()
92 badblocks_clear(&pmem->bb, sector, blks); in pmem_clear_bb()
249 struct badblocks *bb = &pmem->bb; in __pmem_direct_access() local
258 if (bb->count && in __pmem_direct_access()
283 if (bb->count) in __pmem_direct_access()
331 struct device *dev = pmem->bb.dev; in pmem_recovery_write()
551 if (devm_init_badblocks(dev, &pmem->bb)) in pmem_attach_disk()
554 disk->bb = &pmem->bb; in pmem_attach_disk()
676 struct badblocks *bb; in pmem_revalidate_poison() local
686 bb = &nsio->bb; in pmem_revalidate_poison()
[all …]
A Dclaim.c252 if (unlikely(is_bad_pmem(&nsio->bb, sector, sz_align))) in nsio_rw_bytes()
259 if (unlikely(is_bad_pmem(&nsio->bb, sector, sz_align))) { in nsio_rw_bytes()
271 badblocks_clear(&nsio->bb, sector, cleared); in nsio_rw_bytes()
303 if (devm_init_badblocks(dev, &nsio->bb)) in devm_nsio_enable()
305 nvdimm_badblocks_populate(to_nd_region(ndns->dev.parent), &nsio->bb, in devm_nsio_enable()
318 devm_exit_badblocks(dev, &nsio->bb); in devm_nsio_disable()
A Dregion.c38 if (devm_init_badblocks(dev, &nd_region->bb)) in nd_region_probe()
44 nvdimm_badblocks_populate(nd_region, &nd_region->bb, &range); in nd_region_probe()
135 &nd_region->bb, &range); in nd_region_notify()
/drivers/gpu/drm/xe/tests/
A Dxe_migrate.c192 struct xe_bb *bb; in xe_migrate_sanity_test() local
231 if (IS_ERR(bb)) { in xe_migrate_sanity_test()
233 PTR_ERR(bb)); in xe_migrate_sanity_test()
255 run_sanity_job(m, xe, bb, bb->len, "Writing PTE for our fake PT", test); in xe_migrate_sanity_test()
262 bb->len = 0; in xe_migrate_sanity_test()
263 bb->cs[bb->len++] = MI_BATCH_BUFFER_END; in xe_migrate_sanity_test()
322 xe_bb_free(bb, NULL); in xe_migrate_sanity_test()
396 struct xe_bb *bb; in blt_copy() local
424 if (IS_ERR(bb)) { in blt_copy()
425 err = PTR_ERR(bb); in blt_copy()
[all …]
/drivers/spi/
A Dspi-gpio.c347 struct spi_bitbang *bb; in spi_gpio_probe() local
386 bb = &spi_gpio->bitbang; in spi_gpio_probe()
387 bb->ctlr = host; in spi_gpio_probe()
394 bb->chipselect = spi_gpio_chipselect; in spi_gpio_probe()
395 bb->set_line_direction = spi_gpio_set_direction; in spi_gpio_probe()
396 bb->set_mosi_idle = spi_gpio_set_mosi_idle; in spi_gpio_probe()
404 bb->txrx_word[SPI_MODE_0] = spi_gpio_txrx_word_mode0; in spi_gpio_probe()
405 bb->txrx_word[SPI_MODE_1] = spi_gpio_txrx_word_mode1; in spi_gpio_probe()
406 bb->txrx_word[SPI_MODE_2] = spi_gpio_txrx_word_mode2; in spi_gpio_probe()
407 bb->txrx_word[SPI_MODE_3] = spi_gpio_txrx_word_mode3; in spi_gpio_probe()
[all …]
/drivers/gpu/drm/amd/display/dc/dml/dcn20/
A Ddcn20_fpu.c1863 memset(bb->clock_limits, 0, sizeof(bb->clock_limits)); in dcn20_update_bounding_box()
1911 bb->clock_limits[num_calculated_states].state = bb->num_states; in dcn20_update_bounding_box()
1960 if (bb->clock_limits[i-1].dcfclk_mhz != bb->clock_limits[i].dcfclk_mhz) in dcn20_cap_soc_clocks()
1962 if (bb->clock_limits[i-1].dispclk_mhz != bb->clock_limits[i].dispclk_mhz) in dcn20_cap_soc_clocks()
1964 if (bb->clock_limits[i-1].dppclk_mhz != bb->clock_limits[i].dppclk_mhz) in dcn20_cap_soc_clocks()
1966 if (bb->clock_limits[i-1].dram_speed_mts != bb->clock_limits[i].dram_speed_mts) in dcn20_cap_soc_clocks()
1968 if (bb->clock_limits[i-1].dscclk_mhz != bb->clock_limits[i].dscclk_mhz) in dcn20_cap_soc_clocks()
1970 if (bb->clock_limits[i-1].fabricclk_mhz != bb->clock_limits[i].fabricclk_mhz) in dcn20_cap_soc_clocks()
1972 if (bb->clock_limits[i-1].phyclk_mhz != bb->clock_limits[i].phyclk_mhz) in dcn20_cap_soc_clocks()
1974 if (bb->clock_limits[i-1].socclk_mhz != bb->clock_limits[i].socclk_mhz) in dcn20_cap_soc_clocks()
[all …]
A Ddcn20_fpu.h55 void dcn20_cap_soc_clocks(struct _vcs_dpi_soc_bounding_box_st *bb,
58 struct _vcs_dpi_soc_bounding_box_st *bb,
63 struct _vcs_dpi_soc_bounding_box_st *bb);
/drivers/gpu/drm/i915/gt/
A Dselftest_ring_submission.c207 struct i915_vma *bb; in __live_ctx_switch_wa() local
211 bb = create_wally(engine); in __live_ctx_switch_wa()
212 if (IS_ERR(bb)) in __live_ctx_switch_wa()
213 return PTR_ERR(bb); in __live_ctx_switch_wa()
215 result = i915_gem_object_pin_map_unlocked(bb->obj, I915_MAP_WC); in __live_ctx_switch_wa()
217 intel_context_put(bb->private); in __live_ctx_switch_wa()
218 i915_vma_unpin_and_release(&bb, 0); in __live_ctx_switch_wa()
223 engine->wa_ctx.vma = bb; in __live_ctx_switch_wa()
/drivers/gpu/drm/i915/gvt/
A Dscheduler.c539 if (bb->bb_offset) in prepare_shadow_batch_buffer()
541 + bb->bb_offset; in prepare_shadow_batch_buffer()
552 if (!bb->ppgtt) { in prepare_shadow_batch_buffer()
557 bb->vma = i915_gem_object_ggtt_pin_ww(bb->obj, &ww, in prepare_shadow_batch_buffer()
559 if (IS_ERR(bb->vma)) { in prepare_shadow_batch_buffer()
570 bb->bb_start_cmd_va[1] = i915_ggtt_offset(bb->vma); in prepare_shadow_batch_buffer()
668 if (bb->obj) { in release_shadow_batch_buffer()
670 if (bb->va && !IS_ERR(bb->va)) in release_shadow_batch_buffer()
673 if (bb->vma && !IS_ERR(bb->vma)) in release_shadow_batch_buffer()
679 list_del(&bb->list); in release_shadow_batch_buffer()
[all …]
A Dcmd_parser.c1920 bb = kzalloc(sizeof(*bb), GFP_KERNEL); in perform_bb_shadow()
1921 if (!bb) in perform_bb_shadow()
1938 if (bb->ppgtt) in perform_bb_shadow()
1944 if (IS_ERR(bb->obj)) { in perform_bb_shadow()
1945 ret = PTR_ERR(bb->obj); in perform_bb_shadow()
1949 bb->va = i915_gem_object_pin_map(bb->obj, I915_MAP_WB); in perform_bb_shadow()
1950 if (IS_ERR(bb->va)) { in perform_bb_shadow()
1951 ret = PTR_ERR(bb->va); in perform_bb_shadow()
1969 INIT_LIST_HEAD(&bb->list); in perform_bb_shadow()
1977 bb->bb_offset = 0; in perform_bb_shadow()
[all …]
/drivers/pci/
A Dp2pdma.c588 struct pci_dev *a = provider, *b = client, *bb; in calc_map_type_and_dist() local
612 bb = b; in calc_map_type_and_dist()
614 while (bb) { in calc_map_type_and_dist()
615 if (a == bb) in calc_map_type_and_dist()
618 bb = pci_upstream_bridge(bb); in calc_map_type_and_dist()
630 bb = b; in calc_map_type_and_dist()
632 while (bb) { in calc_map_type_and_dist()
633 if (a == bb) in calc_map_type_and_dist()
636 if (pci_bridge_has_acs_redir(bb)) { in calc_map_type_and_dist()
637 seq_buf_print_bus_devfn(&acs_list, bb); in calc_map_type_and_dist()
[all …]
/drivers/md/
A Ddm-dust.c19 sector_t bb; member
43 if (bblk->bb > blk) in dust_rb_search()
45 else if (bblk->bb < blk) in dust_rb_search()
58 sector_t value = new->bb; in dust_rb_insert()
64 if (bblk->bb > value) in dust_rb_insert()
66 else if (bblk->bb < value) in dust_rb_insert()
119 bblock->bb = block; in dust_add_block()
301 DMEMIT("%llu\n", bblk->bb); in dust_list_badblocks()
/drivers/net/wireless/ti/wl1251/
A Dacx.c644 struct acx_beacon_broadcast *bb; in wl1251_acx_bcn_dtim_options() local
649 bb = kzalloc(sizeof(*bb), GFP_KERNEL); in wl1251_acx_bcn_dtim_options()
650 if (!bb) in wl1251_acx_bcn_dtim_options()
653 bb->beacon_rx_timeout = BCN_RX_TIMEOUT_DEF_VALUE; in wl1251_acx_bcn_dtim_options()
654 bb->broadcast_timeout = BROADCAST_RX_TIMEOUT_DEF_VALUE; in wl1251_acx_bcn_dtim_options()
655 bb->rx_broadcast_in_ps = RX_BROADCAST_IN_PS_DEF_VALUE; in wl1251_acx_bcn_dtim_options()
656 bb->ps_poll_threshold = CONSECUTIVE_PS_POLL_FAILURE_DEF; in wl1251_acx_bcn_dtim_options()
658 ret = wl1251_cmd_configure(wl, ACX_BCN_DTIM_OPTIONS, bb, sizeof(*bb)); in wl1251_acx_bcn_dtim_options()
665 kfree(bb); in wl1251_acx_bcn_dtim_options()
/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_kms.c1735 struct drm_rect bb; in vmw_du_helper_plane_update() local
1812 bb.x1 = INT_MAX; in vmw_du_helper_plane_update()
1813 bb.y1 = INT_MAX; in vmw_du_helper_plane_update()
1814 bb.x2 = INT_MIN; in vmw_du_helper_plane_update()
1815 bb.y2 = INT_MIN; in vmw_du_helper_plane_update()
1829 bb.x1 = min_t(int, bb.x1, clip.x1); in vmw_du_helper_plane_update()
1830 bb.y1 = min_t(int, bb.y1, clip.y1); in vmw_du_helper_plane_update()
1831 bb.x2 = max_t(int, bb.x2, clip.x2); in vmw_du_helper_plane_update()
1832 bb.y2 = max_t(int, bb.y2, clip.y2); in vmw_du_helper_plane_update()
1835 curr_size = update->post_clip(update, cmd_next, &bb); in vmw_du_helper_plane_update()
A Dvmwgfx_scrn.c514 void *cmd, struct drm_rect *bb) in vmw_stud_bo_post_clip() argument
631 void *cmd, struct drm_rect *bb) in vmw_sou_surface_post_clip() argument
651 src_bb = *bb; in vmw_sou_surface_post_clip()
664 blit->body.destRect.left = bb->x1; in vmw_sou_surface_post_clip()
665 blit->body.destRect.top = bb->y1; in vmw_sou_surface_post_clip()
666 blit->body.destRect.right = bb->x2; in vmw_sou_surface_post_clip()
667 blit->body.destRect.bottom = bb->y2; in vmw_sou_surface_post_clip()
671 rect->left -= bb->x1; in vmw_sou_surface_post_clip()
672 rect->top -= bb->y1; in vmw_sou_surface_post_clip()
673 rect->right -= bb->x1; in vmw_sou_surface_post_clip()
[all …]
A Dvmwgfx_stdu.c1169 struct drm_rect *bb) in vmw_stdu_bo_populate_update_cpu() argument
1186 width = bb->x2 - bb->x1; in vmw_stdu_bo_populate_update_cpu()
1187 height = bb->y2 - bb->y1; in vmw_stdu_bo_populate_update_cpu()
1193 dst_offset = bb->y1 * dst_pitch + bb->x1 * stdu->cpp; in vmw_stdu_bo_populate_update_cpu()
1333 struct drm_rect *bb) in vmw_stdu_surface_populate_update() argument
1335 vmw_stdu_populate_update(cmd, update->du->unit, bb->x1, bb->x2, bb->y1, in vmw_stdu_surface_populate_update()
1336 bb->y2); in vmw_stdu_surface_populate_update()

Completed in 99 milliseconds

123