Home
last modified time | relevance | path

Searched refs:hba (Results 1 – 25 of 70) sorted by relevance

123

/drivers/ufs/host/
A Dufs-renesas.c57 return ufshcd_readl(hba, reg); in ufs_renesas_read()
62 ufshcd_writel(hba, value, reg); in ufs_renesas_write()
98 ufs_renesas_write(hba, 0xf0, 1); in ufs_renesas_write_phy()
105 ufs_renesas_write(hba, 0xf0, 0); in ufs_renesas_write_phy()
112 ufs_renesas_write(hba, 0xf0, 1); in ufs_renesas_set_phy()
124 ufs_renesas_write(hba, 0xf0, 0); in ufs_renesas_set_phy()
143 ufs_renesas_write(hba, 0xf0, 0); in ufs_renesas_reset_indirect_update()
152 ufs_renesas_write(hba, 0xf0, 0); in ufs_renesas_indirect_write()
425 priv->pre_init(hba); in ufs_renesas_hce_enable_notify()
438 pm_runtime_put(hba->dev); in ufs_renesas_setup_clocks()
[all …]
A Dufs-mediatek.c160 ufshcd_dme_get(hba, in ufs_mtk_cfg_unipro_cg()
271 hba->ahit = 0; in ufs_mtk_hce_enable_notify()
1098 host->mcq_intr_info[i].hba = hba; in ufs_mtk_init_mcq_irq()
1138 host->hba = hba; in ufs_mtk_init()
1378 if (ufshcd_is_auto_hibern8_supported(hba) && hba->ahit) in ufs_mtk_setup_clk_gating()
1480 ufshcd_mcq_config_mac(hba, hba->nutrs); in ufs_mtk_link_set_hpm()
1537 if (!hba->vreg_info.vccq && !hba->vreg_info.vccq2) in ufs_mtk_dev_vreg_set_lpm()
1545 if (lpm && hba->vreg_info.vcc && hba->vreg_info.vcc->enabled) { in ufs_mtk_dev_vreg_set_lpm()
1716 if (ufs_mtk_is_broken_vcc(hba) && hba->vreg_info.vcc && in ufs_mtk_fixup_dev_quirks()
1967 hba->mcq_base = hba->mmio_base + MCQ_QUEUE_OFFSET(hba->mcq_capabilities); in ufs_mtk_mcq_config_resource()
[all …]
A Dufs-hisi.c35 err |= ufshcd_dme_get(hba, in ufs_hisi_check_hibern8()
52 err |= ufshcd_dme_get(hba, in ufs_hisi_check_hibern8()
222 dev_info(hba->dev, in ufs_hisi_link_startup_pre_change()
372 dev_err(hba->dev, in ufs_hisi_pwr_change_notify()
383 dev_err(hba->dev, in ufs_hisi_pwr_change_notify()
478 host->hba = hba; in ufs_hisi_init_common()
488 ufs_hisi_set_pm_lvl(hba); in ufs_hisi_init_common()
512 ufs_hisi_clk_init(hba); in ufs_hi3660_init()
514 ufs_hisi_soc_init(hba); in ufs_hi3660_init()
531 ufs_hisi_clk_init(hba); in ufs_hi3670_init()
[all …]
A Dufs-qcom.c142 struct ufs_hba *hba = host->hba; in ufs_qcom_config_ice_allocator() local
166 struct ufs_hba *hba = host->hba; in ufs_qcom_ice_init() local
245 ufshcd_hold(hba); in ufs_qcom_ice_keyslot_program()
247 ufshcd_release(hba); in ufs_qcom_ice_keyslot_program()
259 ufshcd_hold(hba); in ufs_qcom_ice_keyslot_evict()
1281 host->hba = hba; in ufs_qcom_init()
2064 struct ufs_hba *hba = qi->hba; in ufs_qcom_mcq_esi_handler() local
2076 devm_free_irq(q->hba->dev, q->irq, q->hba); in ufs_qcom_irq_free()
2096 nr_irqs = hba->nr_hw_queues - hba->nr_queues[HCTX_TYPE_POLL]; in DEFINE_FREE()
2103 qi[0].hba = hba; in DEFINE_FREE()
[all …]
A Dufs-exynos.c226 struct ufs_hba *hba = ufs->hba; in gs101_ufs_drv_init() local
249 struct ufs_hba *hba = ufs->hba; in exynosauto_ufs_post_hce_enable() local
263 struct ufs_hba *hba = ufs->hba; in exynosauto_ufs_pre_link() local
318 struct ufs_hba *hba = ufs->hba; in exynosauto_ufs_pre_pwr_change() local
331 struct ufs_hba *hba = ufs->hba; in exynosauto_ufs_post_pwr_change() local
346 struct ufs_hba *hba = ufs->hba; in exynos7_ufs_pre_link() local
376 struct ufs_hba *hba = ufs->hba; in exynos7_ufs_post_link() local
406 struct ufs_hba *hba = ufs->hba; in exynos7_ufs_post_pwr_change() local
452 struct ufs_hba *hba = ufs->hba; in exynos_ufs_get_clk_info() local
519 struct ufs_hba *hba = ufs->hba; in exynos_ufs_set_pwm_clk_div() local
[all …]
A Dufshcd-pci.c163 (hba->pwr_info.lane_rx != 2 || hba->pwr_info.lane_tx != 2)) in ufs_intel_lkf_pwr_change_notify()
270 intel_cache_ltr(hba); in intel_ltr_set()
292 intel_cache_ltr(hba); in intel_add_debugfs()
369 intel_add_debugfs(hba); in ufs_intel_common_init()
375 intel_remove_debugfs(hba); in ufs_intel_common_exit()
376 intel_ltr_hide(hba->dev); in ufs_intel_common_exit()
448 hba->mcq_base = hba->mmio_base + ufshcd_mcq_queue_cfg_addr(hba); in ufs_qemu_mcq_config_resource()
469 opr = &hba->mcq_opr[i]; in ufs_qemu_op_runtime_config()
537 ufshcd_set_link_off(hba); in ufshcd_pci_restore()
554 ufshcd_remove(hba); in ufshcd_pci_remove()
[all …]
A Dufs-sprd.c134 struct device *dev = hba->dev; in ufs_sprd_common_init()
149 host->hba = hba; in ufs_sprd_common_init()
150 ufshcd_set_variant(hba, host); in ufs_sprd_common_init()
230 ufs_sprd_n6_host_reset(hba); in ufs_sprd_n6_key_acc_enable()
248 hba->caps &= ~UFSHCD_CAP_CRYPTO; in ufs_sprd_n6_key_acc_enable()
256 ret = ufs_sprd_common_init(hba); in ufs_sprd_n6_init()
266 if (hba->caps & UFSHCD_CAP_CRYPTO) in ufs_sprd_n6_init()
267 ufs_sprd_n6_key_acc_enable(hba); in ufs_sprd_n6_init()
334 ufs_sprd_n6_host_reset(hba); in sprd_ufs_n6_hce_enable_notify()
341 err = ufs_sprd_n6_phy_init(hba); in sprd_ufs_n6_hce_enable_notify()
[all …]
A Dcdns-pltfrm.c47 ufshcd_dme_get(hba, UIC_ARG_MIB(T_PROTOCOLID), in cdns_ufs_get_l4_attr()
49 ufshcd_dme_get(hba, UIC_ARG_MIB(T_CPORTFLAGS), in cdns_ufs_get_l4_attr()
61 ufshcd_dme_get(hba, UIC_ARG_MIB(T_CPORTMODE), in cdns_ufs_get_l4_attr()
139 ufshcd_readl(hba, CDNS_UFS_REG_HCLKDIV); in cdns_ufs_set_hclkdiv()
157 return cdns_ufs_set_hclkdiv(hba); in cdns_ufs_hce_enable_notify()
170 cdns_ufs_get_l4_attr(hba); in cdns_ufs_hibern8_notify()
172 cdns_ufs_set_l4_attr(hba); in cdns_ufs_hibern8_notify()
195 ufshcd_disable_host_tx_lcc(hba); in cdns_ufs_link_startup_notify()
201 hba->ahit = 0; in cdns_ufs_link_startup_notify()
216 struct device *dev = hba->dev; in cdns_ufs_init()
[all …]
A Dufs-rockchip.c29 err = ufshcd_dme_reset(hba); in ufs_rockchip_hce_enable_notify()
33 err = ufshcd_dme_enable(hba); in ufs_rockchip_hce_enable_notify()
45 hba->rpm_lvl = UFS_PM_LVL_5; in ufs_rockchip_set_pm_lvl()
46 hba->spm_lvl = UFS_PM_LVL_5; in ufs_rockchip_set_pm_lvl()
130 struct device *dev = hba->dev; in ufs_rockchip_common_init()
177 host->hba = hba; in ufs_rockchip_common_init()
179 ufshcd_set_variant(hba, host); in ufs_rockchip_common_init()
186 struct device *dev = hba->dev; in ufs_rockchip_rk3576_init()
198 hba->caps |= UFSHCD_CAP_WB_EN; in ufs_rockchip_rk3576_init()
201 ufs_rockchip_set_pm_lvl(hba); in ufs_rockchip_rk3576_init()
[all …]
/drivers/ufs/core/
A Dufshcd.c684 hba->saved_err, hba->saved_uic_err); in ufshcd_print_host_state()
697 hba->eh_flags, hba->req_abort_count); in ufshcd_print_host_state()
699 hba->ufs_version, hba->capabilities, hba->caps); in ufshcd_print_host_state()
2468 hba->reserved_slot = hba->nutrs - 1; in ufshcd_hba_capabilities()
3967 hba->lrb = devm_kcalloc(hba->dev, in ufshcd_memory_alloc()
6527 (!(hba->saved_err || hba->saved_uic_err || hba->force_reset || in ufshcd_err_handling_should_stop()
6649 hba->is_powered, hba->shutting_down, hba->saved_err, in ufshcd_err_handler()
6956 hba->saved_err |= hba->errors; in ufshcd_check_errors()
8859 devm_kfree(hba->dev, hba->lrb); in ufshcd_release_sdb_queue()
9807 hba->rpm_lvl : hba->spm_lvl; in __ufshcd_wl_suspend()
[all …]
A Dufshcd-priv.h109 if (hba->vops && hba->vops->exit) in ufshcd_vops_exit()
110 return hba->vops->exit(hba); in ufshcd_vops_exit()
134 if (hba->vops && hba->vops->event_notify) in ufshcd_vops_event_notify()
200 hba->vops->fixup_dev_quirks(hba); in ufshcd_vops_fixup_dev_quirks()
206 if (hba->vops && hba->vops->suspend) in ufshcd_vops_suspend()
214 if (hba->vops && hba->vops->resume) in ufshcd_vops_resume()
215 return hba->vops->resume(hba, op); in ufshcd_vops_resume()
223 hba->vops->dbg_register_dump(hba); in ufshcd_vops_dbg_register_dump()
229 return hba->vops->device_reset(hba); in ufshcd_vops_device_reset()
269 if (hba->vops && hba->vops->config_esi) in ufshcd_mcq_vops_config_esi()
[all …]
A Dufs-mcq.c147 if (!hba->vops || !hba->vops->get_hba_mac) { in ufshcd_mcq_decide_queue_depth()
158 mac = hba->vops->get_hba_mac(hba); in ufshcd_mcq_decide_queue_depth()
373 ufsmcq_writelx(hba, ufshcd_mcq_opr_offset(hba, OPR_SQD, i), in ufshcd_mcq_make_queues_operational()
376 ufsmcq_writelx(hba, ufshcd_mcq_opr_offset(hba, OPR_SQIS, i), in ufshcd_mcq_make_queues_operational()
386 ufsmcq_writelx(hba, ufshcd_mcq_opr_offset(hba, OPR_CQD, i), in ufshcd_mcq_make_queues_operational()
389 ufsmcq_writelx(hba, ufshcd_mcq_opr_offset(hba, OPR_CQIS, i), in ufshcd_mcq_make_queues_operational()
402 if (i < hba->nr_hw_queues - hba->nr_queues[HCTX_TYPE_POLL]) in ufshcd_mcq_make_queues_operational()
435 ufshcd_writel(hba, ufshcd_readl(hba, REG_UFS_MEM_CFG) | 0x2, in ufshcd_mcq_enable_esi()
467 hba->uhq = devm_kzalloc(hba->dev, in ufshcd_mcq_init()
470 if (!hba->uhq) { in ufshcd_mcq_init()
[all …]
A Dufs-debugfs.c64 *val = hba->ee_usr_mask; in ee_usr_mask_get()
69 __acquires(&hba->host_sem) in ufs_debugfs_get_user_access()
71 down(&hba->host_sem); in ufs_debugfs_get_user_access()
73 up(&hba->host_sem); in ufs_debugfs_get_user_access()
76 ufshcd_rpm_get_sync(hba); in ufs_debugfs_get_user_access()
81 __releases(&hba->host_sem) in ufs_debugfs_put_user_access()
84 up(&hba->host_sem); in ufs_debugfs_put_user_access()
114 ee_ctrl_mask = hba->ee_drv_mask | (hba->ee_usr_mask & ~status); in ufs_debugfs_exception_event()
135 if (!hba->ee_usr_mask || pm_runtime_suspended(hba->dev) || in ufs_debugfs_restart_ee()
149 p = &hba->saved_err; in ufs_saved_err_show()
[all …]
A Dufshcd-crypto.c26 ufshcd_hold(hba); in ufshcd_program_key()
40 ufshcd_release(hba); in ufshcd_program_key()
168 hba->crypto_cfg_register = in ufshcd_hba_init_crypto_capabilities()
170 hba->crypto_cap_array = in ufshcd_hba_init_crypto_capabilities()
171 devm_kcalloc(hba->dev, hba->crypto_capabilities.num_crypto_cap, in ufshcd_hba_init_crypto_capabilities()
173 if (!hba->crypto_cap_array) { in ufshcd_hba_init_crypto_capabilities()
180 hba->dev, &hba->crypto_profile, in ufshcd_hba_init_crypto_capabilities()
189 hba->crypto_profile.dev = hba->dev; in ufshcd_hba_init_crypto_capabilities()
198 cpu_to_le32(ufshcd_readl(hba, in ufshcd_hba_init_crypto_capabilities()
212 hba->caps &= ~UFSHCD_CAP_CRYPTO; in ufshcd_hba_init_crypto_capabilities()
[all …]
A Dufs-sysfs.c253 up(&hba->host_sem); in ufshcd_read_hci_reg()
258 ufshcd_hold(hba); in ufshcd_read_hci_reg()
263 up(&hba->host_sem); in ufshcd_read_hci_reg()
310 up(&hba->host_sem); in auto_hibern8_store()
329 if (!ufshcd_is_wb_allowed(hba) || (ufshcd_is_clkscaling_supported(hba) in wb_on_store()
355 up(&hba->host_sem); in wb_on_store()
427 up(&hba->host_sem); in enable_wb_buf_flush_store()
476 if (!ufshcd_is_wb_allowed(hba) || !hba->dev_info.wb_enabled in wb_resize_enable_store()
496 up(&hba->host_sem); in wb_resize_enable_store()
756 memset(&hba->monitor, 0, sizeof(hba->monitor)); in monitor_enable_store()
[all …]
A Dufs-hwmon.c14 struct ufs_hba *hba; member
54 struct ufs_hba *hba = data->hba; in ufs_hwmon_read() local
57 down(&hba->host_sem); in ufs_hwmon_read()
60 up(&hba->host_sem); in ufs_hwmon_read()
64 ufshcd_rpm_get_sync(hba); in ufs_hwmon_read()
91 up(&hba->host_sem); in ufs_hwmon_read()
100 struct ufs_hba *hba = data->hba; in ufs_hwmon_write() local
109 down(&hba->host_sem); in ufs_hwmon_write()
112 up(&hba->host_sem); in ufs_hwmon_write()
125 up(&hba->host_sem); in ufs_hwmon_write()
[all …]
A Dufs_trace.h88 TP_ARGS(hba, state),
96 __entry->hba = hba;
121 __entry->hba = hba;
137 TP_ARGS(hba, state),
145 __entry->hba = hba;
167 __entry->hba = hba;
210 __entry->hba = hba;
295 __entry->hba = hba;
333 __entry->hba = hba;
363 __entry->hba = hba;
[all …]
/drivers/scsi/
A Dhptiop.c349 hba->ops->post_req(hba, &(hba->u.mvfrey.internal_req)); in iop_send_sync_request_mvfrey()
384 hba->ops->disable_intr(hba); in iop_send_sync_msg()
385 hba->ops->post_msg(hba, msg); in iop_send_sync_msg()
389 hba->ops->iop_intr(hba); in iop_send_sync_msg()
396 hba->ops->enable_intr(hba); in iop_send_sync_msg()
571 hba->ops->enable_intr(hba); in hptiop_initialize_iop()
773 free_req(hba, &hba->reqs[tag]); in hptiop_finish_scsi_req()
1050 hba->ops->post_req(hba, _req); in hptiop_queuecommand_lck()
1487 hba->ops->unmap_pci_bar(hba); in hptiop_probe()
1515 hba->ops->disable_intr(hba); in hptiop_shutdown()
[all …]
A Dstex.c411 struct req_msg *req = hba->dma_mem + hba->req_head * hba->rq_size; in stex_alloc_req()
414 hba->req_head %= hba->rq_count+1; in stex_alloc_req()
548 addr = hba->dma_handle + hba->req_head * hba->rq_size; in stex_ss_send_cmd()
553 hba->req_head %= hba->rq_count+1; in stex_ss_send_cmd()
700 req = hba->alloc_rq(hba); in stex_queuecommand_lck()
719 if (!hba->map_sg(hba, req, &hba->ccb[tag])) { in stex_queuecommand_lck()
724 hba->send(hba, req, tag); in stex_queuecommand_lck()
1074 status_phys = hba->dma_handle + (hba->rq_count+1) * hba->rq_size; in stex_common_handshake()
1263 hba->wait_ccb = &hba->ccb[tag]; in stex_abort()
1869 req = hba->alloc_rq(hba); in stex_hba_stop()
[all …]
/drivers/scsi/bnx2i/
A Dbnx2i_init.c101 hba->pci_did); in bnx2i_identify_device()
120 hba = tmp_hba; in get_adapter_list_head()
126 return hba; in get_adapter_list_head()
143 return hba; in bnx2i_find_hba_for_cnic()
285 hba->cnic = cnic; in bnx2i_init_one()
288 hba->age++; in bnx2i_init_one()
323 if (!hba) { in bnx2i_ulp_init()
347 if (!hba) { in bnx2i_ulp_exit()
357 hba->cnic->unregister_device(hba->cnic, CNIC_ULP_ISCSI); in bnx2i_ulp_exit()
378 if (!hba) in bnx2i_get_stats()
[all …]
A Dbnx2i_iscsi.c398 bnx2i_ep->hba = hba; in bnx2i_alloc_ep()
543 hba->mp_bd_tbl, hba->mp_bd_dma); in bnx2i_setup_mp_bdt()
571 hba->mp_bd_tbl, hba->mp_bd_dma); in bnx2i_free_mp_bdt()
805 hba->pci_did = hba->pcidev->device; in bnx2i_alloc_hba()
806 hba->pci_vid = hba->pcidev->vendor; in bnx2i_alloc_hba()
863 hba->num_ccell = hba->max_sqes / 2; in bnx2i_alloc_hba()
1371 bnx2i_conn->hba = hba; in bnx2i_conn_create()
1441 if (bnx2i_ep->hba != hba) { in bnx2i_conn_bind()
1531 struct bnx2i_hba *hba = bnx2i_ep->hba; in bnx2i_ep_get_param() local
1676 if (hba && hba->cnic) in bnx2i_check_route()
[all …]
/drivers/parisc/
A Dlba_pci.c113 return container_of(hba, struct lba_device, hba); in LBA_DEV()
739 ldev->hba.io_space.name, in lba_fixup_bus()
740 ldev->hba.io_space.start, ldev->hba.io_space.end, in lba_fixup_bus()
741 ldev->hba.io_space.flags); in lba_fixup_bus()
743 ldev->hba.lmmio_space.name, in lba_fixup_bus()
744 ldev->hba.lmmio_space.start, ldev->hba.lmmio_space.end, in lba_fixup_bus()
1141 r = &lba_dev->hba.io_space; in lba_pat_resources()
1183 r = &(lba_dev->hba.bus_num); in lba_legacy_resources()
1323 r = &(lba_dev->hba.io_space); in lba_legacy_resources()
1358 d->hba.base_addr, in lba_hw_init()
[all …]
/drivers/scsi/bnx2fc/
A Dbnx2fc_fcoe.c193 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_cleanup() local
284 hba = interface->hba; in bnx2fc_xmit()
804 hba = interface->hba; in bnx2fc_net_config()
899 if (interface->hba == hba && in bnx2fc_indicate_netevent()
913 if (interface->hba != hba) in bnx2fc_indicate_netevent()
1373 hba->max_xid = (hba->max_tasks - 1); in bnx2fc_hba_create()
1452 interface->hba = hba; in bnx2fc_interface_create()
1837 if (interface->hba == hba) { in bnx2fc_ulp_start()
1966 if (interface->hba == hba) in bnx2fc_ulp_stop()
2185 hba = interface->hba; in __bnx2fc_enable()
[all …]
A Dbnx2fc_hwi.c47 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_stat_req()
141 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_fw_fcoe_init_msg()
160 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_fw_fcoe_destroy_msg()
177 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_ofld_req() local
344 if (hba->cnic && hba->cnic->submit_kwqes) in bnx2fc_send_session_ofld_req()
362 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_enable_req() local
431 struct bnx2fc_hba *hba = interface->hba; in bnx2fc_send_session_disable_req() local
542 hba = unsol_els->hba; in bnx2fc_unsol_els_work()
615 unsol_els->hba = interface->hba; in bnx2fc_process_l2_frame_compl()
1171 if (hba != interface->hba) { in bnx2fc_process_ofld_cmpl()
[all …]
/drivers/target/
A Dtarget_core_hba.c111 struct se_hba *hba; in core_alloc_hba() local
114 hba = kzalloc(sizeof(*hba), GFP_KERNEL); in core_alloc_hba()
115 if (!hba) { in core_alloc_hba()
127 if (!hba->backend) { in core_alloc_hba()
132 ret = hba->backend->ops->attach_hba(hba, plugin_dep_id); in core_alloc_hba()
144 return hba; in core_alloc_hba()
148 hba->backend = NULL; in core_alloc_hba()
150 kfree(hba); in core_alloc_hba()
159 hba->backend->ops->detach_hba(hba); in core_delete_hba()
170 hba->backend = NULL; in core_delete_hba()
[all …]

Completed in 764 milliseconds

123