Lines Matching refs:trans_pcie

42 	struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans);  in iwl_trans_pcie_dump_regs()  local
43 struct pci_dev *pdev = trans_pcie->pci_dev; in iwl_trans_pcie_dump_regs()
47 if (trans_pcie->pcie_dbg_dumped_once) in iwl_trans_pcie_dump_regs()
126 trans_pcie->pcie_dbg_dumped_once = 1; in iwl_trans_pcie_dump_regs()
256 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_apm_config() local
267 pcie_capability_read_word(trans_pcie->pci_dev, PCI_EXP_LNKCTL, &lctl); in iwl_pcie_apm_config()
270 pcie_capability_read_word(trans_pcie->pci_dev, PCI_EXP_DEVCTL2, &cap); in iwl_pcie_apm_config()
540 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_nic_init() local
544 spin_lock_bh(&trans_pcie->irq_lock); in iwl_pcie_nic_init()
546 spin_unlock_bh(&trans_pcie->irq_lock); in iwl_pcie_nic_init()
688 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_load_firmware_chunk() local
691 trans_pcie->ucode_write_complete = false; in iwl_pcie_load_firmware_chunk()
700 ret = wait_event_timeout(trans_pcie->ucode_write_waitq, in iwl_pcie_load_firmware_chunk()
701 trans_pcie->ucode_write_complete, 5 * HZ); in iwl_pcie_load_firmware_chunk()
1065 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_check_hw_rf_kill() local
1075 if (trans_pcie->opmode_down) in iwl_pcie_check_hw_rf_kill()
1143 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_map_non_rx_causes() local
1144 int val = trans_pcie->def_irq | MSIX_NON_AUTO_CLEAR_CAUSE; in iwl_pcie_map_non_rx_causes()
1162 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_map_rx_causes() local
1164 trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS ? 1 : 0; in iwl_pcie_map_rx_causes()
1182 if (trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_NON_RX) in iwl_pcie_map_rx_causes()
1186 if (trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS) in iwl_pcie_map_rx_causes()
1190 void iwl_pcie_conf_msix_hw(struct iwl_trans_pcie *trans_pcie) in iwl_pcie_conf_msix_hw() argument
1192 struct iwl_trans *trans = trans_pcie->trans; in iwl_pcie_conf_msix_hw()
1194 if (!trans_pcie->msix_enabled) { in iwl_pcie_conf_msix_hw()
1221 static void iwl_pcie_init_msix(struct iwl_trans_pcie *trans_pcie) in iwl_pcie_init_msix() argument
1223 struct iwl_trans *trans = trans_pcie->trans; in iwl_pcie_init_msix()
1225 iwl_pcie_conf_msix_hw(trans_pcie); in iwl_pcie_init_msix()
1227 if (!trans_pcie->msix_enabled) in iwl_pcie_init_msix()
1230 trans_pcie->fh_init_mask = ~iwl_read32(trans, CSR_MSIX_FH_INT_MASK_AD); in iwl_pcie_init_msix()
1231 trans_pcie->fh_mask = trans_pcie->fh_init_mask; in iwl_pcie_init_msix()
1232 trans_pcie->hw_init_mask = ~iwl_read32(trans, CSR_MSIX_HW_INT_MASK_AD); in iwl_pcie_init_msix()
1233 trans_pcie->hw_mask = trans_pcie->hw_init_mask; in iwl_pcie_init_msix()
1238 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in _iwl_trans_pcie_stop_device() local
1240 lockdep_assert_held(&trans_pcie->mutex); in _iwl_trans_pcie_stop_device()
1242 if (trans_pcie->is_down) in _iwl_trans_pcie_stop_device()
1245 trans_pcie->is_down = true; in _iwl_trans_pcie_stop_device()
1295 iwl_pcie_conf_msix_hw(trans_pcie); in _iwl_trans_pcie_stop_device()
1320 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_synchronize_irqs() local
1322 if (trans_pcie->msix_enabled) { in iwl_pcie_synchronize_irqs()
1325 for (i = 0; i < trans_pcie->alloc_vecs; i++) in iwl_pcie_synchronize_irqs()
1326 synchronize_irq(trans_pcie->msix_entries[i].vector); in iwl_pcie_synchronize_irqs()
1328 synchronize_irq(trans_pcie->pci_dev->irq); in iwl_pcie_synchronize_irqs()
1335 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_start_fw() local
1359 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_start_fw()
1369 if (trans_pcie->is_down) { in iwl_trans_pcie_start_fw()
1415 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_start_fw()
1456 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_stop_device() local
1463 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_stop_device()
1464 trans_pcie->opmode_down = true; in iwl_trans_pcie_stop_device()
1468 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_stop_device()
1473 struct iwl_trans_pcie __maybe_unused *trans_pcie = in iwl_trans_pcie_rf_kill() local
1476 lockdep_assert_held(&trans_pcie->mutex); in iwl_trans_pcie_rf_kill()
1522 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_d3_handshake() local
1539 ret = wait_event_timeout(trans_pcie->sx_waitq, in iwl_pcie_d3_handshake()
1540 trans_pcie->sx_complete, 2 * HZ); in iwl_pcie_d3_handshake()
1543 trans_pcie->sx_complete = false; in iwl_pcie_d3_handshake()
1577 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_d3_resume() local
1602 iwl_pcie_conf_msix_hw(trans_pcie); in iwl_trans_pcie_d3_resume()
1603 if (!trans_pcie->msix_enabled) in iwl_trans_pcie_d3_resume()
1644 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_set_interrupt_capa() local
1657 trans_pcie->msix_entries[i].entry = i; in iwl_pcie_set_interrupt_capa()
1659 num_irqs = pci_enable_msix_range(pdev, trans_pcie->msix_entries, in iwl_pcie_set_interrupt_capa()
1668 trans_pcie->def_irq = (num_irqs == max_irqs) ? num_irqs - 1 : 0; in iwl_pcie_set_interrupt_capa()
1682 trans_pcie->trans->num_rx_queues = num_irqs + 1; in iwl_pcie_set_interrupt_capa()
1683 trans_pcie->shared_vec_mask = IWL_SHARED_IRQ_NON_RX | in iwl_pcie_set_interrupt_capa()
1686 trans_pcie->trans->num_rx_queues = num_irqs; in iwl_pcie_set_interrupt_capa()
1687 trans_pcie->shared_vec_mask = IWL_SHARED_IRQ_NON_RX; in iwl_pcie_set_interrupt_capa()
1689 trans_pcie->trans->num_rx_queues = num_irqs - 1; in iwl_pcie_set_interrupt_capa()
1694 trans_pcie->trans->num_rx_queues, trans_pcie->shared_vec_mask); in iwl_pcie_set_interrupt_capa()
1696 WARN_ON(trans_pcie->trans->num_rx_queues > IWL_MAX_RX_HW_QUEUES); in iwl_pcie_set_interrupt_capa()
1698 trans_pcie->alloc_vecs = num_irqs; in iwl_pcie_set_interrupt_capa()
1699 trans_pcie->msix_enabled = true; in iwl_pcie_set_interrupt_capa()
1718 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_irq_set_affinity() local
1720 i = trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS ? 0 : 1; in iwl_pcie_irq_set_affinity()
1721 iter_rx_q = trans_pcie->trans->num_rx_queues - 1 + i; in iwl_pcie_irq_set_affinity()
1729 cpumask_set_cpu(cpu, &trans_pcie->affinity_mask[i]); in iwl_pcie_irq_set_affinity()
1730 ret = irq_set_affinity_hint(trans_pcie->msix_entries[i].vector, in iwl_pcie_irq_set_affinity()
1731 &trans_pcie->affinity_mask[i]); in iwl_pcie_irq_set_affinity()
1733 IWL_ERR(trans_pcie->trans, in iwl_pcie_irq_set_affinity()
1735 trans_pcie->msix_entries[i].vector); in iwl_pcie_irq_set_affinity()
1740 struct iwl_trans_pcie *trans_pcie) in iwl_pcie_init_msix_handler() argument
1744 for (i = 0; i < trans_pcie->alloc_vecs; i++) { in iwl_pcie_init_msix_handler()
1747 const char *qname = queue_name(&pdev->dev, trans_pcie, i); in iwl_pcie_init_msix_handler()
1752 msix_entry = &trans_pcie->msix_entries[i]; in iwl_pcie_init_msix_handler()
1756 (i == trans_pcie->def_irq) ? in iwl_pcie_init_msix_handler()
1763 IWL_ERR(trans_pcie->trans, in iwl_pcie_init_msix_handler()
1769 iwl_pcie_irq_set_affinity(trans_pcie->trans); in iwl_pcie_init_msix_handler()
1828 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in _iwl_trans_pcie_start_hw() local
1831 lockdep_assert_held(&trans_pcie->mutex); in _iwl_trans_pcie_start_hw()
1858 iwl_pcie_init_msix(trans_pcie); in _iwl_trans_pcie_start_hw()
1863 trans_pcie->opmode_down = false; in _iwl_trans_pcie_start_hw()
1866 trans_pcie->is_down = false; in _iwl_trans_pcie_start_hw()
1876 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_start_hw() local
1879 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_start_hw()
1881 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_start_hw()
1888 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_op_mode_leave() local
1890 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_op_mode_leave()
1901 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_op_mode_leave()
1951 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_configure() local
1964 trans_pcie->n_no_reclaim_cmds = 0; in iwl_trans_pcie_configure()
1966 trans_pcie->n_no_reclaim_cmds = trans_cfg->n_no_reclaim_cmds; in iwl_trans_pcie_configure()
1967 if (trans_pcie->n_no_reclaim_cmds) in iwl_trans_pcie_configure()
1968 memcpy(trans_pcie->no_reclaim_cmds, trans_cfg->no_reclaim_cmds, in iwl_trans_pcie_configure()
1969 trans_pcie->n_no_reclaim_cmds * sizeof(u8)); in iwl_trans_pcie_configure()
1971 trans_pcie->rx_buf_size = trans_cfg->rx_buf_size; in iwl_trans_pcie_configure()
1972 trans_pcie->rx_page_order = in iwl_trans_pcie_configure()
1973 iwl_trans_get_rb_size_order(trans_pcie->rx_buf_size); in iwl_trans_pcie_configure()
1974 trans_pcie->rx_buf_bytes = in iwl_trans_pcie_configure()
1975 iwl_trans_get_rb_size(trans_pcie->rx_buf_size); in iwl_trans_pcie_configure()
1976 trans_pcie->supported_dma_mask = DMA_BIT_MASK(12); in iwl_trans_pcie_configure()
1978 trans_pcie->supported_dma_mask = DMA_BIT_MASK(11); in iwl_trans_pcie_configure()
1981 trans_pcie->scd_set_active = trans_cfg->scd_set_active; in iwl_trans_pcie_configure()
1991 if (trans_pcie->napi_dev.reg_state != NETREG_DUMMY) in iwl_trans_pcie_configure()
1992 init_dummy_netdev(&trans_pcie->napi_dev); in iwl_trans_pcie_configure()
1994 trans_pcie->fw_reset_handshake = trans_cfg->fw_reset_handshake; in iwl_trans_pcie_configure()
1999 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_free() local
2010 if (trans_pcie->rba.alloc_wq) { in iwl_trans_pcie_free()
2011 destroy_workqueue(trans_pcie->rba.alloc_wq); in iwl_trans_pcie_free()
2012 trans_pcie->rba.alloc_wq = NULL; in iwl_trans_pcie_free()
2015 if (trans_pcie->msix_enabled) { in iwl_trans_pcie_free()
2016 for (i = 0; i < trans_pcie->alloc_vecs; i++) { in iwl_trans_pcie_free()
2018 trans_pcie->msix_entries[i].vector, in iwl_trans_pcie_free()
2022 trans_pcie->msix_enabled = false; in iwl_trans_pcie_free()
2029 if (trans_pcie->pnvm_dram.size) in iwl_trans_pcie_free()
2030 dma_free_coherent(trans->dev, trans_pcie->pnvm_dram.size, in iwl_trans_pcie_free()
2031 trans_pcie->pnvm_dram.block, in iwl_trans_pcie_free()
2032 trans_pcie->pnvm_dram.physical); in iwl_trans_pcie_free()
2034 if (trans_pcie->reduce_power_dram.size) in iwl_trans_pcie_free()
2036 trans_pcie->reduce_power_dram.size, in iwl_trans_pcie_free()
2037 trans_pcie->reduce_power_dram.block, in iwl_trans_pcie_free()
2038 trans_pcie->reduce_power_dram.physical); in iwl_trans_pcie_free()
2040 mutex_destroy(&trans_pcie->mutex); in iwl_trans_pcie_free()
2126 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in __iwl_trans_pcie_grab_nic_access() local
2132 spin_lock(&trans_pcie->reg_lock); in __iwl_trans_pcie_grab_nic_access()
2134 if (trans_pcie->cmd_hold_nic_awake) in __iwl_trans_pcie_grab_nic_access()
2184 spin_unlock(&trans_pcie->reg_lock); in __iwl_trans_pcie_grab_nic_access()
2193 __release(&trans_pcie->reg_lock); in __iwl_trans_pcie_grab_nic_access()
2213 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_release_nic_access() local
2215 lockdep_assert_held(&trans_pcie->reg_lock); in iwl_trans_pcie_release_nic_access()
2221 __acquire(&trans_pcie->reg_lock); in iwl_trans_pcie_release_nic_access()
2223 if (trans_pcie->cmd_hold_nic_awake) in iwl_trans_pcie_release_nic_access()
2238 spin_unlock_bh(&trans_pcie->reg_lock); in iwl_trans_pcie_release_nic_access()
2334 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_rxq_dma_data() local
2336 if (queue >= trans->num_rx_queues || !trans_pcie->rxq) in iwl_trans_pcie_rxq_dma_data()
2339 data->fr_bd_cb = trans_pcie->rxq[queue].bd_dma; in iwl_trans_pcie_rxq_dma_data()
2340 data->urbd_stts_wrptr = trans_pcie->rxq[queue].rb_stts_dma; in iwl_trans_pcie_rxq_dma_data()
2341 data->ur_bd_cb = trans_pcie->rxq[queue].used_bd_dma; in iwl_trans_pcie_rxq_dma_data()
2436 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_set_bits_mask() local
2438 spin_lock_bh(&trans_pcie->reg_lock); in iwl_trans_pcie_set_bits_mask()
2440 spin_unlock_bh(&trans_pcie->reg_lock); in iwl_trans_pcie_set_bits_mask()
2641 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rx_queue_read() local
2648 if (!trans_pcie->rxq) in iwl_dbgfs_rx_queue_read()
2656 struct iwl_rxq *rxq = &trans_pcie->rxq[i]; in iwl_dbgfs_rx_queue_read()
2692 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_interrupt_read() local
2693 struct isr_statistics *isr_stats = &trans_pcie->isr_stats; in iwl_dbgfs_interrupt_read()
2750 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_interrupt_write() local
2751 struct isr_statistics *isr_stats = &trans_pcie->isr_stats; in iwl_dbgfs_interrupt_write()
2798 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rfkill_read() local
2803 trans_pcie->debug_rfkill, in iwl_dbgfs_rfkill_read()
2815 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rfkill_write() local
2822 if (new_value == trans_pcie->debug_rfkill) in iwl_dbgfs_rfkill_write()
2825 trans_pcie->debug_rfkill, new_value); in iwl_dbgfs_rfkill_write()
2826 trans_pcie->debug_rfkill = new_value; in iwl_dbgfs_rfkill_write()
2836 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_monitor_data_open() local
2844 if (trans_pcie->fw_mon_data.state != IWL_FW_MON_DBGFS_STATE_CLOSED) in iwl_dbgfs_monitor_data_open()
2847 trans_pcie->fw_mon_data.state = IWL_FW_MON_DBGFS_STATE_OPEN; in iwl_dbgfs_monitor_data_open()
2854 struct iwl_trans_pcie *trans_pcie = in iwl_dbgfs_monitor_data_release() local
2857 if (trans_pcie->fw_mon_data.state == IWL_FW_MON_DBGFS_STATE_OPEN) in iwl_dbgfs_monitor_data_release()
2858 trans_pcie->fw_mon_data.state = IWL_FW_MON_DBGFS_STATE_CLOSED; in iwl_dbgfs_monitor_data_release()
2885 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_monitor_data_read() local
2887 struct cont_rec *data = &trans_pcie->fw_mon_data; in iwl_dbgfs_monitor_data_read()
2968 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rf_read() local
2970 if (!trans_pcie->rf_name[0]) in iwl_dbgfs_rf_read()
2974 trans_pcie->rf_name, in iwl_dbgfs_rf_read()
2975 strlen(trans_pcie->rf_name)); in iwl_dbgfs_rf_read()
3016 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_debugfs_cleanup() local
3017 struct cont_rec *data = &trans_pcie->fw_mon_data; in iwl_trans_pcie_debugfs_cleanup()
3040 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_rbs() local
3041 int max_len = trans_pcie->rx_buf_bytes; in iwl_trans_pcie_dump_rbs()
3043 struct iwl_rxq *rxq = &trans_pcie->rxq[0]; in iwl_trans_pcie_dump_rbs()
3297 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_data() local
3342 struct iwl_rxq *rxq = &trans_pcie->rxq[0]; in iwl_trans_pcie_dump_data()
3350 (PAGE_SIZE << trans_pcie->rx_page_order)); in iwl_trans_pcie_dump_data()
3455 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_sync_nmi() local
3457 if (trans_pcie->msix_enabled) { in iwl_trans_pcie_sync_nmi()
3549 struct iwl_trans_pcie *trans_pcie; in iwl_trans_pcie_alloc() local
3567 trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_alloc()
3569 trans_pcie->trans = trans; in iwl_trans_pcie_alloc()
3570 trans_pcie->opmode_down = true; in iwl_trans_pcie_alloc()
3571 spin_lock_init(&trans_pcie->irq_lock); in iwl_trans_pcie_alloc()
3572 spin_lock_init(&trans_pcie->reg_lock); in iwl_trans_pcie_alloc()
3573 spin_lock_init(&trans_pcie->alloc_page_lock); in iwl_trans_pcie_alloc()
3574 mutex_init(&trans_pcie->mutex); in iwl_trans_pcie_alloc()
3575 init_waitqueue_head(&trans_pcie->ucode_write_waitq); in iwl_trans_pcie_alloc()
3576 init_waitqueue_head(&trans_pcie->fw_reset_waitq); in iwl_trans_pcie_alloc()
3577 init_waitqueue_head(&trans_pcie->imr_waitq); in iwl_trans_pcie_alloc()
3579 trans_pcie->rba.alloc_wq = alloc_workqueue("rb_allocator", in iwl_trans_pcie_alloc()
3581 if (!trans_pcie->rba.alloc_wq) { in iwl_trans_pcie_alloc()
3585 INIT_WORK(&trans_pcie->rba.rx_alloc, iwl_pcie_rx_allocator_work); in iwl_trans_pcie_alloc()
3587 trans_pcie->debug_rfkill = -1; in iwl_trans_pcie_alloc()
3600 trans_pcie->def_rx_queue = 0; in iwl_trans_pcie_alloc()
3628 trans_pcie->hw_base = table[0]; in iwl_trans_pcie_alloc()
3629 if (!trans_pcie->hw_base) { in iwl_trans_pcie_alloc()
3639 trans_pcie->pci_dev = pdev; in iwl_trans_pcie_alloc()
3667 init_waitqueue_head(&trans_pcie->sx_waitq); in iwl_trans_pcie_alloc()
3670 if (trans_pcie->msix_enabled) { in iwl_trans_pcie_alloc()
3671 ret = iwl_pcie_init_msix_handler(pdev, trans_pcie); in iwl_trans_pcie_alloc()
3690 trans_pcie->fw_mon_data.state = IWL_FW_MON_DBGFS_STATE_CLOSED; in iwl_trans_pcie_alloc()
3691 mutex_init(&trans_pcie->fw_mon_data.mutex); in iwl_trans_pcie_alloc()
3701 destroy_workqueue(trans_pcie->rba.alloc_wq); in iwl_trans_pcie_alloc()
3728 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_copy_imr() local
3731 trans_pcie->imr_status = IMR_D2S_REQUESTED; in iwl_trans_pcie_copy_imr()
3733 ret = wait_event_timeout(trans_pcie->imr_waitq, in iwl_trans_pcie_copy_imr()
3734 trans_pcie->imr_status != in iwl_trans_pcie_copy_imr()
3736 if (!ret || trans_pcie->imr_status == IMR_D2S_ERROR) { in iwl_trans_pcie_copy_imr()
3741 trans_pcie->imr_status = IMR_D2S_IDLE; in iwl_trans_pcie_copy_imr()