Lines Matching refs:phb
145 struct pnv_phb *phb = pci_bus_to_pnvhb(pdev->bus); in pnv_pci_ioda_fixup_iov_resources() local
156 mul = phb->ioda.total_pe_num; in pnv_pci_ioda_fixup_iov_resources()
180 if (vf_bar_sz > (phb->ioda.m64_segsize >> 2)) { in pnv_pci_ioda_fixup_iov_resources()
251 struct pnv_phb *phb = pci_bus_to_pnvhb(pdev->bus); in pnv_pci_iov_resource_alignment() local
281 return phb->ioda.total_pe_num * align; in pnv_pci_iov_resource_alignment()
287 struct pnv_phb *phb; in pnv_pci_vf_release_m64() local
290 phb = pci_bus_to_pnvhb(pdev->bus); in pnv_pci_vf_release_m64()
294 opal_pci_phb_mmio_enable(phb->opal_id, in pnv_pci_vf_release_m64()
299 clear_bit(window_id, &phb->ioda.m64_bar_alloc); in pnv_pci_vf_release_m64()
311 static int64_t pnv_ioda_map_m64_segmented(struct pnv_phb *phb, in pnv_ioda_map_m64_segmented() argument
318 rc = opal_pci_set_phb_mem_window(phb->opal_id, in pnv_ioda_map_m64_segmented()
327 rc = opal_pci_phb_mmio_enable(phb->opal_id, in pnv_ioda_map_m64_segmented()
338 static int64_t pnv_ioda_map_m64_single(struct pnv_phb *phb, in pnv_ioda_map_m64_single() argument
363 rc = opal_pci_map_pe_mmio_window(phb->opal_id, in pnv_ioda_map_m64_single()
374 rc = opal_pci_set_phb_mem_window(phb->opal_id, in pnv_ioda_map_m64_single()
387 rc = opal_pci_phb_mmio_enable(phb->opal_id, in pnv_ioda_map_m64_single()
398 static int pnv_pci_alloc_m64_bar(struct pnv_phb *phb, struct pnv_iov_data *iov) in pnv_pci_alloc_m64_bar() argument
403 win = find_next_zero_bit(&phb->ioda.m64_bar_alloc, in pnv_pci_alloc_m64_bar()
404 phb->ioda.m64_bar_idx + 1, 0); in pnv_pci_alloc_m64_bar()
406 if (win >= phb->ioda.m64_bar_idx + 1) in pnv_pci_alloc_m64_bar()
408 } while (test_and_set_bit(win, &phb->ioda.m64_bar_alloc)); in pnv_pci_alloc_m64_bar()
418 struct pnv_phb *phb; in pnv_pci_vf_assign_m64() local
426 phb = pci_bus_to_pnvhb(pdev->bus); in pnv_pci_vf_assign_m64()
436 win = pnv_pci_alloc_m64_bar(phb, iov); in pnv_pci_vf_assign_m64()
443 rc = pnv_ioda_map_m64_segmented(phb, win, start, size); in pnv_pci_vf_assign_m64()
455 win = pnv_pci_alloc_m64_bar(phb, iov); in pnv_pci_vf_assign_m64()
460 rc = pnv_ioda_map_m64_single(phb, win, in pnv_pci_vf_assign_m64()
477 struct pnv_phb *phb; in pnv_ioda_release_vf_PE() local
480 phb = pci_bus_to_pnvhb(pdev->bus); in pnv_ioda_release_vf_PE()
486 list_for_each_entry_safe(pe, pe_n, &phb->ioda.pe_list, list) { in pnv_ioda_release_vf_PE()
493 mutex_lock(&phb->ioda.pe_list_mutex); in pnv_ioda_release_vf_PE()
495 mutex_unlock(&phb->ioda.pe_list_mutex); in pnv_ioda_release_vf_PE()
497 pnv_ioda_deconfigure_pe(phb, pe); in pnv_ioda_release_vf_PE()
616 struct pnv_phb *phb; in pnv_ioda_setup_vf_PE() local
626 phb = pci_bus_to_pnvhb(pdev->bus); in pnv_ioda_setup_vf_PE()
637 pe->phb = phb; in pnv_ioda_setup_vf_PE()
649 if (pnv_ioda_configure_pe(phb, pe)) { in pnv_ioda_setup_vf_PE()
657 mutex_lock(&phb->ioda.pe_list_mutex); in pnv_ioda_setup_vf_PE()
658 list_add_tail(&pe->list, &phb->ioda.pe_list); in pnv_ioda_setup_vf_PE()
659 mutex_unlock(&phb->ioda.pe_list_mutex); in pnv_ioda_setup_vf_PE()
670 pnv_pci_ioda2_setup_dma_pe(phb, pe); in pnv_ioda_setup_vf_PE()
678 struct pnv_phb *phb; in pnv_pci_sriov_enable() local
682 phb = pci_bus_to_pnvhb(pdev->bus); in pnv_pci_sriov_enable()
692 if (phb->type != PNV_PHB_IODA2) { in pnv_pci_sriov_enable()
703 base_pe = pnv_ioda_alloc_pe(phb, num_vfs); in pnv_pci_sriov_enable()