Home
last modified time | relevance | path

Searched refs:iommu (Results 1 – 25 of 32) sorted by relevance

12

/xen-4.10.0-shim-comet/xen/drivers/passthrough/amd/
A Diommu_init.c50 iommu->mmio_base = ioremap(iommu->mmio_base_phys, in map_iommu_mmio_region()
360 BUG_ON(!iommu || ((log != &iommu->event_log) && (log != &iommu->ppr_log))); in iommu_read_log()
406 BUG_ON(!iommu || ((log != &iommu->event_log) && (log != &iommu->ppr_log))); in iommu_reset_log()
441 __msi_set_enable(iommu->seg, PCI_BUS(iommu->bdf), PCI_SLOT(iommu->bdf), in amd_iommu_msi_enable()
606 iommu_read_log(iommu, &iommu->event_log, in iommu_check_event_log()
695 iommu_read_log(iommu, &iommu->ppr_log, in iommu_check_ppr_log()
791 iommu->msi.dev = pci_get_pdev(iommu->seg, PCI_BUS(iommu->bdf), in set_iommu_interrupt_handler()
797 iommu->seg, PCI_BUS(iommu->bdf), in set_iommu_interrupt_handler()
798 PCI_SLOT(iommu->bdf), PCI_FUNC(iommu->bdf)); in set_iommu_interrupt_handler()
801 control = pci_conf_read16(iommu->seg, PCI_BUS(iommu->bdf), in set_iommu_interrupt_handler()
[all …]
A Diommu_guest.c187 if ( !iommu ) in guest_iommu_add_ppr_log()
236 if ( !iommu ) in guest_iommu_add_event_log()
282 if ( !iommu ) in do_complete_ppr_request()
334 if ( !iommu ) in do_invalidate_iotlb_pages()
447 if ( !iommu ) in do_invalidate_dte()
588 guest_iommu_enable_ring_buffer(iommu, &iommu->cmd_buffer, in guest_iommu_write_ctrl()
596 guest_iommu_enable_ring_buffer(iommu, &iommu->event_log, in guest_iommu_write_ctrl()
604 guest_iommu_enable_ring_buffer(iommu, &iommu->ppr_log, in guest_iommu_write_ctrl()
813 if ( !iommu ) in guest_iommu_set_base()
873 return iommu && addr >= iommu->mmio_base && in guest_iommu_mmio_range()
[all …]
A Diommu_detect.c79 ASSERT( iommu->mmio_base ); in get_iommu_features()
83 iommu->features = 0; in get_iommu_features()
105 struct amd_iommu *iommu; in amd_iommu_detect_one_acpi() local
123 if ( !iommu ) in amd_iommu_detect_one_acpi()
129 spin_lock_init(&iommu->lock); in amd_iommu_detect_one_acpi()
140 bus = PCI_BUS(iommu->bdf); in amd_iommu_detect_one_acpi()
141 dev = PCI_SLOT(iommu->bdf); in amd_iommu_detect_one_acpi()
142 func = PCI_FUNC(iommu->bdf); in amd_iommu_detect_one_acpi()
145 iommu->cap_offset, iommu); in amd_iommu_detect_one_acpi()
149 rt = get_iommu_msi_capabilities(iommu->seg, bus, dev, func, iommu); in amd_iommu_detect_one_acpi()
[all …]
A Diommu_cmd.c30 tail = iommu->cmd_buffer.tail; in queue_iommu_command()
290 struct amd_iommu *iommu; in amd_iommu_flush_iotlb() local
301 if ( !iommu ) in amd_iommu_flush_iotlb()
319 flush_command_buffer(iommu); in amd_iommu_flush_iotlb()
348 struct amd_iommu *iommu; in _amd_iommu_flush_pages() local
352 for_each_amd_iommu ( iommu ) in _amd_iommu_flush_pages()
380 flush_command_buffer(iommu); in amd_iommu_flush_device()
388 flush_command_buffer(iommu); in amd_iommu_flush_intremap()
395 invalidate_iommu_all(iommu); in amd_iommu_flush_all_caches()
396 flush_command_buffer(iommu); in amd_iommu_flush_all_caches()
[all …]
A Dpci_amd_iommu.c47 tmp.iommu = NULL; in find_iommu_for_device()
57 ivrs_mappings[bdf].iommu = ivrs_mappings[bd0].iommu; in find_iommu_for_device()
61 return ivrs_mappings[bdf].iommu; in find_iommu_for_device()
254 const struct amd_iommu *iommu; in amd_iommu_hwdom_init() local
290 for_each_amd_iommu ( iommu ) in amd_iommu_hwdom_init()
341 struct amd_iommu *iommu; in reassign_device() local
347 if ( !iommu ) in reassign_device()
466 struct amd_iommu *iommu; in amd_iommu_add_device() local
474 if ( unlikely(!iommu) ) in amd_iommu_add_device()
498 struct amd_iommu *iommu; in amd_iommu_remove_device() local
[all …]
A Diommu_intr.c160 struct amd_iommu *iommu, in update_intremap_entry_from_ioapic() argument
215 if ( iommu->enabled ) in update_intremap_entry_from_ioapic()
235 struct amd_iommu *iommu; in amd_iommu_setup_ioapic_remapping() local
258 if ( !iommu ) in amd_iommu_setup_ioapic_remapping()
308 struct amd_iommu *iommu; in amd_iommu_ioapic_update_ire() local
325 if ( !iommu ) in amd_iommu_ioapic_update_ire()
490 if ( iommu->enabled ) in update_intremap_entry_from_msi_msg()
504 struct amd_iommu *iommu; in _find_iommu_for_device() local
507 if ( iommu->seg == seg && iommu->bdf == bdf ) in _find_iommu_for_device()
511 if ( iommu ) in _find_iommu_for_device()
[all …]
A Diommu_acpi.c85 ivrs_mappings[bdf].iommu = iommu; in add_ivrs_mapping_entry()
91 struct amd_iommu *iommu; in find_iommu_from_bdf_cap() local
94 if ( (iommu->seg == seg) && (iommu->bdf == bdf) && in find_iommu_from_bdf_cap()
96 return iommu; in find_iommu_from_bdf_cap()
119 struct amd_iommu *iommu, in reserve_iommu_exclusion_range_all() argument
159 struct amd_iommu *iommu; in register_exclusion_range_for_all_devices() local
194 struct amd_iommu *iommu; in register_exclusion_range_for_device() local
198 if ( !iommu ) in register_exclusion_range_for_device()
254 if ( iommu == find_iommu_for_device(iommu->seg, bdf) ) in register_exclusion_range_for_iommu_devices()
330 if ( !iommu ) in parse_ivmd_device_iommu()
[all …]
A Diommu_map.c550 struct amd_iommu *iommu = NULL; in update_paging_mode() local
602 iommu = find_iommu_for_device(pdev->seg, bdf); in update_paging_mode()
603 if ( !iommu ) in update_paging_mode()
609 spin_lock_irqsave(&iommu->lock, flags); in update_paging_mode()
612 device_entry = iommu->dev_table.buffer + in update_paging_mode()
621 amd_iommu_flush_device(iommu, req_id); in update_paging_mode()
625 spin_unlock_irqrestore(&iommu->lock, flags); in update_paging_mode()
/xen-4.10.0-shim-comet/xen/drivers/passthrough/vtd/
A Dqinval.c34 static int __must_check invalidate_sync(struct iommu *iommu);
36 static void print_qi_regs(struct iommu *iommu) in print_qi_regs() argument
50 static unsigned int qinval_next_index(struct iommu *iommu) in qinval_next_index() argument
202 static int __must_check invalidate_sync(struct iommu *iommu) in invalidate_sync() argument
211 static int __must_check dev_invalidate_sync(struct iommu *iommu, in dev_invalidate_sync() argument
313 int iommu_flush_iec_global(struct iommu *iommu) in iommu_flush_iec_global() argument
318 int iommu_flush_iec_index(struct iommu *iommu, u8 im, u16 iidx) in iommu_flush_iec_index() argument
327 struct iommu *iommu = (struct iommu *)_iommu; in flush_context_qi() local
357 struct iommu *iommu = (struct iommu *)_iommu; in flush_iotlb_qi() local
397 int enable_qinval(struct iommu *iommu) in enable_qinval() argument
[all …]
A Dextern.h35 int enable_qinval(struct iommu *iommu);
36 void disable_qinval(struct iommu *iommu);
37 int enable_intremap(struct iommu *iommu, int eim);
38 void disable_intremap(struct iommu *iommu);
45 int iommu_flush_iec_global(struct iommu *iommu);
47 void clear_fault_bits(struct iommu *iommu);
53 struct acpi_drhd_unit * iommu_to_drhd(struct iommu *iommu);
60 int dev_invalidate_iotlb(struct iommu *iommu, u16 did,
63 int __must_check qinval_device_iotlb_sync(struct iommu *iommu,
94 void vtd_ops_preamble_quirk(struct iommu* iommu);
[all …]
A Diommu.c343 struct iommu *iommu = (struct iommu *) _iommu; in flush_context_reg() local
416 struct iommu *iommu = (struct iommu *) _iommu; in flush_iotlb_reg() local
552 struct iommu *iommu; in iommu_flush_all() local
594 struct iommu *iommu; in iommu_flush_iotlb() local
748 struct iommu *iommu = drhd->iommu; in iommu_enable_translation() local
1127 struct iommu *iommu = drhd->iommu; in iommu_set_interrupt() local
1160 struct iommu *iommu; in iommu_alloc() local
1185 drhd->iommu = iommu; in iommu_alloc()
1258 struct iommu *iommu = drhd->iommu; in iommu_free() local
1325 struct iommu *iommu, in domain_context_mapping_one() argument
[all …]
A Dutils.c32 void disable_pmr(struct iommu *iommu) in disable_pmr() argument
37 val = dmar_readl(iommu->reg, DMAR_PMEN_REG); in disable_pmr()
54 struct iommu *iommu = drhd->iommu; in print_iommu_regs() local
90 void print_vtd_entries(struct iommu *iommu, int bus, int devfn, u64 gmfn) in print_vtd_entries() argument
99 iommu->index, iommu->intel->drhd->segment, bus, in print_vtd_entries()
102 if ( iommu->root_maddr == 0 ) in print_vtd_entries()
178 struct iommu *iommu; in vtd_dump_iommu_info() local
185 iommu = drhd->iommu; in vtd_dump_iommu_info()
187 iommu->nr_pt_levels); in vtd_dump_iommu_info()
189 if ( ecap_queued_inval(iommu->ecap) || ecap_intr_remap(iommu->ecap) ) in vtd_dump_iommu_info()
[all …]
A Dintremap.c220 static void free_remap_entry(struct iommu *iommu, int index) in free_remap_entry() argument
325 static int ioapic_rte_to_remap_entry(struct iommu *iommu, in ioapic_rte_to_remap_entry() argument
421 struct iommu *iommu = ioapic_to_iommu(IO_APIC_ID(apic)); in io_apic_read_remap_rte() local
446 struct iommu *iommu = ioapic_to_iommu(IO_APIC_ID(apic)); in io_apic_write_remap_rte() local
600 struct iommu *iommu, struct pci_dev *pdev, in msi_msg_to_remap_entry() argument
756 int enable_intremap(struct iommu *iommu, int eim) in enable_intremap() argument
838 void disable_intremap(struct iommu *iommu) in disable_intremap() argument
887 struct iommu *iommu; in iommu_enable_x2apic_IR() local
902 iommu = drhd->iommu; in iommu_enable_x2apic_IR()
918 iommu = drhd->iommu; in iommu_enable_x2apic_IR()
[all …]
A Dquirks.c140 static int cantiga_vtd_ops_preamble(struct iommu* iommu) in cantiga_vtd_ops_preamble() argument
142 struct intel_iommu *intel = iommu->intel; in cantiga_vtd_ops_preamble()
175 static void snb_vtd_ops_preamble(struct iommu* iommu) in snb_vtd_ops_preamble() argument
177 struct intel_iommu *intel = iommu->intel; in snb_vtd_ops_preamble()
205 static void snb_vtd_ops_postamble(struct iommu* iommu) in snb_vtd_ops_postamble() argument
207 struct intel_iommu *intel = iommu->intel; in snb_vtd_ops_postamble()
224 void vtd_ops_preamble_quirk(struct iommu* iommu) in vtd_ops_preamble_quirk() argument
226 cantiga_vtd_ops_preamble(iommu); in vtd_ops_preamble_quirk()
232 snb_vtd_ops_preamble(iommu); in vtd_ops_preamble_quirk()
239 void vtd_ops_postamble_quirk(struct iommu* iommu) in vtd_ops_postamble_quirk() argument
[all …]
A Ddmar.h66 struct iommu *iommu; member
111 #define IOMMU_WAIT_OP(iommu, offset, op, cond, sts) \ argument
115 sts = op(iommu->reg, offset); \
130 void disable_pmr(struct iommu *iommu);
A Diommu.h517 int __must_check (*context)(void *iommu, u16 did, u16 source_id,
520 int __must_check (*iotlb)(void *iommu, u16 did, u64 addr,
533 struct iommu { struct
550 static inline struct qi_ctrl *iommu_qi_ctrl(struct iommu *iommu) in iommu_qi_ctrl() argument
552 return iommu ? &iommu->intel->qi_ctrl : NULL; in iommu_qi_ctrl()
555 static inline struct ir_ctrl *iommu_ir_ctrl(struct iommu *iommu) in iommu_ir_ctrl() argument
557 return iommu ? &iommu->intel->ir_ctrl : NULL; in iommu_ir_ctrl()
560 static inline struct iommu_flush *iommu_get_flush(struct iommu *iommu) in iommu_get_flush() argument
562 return iommu ? &iommu->intel->flush : NULL; in iommu_get_flush()
A DMakefile3 obj-y += iommu.o
A Ddmar.c139 struct acpi_drhd_unit * iommu_to_drhd(struct iommu *iommu) in iommu_to_drhd() argument
143 if ( iommu == NULL ) in iommu_to_drhd()
147 if ( drhd->iommu == iommu ) in iommu_to_drhd()
153 struct iommu * ioapic_to_iommu(unsigned int apic_id) in ioapic_to_iommu()
159 return drhd->iommu; in ioapic_to_iommu()
184 struct iommu *hpet_to_iommu(unsigned int hpet_id) in hpet_to_iommu()
188 return drhd ? drhd->iommu : NULL; in hpet_to_iommu()
/xen-4.10.0-shim-comet/xen/drivers/passthrough/vtd/x86/
A Dats.c33 struct acpi_drhd_unit * find_ats_dev_drhd(struct iommu *iommu) in find_ats_dev_drhd() argument
38 if ( drhd->iommu == iommu ) in find_ats_dev_drhd()
52 if ( !ecap_queued_inval(drhd->iommu->ecap) || in ats_device()
53 !ecap_dev_iotlb(drhd->iommu->ecap) ) in ats_device()
59 ats_drhd = find_ats_dev_drhd(drhd->iommu); in ats_device()
74 static int device_in_domain(const struct iommu *iommu, in device_in_domain() argument
109 int dev_invalidate_iotlb(struct iommu *iommu, u16 did, in dev_invalidate_iotlb() argument
115 if ( !ecap_dev_iotlb(iommu->ecap) ) in dev_invalidate_iotlb()
126 if ( !device_in_domain(iommu, pdev, did) ) in dev_invalidate_iotlb()
133 rc = qinval_device_iotlb_sync(iommu, pdev, did, sbit, addr); in dev_invalidate_iotlb()
[all …]
/xen-4.10.0-shim-comet/xen/include/asm-x86/hvm/svm/
A Damd-iommu-proto.h48 void get_iommu_features(struct amd_iommu *iommu);
84 void amd_iommu_flush_device(struct amd_iommu *iommu, uint16_t bdf);
85 void amd_iommu_flush_intremap(struct amd_iommu *iommu, uint16_t bdf);
86 void amd_iommu_flush_all_caches(struct amd_iommu *iommu);
134 void amd_iommu_send_guest_cmd(struct amd_iommu *iommu, u32 cmd[]);
215 static inline int iommu_has_cap(struct amd_iommu *iommu, uint32_t bit) in iommu_has_cap() argument
217 return !!(iommu->cap.header & (1u << bit)); in iommu_has_cap()
220 static inline int amd_iommu_has_feature(struct amd_iommu *iommu, uint32_t bit) in amd_iommu_has_feature() argument
222 if ( !iommu_has_cap(iommu, PCI_CAP_EFRSUP_SHIFT) ) in amd_iommu_has_feature()
224 return !!(iommu->features & (1U << bit)); in amd_iommu_has_feature()
/xen-4.10.0-shim-comet/xen/drivers/passthrough/arm/
A DMakefile1 obj-y += iommu.o
/xen-4.10.0-shim-comet/xen/drivers/passthrough/x86/
A DMakefile2 obj-y += iommu.o
/xen-4.10.0-shim-comet/xen/drivers/passthrough/
A DMakefile6 obj-y += iommu.o
/xen-4.10.0-shim-comet/xen/include/asm-arm/
A Ddevice.h12 void *iommu; /* IOMMU private data */ member
/xen-4.10.0-shim-comet/xen/include/asm-x86/
A Damd-iommu.h119 struct amd_iommu *iommu; member

Completed in 49 milliseconds

12