| /arch/s390/kvm/ |
| A D | gmap-vsie.c | 40 struct gmap *sg; in gmap_find_shadow() local 46 if (!sg->initialized) in gmap_find_shadow() 49 return sg; in gmap_find_shadow() 71 struct gmap *sg, *new; in gmap_shadow() local 81 if (sg) in gmap_shadow() 82 return sg; in gmap_shadow() 99 if (sg) { in gmap_shadow() 102 return sg; in gmap_shadow() 109 gmap_unshadow(sg); in gmap_shadow() 111 list_del(&sg->list); in gmap_shadow() [all …]
|
| A D | gaccess.c | 1227 kvm = sg->private; in kvm_s390_shadow_tables() 1228 parent = sg->parent; in kvm_s390_shadow_tables() 1230 asce.val = sg->orig_asce; in kvm_s390_shadow_tables() 1281 if (sg->edat_level >= 1) in kvm_s390_shadow_tables() 1309 if (sg->edat_level >= 1) in kvm_s390_shadow_tables() 1347 if (sg->edat_level >= 1) in kvm_s390_shadow_tables() 1377 if (ste.fc && sg->edat_level >= 1) { in kvm_s390_shadow_tables() 1418 spin_lock(&sg->guest_table_lock); in shadow_pgt_lookup() 1431 spin_unlock(&sg->guest_table_lock); in shadow_pgt_lookup() 1461 mmap_read_lock(sg->mm); in kvm_s390_shadow_fault() [all …]
|
| /arch/s390/mm/ |
| A D | gmap.c | 279 struct gmap *sg, *next; in gmap_remove() local 286 list_del(&sg->list); in gmap_remove() 287 gmap_put(sg); in gmap_remove() 1089 parent = sg->parent; in gmap_protect_rmap() 1428 if (sg->removed) in gmap_unshadow() 1430 sg->removed = 1; in gmap_unshadow() 1432 gmap_flush_tlb(sg); in gmap_unshadow() 1816 parent = sg->parent; in gmap_shadow_page() 1882 if (sg->removed) { in gmap_shadow_notify() 1892 gmap_unshadow(sg); in gmap_shadow_notify() [all …]
|
| /arch/powerpc/platforms/powernv/ |
| A D | opal-sensor-groups.c | 27 struct attribute_group sg; member 139 sg->sg.attrs[count] = in add_attr_group() 144 return sysfs_create_group(sg_kobj, &sg->sg); in add_attr_group() 162 struct device_node *sg, *node; in opal_sensor_groups_init() local 166 if (!sg) { in opal_sensor_groups_init() 202 if (!sgs[i].sg.attrs) { in opal_sensor_groups_init() 217 sgs[i].sg.name = sgs[i].name; in opal_sensor_groups_init() 220 sgs[i].sg.name); in opal_sensor_groups_init() 225 of_node_put(sg); in opal_sensor_groups_init() 232 kfree(sgs[i].sg.attrs); in opal_sensor_groups_init() [all …]
|
| A D | opal.c | 1134 struct opal_sg_list *sg, *first = NULL; in opal_vmalloc_to_sg_list() local 1137 sg = kzalloc(PAGE_SIZE, GFP_KERNEL); in opal_vmalloc_to_sg_list() 1138 if (!sg) in opal_vmalloc_to_sg_list() 1141 first = sg; in opal_vmalloc_to_sg_list() 1158 sg->length = cpu_to_be64( in opal_vmalloc_to_sg_list() 1161 sg->next = cpu_to_be64(__pa(next)); in opal_vmalloc_to_sg_list() 1162 sg = next; in opal_vmalloc_to_sg_list() 1181 while (sg) { in opal_free_sg_list() 1184 kfree(sg); in opal_free_sg_list() 1187 sg = __va(next); in opal_free_sg_list() [all …]
|
| /arch/alpha/kernel/ |
| A D | pci_iommu.c | 488 leader = sg; in sg_classify() 495 for (++sg; sg < end; ++sg) { in sg_classify() 514 leader = sg; in sg_classify() 593 sg = leader; in sg_fill() 602 while (sg+1 < end && (int) sg[1].dma_address == -1) { in sg_fill() 604 sg = sg_next(sg); in sg_fill() 623 } while (++sg < end && (int) sg->dma_address < 0); in sg_fill() 645 sg->dma_length = sg->length; in alpha_pci_map_sg() 654 start = sg; in alpha_pci_map_sg() 675 for (out = sg; sg < end; ++sg) { in alpha_pci_map_sg() [all …]
|
| /arch/mips/jazz/ |
| A D | jazzdma.c | 547 struct scatterlist *sg; in jazz_dma_map_sg() local 551 arch_sync_dma_for_device(sg_phys(sg), sg->length, in jazz_dma_map_sg() 553 sg->dma_address = vdma_alloc(sg_phys(sg), sg->length); in jazz_dma_map_sg() 556 sg_dma_len(sg) = sg->length; in jazz_dma_map_sg() 566 struct scatterlist *sg; in jazz_dma_unmap_sg() local 570 arch_sync_dma_for_cpu(sg_phys(sg), sg->length, dir); in jazz_dma_unmap_sg() 571 vdma_free(sg->dma_address); in jazz_dma_unmap_sg() 590 struct scatterlist *sg; in jazz_dma_sync_sg_for_device() local 594 arch_sync_dma_for_device(sg_phys(sg), sg->length, dir); in jazz_dma_sync_sg_for_device() 600 struct scatterlist *sg; in jazz_dma_sync_sg_for_cpu() local [all …]
|
| /arch/sparc/mm/ |
| A D | io-unit.c | 167 struct scatterlist *sg; in iounit_map_sg() local 173 for_each_sg(sgl, sg, nents, i) { in iounit_map_sg() 174 sg->dma_address = iounit_get_area(iounit, (unsigned long) sg_virt(sg), sg->length); in iounit_map_sg() 175 sg->dma_length = sg->length; in iounit_map_sg() 201 struct scatterlist *sg; in iounit_unmap_sg() local 205 for_each_sg(sgl, sg, nents, i) { in iounit_unmap_sg() 206 len = ((sg->dma_address & ~PAGE_MASK) + sg->length + (PAGE_SIZE-1)) >> PAGE_SHIFT; in iounit_unmap_sg() 207 vaddr = (sg->dma_address - IOUNIT_DMA_BASE) >> PAGE_SHIFT; in iounit_unmap_sg()
|
| A D | iommu.c | 253 struct scatterlist *sg; in __sbus_iommu_map_sg() local 256 for_each_sg(sgl, sg, nents, j) { in __sbus_iommu_map_sg() 257 sg->dma_address =__sbus_iommu_map_page(dev, sg_page(sg), in __sbus_iommu_map_sg() 258 sg->offset, sg->length, per_page_flush); in __sbus_iommu_map_sg() 259 if (sg->dma_address == DMA_MAPPING_ERROR) in __sbus_iommu_map_sg() 261 sg->dma_length = sg->length; in __sbus_iommu_map_sg() 302 struct scatterlist *sg; in sbus_iommu_unmap_sg() local 305 for_each_sg(sgl, sg, nents, i) { in sbus_iommu_unmap_sg() 306 sbus_iommu_unmap_page(dev, sg->dma_address, sg->length, dir, in sbus_iommu_unmap_sg() 308 sg->dma_address = 0x21212121; in sbus_iommu_unmap_sg()
|
| /arch/s390/include/asm/ |
| A D | gmap.h | 118 void gmap_unshadow(struct gmap *sg); 119 int gmap_shadow_r2t(struct gmap *sg, unsigned long saddr, unsigned long r2t, 121 int gmap_shadow_r3t(struct gmap *sg, unsigned long saddr, unsigned long r3t, 123 int gmap_shadow_sgt(struct gmap *sg, unsigned long saddr, unsigned long sgt, 125 int gmap_shadow_pgt(struct gmap *sg, unsigned long saddr, unsigned long pgt, 127 int gmap_shadow_page(struct gmap *sg, unsigned long saddr, pte_t pte);
|
| /arch/arm/mach-rpc/ |
| A D | dma.c | 58 if (idma->dma.sg) { in iomd_get_next_sg() 77 idma->dma.sg = sg_next(idma->dma.sg); in iomd_get_next_sg() 78 idma->dma_addr = idma->dma.sg->dma_address; in iomd_get_next_sg() 79 idma->dma_len = idma->dma.sg->length; in iomd_get_next_sg() 82 idma->dma.sg = NULL; in iomd_get_next_sg() 168 if (!idma->dma.sg) { in iomd_enable_dma() 169 idma->dma.sg = &idma->dma.buf; in iomd_enable_dma() 178 idma->dma_addr = idma->dma.sg->dma_address; in iomd_enable_dma() 179 idma->dma_len = idma->dma.sg->length; in iomd_enable_dma() 272 if (fdma->dma.sg) in floppy_enable_dma()
|
| /arch/arm/kernel/ |
| A D | dma.c | 121 void set_dma_sg (unsigned int chan, struct scatterlist *sg, int nr_sg) in set_dma_sg() argument 128 dma->sg = sg; in set_dma_sg() 145 dma->sg = NULL; in __set_dma_addr() 162 dma->sg = NULL; in set_dma_count()
|
| /arch/sparc/kernel/ |
| A D | iommu_common.h | 42 struct scatterlist *sg) in is_span_boundary() argument 45 int nr = iommu_num_pages(paddr, outs->dma_length + sg->length, in is_span_boundary()
|
| A D | iommu.c | 587 static unsigned long fetch_sg_ctx(struct iommu *iommu, struct scatterlist *sg) in fetch_sg_ctx() argument 596 bus_addr = sg->dma_address & IO_PAGE_MASK; in fetch_sg_ctx() 610 struct scatterlist *sg; in dma_4u_unmap_sg() local 623 sg = sglist; in dma_4u_unmap_sg() 625 dma_addr_t dma_handle = sg->dma_address; in dma_4u_unmap_sg() 626 unsigned int len = sg->dma_length; in dma_4u_unmap_sg() 649 sg = sg_next(sg); in dma_4u_unmap_sg() 702 struct scatterlist *sg, *sgprv; in dma_4u_sync_sg_for_cpu() local 728 for_each_sg(sglist, sg, nelems, i) { in dma_4u_sync_sg_for_cpu() 729 if (sg->dma_length == 0) in dma_4u_sync_sg_for_cpu() [all …]
|
| /arch/x86/kernel/ |
| A D | amd_gart_64.c | 282 for_each_sg(sg, s, nents, i) { in gart_unmap_sg() 290 static int dma_map_sg_nonforce(struct device *dev, struct scatterlist *sg, in dma_map_sg_nonforce() argument 300 for_each_sg(sg, s, nents, i) { in dma_map_sg_nonforce() 307 gart_unmap_sg(dev, sg, i, dir, 0); in dma_map_sg_nonforce() 309 sg[0].dma_length = 0; in dma_map_sg_nonforce() 377 static int gart_map_sg(struct device *dev, struct scatterlist *sg, int nents, in gart_map_sg() argument 391 start_sg = sg; in gart_map_sg() 392 sgmap = sg; in gart_map_sg() 397 for_each_sg(sg, s, nents, i) { in gart_map_sg() 447 gart_unmap_sg(dev, sg, out, dir, 0); in gart_map_sg() [all …]
|
| /arch/powerpc/platforms/pseries/ |
| A D | ibmebus.c | 113 struct scatterlist *sg; in ibmebus_map_sg() local 116 for_each_sg(sgl, sg, nents, i) { in ibmebus_map_sg() 117 sg->dma_address = (dma_addr_t) sg_virt(sg); in ibmebus_map_sg() 118 sg->dma_length = sg->length; in ibmebus_map_sg() 125 struct scatterlist *sg, in ibmebus_unmap_sg() argument
|
| /arch/powerpc/platforms/512x/ |
| A D | mpc512x_lpbfifo.c | 166 struct scatterlist sg; in mpc512x_lpbfifo_kick() local 246 sg_init_table(&sg, 1); in mpc512x_lpbfifo_kick() 248 sg_dma_address(&sg) = dma_map_single(dma_dev->dev, in mpc512x_lpbfifo_kick() 250 if (dma_mapping_error(dma_dev->dev, sg_dma_address(&sg))) in mpc512x_lpbfifo_kick() 253 lpbfifo.ram_bus_addr = sg_dma_address(&sg); /* For freeing later */ in mpc512x_lpbfifo_kick() 255 sg_dma_len(&sg) = lpbfifo.req->size; in mpc512x_lpbfifo_kick() 257 dma_tx = dmaengine_prep_slave_sg(lpbfifo.chan, &sg, in mpc512x_lpbfifo_kick() 325 dma_unmap_single(dma_dev->dev, sg_dma_address(&sg), in mpc512x_lpbfifo_kick()
|
| /arch/powerpc/kernel/ |
| A D | dma-iommu.c | 35 bool arch_dma_map_sg_direct(struct device *dev, struct scatterlist *sg, in arch_dma_map_sg_direct() argument 44 for_each_sg(sg, s, nents, i) { in arch_dma_map_sg_direct() 52 bool arch_dma_unmap_sg_direct(struct device *dev, struct scatterlist *sg, in arch_dma_unmap_sg_direct() argument 61 for_each_sg(sg, s, nents, i) { in arch_dma_unmap_sg_direct()
|
| A D | iommu.c | 619 struct scatterlist *sg; in ppc_iommu_unmap_sg() local 626 sg = sglist; in ppc_iommu_unmap_sg() 629 dma_addr_t dma_handle = sg->dma_address; in ppc_iommu_unmap_sg() 631 if (sg->dma_length == 0) in ppc_iommu_unmap_sg() 633 npages = iommu_num_pages(dma_handle, sg->dma_length, in ppc_iommu_unmap_sg() 636 sg = sg_next(sg); in ppc_iommu_unmap_sg()
|
| /arch/powerpc/platforms/ps3/ |
| A D | system-bus.c | 638 struct scatterlist *sg; in ps3_sb_map_sg() 641 for_each_sg(sgl, sg, nents, i) { in ps3_sb_map_sg() 642 int result = ps3_dma_map(dev->d_region, sg_phys(sg), in ps3_sb_map_sg() 643 sg->length, &sg->dma_address, 0); in ps3_sb_map_sg() 651 sg->dma_length = sg->length; in ps3_sb_map_sg() 658 static int ps3_ioc0_map_sg(struct device *_dev, struct scatterlist *sg, in ps3_ioc0_map_sg() argument 667 static void ps3_sb_unmap_sg(struct device *_dev, struct scatterlist *sg, in ps3_sb_unmap_sg() argument 675 static void ps3_ioc0_unmap_sg(struct device *_dev, struct scatterlist *sg, in ps3_ioc0_unmap_sg() argument
|
| /arch/arm/include/asm/mach/ |
| A D | dma.h | 29 struct scatterlist *sg; /* DMA Scatter-Gather List */ member
|
| /arch/arm/mach-footbridge/ |
| A D | dma-isa.c | 87 if (!dma->sg) { in isa_enable_dma() 92 dma->sg = &dma->buf; in isa_enable_dma()
|
| /arch/arm/include/asm/ |
| A D | dma.h | 102 extern void set_dma_sg(unsigned int chan, struct scatterlist *sg, int nr_sg);
|
| /arch/arm/mm/ |
| A D | dma-mapping.c | 1185 static int __map_sg_chunk(struct device *dev, struct scatterlist *sg, in __map_sg_chunk() argument 1203 for (count = 0, s = sg; count < (size >> PAGE_SHIFT); s = sg_next(s)) { in __map_sg_chunk() 1240 static int arm_iommu_map_sg(struct device *dev, struct scatterlist *sg, in arm_iommu_map_sg() argument 1243 struct scatterlist *s = sg, *dma = sg, *start = sg; in arm_iommu_map_sg() 1280 for_each_sg(sg, s, count, i) in arm_iommu_map_sg() 1298 struct scatterlist *sg, int nents, in arm_iommu_unmap_sg() argument 1305 for_each_sg(sg, s, nents, i) { in arm_iommu_unmap_sg() 1323 struct scatterlist *sg, in arm_iommu_sync_sg_for_cpu() argument 1332 for_each_sg(sg, s, nents, i) in arm_iommu_sync_sg_for_cpu() 1345 struct scatterlist *sg, in arm_iommu_sync_sg_for_device() argument [all …]
|
| /arch/um/drivers/ |
| A D | virtio_pcidev.c | 395 struct scatterlist sg[1]; in virtio_pcidev_irq_vq_addbuf() local 397 sg_init_one(sg, buf, MAX_IRQ_MSG_SIZE); in virtio_pcidev_irq_vq_addbuf() 398 if (virtqueue_add_inbuf(vq, sg, 1, buf, GFP_ATOMIC)) in virtio_pcidev_irq_vq_addbuf()
|