Lines Matching refs:ctx

150 	struct vmctx *ctx;  in vm_create()  local
156 ctx = calloc(1, sizeof(struct vmctx) + strnlen(name, PATH_MAX) + 1); in vm_create()
157 if ((ctx == NULL) || (devfd != -1)) in vm_create()
172 ctx->gvt_enabled = false; in vm_create()
173 ctx->fd = devfd; in vm_create()
174 ctx->lowmem_limit = PCI_EMUL_MEMBASE32; in vm_create()
175 ctx->highmem_gpa_base = HIGHRAM_START_ADDR; in vm_create()
176 ctx->name = (char *)(ctx + 1); in vm_create()
177 strncpy(ctx->name, name, strnlen(name, PATH_MAX) + 1); in vm_create()
206 error = ioctl(ctx->fd, ACRN_IOCTL_CREATE_VM, &create_vm); in vm_create()
214 pr_err("failed to create VM %s, %s.\n", ctx->name, errormsg(errno)); in vm_create()
219 ctx->vmid = create_vm.vmid; in vm_create()
221 return ctx; in vm_create()
224 if (ctx != NULL) in vm_create()
225 free(ctx); in vm_create()
231 vm_create_ioreq_client(struct vmctx *ctx) in vm_create_ioreq_client() argument
234 error = ioctl(ctx->fd, ACRN_IOCTL_CREATE_IOREQ_CLIENT, 0); in vm_create_ioreq_client()
242 vm_destroy_ioreq_client(struct vmctx *ctx) in vm_destroy_ioreq_client() argument
245 error = ioctl(ctx->fd, ACRN_IOCTL_DESTROY_IOREQ_CLIENT, ctx->ioreq_client); in vm_destroy_ioreq_client()
253 vm_attach_ioreq_client(struct vmctx *ctx) in vm_attach_ioreq_client() argument
257 error = ioctl(ctx->fd, ACRN_IOCTL_ATTACH_IOREQ_CLIENT, ctx->ioreq_client); in vm_attach_ioreq_client()
266 vm_notify_request_done(struct vmctx *ctx, int vcpu) in vm_notify_request_done() argument
272 notify.vmid = ctx->vmid; in vm_notify_request_done()
275 error = ioctl(ctx->fd, ACRN_IOCTL_NOTIFY_REQUEST_FINISH, &notify); in vm_notify_request_done()
285 vm_destroy(struct vmctx *ctx) in vm_destroy() argument
287 if (!ctx) in vm_destroy()
289 if (ioctl(ctx->fd, ACRN_IOCTL_DESTROY_VM, NULL)) { in vm_destroy()
292 close(ctx->fd); in vm_destroy()
293 free(ctx); in vm_destroy()
298 vm_setup_asyncio(struct vmctx *ctx, uint64_t base) in vm_setup_asyncio() argument
302 error = ioctl(ctx->fd, ACRN_IOCTL_SETUP_ASYNCIO, base); in vm_setup_asyncio()
347 vm_get_lowmem_limit(struct vmctx *ctx) in vm_get_lowmem_limit() argument
349 return ctx->lowmem_limit; in vm_get_lowmem_limit()
353 vm_map_memseg_vma(struct vmctx *ctx, size_t len, vm_paddr_t gpa, in vm_map_memseg_vma() argument
364 error = ioctl(ctx->fd, ACRN_IOCTL_SET_MEMSEG, &memmap); in vm_map_memseg_vma()
372 vm_setup_memory(struct vmctx *ctx, size_t memsize) in vm_setup_memory() argument
378 if (memsize > ctx->lowmem_limit) { in vm_setup_memory()
379 ctx->lowmem = ctx->lowmem_limit; in vm_setup_memory()
380 ctx->highmem = memsize - ctx->lowmem_limit; in vm_setup_memory()
382 ctx->lowmem = memsize; in vm_setup_memory()
383 ctx->highmem = 0; in vm_setup_memory()
386 ctx->biosmem = high_bios_size(); in vm_setup_memory()
387 ctx->fbmem = (16 * 1024 * 1024); in vm_setup_memory()
389 return hugetlb_setup_memory(ctx); in vm_setup_memory()
393 vm_unsetup_memory(struct vmctx *ctx) in vm_unsetup_memory() argument
409 bzero((void *)ctx->baseaddr, ctx->lowmem); in vm_unsetup_memory()
410 bzero((void *)(ctx->baseaddr + ctx->highmem_gpa_base), ctx->highmem); in vm_unsetup_memory()
413 hugetlb_unsetup_memory(ctx); in vm_unsetup_memory()
424 vm_map_gpa(struct vmctx *ctx, vm_paddr_t gaddr, size_t len) in vm_map_gpa() argument
427 if (ctx->lowmem > 0) { in vm_map_gpa()
428 if (gaddr < ctx->lowmem && len <= ctx->lowmem && in vm_map_gpa()
429 gaddr + len <= ctx->lowmem) in vm_map_gpa()
430 return (ctx->baseaddr + gaddr); in vm_map_gpa()
433 if (ctx->highmem > 0) { in vm_map_gpa()
434 if (gaddr >= ctx->highmem_gpa_base) { in vm_map_gpa()
435 if (gaddr < ctx->highmem_gpa_base + ctx->highmem && in vm_map_gpa()
436 len <= ctx->highmem && in vm_map_gpa()
437 gaddr + len <= ctx->highmem_gpa_base + ctx->highmem) in vm_map_gpa()
438 return (ctx->baseaddr + gaddr); in vm_map_gpa()
447 vm_get_lowmem_size(struct vmctx *ctx) in vm_get_lowmem_size() argument
449 return ctx->lowmem; in vm_get_lowmem_size()
453 vm_get_highmem_size(struct vmctx *ctx) in vm_get_highmem_size() argument
455 return ctx->highmem; in vm_get_highmem_size()
459 vm_run(struct vmctx *ctx) in vm_run() argument
463 error = ioctl(ctx->fd, ACRN_IOCTL_START_VM, &ctx->vmid); in vm_run()
471 vm_pause(struct vmctx *ctx) in vm_pause() argument
473 if (ioctl(ctx->fd, ACRN_IOCTL_PAUSE_VM, &ctx->vmid)) { in vm_pause()
479 vm_reset(struct vmctx *ctx) in vm_reset() argument
481 if (ioctl(ctx->fd, ACRN_IOCTL_RESET_VM, &ctx->vmid)) { in vm_reset()
487 vm_clear_ioreq(struct vmctx *ctx) in vm_clear_ioreq() argument
489 if (ioctl(ctx->fd, ACRN_IOCTL_CLEAR_VM_IOREQ, NULL)) { in vm_clear_ioreq()
510 vm_suspend(struct vmctx *ctx, enum vm_suspend_how how) in vm_suspend() argument
520 vm_lapic_msi(struct vmctx *ctx, uint64_t addr, uint64_t msg) in vm_lapic_msi() argument
528 error = ioctl(ctx->fd, ACRN_IOCTL_INJECT_MSI, &msi); in vm_lapic_msi()
536 vm_set_gsi_irq(struct vmctx *ctx, int gsi, uint32_t operation) in vm_set_gsi_irq() argument
544 error = ioctl(ctx->fd, ACRN_IOCTL_SET_IRQLINE, *req); in vm_set_gsi_irq()
552 vm_assign_pcidev(struct vmctx *ctx, struct acrn_pcidev *pcidev) in vm_assign_pcidev() argument
555 error = ioctl(ctx->fd, ACRN_IOCTL_ASSIGN_PCIDEV, pcidev); in vm_assign_pcidev()
563 vm_deassign_pcidev(struct vmctx *ctx, struct acrn_pcidev *pcidev) in vm_deassign_pcidev() argument
566 error = ioctl(ctx->fd, ACRN_IOCTL_DEASSIGN_PCIDEV, pcidev); in vm_deassign_pcidev()
574 vm_assign_mmiodev(struct vmctx *ctx, struct acrn_mmiodev *mmiodev) in vm_assign_mmiodev() argument
577 error = ioctl(ctx->fd, ACRN_IOCTL_ASSIGN_MMIODEV, mmiodev); in vm_assign_mmiodev()
585 vm_deassign_mmiodev(struct vmctx *ctx, struct acrn_mmiodev *mmiodev) in vm_deassign_mmiodev() argument
588 error = ioctl(ctx->fd, ACRN_IOCTL_DEASSIGN_MMIODEV, mmiodev); in vm_deassign_mmiodev()
596 vm_map_ptdev_mmio(struct vmctx *ctx, int bus, int slot, int func, in vm_map_ptdev_mmio() argument
607 error = ioctl(ctx->fd, ACRN_IOCTL_SET_MEMSEG, &memmap); in vm_map_ptdev_mmio()
615 vm_unmap_ptdev_mmio(struct vmctx *ctx, int bus, int slot, int func, in vm_unmap_ptdev_mmio() argument
627 error = ioctl(ctx->fd, ACRN_IOCTL_UNSET_MEMSEG, &memmap); in vm_unmap_ptdev_mmio()
635 vm_add_hv_vdev(struct vmctx *ctx, struct acrn_vdev *dev) in vm_add_hv_vdev() argument
638 error = ioctl(ctx->fd, ACRN_IOCTL_CREATE_VDEV, dev); in vm_add_hv_vdev()
646 vm_remove_hv_vdev(struct vmctx *ctx, struct acrn_vdev *dev) in vm_remove_hv_vdev() argument
649 error = ioctl(ctx->fd, ACRN_IOCTL_DESTROY_VDEV, dev); in vm_remove_hv_vdev()
657 vm_set_ptdev_intx_info(struct vmctx *ctx, uint16_t virt_bdf, uint16_t phys_bdf, in vm_set_ptdev_intx_info() argument
670 error = ioctl(ctx->fd, ACRN_IOCTL_SET_PTDEV_INTR, &ptirq); in vm_set_ptdev_intx_info()
678 vm_reset_ptdev_intx_info(struct vmctx *ctx, uint16_t virt_bdf, uint16_t phys_bdf, in vm_reset_ptdev_intx_info() argument
690 error = ioctl(ctx->fd, ACRN_IOCTL_RESET_PTDEV_INTR, &ptirq); in vm_reset_ptdev_intx_info()
698 vm_set_vcpu_regs(struct vmctx *ctx, struct acrn_vcpu_regs *vcpu_regs) in vm_set_vcpu_regs() argument
701 error = ioctl(ctx->fd, ACRN_IOCTL_SET_VCPU_REGS, vcpu_regs); in vm_set_vcpu_regs()
709 vm_get_cpu_state(struct vmctx *ctx, void *state_buf) in vm_get_cpu_state() argument
712 error = ioctl(ctx->fd, ACRN_IOCTL_PM_GET_CPU_STATE, state_buf); in vm_get_cpu_state()
720 vm_intr_monitor(struct vmctx *ctx, void *intr_buf) in vm_intr_monitor() argument
723 error = ioctl(ctx->fd, ACRN_IOCTL_VM_INTR_MONITOR, intr_buf); in vm_intr_monitor()
731 vm_ioeventfd(struct vmctx *ctx, struct acrn_ioeventfd *args) in vm_ioeventfd() argument
734 error = ioctl(ctx->fd, ACRN_IOCTL_IOEVENTFD, args); in vm_ioeventfd()
742 vm_irqfd(struct vmctx *ctx, struct acrn_irqfd *args) in vm_irqfd() argument
745 error = ioctl(ctx->fd, ACRN_IOCTL_IRQFD, args); in vm_irqfd()