Home
last modified time | relevance | path

Searched refs:seg (Results 1 – 25 of 32) sorted by relevance

12

/qemu/target/i386/hvf/
A Dx86_descr.c24 #define VMX_SEGMENT_FIELD(seg) \ argument
25 [R_##seg] = { \
26 .selector = VMCS_GUEST_##seg##_SELECTOR, \
27 .base = VMCS_GUEST_##seg##_BASE, \
28 .limit = VMCS_GUEST_##seg##_LIMIT, \
29 .ar_bytes = VMCS_GUEST_##seg##_ACCESS_RIGHTS, \
48 uint32_t vmx_read_segment_limit(CPUState *cpu, X86Seg seg) in vmx_read_segment_limit() argument
53 uint32_t vmx_read_segment_ar(CPUState *cpu, X86Seg seg) in vmx_read_segment_ar() argument
58 uint64_t vmx_read_segment_base(CPUState *cpu, X86Seg seg) in vmx_read_segment_base() argument
60 return rvmcs(cpu->accel->fd, vmx_segment_fields[seg].base); in vmx_read_segment_base()
[all …]
A Dx86_descr.h33 struct vmx_segment *desc, enum X86Seg seg);
35 enum X86Seg seg);
38 enum X86Seg seg);
41 enum X86Seg seg);
43 uint64_t vmx_read_segment_base(CPUState *cpu, enum X86Seg seg);
44 void vmx_write_segment_base(CPUState *cpu, enum X86Seg seg,
52 uint32_t vmx_read_segment_limit(CPUState *cpu, enum X86Seg seg);
53 uint32_t vmx_read_segment_ar(CPUState *cpu, enum X86Seg seg);
A Dx86hvf.c88 struct vmx_segment seg; in hvf_put_segments() local
122 hvf_set_segment(cs, &seg, &env->tr, true); in hvf_put_segments()
170 struct vmx_segment seg; in hvf_get_segments() local
175 hvf_get_segment(&env->segs[R_CS], &seg); in hvf_get_segments()
178 hvf_get_segment(&env->segs[R_DS], &seg); in hvf_get_segments()
181 hvf_get_segment(&env->segs[R_ES], &seg); in hvf_get_segments()
184 hvf_get_segment(&env->segs[R_FS], &seg); in hvf_get_segments()
187 hvf_get_segment(&env->segs[R_GS], &seg); in hvf_get_segments()
190 hvf_get_segment(&env->segs[R_SS], &seg); in hvf_get_segments()
193 hvf_get_segment(&env->tr, &seg); in hvf_get_segments()
[all …]
A Dx86.c161 target_ulong linear_addr(CPUState *cpu, target_ulong addr, X86Seg seg) in linear_addr() argument
163 return vmx_read_segment_base(cpu, seg) + addr; in linear_addr()
167 X86Seg seg) in linear_addr_size() argument
179 return linear_addr(cpu, addr, seg); in linear_addr_size()
A Dx86_decode.c1639 X86Seg seg = R_DS; in calc_modrm_operand16() local
1659 seg = R_SS; in calc_modrm_operand16()
1663 seg = R_SS; in calc_modrm_operand16()
1673 seg = R_SS; in calc_modrm_operand16()
1758 X86Seg seg = R_DS; in calc_modrm_operand32() local
2175 seg = R_CS; in decode_linear_addr()
2178 seg = R_SS; in decode_linear_addr()
2181 seg = R_DS; in decode_linear_addr()
2184 seg = R_ES; in decode_linear_addr()
2187 seg = R_FS; in decode_linear_addr()
[all …]
A Dx86.h271 target_ulong linear_addr(CPUState *cpu, target_ulong addr, enum X86Seg seg);
273 enum X86Seg seg);
A Dx86_decode.h313 target_ulong addr, enum X86Seg seg);
/qemu/hw/ssi/
A Daspeed_smc.c250 new->addr < seg.addr + seg.size) { in aspeed_smc_flash_overlap()
255 i, seg.addr, seg.addr + seg.size); in aspeed_smc_flash_overlap()
292 trace_aspeed_smc_flash_set_segment(cs, new, seg.addr, seg.addr + seg.size); in aspeed_smc_flash_set_segment()
309 seg.addr + seg.size != asc->segments[cs].addr + in aspeed_smc_flash_set_segment()
312 HWADDR_PRIx, cs, seg.addr + seg.size); in aspeed_smc_flash_set_segment()
319 if (seg.size && in aspeed_smc_flash_set_segment()
320 (seg.addr + seg.size <= asc->flash_window_base || in aspeed_smc_flash_set_segment()
324 cs, seg.addr, seg.addr + seg.size); in aspeed_smc_flash_set_segment()
329 if (seg.size && !QEMU_IS_ALIGNED(seg.addr, seg.size)) { in aspeed_smc_flash_set_segment()
332 cs, seg.addr, seg.addr + seg.size); in aspeed_smc_flash_set_segment()
[all …]
/qemu/hw/xen/
A Dxen-operations.c52 XenGrantCopySegment *seg = &segs[i]; in libxengnttab_fallback_grant_copy() local
54 refs[i] = to_domain ? seg->dest.foreign.ref : in libxengnttab_fallback_grant_copy()
55 seg->source.foreign.ref; in libxengnttab_fallback_grant_copy()
68 XenGrantCopySegment *seg = &segs[i]; in libxengnttab_fallback_grant_copy() local
72 memcpy(page + seg->dest.foreign.offset, seg->source.virt, in libxengnttab_fallback_grant_copy()
73 seg->len); in libxengnttab_fallback_grant_copy()
75 memcpy(seg->dest.virt, page + seg->source.foreign.offset, in libxengnttab_fallback_grant_copy()
76 seg->len); in libxengnttab_fallback_grant_copy()
106 XenGrantCopySegment *seg = &segs[i]; in libxengnttab_backend_grant_copy() local
120 seg->source.foreign.offset; in libxengnttab_backend_grant_copy()
[all …]
/qemu/hw/block/
A Dxen_blkif.h28 struct blkif_request_segment seg[BLKIF_MAX_SEGMENTS_PER_REQUEST]; member
54 struct blkif_request_segment seg[BLKIF_MAX_SEGMENTS_PER_REQUEST]; member
115 dst->seg[i] = src->seg[i]; in blkif_get_x86_32_req()
141 dst->seg[i] = src->seg[i]; in blkif_get_x86_64_req()
/qemu/hw/net/
A Dvirtio-net.c2136 ret = virtio_net_do_receive(seg->nc, seg->buf, seg->size); in virtio_net_rsc_drain_seg()
2138 g_free(seg->buf); in virtio_net_rsc_drain_seg()
2139 g_free(seg); in virtio_net_rsc_drain_seg()
2193 seg->size = size; in virtio_net_rsc_cache_buf()
2194 seg->packets = 1; in virtio_net_rsc_cache_buf()
2195 seg->dup_ack = 0; in virtio_net_rsc_cache_buf()
2197 seg->nc = nc; in virtio_net_rsc_cache_buf()
2204 virtio_net_rsc_extract_unit4(chain, seg->buf, &seg->unit); in virtio_net_rsc_cache_buf()
2207 virtio_net_rsc_extract_unit6(chain, seg->buf, &seg->unit); in virtio_net_rsc_cache_buf()
2307 memmove(seg->buf + seg->size, data, n_unit->payload); in virtio_net_rsc_coalesce_data()
[all …]
/qemu/hw/i386/kvm/
A Dxen_gnttab.c464 XenGrantCopySegment *seg = &segs[i]; in xen_be_gnttab_copy() local
466 uint32_t ref = to_domain ? seg->dest.foreign.ref : in xen_be_gnttab_copy()
467 seg->source.foreign.ref; in xen_be_gnttab_copy()
479 memcpy(page + seg->dest.foreign.offset, seg->source.virt, in xen_be_gnttab_copy()
480 seg->len); in xen_be_gnttab_copy()
482 memcpy(seg->dest.virt, page + seg->source.foreign.offset, in xen_be_gnttab_copy()
483 seg->len); in xen_be_gnttab_copy()
/qemu/hw/block/dataplane/
A Dxen-block.c185 if (request->req.seg[i].first_sect > request->req.seg[i].last_sect) { in xen_block_parse_request()
189 if (request->req.seg[i].last_sect * dataplane->sector_size >= in xen_block_parse_request()
195 len = (request->req.seg[i].last_sect - in xen_block_parse_request()
196 request->req.seg[i].first_sect + 1) * dataplane->sector_size; in xen_block_parse_request()
228 segs[i].dest.foreign.ref = request->req.seg[i].gref; in xen_block_copy_request()
229 segs[i].dest.foreign.offset = request->req.seg[i].first_sect * in xen_block_copy_request()
233 segs[i].source.foreign.ref = request->req.seg[i].gref; in xen_block_copy_request()
234 segs[i].source.foreign.offset = request->req.seg[i].first_sect * in xen_block_copy_request()
238 segs[i].len = (request->req.seg[i].last_sect - in xen_block_copy_request()
239 request->req.seg[i].first_sect + 1) * in xen_block_copy_request()
/qemu/tests/tcg/i386/
A Dtest-i386.c1263 uint16_t mseg = seg;\
1298 uint16_t seg; in test_segs() member
1463 uint16_t seg; in test_misc()
1618 int seg, ret; in test_vm86() local
1633 seg = VM86_CODE_CS; in test_vm86()
1634 r->cs = seg; in test_vm86()
1635 r->ss = seg; in test_vm86()
1636 r->ds = seg; in test_vm86()
1637 r->es = seg; in test_vm86()
1638 r->fs = seg; in test_vm86()
[all …]
/qemu/hw/usb/
A Dxen-usb.c148 struct usbif_request_segment *seg; in usbback_gnttab_map() local
163 if ((unsigned)usbback_req->req.seg[i].offset + in usbback_gnttab_map()
164 (unsigned)usbback_req->req.seg[i].length > XEN_PAGE_SIZE) { in usbback_gnttab_map()
176 ref[i] = usbback_req->req.seg[i].gref; in usbback_gnttab_map()
187 seg = usbback_req->req.seg + i; in usbback_gnttab_map()
188 addr = usbback_req->buffer + i * XEN_PAGE_SIZE + seg->offset; in usbback_gnttab_map()
189 qemu_iovec_add(&usbback_req->packet.iov, addr, seg->length); in usbback_gnttab_map()
210 ref[i] = usbback_req->req.seg[i + usbback_req->req.nr_buffer_segs].gref; in usbback_gnttab_map()
300 ref[i] = usbback_req->req.seg[i].gref; in usbback_do_response()
309 ref[i] = usbback_req->req.seg[i + usbback_req->req.nr_buffer_segs].gref; in usbback_do_response()
A Dhcd-xhci.c800 XHCIEvRingSeg seg; in xhci_er_reset() local
815 if (dma_memory_read(xhci->as, erstba, &seg, sizeof(seg), in xhci_er_reset()
823 le32_to_cpus(&seg.addr_low); in xhci_er_reset()
824 le32_to_cpus(&seg.addr_high); in xhci_er_reset()
825 le32_to_cpus(&seg.size); in xhci_er_reset()
826 if (seg.size < 16 || seg.size > 4096) { in xhci_er_reset()
827 DPRINTF("xhci: invalid value for segment size: %d\n", seg.size); in xhci_er_reset()
831 intr->er_start = xhci_addr64(seg.addr_low, seg.addr_high); in xhci_er_reset()
832 intr->er_size = seg.size; in xhci_er_reset()
/qemu/include/hw/ssi/
A Daspeed_smc.h114 const AspeedSegments *seg);
116 AspeedSegments *seg);
/qemu/scripts/
A Dxen-detect.c75 xengnttab_grant_copy_segment_t* seg = NULL; in main() local
92 xengnttab_grant_copy(xg, 0, seg); in main()
/qemu/pc-bios/optionrom/
A Dlinuxboot_dma.c71 uint32_t seg = (uint32_t)addr >> 4; in set_es() local
72 asm("movl %0, %%es" : : "r"(seg)); in set_es()
/qemu/linux-user/
A Dflatload.c501 abi_ulong seg; in load_flt_binary() local
502 seg = libinfo[i].start_data; in load_flt_binary()
504 seg -= 4; in load_flt_binary()
509 seg)) in load_flt_binary()
/qemu/include/hw/xen/interface/
A Dphysdev.h273 uint16_t seg; member
301 uint16_t seg; member
/qemu/tcg/i386/
A Dtcg-target.c.inc1980 int seg;
2151 h->seg = 0;
2306 tcg_out_modrm_sib_offset(s, movop + h.seg, datalo,
2338 tcg_out_modrm_sib_offset(s, movop + h.seg, datalo,
2340 tcg_out_modrm_sib_offset(s, movop + h.seg, datahi,
2462 tcg_out_modrm_sib_offset(s, movop + h.seg, datalo,
2475 tcg_out_modrm_sib_offset(s, movop + h.seg, datalo,
2477 tcg_out_modrm_sib_offset(s, movop + h.seg, datahi,
4362 int seg = setup_guest_base_seg();
4363 if (seg != 0) {
[all …]
/qemu/include/hw/xen/interface/io/
A Dblkif.h644 struct blkif_request_segment seg[BLKIF_MAX_SEGMENTS_PER_REQUEST]; member
A Dusbif.h363 struct usbif_request_segment seg[USBIF_MAX_SEGMENTS_PER_REQUEST]; member
/qemu/target/i386/
A Dmonitor.c609 #define SEG(name, seg) \ argument
610 { name, offsetof(CPUX86State, segs[seg].selector), NULL, MD_I32 },\
611 { name ".base", offsetof(CPUX86State, segs[seg].base) },\
612 { name ".limit", offsetof(CPUX86State, segs[seg].limit), NULL, MD_I32 },

Completed in 107 milliseconds

12