| /devicemodel/hw/platform/acpi/ |
| A D | acpi_pm.c | 146 int i; in dsdt_write_cst() local 164 for (i = 1; i <= vcpu_cx_cnt; i++) { in dsdt_write_cst() 165 if (get_vcpu_cx_data(ctx, vcpu_id, i, vcpu_cx_data + i - 1)) { in dsdt_write_cst() 180 for (i = 0; i < vcpu_cx_cnt; i++) { in dsdt_write_cst() 260 int i; in dsdt_write_pss() local 278 for (i = 0; i < vcpu_px_cnt; i++) { in dsdt_write_pss() 279 if (get_vcpu_px_data(ctx, vcpu_id, i, vcpu_px_data + i)) { in dsdt_write_pss() 292 for (i = 0; i < vcpu_px_cnt; i++) { in dsdt_write_pss() 362 int i; in pm_write_dsdt() local 371 for (i = 0; i < ncpu; i++) { in pm_write_dsdt() [all …]
|
| /devicemodel/hw/platform/ |
| A D | hpet.c | 549 int i; in vhpet_start_counting() local 555 for (i = 0; i < VHPET_NUM_TIMERS; i++) { in vhpet_start_counting() 570 int i; in vhpet_stop_counting() local 575 for (i = 0; i < VHPET_NUM_TIMERS; i++) { in vhpet_stop_counting() 762 for (i = 0; i < VHPET_NUM_TIMERS; i++) { in vhpet_mmio_write() 781 for (i = 0; i < VHPET_NUM_TIMERS; i++) { in vhpet_mmio_write() 810 " - should be 0", i, vhpet->timer[i].comprate); in vhpet_mmio_write() 884 for (i = 0; i < VHPET_NUM_TIMERS; i++) { in vhpet_mmio_read() 944 int i, j; in vhpet_deinit_timers() local 947 for (i = 0; i < VHPET_NUM_TIMERS; i++) { in vhpet_deinit_timers() [all …]
|
| A D | ioc_cbc.c | 38 for (i = 0; i < size; i++) { in cbc_copy_to_ring() 71 for (i = 0; i < size; i++) in cbc_cal_chksum() 85 for (i = 0; i < size; i++) { in cbc_verify_chksum() 227 for (i = 0; i < size; i++) { in cbc_find_signal() 242 for (i = 0; i < size; i++) { in cbc_find_signal_group() 297 for (i = 0; i < size; i++) { in wlist_find_signal() 312 for (i = 0; i < size; i++) { in wlist_find_group() 373 for (i = 0; i < num; i++) { in cbc_set_invalidation() 401 for (i = 0; i < payload[0]; i++) { in cbc_forward_signals() 989 for (i = 0; i < wlist_size; i++) { in wlist_init_group() [all …]
|
| A D | usb_pmapper.c | 80 int i; in internal_scan() local 98 for (i = 0; i < list_sz; i++) { in internal_scan() 111 for (i = 0; i < list_sz; i++) { in internal_scan() 245 for (i = 0; i < trn->num_iso_packets; i++) in usb_dev_comp_cb() 381 for (i = 0; i < xfer->ndata; in usb_dev_prepare_xfer() 513 for (i = 0; i < cfg->bNumInterfaces; i++) { in usb_dev_update_ep() 548 for (i = 0; i < config->bNumInterfaces; i++) { in usb_dev_native_toggle_if() 596 for (i = 0; i < config->bNumInterfaces; i++) { in usb_dev_native_toggle_if_drivers() 714 for (i = 0; i < xfer->ndata; i++) { in usb_dev_prepare_ctrl_xfer() 918 if (data[i] == 9 && data[i+1] == 0x4 && data[i+7] == 0x62) { in clear_uas_desc() [all …]
|
| A D | ioc.c | 713 for (i = 0, chl = ioc_ch_tbl; i < ARRAY_SIZE(ioc_ch_tbl); i++, chl++) { in ioc_ch_init() 787 int i; in ioc_ch_deinit() local 790 for (i = 0, chl = ioc_ch_tbl; i < ARRAY_SIZE(ioc_ch_tbl); i++, chl++) { in ioc_ch_deinit() 959 int i; in process_shutdown_event() local 967 for (i = 0, chl = ioc_ch_tbl; i < ARRAY_SIZE(ioc_ch_tbl); i++, chl++) { in process_shutdown_event() 995 for (i = 0, chl = ioc_ch_tbl; i < ARRAY_SIZE(ioc_ch_tbl); i++, chl++) { in process_resume_event() 1043 for (i = 0; i < ARRAY_SIZE(ioc_state_tbl); i++) { in ioc_process_events() 1088 for (i = 0; i < link_len; i++) { in ioc_build_request() 1208 for (i = 0, chl = ioc_ch_tbl; i < chl_size; i++, chl++) { in ioc_core_thread() 1227 for (i = 0; i < n; i++) in ioc_core_thread() [all …]
|
| /devicemodel/hw/platform/vssram/ |
| A D | vssram.c | 109 for (i = 0; i < length; i++) { in vrtct_checksum() 406 for (i = 0; i < MAX_VSSRAM_BUFFER_NUM; i++) { in vssram_close_buffers() 570 for (i = 0; i < guest_vcpu_num; i++) in vssram_add_buffer() 590 for (i = 0; i < MAX_VSSRAM_BUFFER_NUM; i++) { in vssram_add_buffer() 636 for (i = 0; i < MAX_VSSRAM_BUFFER_NUM; i++) { in vssram_extend_l3buf_for_l2buf() 713 for (i = 0; i < rgn_cnt; i++) { in load_tcc_memory_info() 765 for (i = 0; i < mem_info->region_cnt; i++) { in vssram_request_buffer() 833 for (i = 0; i < MAX_VSSRAM_BUFFER_NUM; i++) { in vssram_prepare_buffers() 866 for (i = 0; i < MAX_VSSRAM_BUFFER_NUM; i++) { in vssram_merge_l2l3_gpa_regions() 940 for (i = 0; i < MAX_VSSRAM_BUFFER_NUM; i++) { in vssram_ept_map_buffers() [all …]
|
| /devicemodel/hw/pci/ |
| A D | gsi_sharing.c | 120 int i, error, msi_support; in create_gsi_sharing_groups() local 127 for (i = 0; i < num_gsi_dev_mapping_tables; i++) { in create_gsi_sharing_groups() 129 gsi = gsi_dev_mapping_tables[i].gsi; in create_gsi_sharing_groups() 166 int i, bus, slot, func; in update_pt_info() local 170 for (i = 0; i < (group->shared_dev_num); i++) { in update_pt_info() 171 name = group->dev[i].dev_name; in update_pt_info() 186 int i, error, violation; in check_gsi_sharing_violation() local 204 for (i = 1; i < (group->shared_dev_num); i++) { in check_gsi_sharing_violation() 220 for (i = 0; i < (group->shared_dev_num); i++) { in check_gsi_sharing_violation() 222 group->dev[i].dev_name, in check_gsi_sharing_violation() [all …]
|
| A D | core.c | 136 for(i = 0; i < REGION_NUMS; i++){ in reserve_io_rgn() 160 for(i = 0; i < REGION_NUMS; i++) in destory_io_rsvd_rgns() 184 for(i = 0; i < REGION_NUMS; i++){ in adjust_bar_region() 505 for (i = 0; i <= PCI_BARMAX; i++) { in pci_emul_io_handler() 757 for(i = 0; i < REGION_NUMS; i++){ in get_io_rsvd_rgn_by_vdev_idx() 935 for (i = 0; i < PCI_BARMAX; i++) in pci_emul_free_bars() 966 for (i = 0; i < caplen; i++) in pci_emul_add_capability() 1229 for (i = 0; i < table_entries; i++) in pci_msix_table_init() 1526 for (i = 0; i < 2; i++) { in init_pci() 1584 for(i = 0; i < REGION_NUMS; i++){ in init_pci() [all …]
|
| A D | xhci.c | 535 for (i = ports; i <= porte; i++) { in pci_xhci_get_free_vport() 634 for (i = 1; i <= info->maxchild; i++) { in pci_xhci_assign_hub_ports() 1220 for (i = 1; i <= XHCI_MAX_DEVS; i++) in pci_xhci_reset() 1233 for (i = 1; i <= XHCI_MAX_SLOTS; i++) in pci_xhci_reset() 1646 for (i = 0; i < max_blk_cnt; ++i) { in pci_xhci_alloc_usb_xfer() 1739 for (i = 0; i < pstreams; i++) { in pci_xhci_init_ep() 1954 for (i = 1; i <= XHCI_MAX_DEVS; ++i) in pci_xhci_cmd_disable_slot() 2045 for (i = 2; i <= 31; i++) { in pci_xhci_cmd_reset_device() 2253 for (i = 2; i < 32; i++) in pci_xhci_cmd_config_ep() 2293 for (i = 2; i <= 31; i++) { in pci_xhci_cmd_config_ep() [all …]
|
| A D | irq.c | 126 int i; in pci_irq_init() local 128 for (i = 0; i < nitems(pirqs); i++) { in pci_irq_init() 129 pirqs[i].reg = PIRQ_DIS; in pci_irq_init() 130 pirqs[i].use_count = 0; in pci_irq_init() 131 pirqs[i].active_count = 0; in pci_irq_init() 132 pthread_mutex_init(&pirqs[i].lock, NULL); in pci_irq_init() 134 for (i = 0; i < nitems(irq_counts); i++) { in pci_irq_init() 135 if (IRQ_PERMITTED(i)) in pci_irq_init() 136 irq_counts[i] = 0; in pci_irq_init() 138 irq_counts[i] = IRQ_DISABLED; in pci_irq_init()
|
| /devicemodel/hw/pci/virtio/ |
| A D | virtio_rpmb.c | 130 __u32 i; in rpmb_check_response() local 133 for (i = 0; i < frame_cnt; i++) { in rpmb_check_response() 252 __u32 i; in rpmb_write_block() local 278 for (i = 0; i < count; i++) { in rpmb_write_block() 279 memset(&frame_rel[i], 0, sizeof(frame_rel[i])); in rpmb_write_block() 280 memcpy(frame_rel[i].data, buf + i * sizeof(frame_rel[i].data), sizeof(frame_rel[i].data)); in rpmb_write_block() 337 __u32 i; in rpmb_read_block() local 388 for (bufp = buf, i = 0; i < count; i++, bufp += sizeof(frame_out[i].data)) in rpmb_read_block() 531 for (i = 0; i < block_num; i++) { in rpmb_keybox_retrieve() 565 int i; in virtio_rpmb_seq_handler() local [all …]
|
| A D | virtio_gpio.c | 321 for (i = 0; i < chip->ngpio; i++) { in native_gpio_close_chip() 638 for (i = 0; i < gpio->nvline; i++) { in virtio_gpio_proc() 743 for (i = 0; i < chip->ngpio; i++) { in native_gpio_open_chip() 777 for (i = 0; i < chip->ngpio; i++) { in native_gpio_get_offset() 1265 for (i = 0; i < gpio->nvline; i++) { in gpio_irq_deinit() 1291 for (i = 0; i < gpio->nvline; i++) { in gpio_irq_init() 1411 for (i = 0; i < gpio->nchip; i++) in virtio_gpio_init() 1434 for (i = 0; i < gpio->nchip; i++) in virtio_gpio_deinit() 1562 for (i = 0; i < gpio->nvline; i++) { in print_gpio_info() 1573 for (i = 0; i < gpio->nchip; i++) { in print_gpio_info() [all …]
|
| A D | vhost.c | 522 int i, rc; in vhost_dev_init() local 548 for (i = 0; i < vdev->nvqs; i++) { in vhost_dev_init() 588 int i; in vhost_dev_deinit() local 593 for (i = 0; i < vdev->nvqs; i++) in vhost_dev_deinit() 615 int i, rc; in vhost_dev_start() local 663 for (i = 0; i < vdev->nvqs; i++) { in vhost_dev_start() 664 state.index = i; in vhost_dev_start() 675 for (i = 0; i < vdev->nvqs; i++) { in vhost_dev_start() 685 while (--i >= 0) in vhost_dev_start() 703 int i, rc = 0; in vhost_dev_stop() local [all …]
|
| A D | virtio_i2c.c | 453 int i; in native_adapter_create() local 485 for (i = 0; i < n_client; i++) { in native_adapter_create() 486 if (client_addr[i]) { in native_adapter_create() 509 int i; in native_adapter_remove() local 512 for (i = 0; i < MAX_NATIVE_I2C_ADAPTER; i++) { in native_adapter_remove() 518 vi2c->native_adapter[i] = NULL; in native_adapter_remove() 617 int i, client_addr; in virtio_i2c_map() local 630 for (i = 0; i < vi2c->native_adapter_num; i++) { in virtio_i2c_map() 725 int i, j, node_num; in virtio_i2c_dsdt() local 739 for (i = 0; i < MAX_I2C_VDEV; i++) { in virtio_i2c_dsdt() [all …]
|
| A D | virtio_gpu.c | 594 for (i = 0; i < gpu->scanout_num; i++) { in virtio_gpu_cmd_get_display_info() 820 for (i = 1; i < (cmd->iovcnt - 1); i++) { in virtio_gpu_cmd_resource_attach_backing() 824 for (i = 0; i < req.nr_entries; i++) { in virtio_gpu_cmd_resource_attach_backing() 829 r2d->iov[i].iov_len = entries[i].length; in virtio_gpu_cmd_resource_attach_backing() 986 for (i = 0; i < r2d->iovcnt; i++) { in virtio_gpu_cmd_transfer_to_host_2d() 1081 for (i = 0; i < gpu->scanout_num; i++) { in virtio_gpu_cmd_resource_flush() 1096 for (i = 0; i < gpu->scanout_num; i++) { in virtio_gpu_cmd_resource_flush() 1159 for (i = 0; i < nr_entries; i++) { in virtio_gpu_create_udmabuf() 1268 for (i = 1; i < (cmd->iovcnt - 1); i++) { in virtio_gpu_cmd_create_blob() 1305 for (i = 0; i < req.nr_entries; i++) { in virtio_gpu_cmd_create_blob() [all …]
|
| /devicemodel/hw/mmio/ |
| A D | core.c | 55 int i; in get_mmiodev() local 58 for (i = 0; i < mmio_dev_idx; i++) { in get_mmiodev() 59 dev = &mmio_devs[i]; in get_mmiodev() 270 int i, err = 0; in init_mmio_devs() local 273 for (i = 0; i < MAX_MMIO_DEV_NUM; i++) { in init_mmio_devs() 274 ops = mmio_dev_finddev(mmio_devs[i].name); in init_mmio_devs() 278 mmio_devs[i].dev.res[0].host_pa, mmio_devs[i].dev.res[0].user_vm_pa, in init_mmio_devs() 279 mmio_devs[i].dev.res[0].size, err); in init_mmio_devs() 289 for (; i>=0; i--) { in init_mmio_devs() 300 int i; in deinit_mmio_devs() local [all …]
|
| /devicemodel/core/ |
| A D | sw_load_common.c | 169 int i, length = len; in add_e820_entry() local 172 for (i = 0; i < len; i++) { in add_e820_entry() 173 e_s = e820[i].baseaddr; in add_e820_entry() 174 e_e = e820[i].baseaddr + e820[i].length; in add_e820_entry() 185 pt_t[0] = e820[i].type; in add_e820_entry() 187 pt_t[2] = e820[i].type; in add_e820_entry() 199 memmove(&e820[i + index_e - index_s], in add_e820_entry() 200 &e820[i + 1], (len - i - 1) * in add_e820_entry() 204 for (; index_s < index_e; index_s++, i++) { in add_e820_entry() 205 e820[i].baseaddr = pt[index_s]; in add_e820_entry() [all …]
|
| A D | inout.c | 151 int i; in register_inout() local 164 for (i = iop->port; i < iop->port + iop->size; i++) { in register_inout() 165 if ((inout_handlers[i].flags & IOPORT_F_DEFAULT) == 0) in register_inout() 170 for (i = iop->port; i < iop->port + iop->size; i++) { in register_inout() 171 inout_handlers[i].name = iop->name; in register_inout() 172 inout_handlers[i].flags = iop->flags; in register_inout() 173 inout_handlers[i].handler = iop->handler; in register_inout() 174 inout_handlers[i].arg = iop->arg; in register_inout()
|
| A D | iothread.c | 37 int i, n; in io_thread() local 53 for (i = 0; i < n; i++) { in io_thread() 54 aevp = eventlist[i].data.ptr; in io_thread() 151 int i; in iothread_deinit() local 155 for (i = 0; i < ioctx_active_cnt; i++) { in iothread_deinit() 156 ioctx_x = &ioctxes[i]; in iothread_deinit() 193 int i, ret, base, end; in iothread_create() local 212 for (i = base; i < end; i++) { in iothread_create() 213 ioctx_x = &ioctxes[i]; in iothread_create() 220 ioctx_x->idx = i; in iothread_create() [all …]
|
| A D | vm_event.c | 146 int i; in vm_event_throttle_init() local 151 for (i = 0; i < ARRAY_SIZE(ve_proc); i++) { in vm_event_throttle_init() 152 ctl = &ve_proc[i].throttle_ctl; in vm_event_throttle_init() 169 pr_warn("failed to set timer for vm_event %d, throttle disabled\n", i); in vm_event_throttle_init() 178 int i; in vm_event_throttle_deinit() local 181 for (i = 0; i < ARRAY_SIZE(ve_proc); i++) { in vm_event_throttle_deinit() 182 ctl = &ve_proc[i].throttle_ctl; in vm_event_throttle_deinit() 308 int n, i; in vm_event_thread() local 324 for (i = 0; i < n; i++) { in vm_event_thread() 325 if (i < MAX_EPOLL_EVENTS) { in vm_event_thread() [all …]
|
| A D | sw_load_elf.c | 114 int i; in load_elf32() local 132 for (i = 0; i < elf32_header->e_phnum; i++) { in load_elf32() 166 int i, ret = 0; in acrn_load_elf() local 197 for (i = 0; i <= ((ELF_BUF_LEN/4) - 3); i++) { in acrn_load_elf() 198 if (ptr32[i] == MULTIBOOT_HEAD_MAGIC) { in acrn_load_elf() 211 sum += ptr32[j + i]; in acrn_load_elf() 216 *multiboot_flags = ptr32[i + 1]; in acrn_load_elf()
|
| /devicemodel/hw/platform/rpmb/ |
| A D | rpmb_sim.c | 74 int i; in rpmb_mac() local 86 for (i = 0; i < frame_cnt; i++) { in rpmb_mac() 114 int i; in rpmb_mac() local 133 for (i = 0; i < frame_cnt; i++) { in rpmb_mac() 162 int i; in rpmb_mac() local 179 for (i = 0; i < frame_cnt; i++) { in rpmb_mac() 475 for (i = 0; i < in_cnt; i++) in rpmb_sim_write() 476 memcpy(data + i * 256, in_frame[i].data, 256); in rpmb_sim_write() 548 for (i = 0; i < out_cnt; i++) { in rpmb_sim_read() 553 memcpy(out_frame[i].data, data+256*i, 256); in rpmb_sim_read() [all …]
|
| A D | rpmb_backend.c | 150 uint32_t i; in rpmb_replace_frame() local 152 for (i = 0; i < frame_cnt; i++) { in rpmb_replace_frame() 154 memcpy(frames[i].nonce, nonce, sizeof(frames[i].nonce)); in rpmb_replace_frame() 158 frames[i].addr = swap16(*addr); in rpmb_replace_frame() 162 frames[i].result = swap16(*result); in rpmb_replace_frame() 164 frames[i].req_resp = swap16(*req_resp); in rpmb_replace_frame() 182 uint32_t i; in rpmb_check_frame() local 186 for (i = 0; i < frame_cnt; i++) { in rpmb_check_frame() 197 cmd_str, swap16(frames[i].addr), *addr)); in rpmb_check_frame() 472 uint16_t i; in rpmb_handler() local [all …]
|
| /devicemodel/hw/ |
| A D | block_if.c | 234 for (i = 0; i < breq->iovcnt; i++) in blockif_enqueue() 338 for (i = 0; i < n_range; i++) { in blockif_process_discard() 358 for (i = 0; i < segment; i++) { in blockif_process_discard() 370 arg[i][0], arg[i][1]); in blockif_process_discard() 395 for (i = 0; i < br->iovcnt; i++) { in blockif_init_iov_align_info() 432 for (i = 0; i < br->iovcnt; i++) { in blockif_dump_align_info() 637 for (i = 0; i < br->iovcnt; i++) { in blockif_complete_bounced_read() 767 for (i = 0; i < br->iovcnt; i++) { in blockif_init_bounced_write() 983 for (i = 0; i < BLOCKIF_NUMTHR; i++) { in thread_pool_init() 1001 for (i = 0; i < BLOCKIF_NUMTHR; i++) in thread_pool_deinit() [all …]
|
| A D | vdisplay_sdl.c | 423 int i; in vdpy_edid_get_checksum() local 426 for (i = 0; i < 127; i++) { in vdpy_edid_get_checksum() 582 int i; in sdl_gl_display_init() local 599 for (i = 0; i < vdpy.vscrs_num; i++) { in sdl_gl_display_init() 939 int i; in vdpy_sdl_ui_refresh() local 943 for (i = 0; i < vdpy.vscrs_num; i++) { in vdpy_sdl_ui_refresh() 1082 int i; in vdpy_sdl_display_thread() local 1084 for (i = 0; i < vdpy.vscrs_num; i++) { in vdpy_sdl_display_thread() 1155 for (i = 0; i < vdpy.vscrs_num; i++) { in vdpy_sdl_display_thread() 1177 for (i = 0; i < vdpy.vscrs_num; i++) { in vdpy_sdl_display_thread() [all …]
|