Home
last modified time | relevance | path

Searched refs:page_count (Results 1 – 25 of 91) sorted by relevance

1234

/drivers/media/pci/ivtv/
A Divtv-udma.c104 dma->SG_length, dma->page_count); in ivtv_udma_setup()
110 if (user_dma.page_count <= 0) { in ivtv_udma_setup()
120 if (user_dma.page_count != err) { in ivtv_udma_setup()
122 err, user_dma.page_count); in ivtv_udma_setup()
130 dma->page_count = user_dma.page_count; in ivtv_udma_setup()
137 dma->page_count = 0; in ivtv_udma_setup()
143 dma->page_count, DMA_TO_DEVICE); in ivtv_udma_setup()
147 dma->page_count = 0; in ivtv_udma_setup()
158 return dma->page_count; in ivtv_udma_setup()
168 if (dma->page_count == 0) in ivtv_udma_unmap()
[all …]
A Divtv-yuv.c54 if (dma->SG_length || dma->page_count) { in ivtv_yuv_prep_user_dma()
57 dma->SG_length, dma->page_count); in ivtv_yuv_prep_user_dma()
66 y_dma.page_count, &dma->map[0], 0); in ivtv_yuv_prep_user_dma()
68 if (y_pages == y_dma.page_count) { in ivtv_yuv_prep_user_dma()
73 if (y_pages != y_dma.page_count || uv_pages != uv_dma.page_count) { in ivtv_yuv_prep_user_dma()
76 if (y_pages == y_dma.page_count) { in ivtv_yuv_prep_user_dma()
79 uv_pages, uv_dma.page_count); in ivtv_yuv_prep_user_dma()
90 y_pages, y_dma.page_count); in ivtv_yuv_prep_user_dma()
106 dma->page_count = y_pages + uv_pages; in ivtv_yuv_prep_user_dma()
112 dma->page_count = 0; in ivtv_yuv_prep_user_dma()
[all …]
/drivers/hv/
A Dmshv_root_hv_call.c204 page_count >>= large_shift; in hv_do_map_gpa_hcall()
207 while (done < page_count) { in hv_do_map_gpa_hcall()
298 if (page_count == 0) in hv_call_unmap_gpa_pages()
306 page_count >>= large_shift; in hv_call_unmap_gpa_pages()
309 while (done < page_count) { in hv_call_unmap_gpa_pages()
431 if (!page_count && !ret_output) in hv_call_get_vp_state()
490 else if (page_count) in hv_call_set_vp_state()
491 varhead_sz = page_count; in hv_call_set_vp_state()
800 if (page_count == 0) in hv_call_modify_spa_host_access()
807 page_count >>= large_shift; in hv_call_modify_spa_host_access()
[all …]
A Dhv_proc.c24 int i, j, page_count; in hv_call_deposit_pages() local
82 for (i = 0, page_count = 0; i < num_allocations; ++i) { in hv_call_deposit_pages()
84 for (j = 0; j < counts[i]; ++j, ++page_count) in hv_call_deposit_pages()
85 input_page->gpa_page_list[page_count] = base_pfn + j; in hv_call_deposit_pages()
88 page_count, 0, input_page, NULL); in hv_call_deposit_pages()
A Dmshv_root_main.c623 if (page_count > INT_MAX) in mshv_vp_ioctl_get_set_state_pfn()
642 int remaining = page_count - completed; in mshv_vp_ioctl_get_set_state_pfn()
656 state_data, page_count, pages, in mshv_vp_ioctl_get_set_state_pfn()
661 state_data, page_count, pages, in mshv_vp_ioctl_get_set_state_pfn()
746 size_t page_count = PFN_DOWN(args.buf_sz); in mshv_vp_ioctl_get_set_state() local
749 page_count, is_set); in mshv_vp_ioctl_get_set_state()
1089 u64 page_offset, u64 page_count) in mshv_region_remap_pages() argument
1100 page_count, map_flags, in mshv_region_remap_pages()
1115 u64 page_offset, u64 page_count) in mshv_region_evict_pages() argument
1132 u64 page_offset, u64 page_count) in mshv_region_populate_pages() argument
[all …]
A Dmshv_root.h259 int hv_call_map_gpa_pages(u64 partition_id, u64 gpa_target, u64 page_count,
261 int hv_call_unmap_gpa_pages(u64 partition_id, u64 gpa_target, u64 page_count,
275 u64 page_count, struct page **pages,
279 struct hv_vp_state_data state_data, u64 page_count,
/drivers/gpu/drm/nouveau/
A Dnouveau_bo85b5.c47 u32 page_count = PFN_UP(new_reg->size); in nva3_bo_move_copy() local
50 page_count = PFN_UP(new_reg->size); in nva3_bo_move_copy()
51 while (page_count) { in nva3_bo_move_copy()
52 int line_count = (page_count > 8191) ? 8191 : page_count; in nva3_bo_move_copy()
68 page_count -= line_count; in nva3_bo_move_copy()
A Dnouveau_bo90b5.c40 u32 page_count = PFN_UP(new_reg->size); in nvc0_bo_move_copy() local
43 page_count = PFN_UP(new_reg->size); in nvc0_bo_move_copy()
44 while (page_count) { in nvc0_bo_move_copy()
45 int line_count = (page_count > 8191) ? 8191 : page_count; in nvc0_bo_move_copy()
61 page_count -= line_count; in nvc0_bo_move_copy()
A Dnouveau_bo0039.c55 u32 page_count = PFN_UP(new_reg->size); in nv04_bo_move_m2mf() local
65 page_count = PFN_UP(new_reg->size); in nv04_bo_move_m2mf()
66 while (page_count) { in nv04_bo_move_m2mf()
67 int line_count = (page_count > 2047) ? 2047 : page_count; in nv04_bo_move_m2mf()
88 page_count -= line_count; in nv04_bo_move_m2mf()
A Dnouveau_bo9039.c45 u32 page_count = PFN_UP(new_reg->size); in nvc0_bo_move_m2mf() local
48 page_count = PFN_UP(new_reg->size); in nvc0_bo_move_m2mf()
49 while (page_count) { in nvc0_bo_move_m2mf()
50 int line_count = (page_count > 2047) ? 2047 : page_count; in nvc0_bo_move_m2mf()
78 page_count -= line_count; in nvc0_bo_move_m2mf()
/drivers/char/agp/
A Dgeneric.c181 if (curr->page_count != 0) { in agp_free_memory()
186 for (i = 0; i < curr->page_count; i++) { in agp_free_memory()
219 size_t page_count, u32 type) in agp_allocate_memory() argument
231 (cur_memory + page_count < page_count)) in agp_allocate_memory()
264 for (i = 0; i < page_count; i++) { in agp_allocate_memory()
272 new->page_count++; in agp_allocate_memory()
1038 if (mem->page_count == 0) in agp_generic_insert_memory()
1117 if (mem->page_count == 0) in agp_generic_remove_memory()
1170 for (i = 0; i < page_count; i++) in agp_generic_alloc_user()
1172 new->page_count = 0; in agp_generic_alloc_user()
[all …]
A Dnvidia-agp.c211 if (mem->page_count == 0) in nvidia_insert_memory()
214 if ((pg_start + mem->page_count) > in nvidia_insert_memory()
218 for (j = pg_start; j < (pg_start + mem->page_count); j++) { in nvidia_insert_memory()
227 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in nvidia_insert_memory()
251 if (mem->page_count == 0) in nvidia_remove_memory()
254 for (i = pg_start; i < (mem->page_count + pg_start); i++) in nvidia_remove_memory()
A Dintel-gtt.c129 DBG("try unmapping %lu pages\n", (unsigned long)mem->page_count); in intel_gtt_unmap_memory()
219 if ((pg_start + mem->page_count) in i810_insert_dcache_entries()
226 for (i = pg_start; i < (pg_start + mem->page_count); i++) { in i810_insert_dcache_entries()
271 new->page_count = pg_count; in alloc_agpphysmem_i8xx()
282 if (curr->page_count == 4) in intel_i810_free_by_type()
946 if (mem->page_count == 0) in intel_fake_agp_insert_entries()
949 if (pg_start + mem->page_count > intel_private.gtt_total_entries) in intel_fake_agp_insert_entries()
964 ret = intel_gtt_map_memory(mem->pages, mem->page_count, &st); in intel_fake_agp_insert_entries()
999 if (mem->page_count == 0) in intel_fake_agp_remove_entries()
1002 intel_gmch_gtt_clear_range(pg_start, mem->page_count); in intel_fake_agp_remove_entries()
[all …]
A Dati-agp.c273 if (mem->page_count == 0) in ati_insert_memory()
276 if ((pg_start + mem->page_count) > num_entries) in ati_insert_memory()
280 while (j < (pg_start + mem->page_count)) { in ati_insert_memory()
294 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) { in ati_insert_memory()
319 if (mem->page_count == 0) in ati_remove_memory()
322 for (i = pg_start; i < (mem->page_count + pg_start); i++) { in ati_remove_memory()
A Duninorth-agp.c166 if (mem->page_count == 0) in uninorth_insert_memory()
172 if ((pg_start + mem->page_count) > num_entries) in uninorth_insert_memory()
176 for (i = 0; i < mem->page_count; ++i) { in uninorth_insert_memory()
185 for (i = 0; i < mem->page_count; i++) { in uninorth_insert_memory()
215 if (mem->page_count == 0) in uninorth_remove_memory()
219 for (i = 0; i < mem->page_count; ++i) { in uninorth_remove_memory()
A Defficeon-agp.c238 int i, count = mem->page_count, num_entries; in efficeon_insert_memory()
246 if ((pg_start + mem->page_count) > num_entries) in efficeon_insert_memory()
287 int i, count = mem->page_count, num_entries; in efficeon_remove_memory()
293 if ((pg_start + mem->page_count) > num_entries) in efficeon_remove_memory()
A Dagp.h194 struct agp_memory *agp_generic_alloc_by_type(size_t page_count, int type);
198 struct agp_memory *memory, size_t page_count);
215 struct agp_memory *agp_generic_alloc_user(size_t page_count, int type);
/drivers/firewire/
A Dcore-iso.c35 buffer->page_count = 0; in fw_iso_buffer_alloc()
42 for (i = 0; i < page_count; i++) { in fw_iso_buffer_alloc()
47 buffer->page_count = i; in fw_iso_buffer_alloc()
48 if (i < page_count) { in fw_iso_buffer_alloc()
64 for (i = 0; i < buffer->page_count; i++) { in fw_iso_buffer_map_dma()
73 if (i < buffer->page_count) in fw_iso_buffer_map_dma()
80 int page_count, enum dma_data_direction direction) in fw_iso_buffer_init() argument
84 ret = fw_iso_buffer_alloc(buffer, page_count); in fw_iso_buffer_init()
107 for (i = 0; i < buffer->page_count; i++) in fw_iso_buffer_destroy()
112 buffer->page_count = 0; in fw_iso_buffer_destroy()
[all …]
/drivers/vfio/pci/pds/
A Ddirty.c67 le32_to_cpu(region_info[i].page_count), in pds_vfio_print_guest_region_info()
145 u32 page_count) in pds_vfio_dirty_alloc_sgl() argument
154 max_sge = DIV_ROUND_UP(page_count, PAGE_SIZE * 8); in pds_vfio_dirty_alloc_sgl()
199 u32 page_count; in pds_vfio_dirty_alloc_regions() local
202 page_count = le32_to_cpu(ri->page_count); in pds_vfio_dirty_alloc_regions()
204 region_size = page_count * region_page_size; in pds_vfio_dirty_alloc_regions()
207 page_count / BITS_PER_BYTE); in pds_vfio_dirty_alloc_regions()
226 dev_bmp_offset_byte += page_count / BITS_PER_BYTE; in pds_vfio_dirty_alloc_regions()
301 u32 page_count; in pds_vfio_dirty_enable() local
306 ri->page_count = cpu_to_le32(page_count); in pds_vfio_dirty_enable()
[all …]
/drivers/target/
A Dtarget_core_rd.c68 u32 i, j, page_count = 0, sg_per_table; in rd_release_sgl_table() local
78 page_count++; in rd_release_sgl_table()
85 return page_count; in rd_release_sgl_table()
90 u32 page_count; in rd_release_device_space() local
95 page_count = rd_release_sgl_table(rd_dev, rd_dev->sg_table_array, in rd_release_device_space()
100 rd_dev->rd_host->rd_host_id, rd_dev->rd_dev_id, page_count, in rd_release_device_space()
101 rd_dev->sg_table_count, (unsigned long)page_count * PAGE_SIZE); in rd_release_device_space()
216 u32 page_count; in rd_release_prot_space() local
221 page_count = rd_release_sgl_table(rd_dev, rd_dev->sg_prot_array, in rd_release_prot_space()
226 rd_dev->rd_host->rd_host_id, rd_dev->rd_dev_id, page_count, in rd_release_prot_space()
[all …]
/drivers/w1/slaves/
A Dw1_ds2433.c44 unsigned int page_count; /* number of 256 bits pages */ member
50 .page_count = 16,
56 .page_count = 80,
94 bitmap_zero(data->validcrc, data->cfg->page_count); in w1_f23_refresh_block()
337 if (data->cfg->page_count > W1_VALIDCRC_MAX) { in w1_f23_add_slave()
347 bitmap_zero(data->validcrc, data->cfg->page_count); in w1_f23_add_slave()
/drivers/gpu/drm/i915/gem/
A Di915_gem_shmem.c68 unsigned int page_count; /* restricted by sg_alloc_table */ in shmem_sg_alloc_table() local
75 if (overflows_type(size / PAGE_SIZE, page_count)) in shmem_sg_alloc_table()
78 page_count = size / PAGE_SIZE; in shmem_sg_alloc_table()
86 if (sg_alloc_table(st, page_count, GFP_KERNEL | __GFP_NOWARN)) in shmem_sg_alloc_table()
101 for (i = 0; i < page_count; i++) { in shmem_sg_alloc_table()
121 i915_gem_shrink(NULL, i915, 2 * page_count, NULL, *s++); in shmem_sg_alloc_table()
155 folio_nr_pages(folio), page_count - i); in shmem_sg_alloc_table()
/drivers/virt/vboxguest/
A Dvboxguest_utils.c199 u32 page_count; in hgcm_call_add_pagelist_size() local
201 page_count = hgcm_call_buf_size_in_pages(buf, len); in hgcm_call_add_pagelist_size()
202 *extra += offsetof(struct vmmdev_hgcm_pagelist, pages[page_count]); in hgcm_call_add_pagelist_size()
340 u32 i, page_count; in hgcm_call_init_linaddr() local
351 page_count = hgcm_call_buf_size_in_pages(buf, len); in hgcm_call_init_linaddr()
359 dst_pg_lst->page_count = page_count; in hgcm_call_init_linaddr()
361 for (i = 0; i < page_count; i++) { in hgcm_call_init_linaddr()
371 *off_extra += offsetof(struct vmmdev_hgcm_pagelist, pages[page_count]); in hgcm_call_init_linaddr()
/drivers/net/wireless/mediatek/mt76/mt7603/
A Dinit.c44 int page_count; in mt7603_dma_sched_init() local
55 page_count = mt76_get_field(dev, MT_PSE_FC_P0, in mt7603_dma_sched_init()
73 mt76_wr(dev, MT_SCH_1, page_count | (2 << 28)); in mt7603_dma_sched_init()
92 mt76_wr(dev, MT_RSV_MAX_THRESH, page_count - reserved_count); in mt7603_dma_sched_init()
96 page_count - beacon_pages - mcu_pages); in mt7603_dma_sched_init()
102 mt76_wr(dev, MT_GROUP_THRESH(0), page_count); in mt7603_dma_sched_init()
/drivers/infiniband/hw/hns/
A Dhns_roce_mr.c555 static inline int mtr_check_direct_pages(dma_addr_t *pages, int page_count, in mtr_check_direct_pages() argument
561 for (i = 1; i < page_count; i++) in mtr_check_direct_pages()
644 int page_count = cal_mtr_pg_cnt(mtr); in mtr_map_bufs() local
653 pages = kvcalloc(page_count, sizeof(dma_addr_t), GFP_KERNEL); in mtr_map_bufs()
658 npage = hns_roce_get_umem_bufs(pages, page_count, in mtr_map_bufs()
661 npage = hns_roce_get_kmem_bufs(hr_dev, pages, page_count, in mtr_map_bufs()
664 if (npage != page_count) { in mtr_map_bufs()
666 page_count); in mtr_map_bufs()
681 ret = hns_roce_mtr_map(hr_dev, mtr, pages, page_count); in mtr_map_bufs()

Completed in 57 milliseconds

1234