| /drivers/media/pci/cx18/ |
| A D | cx18-mailbox.c | 240 mb = &order->mb; in epu_dma_done() 349 order->mb.cmd); in epu_cmd() 356 order->mb.cmd); in epu_cmd() 374 epu_cmd(cx, order); in cx18_in_work_handler() 399 order->rpu, order->mb.cmd); in mb_ack_irq() 408 rpu_str[order->rpu], rpu_str[order->rpu], req); in mb_ack_irq() 423 mb = &order->mb; in epu_dma_done_irq() 489 order->mb.cmd); in epu_cmd_irq() 518 return order; in alloc_in_work_order_irq() 546 order->flags = 0; in cx18_api_epu_cmd_irq() [all …]
|
| /drivers/gpu/drm/ttm/ |
| A D | ttm_pool.c | 113 unsigned int order; member 147 if (order) in ttm_pool_alloc_page() 162 if (order) in ttm_pool_alloc_page() 208 if (order) in ttm_pool_free_page() 315 pt->order = order; in ttm_pool_type_init() 418 if (!order) in ttm_pool_split_for_swap() 422 nr = 1UL << order; in ttm_pool_split_for_swap() 590 restore->order = order; in ttm_pool_page_allocated_restore() 731 order = ttm_pool_alloc_find_order(order, alloc)) { in __ttm_pool_alloc() 752 if (order) { in __ttm_pool_alloc() [all …]
|
| /drivers/gpu/drm/ |
| A D | drm_buddy.c | 31 block->header |= order; in drm_block_alloc() 128 unsigned int order; in __drm_buddy_free() local 164 return order; in __drm_buddy_free() 172 unsigned int order; in __force_merge() local 331 unsigned int order; in drm_buddy_fini() local 729 if (tmp != order) in alloc_from_freelist() 862 if (order == 0) in __alloc_contig_try_harder() 1082 order = min(order, (unsigned int)fls(pages) - 1); in drm_buddy_alloc_blocks() 1089 order, in drm_buddy_alloc_blocks() 1198 int order; in drm_buddy_print() local [all …]
|
| /drivers/gpu/drm/lib/ |
| A D | drm_random.c | 17 void drm_random_reorder(unsigned int *order, unsigned int count, in drm_random_reorder() argument 25 swap(order[i], order[j]); in drm_random_reorder() 32 unsigned int *order, i; in drm_random_order() local 34 order = kmalloc_array(count, sizeof(*order), GFP_KERNEL); in drm_random_order() 35 if (!order) in drm_random_order() 36 return order; in drm_random_order() 39 order[i] = i; in drm_random_order() 41 drm_random_reorder(order, count, state); in drm_random_order() 42 return order; in drm_random_order()
|
| /drivers/gpu/drm/nouveau/nvkm/subdev/therm/ |
| A D | gk104.c | 34 const struct gk104_clkgate_engine_info *order = therm->clkgate_order; in gk104_clkgate_enable() local 38 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_enable() 39 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_enable() 42 nvkm_mask(dev, 0x20200 + order[i].offset, 0xff00, 0x4500); in gk104_clkgate_enable() 50 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_enable() 51 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_enable() 54 nvkm_mask(dev, 0x20200 + order[i].offset, 0x00ff, 0x0045); in gk104_clkgate_enable() 63 const struct gk104_clkgate_engine_info *order = therm->clkgate_order; in gk104_clkgate_fini() local 67 for (i = 0; order[i].type != NVKM_SUBDEV_NR; i++) { in gk104_clkgate_fini() 68 if (!nvkm_device_subdev(dev, order[i].type, order[i].inst)) in gk104_clkgate_fini() [all …]
|
| /drivers/net/ethernet/mellanox/mlx5/core/steering/sws/ |
| A D | dr_buddy.c | 75 unsigned int *order) in dr_buddy_find_free_seg() argument 99 *order = order_iter; in dr_buddy_find_free_seg() 120 unsigned int order, in mlx5dr_buddy_alloc_mem() argument 137 while (order_iter > order) { in mlx5dr_buddy_alloc_mem() 144 seg <<= order; in mlx5dr_buddy_alloc_mem() 151 unsigned int seg, unsigned int order) in mlx5dr_buddy_free_mem() argument 153 seg >>= order; in mlx5dr_buddy_free_mem() 160 --buddy->num_free[order]; in mlx5dr_buddy_free_mem() 162 ++order; in mlx5dr_buddy_free_mem() 164 bitmap_set(buddy->bitmap[order], seg, 1); in mlx5dr_buddy_free_mem() [all …]
|
| /drivers/gpu/drm/i915/selftests/ |
| A D | i915_syncmap.c | 274 unsigned int pass, order; in igt_syncmap_join_above() local 296 for (order = 0; order < 64; order += SHIFT) { in igt_syncmap_join_above() 335 unsigned int step, order, idx; in igt_syncmap_join_below() local 345 for (order = 64 - SHIFT; order > 0; order -= SHIFT) { in igt_syncmap_join_below() 362 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_join_below() 383 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_join_below() 449 unsigned int idx, order; in igt_syncmap_compact() local 462 for (order = SHIFT; order < 64; order += SHIFT) { in igt_syncmap_compact() 477 context, order, idx, in igt_syncmap_compact() 491 if (sync->height != order) { in igt_syncmap_compact() [all …]
|
| A D | i915_random.c | 70 void i915_random_reorder(unsigned int *order, unsigned int count, in i915_random_reorder() argument 73 i915_prandom_shuffle(order, sizeof(*order), count, state); in i915_random_reorder() 78 unsigned int *order, i; in i915_random_order() local 80 order = kmalloc_array(count, sizeof(*order), in i915_random_order() 82 if (!order) in i915_random_order() 83 return order; in i915_random_order() 86 order[i] = i; in i915_random_order() 88 i915_random_reorder(order, count, state); in i915_random_order() 89 return order; in i915_random_order()
|
| /drivers/net/ethernet/mellanox/mlx5/core/steering/hws/ |
| A D | buddy.c | 85 u32 *order) in hws_buddy_find_free_seg() argument 109 *order = order_iter; in hws_buddy_find_free_seg() 124 while (order_iter > order) { in mlx5hws_buddy_alloc_mem() 131 seg <<= order; in mlx5hws_buddy_alloc_mem() 138 seg >>= order; in mlx5hws_buddy_free_mem() 140 while (test_bit(seg ^ 1, buddy->bitmap[order])) { in mlx5hws_buddy_free_mem() 141 bitmap_clear(buddy->bitmap[order], seg ^ 1, 1); in mlx5hws_buddy_free_mem() 142 --buddy->num_free[order]; in mlx5hws_buddy_free_mem() 144 ++order; in mlx5hws_buddy_free_mem() 147 bitmap_set(buddy->bitmap[order], seg, 1); in mlx5hws_buddy_free_mem() [all …]
|
| /drivers/gpu/drm/tests/ |
| A D | drm_buddy_test.c | 164 kfree(order); in drm_test_buddy_alloc_range_bias() 365 order = 1; in drm_test_buddy_alloc_clear() 505 int order, top; in drm_test_buddy_alloc_pathological() local 532 for (order = top; order--;) { in drm_test_buddy_alloc_pathological() 567 for (order = 1; order <= max_order; order++) { in drm_test_buddy_alloc_pathological() 603 for (order = 0; order < max_order; order++) { in drm_test_buddy_alloc_pessimistic() 628 for (order = max_order; order--;) { in drm_test_buddy_alloc_pessimistic() 640 order = 1; in drm_test_buddy_alloc_pessimistic() 656 order++; in drm_test_buddy_alloc_pessimistic() 684 int order; in drm_test_buddy_alloc_optimistic() local [all …]
|
| /drivers/gpu/drm/ttm/tests/ |
| A D | ttm_pool_test.c | 14 unsigned int order; member 103 .order = 0, 107 .order = 2, 115 .order = 0, 185 params->order)); in ttm_pool_alloc_basic() 244 unsigned int order = 0; in ttm_pool_alloc_order_caching_match() local 274 unsigned int order = 0; in ttm_pool_alloc_caching_mismatch() local 306 unsigned int order = 2; in ttm_pool_alloc_order_mismatch() local 342 unsigned int order = 2; in ttm_pool_free_dma_alloc() local 373 unsigned int order = 2; in ttm_pool_free_no_dma_alloc() local [all …]
|
| /drivers/net/ethernet/mellanox/mlx4/ |
| A D | mr.c | 69 while (o > order) { in mlx4_buddy_alloc() 78 seg <<= order; in mlx4_buddy_alloc() 85 seg >>= order; in mlx4_buddy_free() 93 ++order; in mlx4_buddy_free() 97 ++buddy->num_free[order]; in mlx4_buddy_free() 200 mtt->order = -1; in mlx4_mtt_init() 207 ++mtt->order; in mlx4_mtt_init() 245 offset, order); in mlx4_free_mtt_range() 253 if (mtt->order < 0) in mlx4_mtt_cleanup() 585 mr->mtt.order = -1; in mlx4_mr_rereg_mem_cleanup() [all …]
|
| /drivers/infiniband/hw/hns/ |
| A D | hns_roce_db.c | 94 struct hns_roce_db *db, int order) in hns_roce_alloc_db_from_pgdir() argument 99 for (o = order; o <= 1; ++o) { in hns_roce_alloc_db_from_pgdir() 112 if (o > order) in hns_roce_alloc_db_from_pgdir() 113 set_bit(i ^ 1, pgdir->bits[order]); in hns_roce_alloc_db_from_pgdir() 119 db->order = order; in hns_roce_alloc_db_from_pgdir() 125 int order) in hns_roce_alloc_db() argument 133 if (!hns_roce_alloc_db_from_pgdir(pgdir, db, order)) in hns_roce_alloc_db() 145 WARN_ON(hns_roce_alloc_db_from_pgdir(pgdir, db, order)); in hns_roce_alloc_db() 160 o = db->order; in hns_roce_free_db() 163 if (db->order == 0 && test_bit(i ^ 1, db->u.pgdir->order0)) { in hns_roce_free_db()
|
| /drivers/pci/endpoint/ |
| A D | pci-epc-mem.c | 25 int order; in pci_epc_mem_get_order() local 31 order = fls(size); in pci_epc_mem_get_order() 33 order = fls64(size); in pci_epc_mem_get_order() 35 return order; in pci_epc_mem_get_order() 186 int order; in pci_epc_mem_alloc_addr() local 195 order = pci_epc_mem_get_order(mem, align_size); in pci_epc_mem_alloc_addr() 199 order); in pci_epc_mem_alloc_addr() 207 pageno, order); in pci_epc_mem_alloc_addr() 254 int order; in pci_epc_mem_free_addr() local 267 order = pci_epc_mem_get_order(mem, size); in pci_epc_mem_free_addr() [all …]
|
| /drivers/infiniband/hw/mthca/ |
| A D | mthca_mr.c | 43 int order; member 107 while (o > order) { in mthca_buddy_alloc() 116 seg <<= order; in mthca_buddy_alloc() 123 seg >>= order; in mthca_buddy_free() 129 --buddy->num_free[order]; in mthca_buddy_free() 131 ++order; in mthca_buddy_free() 134 __set_bit(seg, buddy->bits[order]); in mthca_buddy_free() 135 ++buddy->num_free[order]; in mthca_buddy_free() 198 seg + (1 << order) - 1)) { in mthca_alloc_mtt_range() 220 mtt->order = 0; in __mthca_alloc_mtt() [all …]
|
| /drivers/staging/media/ipu3/ |
| A D | ipu3-dmamap.c | 55 unsigned int order = __fls(order_mask); in imgu_dmamap_alloc_buffer() local 57 order_size = 1U << order; in imgu_dmamap_alloc_buffer() 59 gfp | high_order_gfp : gfp, order); in imgu_dmamap_alloc_buffer() 62 if (!order) in imgu_dmamap_alloc_buffer() 65 split_page(page, order); in imgu_dmamap_alloc_buffer() 69 __free_pages(page, order); in imgu_dmamap_alloc_buffer() 233 unsigned long order, base_pfn; in imgu_dmamap_init() local 239 order = __ffs(IPU3_PAGE_SIZE); in imgu_dmamap_init() 240 base_pfn = max_t(unsigned long, 1, imgu->mmu->aperture_start >> order); in imgu_dmamap_init() 241 init_iova_domain(&imgu->iova_domain, 1UL << order, base_pfn); in imgu_dmamap_init()
|
| /drivers/gpu/drm/i915/gem/ |
| A D | i915_gem_internal.c | 70 int order = min(fls(npages) - 1, max_order); in i915_gem_object_get_pages_internal() local 74 page = alloc_pages(gfp | (order ? QUIET : MAYFAIL), in i915_gem_object_get_pages_internal() 75 order); in i915_gem_object_get_pages_internal() 78 if (!order--) in i915_gem_object_get_pages_internal() 82 max_order = order; in i915_gem_object_get_pages_internal() 85 sg_set_page(sg, page, PAGE_SIZE << order, 0); in i915_gem_object_get_pages_internal() 88 npages -= 1 << order; in i915_gem_object_get_pages_internal()
|
| /drivers/hv/ |
| A D | hv_proc.c | 25 int order; in hv_call_deposit_pages() local 54 order = 31 - __builtin_clz(num_pages); in hv_call_deposit_pages() 57 pages[i] = alloc_pages_node(node, GFP_KERNEL, order); in hv_call_deposit_pages() 60 if (!order) { in hv_call_deposit_pages() 65 --order; in hv_call_deposit_pages() 68 split_page(pages[i], order); in hv_call_deposit_pages() 69 counts[i] = 1 << order; in hv_call_deposit_pages()
|
| /drivers/atm/ |
| A D | eni.c | 212 for (order = 0; !(((unsigned long)start | size) & (1 << order)); order++); in eni_put_free() 219 list[len].order = order; in eni_put_free() 239 for (order = 0; (1 << order) < *size; order++) in eni_alloc_mem() 245 if (list[i].order == order) { in eni_alloc_mem() 250 else if (best_order > list[i].order && list[i].order > order) { in eni_alloc_mem() 276 for (order = -1; size; order++) size >>= 1; in eni_free_mem() 280 list[i].order == order) { in eni_free_mem() 282 list[i].start,start,1 << order,list[i].order,order); in eni_free_mem() 295 list[len].order = order; in eni_free_mem() 820 for (order = -1; size; order++) size >>= 1; in open_rx_second() [all …]
|
| /drivers/media/test-drivers/vimc/ |
| A D | vimc-debayer.c | 29 enum vimc_debayer_rgb_colors order[2][2]; member 81 .order = { { VIMC_DEBAYER_BLUE, VIMC_DEBAYER_GREEN }, 86 .order = { { VIMC_DEBAYER_GREEN, VIMC_DEBAYER_BLUE }, 91 .order = { { VIMC_DEBAYER_GREEN, VIMC_DEBAYER_RED }, 96 .order = { { VIMC_DEBAYER_RED, VIMC_DEBAYER_GREEN }, 101 .order = { { VIMC_DEBAYER_BLUE, VIMC_DEBAYER_GREEN }, 106 .order = { { VIMC_DEBAYER_GREEN, VIMC_DEBAYER_BLUE }, 111 .order = { { VIMC_DEBAYER_GREEN, VIMC_DEBAYER_RED }, 116 .order = { { VIMC_DEBAYER_RED, VIMC_DEBAYER_GREEN }, 131 .order = { { VIMC_DEBAYER_GREEN, VIMC_DEBAYER_RED }, [all …]
|
| /drivers/s390/char/ |
| A D | vmcp.c | 63 int nr_pages, order; in vmcp_response_alloc() local 65 order = get_order(session->bufsize); in vmcp_response_alloc() 72 if (order > 2) in vmcp_response_alloc() 79 session->response = (char *)__get_free_pages(GFP_KERNEL | __GFP_RETRY_MAYFAIL, order); in vmcp_response_alloc() 84 int nr_pages, order; in vmcp_response_free() local 89 order = get_order(session->bufsize); in vmcp_response_free() 96 free_pages((unsigned long)session->response, order); in vmcp_response_free()
|
| /drivers/tee/amdtee/ |
| A D | shm_pool.c | 14 unsigned int order = get_order(size); in pool_op_alloc() local 22 va = __get_free_pages(GFP_KERNEL | __GFP_ZERO, order); in pool_op_alloc() 28 shm->size = PAGE_SIZE << order; in pool_op_alloc() 33 free_pages(va, order); in pool_op_alloc()
|
| /drivers/gpu/drm/etnaviv/ |
| A D | etnaviv_cmdbuf.c | 87 int granule_offs, order, ret; in etnaviv_cmdbuf_init() local 92 order = order_base_2(ALIGN(size, SUBALLOC_GRANULE) / SUBALLOC_GRANULE); in etnaviv_cmdbuf_init() 96 SUBALLOC_GRANULES, order); in etnaviv_cmdbuf_init() 120 int order = order_base_2(ALIGN(cmdbuf->size, SUBALLOC_GRANULE) / in etnaviv_cmdbuf_free() local 129 order); in etnaviv_cmdbuf_free()
|
| /drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_gart.c | 120 unsigned int order = get_order(adev->gart.table_size); in amdgpu_gart_table_ram_alloc() local 133 p = alloc_pages(gfp_flags, order); in amdgpu_gart_table_ram_alloc() 138 for (x = 0; x < (1UL << order); x++) in amdgpu_gart_table_ram_alloc() 149 __free_pages(p, order); in amdgpu_gart_table_ram_alloc() 217 __free_pages(p, order); in amdgpu_gart_table_ram_alloc() 231 unsigned int order = get_order(adev->gart.table_size); in amdgpu_gart_table_ram_free() local 246 for (x = 0; x < (1UL << order); x++) in amdgpu_gart_table_ram_free() 248 __free_pages(p, order); in amdgpu_gart_table_ram_free()
|
| /drivers/iommu/ |
| A D | iommu-pages.c | 41 unsigned int order; in iommu_alloc_pages_node_sz() local 50 order = get_order(size); in iommu_alloc_pages_node_sz() 59 folio = __folio_alloc_node(gfp | __GFP_ZERO, order, nid); in iommu_alloc_pages_node_sz() 72 pgcnt = 1UL << order; in iommu_alloc_pages_node_sz()
|