Home
last modified time | relevance | path

Searched refs:alloc (Results 1 – 25 of 287) sorted by relevance

12345678910>>...12

/drivers/android/
A Dbinder_alloc.c65 return alloc->vm_start + alloc->buffer_size - buffer->user_data; in binder_alloc_buffer_size()
298 mdata->alloc = alloc; in binder_page_alloc()
341 alloc->pid, addr - alloc->vm_start); in binder_install_single_page()
353 alloc->pid, __func__, addr - alloc->vm_start, ret); in binder_install_single_page()
481 if (alloc->free_async_space >= alloc->buffer_size / 10) { in debug_low_async_space_locked()
599 alloc->pid, size, alloc->free_async_space); in binder_alloc_new_buf_locked()
764 alloc->pid, size, alloc->free_async_space); in binder_free_buf_locked()
919 alloc->pages = kvcalloc(alloc->buffer_size / PAGE_SIZE, in binder_alloc_mmap_handler()
939 alloc->free_async_space = alloc->buffer_size / 2; in binder_alloc_mmap_handler()
1024 if (alloc->mm) in binder_alloc_deferred_release()
[all …]
A Dbinder_alloc.h68 struct binder_alloc *alloc; member
132 void binder_alloc_init(struct binder_alloc *alloc);
135 void binder_alloc_vma_close(struct binder_alloc *alloc);
137 binder_alloc_prepare_to_free(struct binder_alloc *alloc,
139 void binder_alloc_free_buf(struct binder_alloc *alloc,
141 int binder_alloc_mmap_handler(struct binder_alloc *alloc,
146 struct binder_alloc *alloc);
148 struct binder_alloc *alloc);
159 guard(mutex)(&alloc->mutex); in binder_alloc_get_free_async_space()
160 return alloc->free_async_space; in binder_alloc_get_free_async_space()
[all …]
A Dbinder_trace.h308 __entry->proc = alloc->pid;
320 TP_ARGS(alloc, page_index),
326 __entry->proc = alloc->pid;
335 TP_ARGS(alloc, page_index));
339 TP_ARGS(alloc, page_index));
343 TP_ARGS(alloc, page_index));
347 TP_ARGS(alloc, page_index));
351 TP_ARGS(alloc, page_index));
355 TP_ARGS(alloc, page_index));
359 TP_ARGS(alloc, page_index));
[all …]
/drivers/infiniband/hw/cxgb4/
A Did_table.c51 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in c4iw_id_alloc()
53 obj = find_first_zero_bit(alloc->table, alloc->max); in c4iw_id_alloc()
60 if (alloc->last >= alloc->max) in c4iw_id_alloc()
61 alloc->last = 0; in c4iw_id_alloc()
63 obj += alloc->start; in c4iw_id_alloc()
75 obj -= alloc->start; in c4iw_id_free()
85 alloc->start = start; in c4iw_id_table_alloc()
86 alloc->flags = flags; in c4iw_id_table_alloc()
90 alloc->last = 0; in c4iw_id_table_alloc()
91 alloc->max = num; in c4iw_id_table_alloc()
[all …]
/drivers/android/tests/
A Dbinder_alloc_kunit.c141 struct binder_alloc *alloc, in check_buffer_pages_allocated() argument
156 alloc->pages[page_index] ? in check_buffer_pages_allocated()
183 struct binder_alloc *alloc, in binder_alloc_test_free_buf() argument
217 if (alloc->pages[i]) { in binder_alloc_test_free_page()
335 struct binder_alloc *alloc, in gen_buf_sizes() argument
367 permute_frees(test, alloc, tc, runs, failures, 0, alloc->buffer_size); in gen_buf_sizes()
406 struct binder_alloc alloc; member
423 struct binder_alloc *alloc = &priv->alloc; in binder_alloc_test_mmap() local
470 binder_alloc_vma_close(alloc); in binder_alloc_test_vma_close()
486 vma->vm_private_data = alloc; in binder_alloc_test_mmap_handler()
[all …]
/drivers/infiniband/hw/mthca/
A Dmthca_allocator.c47 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in mthca_alloc()
49 alloc->top = (alloc->top + alloc->max) & alloc->mask; in mthca_alloc()
50 obj = find_first_zero_bit(alloc->table, alloc->max); in mthca_alloc()
55 obj |= alloc->top; in mthca_alloc()
73 alloc->last = min(alloc->last, obj); in mthca_free()
74 alloc->top = (alloc->top + alloc->max) & alloc->mask; in mthca_free()
86 alloc->last = 0; in mthca_alloc_init()
87 alloc->top = 0; in mthca_alloc_init()
88 alloc->max = num; in mthca_alloc_init()
89 alloc->mask = mask; in mthca_alloc_init()
[all …]
A Dmthca_uar.c40 uar->index = mthca_alloc(&dev->uar_table.alloc); in mthca_uar_alloc()
51 mthca_free(&dev->uar_table.alloc, uar->index); in mthca_uar_free()
58 ret = mthca_alloc_init(&dev->uar_table.alloc, in mthca_init_uar_table()
67 mthca_alloc_cleanup(&dev->uar_table.alloc); in mthca_init_uar_table()
77 mthca_alloc_cleanup(&dev->uar_table.alloc); in mthca_cleanup_uar_table()
A Dmthca_pd.c46 pd->pd_num = mthca_alloc(&dev->pd_table.alloc); in mthca_pd_alloc()
56 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_alloc()
66 mthca_free(&dev->pd_table.alloc, pd->pd_num); in mthca_pd_free()
71 return mthca_alloc_init(&dev->pd_table.alloc, in mthca_init_pd_table()
80 mthca_alloc_cleanup(&dev->pd_table.alloc); in mthca_cleanup_pd_table()
A Dmthca_dev.h194 struct mthca_alloc alloc; member
200 struct mthca_alloc alloc; member
226 struct mthca_alloc alloc; member
239 struct mthca_alloc alloc; member
246 struct mthca_alloc alloc; member
253 struct mthca_alloc alloc; member
269 struct mthca_alloc alloc; member
274 struct mthca_alloc alloc; member
413 u32 mthca_alloc(struct mthca_alloc *alloc);
414 void mthca_free(struct mthca_alloc *alloc, u32 obj);
[all …]
A Dmthca_mcg.c148 index = mthca_alloc(&dev->mcg_table.alloc); in mthca_multicast_attach()
206 mthca_free(&dev->mcg_table.alloc, index); in mthca_multicast_attach()
286 mthca_free(&dev->mcg_table.alloc, amgm_index_to_free); in mthca_multicast_detach()
305 mthca_free(&dev->mcg_table.alloc, index); in mthca_multicast_detach()
320 err = mthca_alloc_init(&dev->mcg_table.alloc, in mthca_init_mcg_table()
334 mthca_alloc_cleanup(&dev->mcg_table.alloc); in mthca_cleanup_mcg_table()
A Dmthca_av.c172 index = mthca_alloc(&dev->av_table.alloc); in mthca_create_ah()
247 mthca_free(&dev->av_table.alloc, in mthca_destroy_ah()
333 err = mthca_alloc_init(&dev->av_table.alloc, in mthca_init_av_table()
363 mthca_alloc_cleanup(&dev->av_table.alloc); in mthca_init_av_table()
375 mthca_alloc_cleanup(&dev->av_table.alloc); in mthca_cleanup_av_table()
/drivers/gpu/drm/ttm/
A Dttm_pool.c222 unsigned int num_pages = alloc->pages - alloc->caching_divide; in ttm_pool_apply_caching()
227 switch (alloc->tt_caching) { in ttm_pool_apply_caching()
236 alloc->caching_divide = alloc->pages; in ttm_pool_apply_caching()
496 if (!alloc->dma_addr) in ttm_pool_allocated_page_commit()
571 alloc, nr); in ttm_pool_restore_commit()
573 alloc->caching_divide = alloc->pages; in ttm_pool_restore_commit()
622 if (alloc->dma_addr) { in ttm_pool_page_allocated()
635 alloc->caching_divide = alloc->pages; in ttm_pool_page_allocated()
683 alloc->pages = tt->pages; in ttm_pool_alloc_state_init()
730 alloc->remaining_pages; in __ttm_pool_alloc()
[all …]
/drivers/md/bcache/
A Dalloc.c561 struct bkey *alloc) in pick_data_bucket() argument
577 if (!ret->sectors_free && KEY_PTRS(alloc)) { in pick_data_bucket()
579 bkey_copy(&ret->key, alloc); in pick_data_bucket()
580 bkey_init(alloc); in pick_data_bucket()
607 BKEY_PADDED(key) alloc; in bch_alloc_sectors()
617 bkey_init(&alloc.key); in bch_alloc_sectors()
620 while (!(b = pick_data_bucket(c, k, write_point, &alloc.key))) { in bch_alloc_sectors()
627 if (bch_bucket_alloc_set(c, watermark, &alloc.key, wait)) in bch_alloc_sectors()
638 if (KEY_PTRS(&alloc.key)) in bch_alloc_sectors()
639 bkey_put(c, &alloc.key); in bch_alloc_sectors()
/drivers/xen/xenbus/
A Dxenbus_comms.c211 void *alloc; in process_msg() member
264 state.alloc = kmalloc(len, GFP_NOIO | __GFP_HIGH); in process_msg()
265 if (!state.alloc) in process_msg()
271 state.body = state.alloc; in process_msg()
319 state.alloc = NULL; in process_msg()
325 kfree(state.alloc); in process_msg()
326 state.alloc = NULL; in process_msg()
/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/
A Dengine.c53 ret = rm->api->ce->alloc(chan, handle, class, inst, &obj->rm); in nvkm_rm_engine_obj_new()
59 ret = rm->api->nvdec->alloc(chan, handle, class, inst, &obj->rm); in nvkm_rm_engine_obj_new()
62 ret = rm->api->nvenc->alloc(chan, handle, class, inst, &obj->rm); in nvkm_rm_engine_obj_new()
65 ret = rm->api->nvjpg->alloc(chan, handle, class, inst, &obj->rm); in nvkm_rm_engine_obj_new()
68 ret = rm->api->ofa->alloc(chan, handle, class, inst, &obj->rm); in nvkm_rm_engine_obj_new()
A Drm.h63 } *alloc; member
113 int (*alloc)(struct nvkm_gsp_device *, u32 handle, member
122 int (*alloc)(struct nvkm_gsp_object *chan, u32 handle, u32 class, int inst, member
/drivers/iommu/
A Dio-pgtable.c41 if (!cfg->alloc && !cfg->free) in check_custom_allocator()
47 if (!cfg->alloc || !cfg->free) in check_custom_allocator()
74 iop = fns->alloc(cfg, cookie); in alloc_io_pgtable_ops()
/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
A Drx.c164 u32 contig, alloc; in mlx5e_xsk_alloc_rx_wqes_batched() local
173 alloc = xsk_buff_alloc_batch(rq->xsk_pool, buffs + ix, wqe_bulk); in mlx5e_xsk_alloc_rx_wqes_batched()
175 alloc = xsk_buff_alloc_batch(rq->xsk_pool, buffs + ix, contig); in mlx5e_xsk_alloc_rx_wqes_batched()
176 if (likely(alloc == contig)) in mlx5e_xsk_alloc_rx_wqes_batched()
177 alloc += xsk_buff_alloc_batch(rq->xsk_pool, buffs, wqe_bulk - contig); in mlx5e_xsk_alloc_rx_wqes_batched()
180 for (i = 0; i < alloc; i++) { in mlx5e_xsk_alloc_rx_wqes_batched()
195 return alloc; in mlx5e_xsk_alloc_rx_wqes_batched()
/drivers/dax/
A Dbus.c847 struct resource *alloc; in alloc_dev_dax_range() local
862 if (!alloc) in alloc_dev_dax_range()
868 __release_region(res, alloc->start, resource_size(alloc)); in alloc_dev_dax_range()
878 .start = alloc->start, in alloc_dev_dax_range()
879 .end = alloc->end, in alloc_dev_dax_range()
884 &alloc->start, &alloc->end); in alloc_dev_dax_range()
1029 resource_size_t alloc = 0; in dev_dax_resize() local
1067 alloc = 0; in dev_dax_resize()
1073 if (!alloc && !next && res->end < region_res->end) in dev_dax_resize()
1076 if (!alloc) in dev_dax_resize()
[all …]
/drivers/thunderbolt/
A Dlc.c532 u32 val, alloc; in tb_lc_dp_sink_available() local
545 alloc = val & TB_LC_SNK_ALLOCATION_SNK0_MASK; in tb_lc_dp_sink_available()
546 if (!alloc || alloc == TB_LC_SNK_ALLOCATION_SNK0_CM) in tb_lc_dp_sink_available()
549 alloc = (val & TB_LC_SNK_ALLOCATION_SNK1_MASK) >> in tb_lc_dp_sink_available()
551 if (!alloc || alloc == TB_LC_SNK_ALLOCATION_SNK1_CM) in tb_lc_dp_sink_available()
/drivers/firmware/efi/libstub/
A DMakefile122 --rename-section .bss=.bss.efistub,load,alloc
139 STUBCOPY_FLAGS-$(CONFIG_ARM64) += --prefix-alloc-sections=.init \
146 STUBCOPY_FLAGS-$(CONFIG_RISCV) += --prefix-alloc-sections=.init \
152 STUBCOPY_FLAGS-$(CONFIG_LOONGARCH) += --prefix-alloc-sections=.init \
/drivers/net/ethernet/
A Dec_bhf.c117 u8 *alloc; member
341 buf->alloc = dma_alloc_coherent(dev, buf->alloc_len, &buf->alloc_phys, in ec_bhf_alloc_dma_mem()
343 if (buf->alloc == NULL) { in ec_bhf_alloc_dma_mem()
349 buf->buf = buf->alloc + (buf->buf_phys - buf->alloc_phys); in ec_bhf_alloc_dma_mem()
425 dma_free_coherent(dev, priv->rx_buf.alloc_len, priv->rx_buf.alloc, in ec_bhf_open()
443 priv->tx_buf.alloc, priv->tx_buf.alloc_phys); in ec_bhf_stop()
445 priv->rx_buf.alloc, priv->rx_buf.alloc_phys); in ec_bhf_stop()
/drivers/gpu/drm/ttm/tests/
A Dttm_mock_manager.c92 .alloc = ttm_mock_manager_alloc,
181 .alloc = ttm_bad_manager_alloc,
187 .alloc = ttm_busy_manager_alloc,
/drivers/gpu/host1x/
A Djob.c217 struct iova *alloc; in pin_job() local
244 alloc = alloc_iova(&host->iova, gather_size >> shift, in pin_job()
246 if (!alloc) { in pin_job()
251 err = iommu_map_sgtable(host->domain, iova_dma_addr(&host->iova, alloc), in pin_job()
254 __free_iova(&host->iova, alloc); in pin_job()
259 map->phys = iova_dma_addr(&host->iova, alloc); in pin_job()
A Dcdma.c74 struct iova *alloc; in host1x_pushbuffer_init() local
99 alloc = alloc_iova(&host1x->iova, size >> shift, in host1x_pushbuffer_init()
101 if (!alloc) { in host1x_pushbuffer_init()
106 pb->dma = iova_dma_addr(&host1x->iova, alloc); in host1x_pushbuffer_init()
127 __free_iova(&host1x->iova, alloc); in host1x_pushbuffer_init()

Completed in 691 milliseconds

12345678910>>...12