Home
last modified time | relevance | path

Searched refs:gem (Results 1 – 25 of 113) sorted by relevance

12345

/drivers/gpu/drm/tegra/
A Dgem.c62 struct drm_gem_object *gem = &obj->gem; in tegra_bo_pin() local
96 map->size = gem->size; in tegra_bo_pin()
148 map->size = gem->size; in tegra_bo_pin()
529 dma_buf_detach(gem->dma_buf, gem->import_attach); in tegra_bo_free_object()
600 gem->size); in __tegra_gem_mmap()
650 gem->size) < 0) in tegra_gem_prime_map_dma_buf()
715 err = drm_gem_mmap_obj(gem, gem->size, vma); in tegra_gem_prime_mmap()
766 exp_info.priv = gem; in tegra_gem_prime_export()
781 return gem; in tegra_gem_prime_import()
789 return &bo->gem; in tegra_gem_prime_import()
[all …]
A Dgem.h56 struct drm_gem_object gem; member
73 static inline struct tegra_bo *to_tegra_bo(struct drm_gem_object *gem) in to_tegra_bo() argument
75 return container_of(gem, struct tegra_bo, gem); in to_tegra_bo()
90 void tegra_bo_free_object(struct drm_gem_object *gem);
96 int __tegra_gem_mmap(struct drm_gem_object *gem, struct vm_area_struct *vma);
99 struct dma_buf *tegra_gem_prime_export(struct drm_gem_object *gem,
A Dfb.c121 fb->obj[i] = &planes[i]->gem; in tegra_fb_alloc()
140 struct drm_gem_object *gem; in tegra_fb_create() local
150 gem = drm_gem_object_lookup(file, cmd->handles[i]); in tegra_fb_create()
151 if (!gem) { in tegra_fb_create()
161 if (gem->size < size) { in tegra_fb_create()
163 drm_gem_object_put(gem); in tegra_fb_create()
167 planes[i] = to_tegra_bo(gem); in tegra_fb_create()
180 drm_gem_object_put(&planes[i]->gem); in tegra_fb_create()
A Ddrm.c371 if (!gem) in tegra_gem_mmap()
374 bo = to_tegra_bo(gem); in tegra_gem_mmap()
378 drm_gem_object_put(gem); in tegra_gem_mmap()
645 if (!gem) in tegra_gem_set_tiling()
648 bo = to_tegra_bo(gem); in tegra_gem_set_tiling()
667 if (!gem) in tegra_gem_get_tiling()
670 bo = to_tegra_bo(gem); in tegra_gem_get_tiling()
709 if (!gem) in tegra_gem_set_flags()
712 bo = to_tegra_bo(gem); in tegra_gem_set_flags()
731 if (!gem) in tegra_gem_get_flags()
[all …]
A Dfbdev.c33 err = drm_gem_mmap_obj(&bo->gem, bo->gem.size, vma); in tegra_fb_mmap()
37 return __tegra_gem_mmap(&bo->gem, vma); in tegra_fb_mmap()
105 drm_gem_object_put(&bo->gem); in tegra_fbdev_driver_fbdev_probe()
116 drm_gem_object_put(&bo->gem); in tegra_fbdev_driver_fbdev_probe()
/drivers/gpu/drm/
A Ddrm_gem_ttm_helper.c27 const struct drm_gem_object *gem) in drm_gem_ttm_print_info() argument
66 int drm_gem_ttm_vmap(struct drm_gem_object *gem, in drm_gem_ttm_vmap() argument
69 struct ttm_buffer_object *bo = drm_gem_ttm_of_gem(gem); in drm_gem_ttm_vmap()
83 void drm_gem_ttm_vunmap(struct drm_gem_object *gem, in drm_gem_ttm_vunmap() argument
100 int drm_gem_ttm_mmap(struct drm_gem_object *gem, in drm_gem_ttm_mmap() argument
114 drm_gem_object_put(gem); in drm_gem_ttm_mmap()
139 struct drm_gem_object *gem; in drm_gem_ttm_dumb_map_offset() local
141 gem = drm_gem_object_lookup(file, handle); in drm_gem_ttm_dumb_map_offset()
142 if (!gem) in drm_gem_ttm_dumb_map_offset()
145 *offset = drm_vma_node_offset_addr(&gem->vma_node); in drm_gem_ttm_dumb_map_offset()
[all …]
A Ddrm_client.c177 if (buffer->gem) { in drm_client_buffer_delete()
178 drm_gem_vunmap(buffer->gem, &buffer->map); in drm_client_buffer_delete()
179 drm_gem_object_put(buffer->gem); in drm_client_buffer_delete()
216 buffer->gem = obj; in drm_client_buffer_create()
250 struct drm_gem_object *gem = buffer->gem; in drm_client_buffer_vmap_local() local
254 drm_gem_lock(gem); in drm_client_buffer_vmap_local()
256 ret = drm_gem_vmap_locked(gem, map); in drm_client_buffer_vmap_local()
264 drm_gem_unlock(gem); in drm_client_buffer_vmap_local()
279 struct drm_gem_object *gem = buffer->gem; in drm_client_buffer_vunmap_local() local
282 drm_gem_vunmap_locked(gem, map); in drm_client_buffer_vunmap_local()
[all …]
A Ddrm_gem_vram_helper.c185 struct drm_gem_object *gem; in drm_gem_vram_create() local
194 gem = dev->driver->gem_create_object(dev, size); in drm_gem_vram_create()
195 if (IS_ERR(gem)) in drm_gem_vram_create()
196 return ERR_CAST(gem); in drm_gem_vram_create()
197 gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_create()
202 gem = &gbo->bo.base; in drm_gem_vram_create()
205 if (!gem->funcs) in drm_gem_vram_create()
206 gem->funcs = &drm_gem_vram_object_funcs; in drm_gem_vram_create()
208 ret = drm_gem_object_init(dev, gem, size); in drm_gem_vram_create()
521 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem); in drm_gem_vram_object_free()
[all …]
A Ddrm_gpuvm.c2065 op.map.gem.obj = obj; in op_map_cb()
2066 op.map.gem.offset = offset; in op_map_cb()
2145 .gem.obj = obj, in __drm_gpuvm_sm_map()
2163 .gem.obj = obj, in __drm_gpuvm_sm_map()
2164 .gem.offset = offset, in __drm_gpuvm_sm_map()
2190 .gem.obj = obj, in __drm_gpuvm_sm_map()
2223 .gem.obj = obj, in __drm_gpuvm_sm_map()
2268 prev.gem.obj = obj; in __drm_gpuvm_sm_unmap()
2269 prev.gem.offset = offset; in __drm_gpuvm_sm_unmap()
2277 next.gem.obj = obj; in __drm_gpuvm_sm_unmap()
[all …]
A Ddrm_fb_dma_helper.c52 struct drm_gem_object *gem; in drm_fb_dma_get_gem_obj() local
54 gem = drm_gem_fb_get_obj(fb, plane); in drm_fb_dma_get_gem_obj()
55 if (!gem) in drm_fb_dma_get_gem_obj()
58 return to_drm_gem_dma_obj(gem); in drm_fb_dma_get_gem_obj()
/drivers/gpu/drm/nova/
A Dgem.rs5 drm::{gem, gem::BaseObject},
19 impl gem::BaseDriverObject<gem::Object<NovaObject>> for NovaObject { impls
25 impl gem::DriverObject for NovaObject {
31 pub(crate) fn new(dev: &NovaDevice, size: usize) -> Result<ARef<gem::Object<Self>>> { in new()
38 gem::Object::new(dev, aligned_size) in new()
46 ) -> Result<ARef<gem::Object<Self>>> { in lookup_handle()
47 gem::Object::lookup_handle(file, handle) in lookup_handle()
A Ddriver.rs3 use kernel::{auxiliary, c_str, device::Core, drm, drm::gem, drm::ioctl, prelude::*, types::ARef};
6 use crate::gem::NovaObject;
60 type Object = gem::Object<NovaObject>;
/drivers/gpu/drm/i915/
A DMakefile140 gem-y += \
141 gem/i915_gem_busy.o \
149 gem/i915_gem_lmem.o \
150 gem/i915_gem_mman.o \
152 gem/i915_gem_pages.o \
153 gem/i915_gem_phys.o \
154 gem/i915_gem_pm.o \
161 gem/i915_gem_ttm.o \
165 gem/i915_gem_wait.o \
166 gem/i915_gemfs.o
[all …]
A Di915_sysfs.c79 spin_lock(&i915->gem.contexts.lock); in i915_l3_read()
84 spin_unlock(&i915->gem.contexts.lock); in i915_l3_read()
112 spin_lock(&i915->gem.contexts.lock); in i915_l3_write()
125 list_for_each_entry(ctx, &i915->gem.contexts.list, link) in i915_l3_write()
128 spin_unlock(&i915->gem.contexts.lock); in i915_l3_write()
/drivers/net/ethernet/sun/
A Dsungem.c145 struct gem *gp = netdev_priv(dev); in _sungem_phy_read()
178 struct gem *gp = netdev_priv(dev); in _sungem_phy_write()
876 struct gem *gp = container_of(napi, struct gem, napi); in gem_poll()
927 struct gem *gp = netdev_priv(dev); in gem_interrupt()
954 struct gem *gp = netdev_priv(dev); in gem_tx_timeout()
982 struct gem *gp = netdev_priv(dev); in gem_start_xmit()
2131 struct gem *gp = netdev_priv(dev); in gem_do_start()
2170 struct gem *gp = netdev_priv(dev); in gem_do_stop()
2215 struct gem *gp = container_of(work, struct gem, reset_task); in gem_reset_task()
2262 struct gem *gp = netdev_priv(dev); in gem_open()
[all …]
/drivers/gpu/drm/nouveau/
A Dnouveau_gem.c477 struct drm_gem_object *gem; in validate_init() local
481 if (!gem) { in validate_init()
489 drm_gem_object_put(gem); in validate_init()
496 drm_gem_object_put(gem); in validate_init()
984 struct drm_gem_object *gem; in nouveau_gem_ioctl_cpu_prep() local
992 if (!gem) in nouveau_gem_ioctl_cpu_prep()
1007 drm_gem_object_put(gem); in nouveau_gem_ioctl_cpu_prep()
1021 if (!gem) in nouveau_gem_ioctl_cpu_fini()
1026 drm_gem_object_put(gem); in nouveau_gem_ioctl_cpu_fini()
1039 if (!gem) in nouveau_gem_ioctl_info()
[all …]
A Dnouveau_ttm.c192 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram()
198 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init_vram()
221 unsigned long size_pages = drm->gem.gart_available >> PAGE_SHIFT; in nouveau_ttm_init_gtt()
313 drm->gem.vram_available = drm->client.device.info.ram_user; in nouveau_ttm_init()
329 drm->gem.gart_available = drm->client.vmm.vmm.limit; in nouveau_ttm_init()
331 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init()
343 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init()
344 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
A Dnouveau_gem.h11 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument
13 return gem ? container_of(gem, struct nouveau_bo, bo.base) : NULL; in nouveau_gem_object()
/drivers/gpu/drm/xe/display/
A Dintel_fb_bo.c77 struct drm_gem_object *gem = drm_gem_object_lookup(filp, mode_cmd->handles[0]); in intel_fb_bo_lookup_valid_bo() local
79 if (!gem) in intel_fb_bo_lookup_valid_bo()
82 bo = gem_to_xe_bo(gem); in intel_fb_bo_lookup_valid_bo()
87 drm_gem_object_put(gem); in intel_fb_bo_lookup_valid_bo()
91 return gem; in intel_fb_bo_lookup_valid_bo()
/drivers/gpu/drm/loongson/
A Dlsdc_gem.c167 mutex_lock(&ldev->gem.mutex); in lsdc_gem_object_create()
168 list_add_tail(&lbo->list, &ldev->gem.objects); in lsdc_gem_object_create()
169 mutex_unlock(&ldev->gem.mutex); in lsdc_gem_object_create()
269 mutex_init(&ldev->gem.mutex); in lsdc_gem_init()
270 INIT_LIST_HEAD(&ldev->gem.objects); in lsdc_gem_init()
281 mutex_lock(&ldev->gem.mutex); in lsdc_show_buffer_object()
285 list_for_each_entry(lbo, &ldev->gem.objects, list) { in lsdc_show_buffer_object()
296 mutex_unlock(&ldev->gem.mutex); in lsdc_show_buffer_object()
A Dlsdc_ttm.h47 static inline struct ttm_buffer_object *to_ttm_bo(struct drm_gem_object *gem) in to_ttm_bo() argument
49 return container_of(gem, struct ttm_buffer_object, base); in to_ttm_bo()
57 static inline struct lsdc_bo *gem_to_lsdc_bo(struct drm_gem_object *gem) in gem_to_lsdc_bo() argument
59 return container_of(gem, struct lsdc_bo, tbo.base); in gem_to_lsdc_bo()
/drivers/gpu/drm/qxl/
A Dqxl_gem.c68 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create()
69 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create()
70 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create()
125 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init()
/drivers/gpu/drm/imagination/
A Dpvr_fw.h34 struct pvr_gem_object *gem; member
438 return pvr_gem_object_vmap(fw_obj->gem); in pvr_fw_object_vmap()
444 pvr_gem_object_vunmap(fw_obj->gem); in pvr_fw_object_vunmap()
471 return pvr_gem_get_dma_addr(fw_obj->gem, offset, dma_addr_out); in pvr_fw_object_get_dma_addr()
488 return pvr_gem_object_size(fw_obj->gem); in pvr_fw_obj_get_object_size()
/drivers/gpu/drm/renesas/rcar-du/
A Drcar_du_vsp.c266 struct drm_gem_dma_object *gem = drm_fb_dma_get_gem_obj(fb, i); in rcar_du_vsp_map_fb() local
269 if (gem->sgt) { in rcar_du_vsp_map_fb()
280 ret = sg_alloc_table(sgt, gem->sgt->orig_nents, in rcar_du_vsp_map_fb()
285 src = gem->sgt->sgl; in rcar_du_vsp_map_fb()
287 for (j = 0; j < gem->sgt->orig_nents; ++j) { in rcar_du_vsp_map_fb()
294 ret = dma_get_sgtable(rcdu->dev, sgt, gem->vaddr, in rcar_du_vsp_map_fb()
295 gem->dma_addr, gem->base.size); in rcar_du_vsp_map_fb()
/drivers/gpu/drm/radeon/
A Dradeon_prime.c63 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
64 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table()
65 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()

Completed in 68 milliseconds

12345