Lines Matching refs:imem

47 	struct nv50_instmem *imem;  member
59 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_wr32_slow() local
60 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_wr32_slow()
65 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_wr32_slow()
66 if (unlikely(imem->addr != base)) { in nv50_instobj_wr32_slow()
68 imem->addr = base; in nv50_instobj_wr32_slow()
71 spin_unlock_irqrestore(&imem->base.lock, flags); in nv50_instobj_wr32_slow()
78 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_rd32_slow() local
79 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_rd32_slow()
85 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_rd32_slow()
86 if (unlikely(imem->addr != base)) { in nv50_instobj_rd32_slow()
88 imem->addr = base; in nv50_instobj_rd32_slow()
91 spin_unlock_irqrestore(&imem->base.lock, flags); in nv50_instobj_rd32_slow()
122 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_kmap() local
125 struct nvkm_subdev *subdev = &imem->base.subdev; in nv50_instobj_kmap()
136 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
141 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
142 eobj = list_first_entry_or_null(&imem->lru, typeof(*eobj), lru); in nv50_instobj_kmap()
154 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
163 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
166 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
168 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
194 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_release() local
195 struct nvkm_subdev *subdev = &imem->base.subdev; in nv50_instobj_release()
200 if (refcount_dec_and_mutex_lock(&iobj->maps, &imem->base.mutex)) { in nv50_instobj_release()
206 list_add_tail(&iobj->lru, &imem->lru); in nv50_instobj_release()
211 mutex_unlock(&imem->base.mutex); in nv50_instobj_release()
219 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_acquire() local
230 mutex_lock(&imem->mutex); in nv50_instobj_acquire()
232 mutex_unlock(&imem->mutex); in nv50_instobj_acquire()
237 if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { in nv50_instobj_acquire()
255 mutex_unlock(&imem->mutex); in nv50_instobj_acquire()
263 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_boot() local
268 mutex_lock(&imem->mutex); in nv50_instobj_boot()
275 nvkm_instmem_boot(imem); in nv50_instobj_boot()
276 mutex_unlock(&imem->mutex); in nv50_instobj_boot()
314 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_dtor() local
318 mutex_lock(&imem->mutex); in nv50_instobj_dtor()
323 mutex_unlock(&imem->mutex); in nv50_instobj_dtor()
326 struct nvkm_vmm *vmm = nvkm_bar_bar2_vmm(imem->subdev.device); in nv50_instobj_dtor()
333 nvkm_instobj_dtor(imem, &iobj->base); in nv50_instobj_dtor()
354 struct nv50_instmem *imem = nv50_instmem(base); in nv50_instobj_wrap() local
361 nvkm_instobj_ctor(&nv50_instobj_func, &imem->base, &iobj->base); in nv50_instobj_wrap()
362 iobj->imem = imem; in nv50_instobj_wrap()
371 nv50_instobj_new(struct nvkm_instmem *imem, u32 size, u32 align, bool zero, in nv50_instobj_new() argument
378 ret = nvkm_ram_get(imem->subdev.device, 0, 1, page, size, true, true, &ram); in nv50_instobj_new()
382 ret = nv50_instobj_wrap(imem, ram, pmemory); in nv50_instobj_new()
409 struct nv50_instmem *imem; in nv50_instmem_new() local
411 if (!(imem = kzalloc(sizeof(*imem), GFP_KERNEL))) in nv50_instmem_new()
413 nvkm_instmem_ctor(&nv50_instmem, device, type, inst, &imem->base); in nv50_instmem_new()
414 INIT_LIST_HEAD(&imem->lru); in nv50_instmem_new()
415 *pimem = &imem->base; in nv50_instmem_new()