Home
last modified time | relevance | path

Searched refs:userd (Results 1 – 21 of 21) sorted by relevance

/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
A Dchan.c278 nvkm_memory_unref(&chan->userd.mem); in nvkm_chan_del()
359 ((func->userd->bar < 0) == !userd) || in nvkm_chan_new_()
366 func->userd->bar < 0, userd, func->ramfc->ctxdma, dmaobj, in nvkm_chan_new_()
451 if (func->userd->bar < 0) { in nvkm_chan_new_()
452 if (ouserd + chan->func->userd->size >= nvkm_memory_size(userd)) { in nvkm_chan_new_()
457 ret = nvkm_memory_kmap(userd, &chan->userd.mem); in nvkm_chan_new_()
463 chan->userd.base = ouserd; in nvkm_chan_new_()
465 chan->userd.mem = nvkm_memory_ref(fifo->userd.mem); in nvkm_chan_new_()
466 chan->userd.base = chan->id * chan->func->userd->size; in nvkm_chan_new_()
469 if (chan->func->userd->clear) in nvkm_chan_new_()
[all …]
A Dgf100.c87 const u64 userd = nvkm_memory_addr(chan->userd.mem) + chan->userd.base; in gf100_chan_ramfc_write() local
121 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x040, 0x00000000); in gf100_chan_userd_clear()
122 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x044, 0x00000000); in gf100_chan_userd_clear()
123 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x048, 0x00000000); in gf100_chan_userd_clear()
124 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x04c, 0x00000000); in gf100_chan_userd_clear()
125 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x050, 0x00000000); in gf100_chan_userd_clear()
126 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x058, 0x00000000); in gf100_chan_userd_clear()
127 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x05c, 0x00000000); in gf100_chan_userd_clear()
128 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x060, 0x00000000); in gf100_chan_userd_clear()
129 nvkm_wo32(chan->userd.mem, chan->userd.base + 0x088, 0x00000000); in gf100_chan_userd_clear()
[all …]
A Duchan.c261 if (chan->func->userd->bar < 0) in nvkm_uchan_map()
265 *addr = device->func->resource_addr(device, chan->func->userd->bar) + in nvkm_uchan_map()
266 chan->func->userd->base + chan->userd.base; in nvkm_uchan_map()
267 *size = chan->func->userd->size; in nvkm_uchan_map()
328 struct nvkm_memory *userd = NULL; in nvkm_uchan_new() local
360 userd = nvkm_umem_search(oclass->client, args->v0.huserd); in nvkm_uchan_new()
361 if (IS_ERR(userd)) { in nvkm_uchan_new()
362 ret = PTR_ERR(userd); in nvkm_uchan_new()
363 userd = NULL; in nvkm_uchan_new()
379 args->v0.length, userd, args->v0.ouserd, &uchan->chan); in nvkm_uchan_new()
[all …]
A Dbase.c294 if (fifo->func->chan.func->userd->bar == 1) { in nvkm_fifo_oneinit()
298 fifo->func->chan.func->userd->size, 0, true, in nvkm_fifo_oneinit()
299 &fifo->userd.mem); in nvkm_fifo_oneinit()
303 ret = nvkm_vmm_get(bar1, 12, nvkm_memory_size(fifo->userd.mem), &fifo->userd.bar1); in nvkm_fifo_oneinit()
307 ret = nvkm_memory_map(fifo->userd.mem, 0, bar1, fifo->userd.bar1, NULL, 0); in nvkm_fifo_oneinit()
328 if (fifo->userd.bar1) in nvkm_fifo_dtor()
329 nvkm_vmm_put(nvkm_bar_bar1_vmm(engine->subdev.device), &fifo->userd.bar1); in nvkm_fifo_dtor()
330 nvkm_memory_unref(&fifo->userd.mem); in nvkm_fifo_dtor()
A Dgv100.c43 const u64 userd = nvkm_memory_addr(chan->userd.mem) + chan->userd.base; in gv100_chan_ramfc_write() local
47 nvkm_wo32(chan->inst, 0x008, lower_32_bits(userd)); in gv100_chan_ramfc_write()
48 nvkm_wo32(chan->inst, 0x00c, upper_32_bits(userd)); in gv100_chan_ramfc_write()
81 .userd = &gv100_chan_userd,
185 const u64 user = nvkm_memory_addr(chan->userd.mem) + chan->userd.base; in gv100_runl_insert_chan()
A Dgk104.c84 const u64 userd = nvkm_memory_addr(chan->userd.mem) + chan->userd.base; in gk104_chan_ramfc_write() local
88 nvkm_wo32(chan->inst, 0x08, lower_32_bits(userd)); in gk104_chan_ramfc_write()
89 nvkm_wo32(chan->inst, 0x0c, upper_32_bits(userd)); in gk104_chan_ramfc_write()
124 .userd = &gk104_chan_userd,
748 if (fifo->func->chan.func->userd->bar == 1) in gk104_fifo_init()
749 nvkm_wr32(device, 0x002254, 0x10000000 | fifo->userd.bar1->addr >> 12); in gk104_fifo_init()
A Dchan.h31 } *userd; member
58 u64 offset, u64 length, struct nvkm_memory *userd, u64 userd_bar1,
A Dnv10.c80 .userd = &nv04_chan_userd,
A Dgk110.c51 .userd = &gk104_chan_userd,
A Dgm107.c36 .userd = &gk104_chan_userd,
A Dnv17.c86 .userd = &nv04_chan_userd,
A Dnv40.c104 .userd = &nv40_chan_userd,
A Dtu102.c51 .userd = &gv100_chan_userd,
A Dg84.c97 .userd = &nv50_chan_userd,
A Dnv50.c141 .userd = &nv50_chan_userd,
A Dga100.c102 .userd = &gv100_chan_userd,
A Dnv04.c170 .userd = &nv04_chan_userd,
/linux-6.3-rc2/drivers/gpu/drm/nouveau/
A Dnouveau_dma.c45 val = nvif_rd32(chan->userd, chan->user_get); in READ_GET()
47 val |= (uint64_t)nvif_rd32(chan->userd, chan->user_get_hi) << 32; in READ_GET()
89 nvif_wr32(chan->userd, 0x8c, chan->dma.ib_put); in nv50_dma_push()
101 uint32_t get = nvif_rd32(chan->userd, 0x88); in nv50_dma_push_wait()
A Dnouveau_chan.h20 struct nvif_object *userd; member
A Dnouveau_chan.c335 chan->userd = &chan->mem_userd.object; in nouveau_channel_ctor()
337 chan->userd = &chan->user; in nouveau_channel_ctor()
365 ret = nvif_object_map(chan->userd, NULL, 0); in nouveau_channel_init()
/linux-6.3-rc2/drivers/gpu/drm/nouveau/include/nvkm/engine/
A Dfifo.h25 } userd; member
69 } userd; member

Completed in 23 milliseconds