Home
last modified time | relevance | path

Searched refs:engn (Results 1 – 25 of 30) sorted by relevance

12

/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
A Dcgrp.c37 struct nvkm_engn *engn = ectx->engn; in nvkm_cgrp_ectx_put() local
40 CGRP_TRACE(cgrp, "dtor ectx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_cgrp_ectx_put()
63 ectx = nvkm_list_find(ectx, &cgrp->ectxs, head, ectx->engn == engn); in nvkm_cgrp_ectx_get()
71 CGRP_TRACE(cgrp, "ctor ectx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_cgrp_ectx_get()
75 ectx->engn = engn; in nvkm_cgrp_ectx_get()
98 struct nvkm_engn *engn = vctx->ectx->engn; in nvkm_cgrp_vctx_put() local
101 CGRP_TRACE(cgrp, "dtor vctx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_cgrp_vctx_put()
128 vctx->ectx->engn == engn && vctx->vmm == chan->vmm); in nvkm_cgrp_vctx_get()
143 CGRP_TRACE(cgrp, "ctor vctx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_cgrp_vctx_get()
159 if (engn->func->bind) { in nvkm_cgrp_vctx_get()
[all …]
A Drunl.c41 id = engn->func->cxid(engn, &cgid); in nvkm_engn_cgrp_get()
63 struct nvkm_engn *engn; in nvkm_runl_rc() local
101 nvkm_runl_foreach_engn_cond(engn, runl, engn->func->cxid) { in nvkm_runl_rc()
326 kfree(engn); in nvkm_runl_del()
352 if (!(engn = kzalloc(sizeof(*engn), GFP_KERNEL))) in nvkm_runl_add()
355 engn->func = func; in nvkm_runl_add()
356 engn->runl = runl; in nvkm_runl_add()
357 engn->id = engi; in nvkm_runl_add()
358 engn->engine = engine; in nvkm_runl_add()
359 engn->fault = -1; in nvkm_runl_add()
[all …]
A Dgf100.c228 ENGN_DEBUG(engn, "triggering mmu fault on 0x%02x", engn->fault); in gf100_engn_mmu_fault_trigger()
233 nvkm_wr32(device, 0x002a30 + (engn->id * 4), 0x00000100 | engn->fault); in gf100_engn_mmu_fault_trigger()
249 u32 stat = nvkm_rd32(engn->engine->subdev.device, 0x002640 + (engn->id * 4)); in gf100_engn_status()
548 engn = nvkm_runl_find_engn(engn, runl, engn->fault == info->engine); in gf100_fifo_mmu_fault_recover()
549 if (engn) { in gf100_fifo_mmu_fault_recover()
552 engn->func->mmu_fault_triggered(engn)) { in gf100_fifo_mmu_fault_recover()
628 nvkm_runl_foreach_engn_cond(engn, runl, engm & BIT(engn->id)) { in gf100_fifo_intr_ctxsw_timeout()
630 id = engn->func->cxid(engn, &cgid); in gf100_fifo_intr_ctxsw_timeout()
653 nvkm_runl_foreach_engn_cond(engn, runl, engn->func->chsw) { in gf100_fifo_intr_sched_ctxsw()
654 if (WARN_ON(engn->fault < 0) || !engn->func->chsw(engn)) in gf100_fifo_intr_sched_ctxsw()
[all …]
A Duchan.c134 struct nvkm_engn *engn; in nvkm_uchan_object_dtor() local
139 engn = uobj->cctx->vctx->ectx->engn; in nvkm_uchan_object_dtor()
140 if (engn->func->ramht_del) in nvkm_uchan_object_dtor()
159 struct nvkm_engn *engn; in nvkm_uchan_object_new() local
164 engn = nvkm_runl_find_engn(engn, cgrp->runl, engn->engine == oclass->engine); in nvkm_uchan_object_new()
165 if (WARN_ON(!engn)) in nvkm_uchan_object_new()
184 .engn = oclass->engn, in nvkm_uchan_object_new()
189 .engine = engn->engine, in nvkm_uchan_object_new()
194 if (engn->func->ramht_add) { in nvkm_uchan_object_new()
195 uobj->hash = engn->func->ramht_add(engn, uobj->oproxy.object, uobj->chan); in nvkm_uchan_object_new()
[all …]
A Dchan.c48 if (!engn->func->bind) in nvkm_chan_cctx_bind()
63 engn->func->bind(engn, cctx, chan); in nvkm_chan_cctx_bind()
78 struct nvkm_engn *engn = cctx->vctx->ectx->engn; in nvkm_chan_cctx_put() local
81 CHAN_TRACE(chan, "dtor cctx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_chan_cctx_put()
104 cctx->vctx->ectx->engn == engn && cctx->vctx->vmm == chan->vmm); in nvkm_chan_cctx_get()
115 CHAN_ERROR(chan, "vctx %d[%s]: %d", engn->id, engn->engine->subdev.name, ret); in nvkm_chan_cctx_get()
120 CHAN_TRACE(chan, "ctor cctx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_chan_cctx_get()
312 struct nvkm_engn *engn; in nvkm_chan_get_inst() local
316 nvkm_runl_foreach_engn(engn, runl) { in nvkm_chan_get_inst()
333 struct nvkm_engn *engn; in nvkm_chan_get_chid() local
[all …]
A Dg98.c39 nvkm_runl_add(runl, 1, fifo->func->engn, NVKM_ENGINE_GR, 0); in g98_fifo_runl_ctor()
40 nvkm_runl_add(runl, 2, fifo->func->engn, NVKM_ENGINE_MSPPP, 0); in g98_fifo_runl_ctor()
41 nvkm_runl_add(runl, 3, fifo->func->engn, NVKM_ENGINE_CE, 0); in g98_fifo_runl_ctor()
42 nvkm_runl_add(runl, 4, fifo->func->engn, NVKM_ENGINE_MSPDEC, 0); in g98_fifo_runl_ctor()
43 nvkm_runl_add(runl, 5, fifo->func->engn, NVKM_ENGINE_SEC, 0); in g98_fifo_runl_ctor()
44 nvkm_runl_add(runl, 6, fifo->func->engn, NVKM_ENGINE_MSVLD, 0); in g98_fifo_runl_ctor()
59 .engn = &g84_engn,
A Dbase.c42 struct nvkm_engn *engn; in nvkm_fifo_ctxsw_in_progress() local
45 nvkm_runl_foreach_engn(engn, runl) { in nvkm_fifo_ctxsw_in_progress()
46 if (engn->engine == engine) in nvkm_fifo_ctxsw_in_progress()
47 return engn->func->chsw ? engn->func->chsw(engn) : false; in nvkm_fifo_ctxsw_in_progress()
105 oclass->engn = &fifo->func->cgrp.user; in nvkm_fifo_class_get()
173 struct nvkm_engn *engn; in nvkm_fifo_info() local
191 nvkm_runl_foreach_engn(engn, runl) { in nvkm_fifo_info()
193 switch (engn->engine->subdev.type) { in nvkm_fifo_info()
245 struct nvkm_engn *engn; in nvkm_fifo_oneinit() local
270 nvkm_runl_foreach_engn(engn, runl) { in nvkm_fifo_oneinit()
[all …]
A Dg84.c106 g84_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in g84_ectx_bind() argument
113 switch (engn->engine->subdev.type) { in g84_ectx_bind()
130 save = nvkm_mask(device, 0x002520, 0x0000003f, BIT(engn->id - 1)); in g84_ectx_bind()
200 nvkm_runl_add(runl, 1, fifo->func->engn, NVKM_ENGINE_GR, 0); in g84_fifo_runl_ctor()
201 nvkm_runl_add(runl, 2, fifo->func->engn, NVKM_ENGINE_MPEG, 0); in g84_fifo_runl_ctor()
202 nvkm_runl_add(runl, 3, fifo->func->engn, NVKM_ENGINE_ME, 0); in g84_fifo_runl_ctor()
203 nvkm_runl_add(runl, 4, fifo->func->engn, NVKM_ENGINE_VP, 0); in g84_fifo_runl_ctor()
204 nvkm_runl_add(runl, 5, fifo->func->engn, NVKM_ENGINE_CIPHER, 0); in g84_fifo_runl_ctor()
205 nvkm_runl_add(runl, 6, fifo->func->engn, NVKM_ENGINE_BSP, 0); in g84_fifo_runl_ctor()
220 .engn = &g84_engn,
A Dtu102.c136 tu102_fifo_intr_ctxsw_timeout_info(struct nvkm_engn *engn, u32 info) in tu102_fifo_intr_ctxsw_timeout_info() argument
138 struct nvkm_runl *runl = engn->runl; in tu102_fifo_intr_ctxsw_timeout_info()
143 ENGN_DEBUG(engn, "CTXSW_TIMEOUT %08x", info); in tu102_fifo_intr_ctxsw_timeout_info()
172 struct nvkm_engn *engn; in tu102_fifo_intr_ctxsw_timeout() local
177 nvkm_runl_foreach_engn_cond(engn, runl, engm & BIT(engn->id)) { in tu102_fifo_intr_ctxsw_timeout()
178 info = nvkm_rd32(device, 0x003200 + (engn->id * 4)); in tu102_fifo_intr_ctxsw_timeout()
179 tu102_fifo_intr_ctxsw_timeout_info(engn, info); in tu102_fifo_intr_ctxsw_timeout()
275 .engn = &gv100_engn,
A Dga100.c125 ga100_engn_cxid(struct nvkm_engn *engn, bool *cgid) in ga100_engn_cxid() argument
127 struct nvkm_runl *runl = engn->runl; in ga100_engn_cxid()
129 u32 stat = nvkm_rd32(device, runl->addr + 0x200 + engn->id * 0x40); in ga100_engn_cxid()
131 ENGN_DEBUG(engn, "status %08x", stat); in ga100_engn_cxid()
140 if (nvkm_engine_chsw_load(engn->engine)) in ga100_engn_cxid()
323 struct nvkm_engn *engn; in ga100_runl_intr() local
336 nvkm_runl_foreach_engn_cond(engn, runl, stat & BIT(engn->id)) { in ga100_runl_intr()
339 tu102_fifo_intr_ctxsw_timeout_info(engn, info); in ga100_runl_intr()
341 nvkm_wr32(device, runl->addr + 0x100, BIT(engn->id)); in ga100_runl_intr()
342 stat &= ~BIT(engn->id); in ga100_runl_intr()
[all …]
A Dgv100.c92 gv100_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gv100_ectx_bind() argument
117 gv100_ectx_ce_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gv100_ectx_ce_bind() argument
129 gv100_ectx_ce_ctor(struct nvkm_engn *engn, struct nvkm_vctx *vctx) in gv100_ectx_ce_ctor() argument
459 struct nvkm_engn *engn; in gv100_fifo_intr_ctxsw_timeout() local
462 nvkm_runl_foreach_engn_cond(engn, runl, engm & BIT(engn->id)) in gv100_fifo_intr_ctxsw_timeout()
463 nvkm_runl_rc_engn(runl, engn); in gv100_fifo_intr_ctxsw_timeout()
481 .engn = &gv100_engn,
A Drunl.h108 #define nvkm_runl_find_engn(engn,runl,cond) nvkm_list_find(engn, &(runl)->engns, head, (cond)) argument
113 #define nvkm_runl_foreach_engn(engn,runl) list_for_each_entry((engn), &(runl)->engns, head) argument
114 #define nvkm_runl_foreach_engn_cond(engn,runl,cond) \ argument
115 nvkm_list_foreach(engn, &(runl)->engns, head, (cond))
A Dgk104.c139 switch (engn->engine->subdev.type) { in gk104_ectx_bind()
153 if (!engn->engine->subdev.inst) in gk104_ectx_bind()
178 gk104_ectx_ctor(struct nvkm_engn *engn, struct nvkm_vctx *vctx) in gk104_ectx_ctor() argument
206 u32 stat = nvkm_rd32(engn->runl->fifo->engine.subdev.device, 0x002640 + (engn->id * 0x08)); in gk104_engn_status()
221 if (nvkm_engine_chsw_load(engn->engine)) in gk104_engn_status()
245 gk104_engn_cxid(struct nvkm_engn *engn, bool *cgid) in gk104_engn_cxid() argument
249 gk104_engn_status(engn, &status); in gk104_engn_cxid()
259 gk104_engn_chsw(struct nvkm_engn *engn) in gk104_engn_chsw() argument
263 gk104_engn_status(engn, &status); in gk104_engn_chsw()
791 func = fifo->func->engn; in gk104_fifo_runl_ctor()
[all …]
A Dnv40.c111 nv40_eobj_ramht_add(struct nvkm_engn *engn, struct nvkm_object *eobj, struct nvkm_chan *chan) in nv40_eobj_ramht_add() argument
115 u32 context = chan->id << 23 | engn->id << 20; in nv40_eobj_ramht_add()
125 nv40_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in nv40_ectx_bind() argument
133 switch (engn->engine->subdev.type) { in nv40_ectx_bind()
241 .engn = &nv40_engn,
A Dnv50.c42 nv50_eobj_ramht_add(struct nvkm_engn *engn, struct nvkm_object *eobj, struct nvkm_chan *chan) in nv50_eobj_ramht_add() argument
44 return nvkm_ramht_insert(chan->ramht, eobj, 0, 4, eobj->handle, engn->id << 20); in nv50_eobj_ramht_add()
150 nv50_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in nv50_ectx_bind() argument
157 switch (engn->engine->subdev.type) { in nv50_ectx_bind()
387 .engn = &nv50_engn,
A Dnv04.c192 nv04_eobj_ramht_add(struct nvkm_engn *engn, struct nvkm_object *eobj, struct nvkm_chan *chan) in nv04_eobj_ramht_add() argument
196 u32 context = 0x80000000 | chan->id << 24 | engn->id << 16; in nv04_eobj_ramht_add()
504 nvkm_runl_add(runl, 1, fifo->func->engn , NVKM_ENGINE_GR, 0); in nv04_fifo_runl_ctor()
505 nvkm_runl_add(runl, 2, fifo->func->engn , NVKM_ENGINE_MPEG, 0); /* NV31- */ in nv04_fifo_runl_ctor()
532 .engn = &nv04_engn,
A Dga102.c34 .engn = &ga100_engn,
A Dgk20a.c41 .engn = &gk104_engn,
A Dgm200.c55 .engn = &gk104_engn,
A Dgk208.c66 .engn = &gk104_engn,
A Dnv10.c102 .engn = &nv04_engn,
A Dcgrp.h20 struct nvkm_engn *engn; member
/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvkm/engine/sw/
A Dbase.c54 const struct nvkm_sw_chan_sclass *sclass = oclass->engn; in nvkm_sw_oclass_new()
66 oclass->engn = &sw->func->sclass[index]; in nvkm_sw_oclass_get()
/linux-6.3-rc2/drivers/gpu/drm/nouveau/include/nvkm/core/
A Doclass.h22 const void *engn; member
/linux-6.3-rc2/drivers/gpu/drm/nouveau/nvkm/engine/dma/
A Dbase.c74 sclass->engn = oclass; in nvkm_dma_oclass_base_get()

Completed in 27 milliseconds

12