| /drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
| A D | cgrp.c | 181 nvkm_chid_put(runl->cgid, cgrp->id, &cgrp->lock); in nvkm_cgrp_del() 185 kfree(cgrp); in nvkm_cgrp_del() 193 if (!cgrp) in nvkm_cgrp_unref() 203 if (cgrp) in nvkm_cgrp_ref() 206 return cgrp; in nvkm_cgrp_ref() 214 if (!cgrp) in nvkm_cgrp_put() 227 if (!(cgrp = *pcgrp = kmalloc(sizeof(*cgrp), GFP_KERNEL))) in nvkm_cgrp_new() 230 cgrp->func = runl->fifo->func->cgrp.func; in nvkm_cgrp_new() 231 strscpy(cgrp->name, name, sizeof(cgrp->name)); in nvkm_cgrp_new() 234 cgrp->hw = hw; in nvkm_cgrp_new() [all …]
|
| A D | chan.c | 43 struct nvkm_cgrp *cgrp = chan->cgrp; in nvkm_chan_cctx_bind() local 55 if (cgrp->hw) in nvkm_chan_cctx_bind() 65 if (cgrp->hw) in nvkm_chan_cctx_bind() 95 struct nvkm_cgrp *cgrp = chan->cgrp; in nvkm_chan_cctx_get() local 165 struct nvkm_cgrp *cgrp = chan->cgrp; in nvkm_chan_remove_locked() local 197 struct nvkm_cgrp *cgrp = chan->cgrp; in nvkm_chan_insert() local 210 list_add_tail(&cgrp->head, &cgrp->runl->cgrps); in nvkm_chan_insert() 391 if (!cgrp) { in nvkm_chan_new_() 398 cgrp = chan->cgrp; in nvkm_chan_new_() 400 if (cgrp->runl != runl || cgrp->vmm != vmm) { in nvkm_chan_new_() [all …]
|
| A D | runl.c | 48 cgrp = chan->cgrp; in nvkm_engn_cgrp_get() 53 WARN_ON(!cgrp); in nvkm_engn_cgrp_get() 54 return cgrp; in nvkm_engn_cgrp_get() 103 if (!cgrp) { in nvkm_runl_rc() 157 struct nvkm_cgrp *cgrp; in nvkm_runl_rc_engn() local 162 if (!cgrp) { in nvkm_runl_rc_engn() 167 nvkm_runl_rc_cgrp(cgrp); in nvkm_runl_rc_engn() 231 struct nvkm_cgrp *cgrp; in nvkm_runl_cgrp_get_cgid() local 236 cgrp = cgid->data[id]; in nvkm_runl_cgrp_get_cgid() 237 if (likely(cgrp)) { in nvkm_runl_cgrp_get_cgid() [all …]
|
| A D | gk110.c | 38 struct nvkm_cgrp *cgrp = chan->cgrp; in gk110_chan_preempt() local 40 if (cgrp->hw) { in gk110_chan_preempt() 41 cgrp->func->preempt(cgrp); in gk110_chan_preempt() 61 gk110_cgrp_preempt(struct nvkm_cgrp *cgrp) in gk110_cgrp_preempt() argument 63 nvkm_wr32(cgrp->runl->fifo->engine.subdev.device, 0x002634, 0x01000000 | cgrp->id); in gk110_cgrp_preempt() 72 gk110_runl_insert_cgrp(struct nvkm_cgrp *cgrp, struct nvkm_memory *memory, u64 offset) in gk110_runl_insert_cgrp() argument 74 nvkm_wo32(memory, offset + 0, (cgrp->chan_nr << 26) | (128 << 18) | in gk110_runl_insert_cgrp() 75 (3 << 14) | 0x00002000 | cgrp->id); in gk110_runl_insert_cgrp() 123 .cgrp = {{ 0, 0, KEPLER_CHANNEL_GROUP_A }, &gk110_cgrp },
|
| A D | ucgrp.c | 33 struct nvkm_cgrp *cgrp; member 40 struct nvkm_cgrp *cgrp = nvkm_ucgrp(oclass->parent)->cgrp; in nvkm_ucgrp_chan_new() local 42 return nvkm_uchan_new(cgrp->runl->fifo, cgrp, oclass, argv, argc, pobject); in nvkm_ucgrp_chan_new() 48 struct nvkm_cgrp *cgrp = nvkm_ucgrp(object)->cgrp; in nvkm_ucgrp_sclass() local 49 struct nvkm_fifo *fifo = cgrp->runl->fifo; in nvkm_ucgrp_sclass() 70 nvkm_cgrp_unref(&ucgrp->cgrp); in nvkm_ucgrp_dtor() 115 ret = nvkm_cgrp_new(runl, args->v0.name, vmm, true, &ucgrp->cgrp); in nvkm_ucgrp_new() 120 args->v0.cgid = ucgrp->cgrp->id; in nvkm_ucgrp_new()
|
| A D | tu102.c | 37 return (chan->cgrp->runl->id << 16) | chan->id; in tu102_chan_doorbell_handle() 43 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in tu102_chan_start() 140 struct nvkm_cgrp *cgrp; in tu102_fifo_intr_ctxsw_timeout_info() local 151 cgrp = nvkm_runl_cgrp_get_cgid(runl, info & 0x3fff0000, &flags); in tu102_fifo_intr_ctxsw_timeout_info() 155 cgrp = nvkm_runl_cgrp_get_cgid(runl, info & 0x00003fff, &flags); in tu102_fifo_intr_ctxsw_timeout_info() 158 cgrp = NULL; in tu102_fifo_intr_ctxsw_timeout_info() 162 if (!WARN_ON(!cgrp)) { in tu102_fifo_intr_ctxsw_timeout_info() 163 nvkm_runl_rc_cgrp(cgrp); in tu102_fifo_intr_ctxsw_timeout_info() 164 nvkm_cgrp_put(&cgrp, flags); in tu102_fifo_intr_ctxsw_timeout_info() 278 .cgrp = {{ 0, 0, KEPLER_CHANNEL_GROUP_A }, &gk110_cgrp, .force = true },
|
| A D | nv50.c | 50 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nv50_chan_stop() 58 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nv50_chan_start() 66 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nv50_chan_unbind() 152 struct nvkm_subdev *subdev = &chan->cgrp->runl->fifo->engine.subdev; in nv50_ectx_bind() 288 struct nvkm_cgrp *cgrp; in nv50_runl_update() local 303 nvkm_runl_foreach_cgrp(cgrp, runl) { in nv50_runl_update() 304 if (cgrp->hw) { in nv50_runl_update() 305 CGRP_TRACE(cgrp, " RAMRL+%08x: chans:%d", offset, cgrp->chan_nr); in nv50_runl_update() 306 runl->func->insert_cgrp(cgrp, memory, offset); in nv50_runl_update() 310 nvkm_cgrp_foreach_chan(chan, cgrp) { in nv50_runl_update() [all …]
|
| A D | uchan.c | 45 struct nvkm_runl *runl = chan->cgrp->runl; in nvkm_uchan_uevent() 86 if (refcount_dec_and_mutex_lock(&cctx->uses, &chan->cgrp->mutex)) { in nvkm_uchan_object_fini_1() 91 mutex_unlock(&chan->cgrp->mutex); in nvkm_uchan_object_fini_1() 111 mutex_lock(&chan->cgrp->mutex); in nvkm_uchan_object_init_0() 124 mutex_unlock(&chan->cgrp->mutex); in nvkm_uchan_object_init_0() 158 struct nvkm_cgrp *cgrp = chan->cgrp; in nvkm_uchan_object_new() local 164 engn = nvkm_runl_find_engn(engn, cgrp->runl, engn->engine == oclass->engine); in nvkm_uchan_object_new() 210 nvkm_runl_foreach_engn(engn, chan->cgrp->runl) { in nvkm_uchan_sclass() 259 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nvkm_uchan_map() 330 nvkm_uchan_new(struct nvkm_fifo *fifo, struct nvkm_cgrp *cgrp, const struct nvkm_oclass *oclass, in nvkm_uchan_new() argument [all …]
|
| A D | cgrp.h | 67 #define nvkm_cgrp_foreach_chan(chan,cgrp) list_for_each_entry((chan), &(cgrp)->chans, head) argument 68 #define nvkm_cgrp_foreach_chan_safe(chan,ctmp,cgrp) \ argument 69 list_for_each_entry_safe((chan), (ctmp), &(cgrp)->chans, head)
|
| A D | base.c | 78 if (oclass->engn == &fifo->func->cgrp.user) in nvkm_fifo_class_new() 97 const struct nvkm_fifo_func_cgrp *cgrp = &fifo->func->cgrp; in nvkm_fifo_class_get() local 102 if (cgrp->user.oclass) { in nvkm_fifo_class_get() 104 oclass->base = cgrp->user; in nvkm_fifo_class_get() 105 oclass->engn = &fifo->func->cgrp.user; in nvkm_fifo_class_get()
|
| A D | nv40.c | 41 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv40_chan_ramfc_write() 113 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv40_eobj_ramht_add() 127 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv40_ectx_bind() 243 .cgrp = {{ }, &nv04_cgrp },
|
| A D | runl.h | 128 #define nvkm_runl_foreach_cgrp(cgrp,runl) list_for_each_entry((cgrp), &(runl)->cgrps, head) argument 129 #define nvkm_runl_foreach_cgrp_safe(cgrp,gtmp,runl) \ argument 130 list_for_each_entry_safe((cgrp), (gtmp), &(runl)->cgrps, head)
|
| A D | ga100.c | 39 return (chan->cgrp->runl->doorbell << 16) | chan->id; in ga100_chan_doorbell_handle() 45 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_stop() 53 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_start() 64 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_unbind() 84 nvkm_wo32(chan->inst, 0x0f8, 0x80000000 | chan->cgrp->runl->nonstall.vector); in ga100_chan_ramfc_write() 110 ga100_cgrp_preempt(struct nvkm_cgrp *cgrp) in ga100_cgrp_preempt() argument 112 struct nvkm_runl *runl = cgrp->runl; in ga100_cgrp_preempt() 114 nvkm_wr32(runl->fifo->engine.subdev.device, runl->addr + 0x098, 0x01000000 | cgrp->id); in ga100_cgrp_preempt() 607 .cgrp = {{ 0, 0, KEPLER_CHANNEL_GROUP_A }, &ga100_cgrp, .force = true },
|
| A D | nv04.c | 43 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv04_chan_stop() 94 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv04_chan_start() 105 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv04_chan_ramfc_clear() 118 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv04_chan_ramfc_write() 183 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv04_eobj_ramht_del() 194 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv04_eobj_ramht_add() 534 .cgrp = {{ }, &nv04_cgrp },
|
| A D | g84.c | 37 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in g84_chan_bind() 45 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in g84_chan_ramfc_write() 108 struct nvkm_subdev *subdev = &chan->cgrp->runl->fifo->engine.subdev; in g84_ectx_bind() 222 .cgrp = {{ }, &nv04_cgrp },
|
| A D | gv100.c | 194 gv100_runl_insert_cgrp(struct nvkm_cgrp *cgrp, struct nvkm_memory *memory, u64 offset) in gv100_runl_insert_cgrp() argument 197 nvkm_wo32(memory, offset + 0x4, cgrp->chan_nr); in gv100_runl_insert_cgrp() 198 nvkm_wo32(memory, offset + 0x8, cgrp->id); in gv100_runl_insert_cgrp() 482 .cgrp = {{ 0, 0, KEPLER_CHANNEL_GROUP_A }, &gk110_cgrp, .force = true },
|
| A D | nv10.c | 39 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv10_chan_ramfc_write() 104 .cgrp = {{ }, &nv04_cgrp },
|
| A D | chan.h | 74 #define CHAN_PRCLI(c,l,p,f,a...) CGRP_PRINT((c)->cgrp, l, p, "%04x:[%s]"f, (c)->id, (c)->name, ##a) 75 #define CHAN_PRINT(c,l,p,f,a...) CGRP_PRINT((c)->cgrp, l, p, "%04x:"f, (c)->id, ##a)
|
| A D | gk104.c | 42 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gk104_chan_stop() 50 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gk104_chan_start() 58 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gk104_chan_unbind() 66 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gk104_chan_bind_inst() 74 struct nvkm_runl *runl = chan->cgrp->runl; in gk104_chan_bind() 824 .cgrp = {{ }, &nv04_cgrp },
|
| A D | nv17.c | 40 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv17_chan_ramfc_write() 133 .cgrp = {{ }, &nv04_cgrp },
|
| A D | gf100.c | 43 nvkm_wr32(chan->cgrp->runl->fifo->engine.subdev.device, 0x002634, chan->id); in gf100_chan_preempt() 49 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gf100_chan_stop() 57 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gf100_chan_start() 67 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in gf100_chan_unbind() 79 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gf100_chan_bind() 602 nvkm_runl_rc_cgrp(chan->cgrp); in gf100_fifo_mmu_fault_recover() 960 .cgrp = {{ }, &nv04_cgrp },
|
| A D | gb202.c | 13 return BIT(30) | (chan->cgrp->runl->id << 16) | chan->id; in gb202_chan_doorbell_handle()
|
| A D | ga102.c | 38 .cgrp = {{ 0, 0, KEPLER_CHANNEL_GROUP_A }, &ga100_cgrp, .force = true },
|
| A D | gk20a.c | 43 .cgrp = {{ }, &gk110_cgrp },
|
| A D | gm200.c | 57 .cgrp = {{ 0, 0, KEPLER_CHANNEL_GROUP_A }, &gk110_cgrp },
|