Lines Matching refs:disp
44 struct nvkm_disp *disp = container_of(event, typeof(*disp), vblank); in nvkm_disp_vblank_fini() local
45 struct nvkm_head *head = nvkm_head_find(disp, id); in nvkm_disp_vblank_fini()
53 struct nvkm_disp *disp = container_of(event, typeof(*disp), vblank); in nvkm_disp_vblank_init() local
54 struct nvkm_head *head = nvkm_head_find(disp, id); in nvkm_disp_vblank_init()
66 nvkm_disp_vblank(struct nvkm_disp *disp, int head) in nvkm_disp_vblank() argument
68 nvkm_event_ntfy(&disp->vblank, head, NVKM_DISP_HEAD_EVENT_VBLANK); in nvkm_disp_vblank()
88 struct nvkm_disp *disp = nvkm_disp(oclass->engine); in nvkm_disp_class_get() local
90 oclass->base = disp->func->root; in nvkm_disp_class_get()
100 struct nvkm_disp *disp = nvkm_disp(engine); in nvkm_disp_intr() local
101 disp->func->intr(disp); in nvkm_disp_intr()
107 struct nvkm_disp *disp = nvkm_disp(engine); in nvkm_disp_fini() local
111 if (disp->func->fini) in nvkm_disp_fini()
112 disp->func->fini(disp); in nvkm_disp_fini()
114 list_for_each_entry(outp, &disp->outps, head) { in nvkm_disp_fini()
118 list_for_each_entry(conn, &disp->conns, head) { in nvkm_disp_fini()
128 struct nvkm_disp *disp = nvkm_disp(engine); in nvkm_disp_init() local
133 list_for_each_entry(conn, &disp->conns, head) { in nvkm_disp_init()
137 list_for_each_entry(outp, &disp->outps, head) { in nvkm_disp_init()
141 if (disp->func->init) { in nvkm_disp_init()
142 int ret = disp->func->init(disp); in nvkm_disp_init()
150 list_for_each_entry(ior, &disp->iors, head) { in nvkm_disp_init()
160 struct nvkm_disp *disp = nvkm_disp(engine); in nvkm_disp_oneinit() local
161 struct nvkm_subdev *subdev = &disp->engine.subdev; in nvkm_disp_oneinit()
189 ret = nvkm_outp_new(disp, i, &dcbE, &outp); in nvkm_disp_oneinit()
192 ret = nvkm_dp_new(disp, i, &dcbE, &outp); in nvkm_disp_oneinit()
217 list_add_tail(&outp->head, &disp->outps); in nvkm_disp_oneinit()
222 list_for_each_entry_safe(outp, outt, &disp->outps, head) { in nvkm_disp_oneinit()
236 list_for_each_entry(pair, &disp->outps, head) { in nvkm_disp_oneinit()
256 list_for_each_entry(conn, &disp->conns, head) { in nvkm_disp_oneinit()
267 ret = nvkm_conn_new(disp, i, &connE, &outp->conn); in nvkm_disp_oneinit()
276 list_add_tail(&outp->conn->head, &disp->conns); in nvkm_disp_oneinit()
279 if (disp->func->oneinit) { in nvkm_disp_oneinit()
280 ret = disp->func->oneinit(disp); in nvkm_disp_oneinit()
288 list_for_each_entry(outp, &disp->outps, head) { in nvkm_disp_oneinit()
291 ior = nvkm_ior_find(disp, SOR, ffs(outp->info.or) - 1); in nvkm_disp_oneinit()
299 list_for_each_entry(head, &disp->heads, head) in nvkm_disp_oneinit()
302 return nvkm_event_init(&nvkm_disp_vblank_func, subdev, 1, i, &disp->vblank); in nvkm_disp_oneinit()
308 struct nvkm_disp *disp = nvkm_disp(engine); in nvkm_disp_dtor() local
313 void *data = disp; in nvkm_disp_dtor()
315 nvkm_ramht_del(&disp->ramht); in nvkm_disp_dtor()
316 nvkm_gpuobj_del(&disp->inst); in nvkm_disp_dtor()
318 nvkm_event_fini(&disp->uevent); in nvkm_disp_dtor()
320 if (disp->super.wq) { in nvkm_disp_dtor()
321 destroy_workqueue(disp->super.wq); in nvkm_disp_dtor()
322 mutex_destroy(&disp->super.mutex); in nvkm_disp_dtor()
325 nvkm_event_fini(&disp->vblank); in nvkm_disp_dtor()
327 while (!list_empty(&disp->conns)) { in nvkm_disp_dtor()
328 conn = list_first_entry(&disp->conns, typeof(*conn), head); in nvkm_disp_dtor()
333 while (!list_empty(&disp->outps)) { in nvkm_disp_dtor()
334 outp = list_first_entry(&disp->outps, typeof(*outp), head); in nvkm_disp_dtor()
339 while (!list_empty(&disp->iors)) { in nvkm_disp_dtor()
340 ior = list_first_entry(&disp->iors, typeof(*ior), head); in nvkm_disp_dtor()
344 while (!list_empty(&disp->heads)) { in nvkm_disp_dtor()
345 head = list_first_entry(&disp->heads, typeof(*head), head); in nvkm_disp_dtor()
366 struct nvkm_disp *disp; in nvkm_disp_new_() local
369 if (!(disp = *pdisp = kzalloc(sizeof(**pdisp), GFP_KERNEL))) in nvkm_disp_new_()
372 disp->func = func; in nvkm_disp_new_()
373 INIT_LIST_HEAD(&disp->heads); in nvkm_disp_new_()
374 INIT_LIST_HEAD(&disp->iors); in nvkm_disp_new_()
375 INIT_LIST_HEAD(&disp->outps); in nvkm_disp_new_()
376 INIT_LIST_HEAD(&disp->conns); in nvkm_disp_new_()
377 spin_lock_init(&disp->client.lock); in nvkm_disp_new_()
379 ret = nvkm_engine_ctor(&nvkm_disp, device, type, inst, true, &disp->engine); in nvkm_disp_new_()
384 disp->super.wq = create_singlethread_workqueue("nvkm-disp"); in nvkm_disp_new_()
385 if (!disp->super.wq) in nvkm_disp_new_()
388 INIT_WORK(&disp->super.work, func->super); in nvkm_disp_new_()
389 mutex_init(&disp->super.mutex); in nvkm_disp_new_()
392 return nvkm_event_init(func->uevent, &disp->engine.subdev, 1, ARRAY_SIZE(disp->chan), in nvkm_disp_new_()
393 &disp->uevent); in nvkm_disp_new_()