Lines Matching refs:dev_priv

57 	struct vmw_private *dev_priv = vmw_priv(dev);  in vmw_thread_fn()  local
61 dev_priv->irqthread_pending)) { in vmw_thread_fn()
62 vmw_fences_update(dev_priv->fman); in vmw_thread_fn()
63 wake_up_all(&dev_priv->fence_queue); in vmw_thread_fn()
68 dev_priv->irqthread_pending)) { in vmw_thread_fn()
69 vmw_cmdbuf_irqthread(dev_priv->cman); in vmw_thread_fn()
90 struct vmw_private *dev_priv = vmw_priv(dev); in vmw_irq_handler() local
94 status = vmw_irq_status_read(dev_priv); in vmw_irq_handler()
95 masked_status = status & READ_ONCE(dev_priv->irq_mask); in vmw_irq_handler()
98 vmw_irq_status_write(dev_priv, status); in vmw_irq_handler()
104 wake_up_all(&dev_priv->fifo_queue); in vmw_irq_handler()
107 vmw_irqflag_fence_goal(dev_priv))) && in vmw_irq_handler()
108 !test_and_set_bit(VMW_IRQTHREAD_FENCE, dev_priv->irqthread_pending)) in vmw_irq_handler()
114 dev_priv->irqthread_pending)) in vmw_irq_handler()
120 static bool vmw_fifo_idle(struct vmw_private *dev_priv, uint32_t seqno) in vmw_fifo_idle() argument
123 return (vmw_read(dev_priv, SVGA_REG_BUSY) == 0); in vmw_fifo_idle()
126 bool vmw_seqno_passed(struct vmw_private *dev_priv, in vmw_seqno_passed() argument
130 u32 last_read_seqno = atomic_read_acquire(&dev_priv->last_read_seqno); in vmw_seqno_passed()
135 last_read_seqno = vmw_fences_update(dev_priv->fman); in vmw_seqno_passed()
139 if (!vmw_has_fences(dev_priv) && vmw_fifo_idle(dev_priv, seqno)) in vmw_seqno_passed()
147 ret = ((atomic_read(&dev_priv->marker_seq) - seqno) in vmw_seqno_passed()
153 int vmw_fallback_wait(struct vmw_private *dev_priv, in vmw_fallback_wait() argument
160 struct vmw_fifo_state *fifo_state = dev_priv->fifo; in vmw_fallback_wait()
178 if (dev_priv->cman) { in vmw_fallback_wait()
179 ret = vmw_cmdbuf_idle(dev_priv->cman, interruptible, in vmw_fallback_wait()
189 signal_seq = atomic_read(&dev_priv->marker_seq); in vmw_fallback_wait()
193 prepare_to_wait(&dev_priv->fence_queue, &__wait, in vmw_fallback_wait()
196 if (wait_condition(dev_priv, seqno)) in vmw_fallback_wait()
221 finish_wait(&dev_priv->fence_queue, &__wait); in vmw_fallback_wait()
223 vmw_fence_write(dev_priv, signal_seq); in vmw_fallback_wait()
225 wake_up_all(&dev_priv->fence_queue); in vmw_fallback_wait()
233 bool vmw_generic_waiter_add(struct vmw_private *dev_priv, in vmw_generic_waiter_add() argument
238 spin_lock(&dev_priv->waiter_lock); in vmw_generic_waiter_add()
240 vmw_irq_status_write(dev_priv, flag); in vmw_generic_waiter_add()
241 dev_priv->irq_mask |= flag; in vmw_generic_waiter_add()
242 vmw_write(dev_priv, SVGA_REG_IRQMASK, dev_priv->irq_mask); in vmw_generic_waiter_add()
245 spin_unlock(&dev_priv->waiter_lock); in vmw_generic_waiter_add()
249 bool vmw_generic_waiter_remove(struct vmw_private *dev_priv, in vmw_generic_waiter_remove() argument
254 spin_lock(&dev_priv->waiter_lock); in vmw_generic_waiter_remove()
256 dev_priv->irq_mask &= ~flag; in vmw_generic_waiter_remove()
257 vmw_write(dev_priv, SVGA_REG_IRQMASK, dev_priv->irq_mask); in vmw_generic_waiter_remove()
260 spin_unlock(&dev_priv->waiter_lock); in vmw_generic_waiter_remove()
264 bool vmw_seqno_waiter_add(struct vmw_private *dev_priv) in vmw_seqno_waiter_add() argument
266 return vmw_generic_waiter_add(dev_priv, SVGA_IRQFLAG_ANY_FENCE, in vmw_seqno_waiter_add()
267 &dev_priv->fence_queue_waiters); in vmw_seqno_waiter_add()
270 bool vmw_seqno_waiter_remove(struct vmw_private *dev_priv) in vmw_seqno_waiter_remove() argument
272 return vmw_generic_waiter_remove(dev_priv, SVGA_IRQFLAG_ANY_FENCE, in vmw_seqno_waiter_remove()
273 &dev_priv->fence_queue_waiters); in vmw_seqno_waiter_remove()
276 bool vmw_goal_waiter_add(struct vmw_private *dev_priv) in vmw_goal_waiter_add() argument
278 return vmw_generic_waiter_add(dev_priv, vmw_irqflag_fence_goal(dev_priv), in vmw_goal_waiter_add()
279 &dev_priv->goal_queue_waiters); in vmw_goal_waiter_add()
282 bool vmw_goal_waiter_remove(struct vmw_private *dev_priv) in vmw_goal_waiter_remove() argument
284 return vmw_generic_waiter_remove(dev_priv, vmw_irqflag_fence_goal(dev_priv), in vmw_goal_waiter_remove()
285 &dev_priv->goal_queue_waiters); in vmw_goal_waiter_remove()
290 struct vmw_private *dev_priv = vmw_priv(dev); in vmw_irq_preinstall() local
293 status = vmw_irq_status_read(dev_priv); in vmw_irq_preinstall()
294 vmw_irq_status_write(dev_priv, status); in vmw_irq_preinstall()
299 struct vmw_private *dev_priv = vmw_priv(dev); in vmw_irq_uninstall() local
304 if (!(dev_priv->capabilities & SVGA_CAP_IRQMASK)) in vmw_irq_uninstall()
307 vmw_write(dev_priv, SVGA_REG_IRQMASK, 0); in vmw_irq_uninstall()
309 status = vmw_irq_status_read(dev_priv); in vmw_irq_uninstall()
310 vmw_irq_status_write(dev_priv, status); in vmw_irq_uninstall()
312 for (i = 0; i < dev_priv->num_irq_vectors; ++i) in vmw_irq_uninstall()
313 free_irq(dev_priv->irqs[i], dev); in vmw_irq_uninstall()
316 dev_priv->num_irq_vectors = 0; in vmw_irq_uninstall()
325 int vmw_irq_install(struct vmw_private *dev_priv) in vmw_irq_install() argument
327 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev); in vmw_irq_install()
328 struct drm_device *dev = &dev_priv->drm; in vmw_irq_install()
340 drm_err(&dev_priv->drm, in vmw_irq_install()
351 drm_err(&dev_priv->drm, in vmw_irq_install()
355 dev_priv->irqs[i] = ret; in vmw_irq_install()
357 ret = request_threaded_irq(dev_priv->irqs[i], vmw_irq_handler, vmw_thread_fn, in vmw_irq_install()
360 drm_err(&dev_priv->drm, in vmw_irq_install()
362 dev_priv->irqs[i], ret); in vmw_irq_install()
368 dev_priv->num_irq_vectors = i; in vmw_irq_install()