Lines Matching refs:dev_priv
35 bool vmw_supports_3d(struct vmw_private *dev_priv) in vmw_supports_3d() argument
38 const struct vmw_fifo_state *fifo = dev_priv->fifo; in vmw_supports_3d()
40 if (!(dev_priv->capabilities & SVGA_CAP_3D)) in vmw_supports_3d()
43 if (dev_priv->capabilities & SVGA_CAP_GBOBJECTS) { in vmw_supports_3d()
46 if (!dev_priv->has_mob) in vmw_supports_3d()
49 result = vmw_devcap_get(dev_priv, SVGA3D_DEVCAP_3D); in vmw_supports_3d()
54 if (!(dev_priv->capabilities & SVGA_CAP_EXTENDED_FIFO)) in vmw_supports_3d()
57 BUG_ON(vmw_is_svga_v3(dev_priv)); in vmw_supports_3d()
59 fifo_min = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MIN); in vmw_supports_3d()
63 hwversion = vmw_fifo_mem_read(dev_priv, in vmw_supports_3d()
76 if (dev_priv->active_display_unit == vmw_du_legacy) in vmw_supports_3d()
82 bool vmw_fifo_have_pitchlock(struct vmw_private *dev_priv) in vmw_fifo_have_pitchlock() argument
86 if (!(dev_priv->capabilities & SVGA_CAP_EXTENDED_FIFO)) in vmw_fifo_have_pitchlock()
89 caps = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_CAPABILITIES); in vmw_fifo_have_pitchlock()
96 struct vmw_fifo_state *vmw_fifo_create(struct vmw_private *dev_priv) in vmw_fifo_create() argument
102 if (!dev_priv->fifo_mem) in vmw_fifo_create()
122 if (dev_priv->capabilities & SVGA_CAP_EXTENDED_FIFO) in vmw_fifo_create()
123 min = vmw_read(dev_priv, SVGA_REG_MEM_REGS); in vmw_fifo_create()
129 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_MIN, min); in vmw_fifo_create()
130 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_MAX, dev_priv->fifo_mem_size); in vmw_fifo_create()
132 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_NEXT_CMD, min); in vmw_fifo_create()
133 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_STOP, min); in vmw_fifo_create()
134 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_BUSY, 0); in vmw_fifo_create()
137 vmw_write(dev_priv, SVGA_REG_CONFIG_DONE, 1); in vmw_fifo_create()
139 max = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MAX); in vmw_fifo_create()
140 min = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MIN); in vmw_fifo_create()
141 fifo->capabilities = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_CAPABILITIES); in vmw_fifo_create()
143 drm_info(&dev_priv->drm, in vmw_fifo_create()
150 drm_warn(&dev_priv->drm, in vmw_fifo_create()
158 void vmw_fifo_ping_host(struct vmw_private *dev_priv, uint32_t reason) in vmw_fifo_ping_host() argument
160 u32 *fifo_mem = dev_priv->fifo_mem; in vmw_fifo_ping_host()
162 vmw_write(dev_priv, SVGA_REG_SYNC, reason); in vmw_fifo_ping_host()
166 void vmw_fifo_destroy(struct vmw_private *dev_priv) in vmw_fifo_destroy() argument
168 struct vmw_fifo_state *fifo = dev_priv->fifo; in vmw_fifo_destroy()
183 dev_priv->fifo = NULL; in vmw_fifo_destroy()
186 static bool vmw_fifo_is_full(struct vmw_private *dev_priv, uint32_t bytes) in vmw_fifo_is_full() argument
188 uint32_t max = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MAX); in vmw_fifo_is_full()
189 uint32_t next_cmd = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_NEXT_CMD); in vmw_fifo_is_full()
190 uint32_t min = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MIN); in vmw_fifo_is_full()
191 uint32_t stop = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_STOP); in vmw_fifo_is_full()
196 static int vmw_fifo_wait_noirq(struct vmw_private *dev_priv, in vmw_fifo_wait_noirq() argument
207 prepare_to_wait(&dev_priv->fifo_queue, &__wait, in vmw_fifo_wait_noirq()
210 if (!vmw_fifo_is_full(dev_priv, bytes)) in vmw_fifo_wait_noirq()
223 finish_wait(&dev_priv->fifo_queue, &__wait); in vmw_fifo_wait_noirq()
224 wake_up_all(&dev_priv->fifo_queue); in vmw_fifo_wait_noirq()
229 static int vmw_fifo_wait(struct vmw_private *dev_priv, in vmw_fifo_wait() argument
235 if (likely(!vmw_fifo_is_full(dev_priv, bytes))) in vmw_fifo_wait()
238 vmw_fifo_ping_host(dev_priv, SVGA_SYNC_FIFOFULL); in vmw_fifo_wait()
239 if (!(dev_priv->capabilities & SVGA_CAP_IRQMASK)) in vmw_fifo_wait()
240 return vmw_fifo_wait_noirq(dev_priv, bytes, in vmw_fifo_wait()
243 vmw_generic_waiter_add(dev_priv, SVGA_IRQFLAG_FIFO_PROGRESS, in vmw_fifo_wait()
244 &dev_priv->fifo_queue_waiters); in vmw_fifo_wait()
248 (dev_priv->fifo_queue, in vmw_fifo_wait()
249 !vmw_fifo_is_full(dev_priv, bytes), timeout); in vmw_fifo_wait()
252 (dev_priv->fifo_queue, in vmw_fifo_wait()
253 !vmw_fifo_is_full(dev_priv, bytes), timeout); in vmw_fifo_wait()
260 vmw_generic_waiter_remove(dev_priv, SVGA_IRQFLAG_FIFO_PROGRESS, in vmw_fifo_wait()
261 &dev_priv->fifo_queue_waiters); in vmw_fifo_wait()
276 static void *vmw_local_fifo_reserve(struct vmw_private *dev_priv, in vmw_local_fifo_reserve() argument
279 struct vmw_fifo_state *fifo_state = dev_priv->fifo; in vmw_local_fifo_reserve()
280 u32 *fifo_mem = dev_priv->fifo_mem; in vmw_local_fifo_reserve()
288 max = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MAX); in vmw_local_fifo_reserve()
289 min = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MIN); in vmw_local_fifo_reserve()
290 next_cmd = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_NEXT_CMD); in vmw_local_fifo_reserve()
301 uint32_t stop = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_STOP); in vmw_local_fifo_reserve()
310 else if (vmw_fifo_is_full(dev_priv, bytes)) { in vmw_local_fifo_reserve()
311 ret = vmw_fifo_wait(dev_priv, bytes, in vmw_local_fifo_reserve()
323 ret = vmw_fifo_wait(dev_priv, bytes, in vmw_local_fifo_reserve()
335 vmw_fifo_mem_write(dev_priv, in vmw_local_fifo_reserve()
364 void *vmw_cmd_ctx_reserve(struct vmw_private *dev_priv, uint32_t bytes, in vmw_cmd_ctx_reserve() argument
369 if (dev_priv->cman) in vmw_cmd_ctx_reserve()
370 ret = vmw_cmdbuf_reserve(dev_priv->cman, bytes, in vmw_cmd_ctx_reserve()
373 ret = vmw_local_fifo_reserve(dev_priv, bytes); in vmw_cmd_ctx_reserve()
426 static void vmw_local_fifo_commit(struct vmw_private *dev_priv, uint32_t bytes) in vmw_local_fifo_commit() argument
428 struct vmw_fifo_state *fifo_state = dev_priv->fifo; in vmw_local_fifo_commit()
429 uint32_t next_cmd = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_NEXT_CMD); in vmw_local_fifo_commit()
430 uint32_t max = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MAX); in vmw_local_fifo_commit()
431 uint32_t min = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_MIN); in vmw_local_fifo_commit()
441 vmw_fifo_res_copy(fifo_state, dev_priv, in vmw_local_fifo_commit()
444 vmw_fifo_slow_copy(fifo_state, dev_priv, in vmw_local_fifo_commit()
460 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_NEXT_CMD, next_cmd); in vmw_local_fifo_commit()
464 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_RESERVED, 0); in vmw_local_fifo_commit()
467 vmw_fifo_ping_host(dev_priv, SVGA_SYNC_GENERIC); in vmw_local_fifo_commit()
471 void vmw_cmd_commit(struct vmw_private *dev_priv, uint32_t bytes) in vmw_cmd_commit() argument
473 if (dev_priv->cman) in vmw_cmd_commit()
474 vmw_cmdbuf_commit(dev_priv->cman, bytes, NULL, false); in vmw_cmd_commit()
476 vmw_local_fifo_commit(dev_priv, bytes); in vmw_cmd_commit()
486 void vmw_cmd_commit_flush(struct vmw_private *dev_priv, uint32_t bytes) in vmw_cmd_commit_flush() argument
488 if (dev_priv->cman) in vmw_cmd_commit_flush()
489 vmw_cmdbuf_commit(dev_priv->cman, bytes, NULL, true); in vmw_cmd_commit_flush()
491 vmw_local_fifo_commit(dev_priv, bytes); in vmw_cmd_commit_flush()
501 int vmw_cmd_flush(struct vmw_private *dev_priv, bool interruptible) in vmw_cmd_flush() argument
505 if (dev_priv->cman) in vmw_cmd_flush()
506 return vmw_cmdbuf_cur_flush(dev_priv->cman, interruptible); in vmw_cmd_flush()
511 int vmw_cmd_send_fence(struct vmw_private *dev_priv, uint32_t *seqno) in vmw_cmd_send_fence() argument
518 fm = VMW_CMD_RESERVE(dev_priv, bytes); in vmw_cmd_send_fence()
520 *seqno = atomic_read(&dev_priv->marker_seq); in vmw_cmd_send_fence()
522 (void)vmw_fallback_wait(dev_priv, false, true, *seqno, in vmw_cmd_send_fence()
528 *seqno = atomic_add_return(1, &dev_priv->marker_seq); in vmw_cmd_send_fence()
531 if (!vmw_has_fences(dev_priv)) { in vmw_cmd_send_fence()
538 vmw_cmd_commit(dev_priv, 0); in vmw_cmd_send_fence()
545 vmw_cmd_commit_flush(dev_priv, bytes); in vmw_cmd_send_fence()
546 vmw_update_seqno(dev_priv); in vmw_cmd_send_fence()
561 static int vmw_cmd_emit_dummy_legacy_query(struct vmw_private *dev_priv, in vmw_cmd_emit_dummy_legacy_query() argument
570 struct ttm_buffer_object *bo = &dev_priv->dummy_query_bo->base; in vmw_cmd_emit_dummy_legacy_query()
576 cmd = VMW_CMD_RESERVE(dev_priv, sizeof(*cmd)); in vmw_cmd_emit_dummy_legacy_query()
593 vmw_cmd_commit(dev_priv, sizeof(*cmd)); in vmw_cmd_emit_dummy_legacy_query()
607 static int vmw_cmd_emit_dummy_gb_query(struct vmw_private *dev_priv, in vmw_cmd_emit_dummy_gb_query() argument
616 struct ttm_buffer_object *bo = &dev_priv->dummy_query_bo->base; in vmw_cmd_emit_dummy_gb_query()
622 cmd = VMW_CMD_RESERVE(dev_priv, sizeof(*cmd)); in vmw_cmd_emit_dummy_gb_query()
634 vmw_cmd_commit(dev_priv, sizeof(*cmd)); in vmw_cmd_emit_dummy_gb_query()
658 int vmw_cmd_emit_dummy_query(struct vmw_private *dev_priv, in vmw_cmd_emit_dummy_query() argument
661 if (dev_priv->has_mob) in vmw_cmd_emit_dummy_query()
662 return vmw_cmd_emit_dummy_gb_query(dev_priv, cid); in vmw_cmd_emit_dummy_query()
664 return vmw_cmd_emit_dummy_legacy_query(dev_priv, cid); in vmw_cmd_emit_dummy_query()