Lines Matching refs:dev_priv
133 struct vmw_private *dev_priv; member
324 struct vmw_private *dev_priv; member
645 static inline void vmw_write(struct vmw_private *dev_priv, in vmw_write() argument
648 if (vmw_is_svga_v3(dev_priv)) { in vmw_write()
649 iowrite32(value, dev_priv->rmmio + offset); in vmw_write()
651 spin_lock(&dev_priv->hw_lock); in vmw_write()
652 outl(offset, dev_priv->io_start + SVGA_INDEX_PORT); in vmw_write()
653 outl(value, dev_priv->io_start + SVGA_VALUE_PORT); in vmw_write()
654 spin_unlock(&dev_priv->hw_lock); in vmw_write()
658 static inline uint32_t vmw_read(struct vmw_private *dev_priv, in vmw_read() argument
663 if (vmw_is_svga_v3(dev_priv)) { in vmw_read()
664 val = ioread32(dev_priv->rmmio + offset); in vmw_read()
666 spin_lock(&dev_priv->hw_lock); in vmw_read()
667 outl(offset, dev_priv->io_start + SVGA_INDEX_PORT); in vmw_read()
668 val = inl(dev_priv->io_start + SVGA_VALUE_PORT); in vmw_read()
669 spin_unlock(&dev_priv->hw_lock); in vmw_read()
681 static inline bool has_sm4_context(const struct vmw_private *dev_priv) in has_sm4_context() argument
683 return (dev_priv->sm_type >= VMW_SM_4); in has_sm4_context()
692 static inline bool has_sm4_1_context(const struct vmw_private *dev_priv) in has_sm4_1_context() argument
694 return (dev_priv->sm_type >= VMW_SM_4_1); in has_sm4_1_context()
703 static inline bool has_sm5_context(const struct vmw_private *dev_priv) in has_sm5_context() argument
705 return (dev_priv->sm_type >= VMW_SM_5); in has_sm5_context()
714 static inline bool has_gl43_context(const struct vmw_private *dev_priv) in has_gl43_context() argument
716 return (dev_priv->sm_type >= VMW_SM_5_1X); in has_gl43_context()
720 static inline u32 vmw_max_num_uavs(struct vmw_private *dev_priv) in vmw_max_num_uavs() argument
722 return (has_gl43_context(dev_priv) ? in vmw_max_num_uavs()
726 extern void vmw_svga_enable(struct vmw_private *dev_priv);
727 extern void vmw_svga_disable(struct vmw_private *dev_priv);
735 extern int vmw_gmr_bind(struct vmw_private *dev_priv,
739 extern void vmw_gmr_unbind(struct vmw_private *dev_priv, int gmr_id);
749 int vmw_user_object_lookup(struct vmw_private *dev_priv, struct drm_file *filp,
776 struct vmw_private *dev_priv,
786 extern int vmw_user_stream_lookup(struct vmw_private *dev_priv,
800 void vmw_resource_evict_all(struct vmw_private *dev_priv);
826 extern int vmw_gem_object_create_with_handle(struct vmw_private *dev_priv,
852 extern struct vmw_fifo_state *vmw_fifo_create(struct vmw_private *dev_priv);
853 extern void vmw_fifo_destroy(struct vmw_private *dev_priv);
856 vmw_cmd_ctx_reserve(struct vmw_private *dev_priv, uint32_t bytes, int ctx_id);
857 extern void vmw_cmd_commit(struct vmw_private *dev_priv, uint32_t bytes);
858 extern void vmw_cmd_commit_flush(struct vmw_private *dev_priv, uint32_t bytes);
859 extern int vmw_cmd_send_fence(struct vmw_private *dev_priv, uint32_t *seqno);
860 extern bool vmw_supports_3d(struct vmw_private *dev_priv);
861 extern void vmw_fifo_ping_host(struct vmw_private *dev_priv, uint32_t reason);
862 extern bool vmw_fifo_have_pitchlock(struct vmw_private *dev_priv);
863 extern int vmw_cmd_emit_dummy_query(struct vmw_private *dev_priv,
865 extern int vmw_cmd_flush(struct vmw_private *dev_priv,
886 static inline uint32_t vmw_fifo_caps(const struct vmw_private *dev_priv) in vmw_fifo_caps() argument
888 if (!dev_priv->fifo_mem || !dev_priv->fifo) in vmw_fifo_caps()
890 return dev_priv->fifo->capabilities; in vmw_fifo_caps()
900 vmw_is_cursor_bypass3_enabled(const struct vmw_private *dev_priv) in vmw_is_cursor_bypass3_enabled() argument
902 return (vmw_fifo_caps(dev_priv) & SVGA_FIFO_CAP_CURSOR_BYPASS_3) != 0; in vmw_is_cursor_bypass3_enabled()
915 int vmw_bo_create_and_populate(struct vmw_private *dev_priv,
967 struct vmw_private *dev_priv,
977 extern void __vmw_execbuf_release_pinned_bo(struct vmw_private *dev_priv,
979 extern void vmw_execbuf_release_pinned_bo(struct vmw_private *dev_priv);
982 struct vmw_private *dev_priv,
985 extern int vmw_execbuf_copy_fence_user(struct vmw_private *dev_priv,
999 extern int vmw_irq_install(struct vmw_private *dev_priv);
1001 extern bool vmw_seqno_passed(struct vmw_private *dev_priv,
1003 extern int vmw_fallback_wait(struct vmw_private *dev_priv,
1009 bool vmw_seqno_waiter_add(struct vmw_private *dev_priv);
1010 bool vmw_seqno_waiter_remove(struct vmw_private *dev_priv);
1011 bool vmw_goal_waiter_add(struct vmw_private *dev_priv);
1012 bool vmw_goal_waiter_remove(struct vmw_private *dev_priv);
1013 bool vmw_generic_waiter_add(struct vmw_private *dev_priv, u32 flag,
1015 bool vmw_generic_waiter_remove(struct vmw_private *dev_priv,
1022 int vmw_kms_init(struct vmw_private *dev_priv);
1023 int vmw_kms_close(struct vmw_private *dev_priv);
1033 int vmw_kms_present(struct vmw_private *dev_priv,
1054 int vmw_overlay_init(struct vmw_private *dev_priv);
1055 int vmw_overlay_close(struct vmw_private *dev_priv);
1058 int vmw_overlay_resume_all(struct vmw_private *dev_priv);
1059 int vmw_overlay_pause_all(struct vmw_private *dev_priv);
1060 int vmw_overlay_claim(struct vmw_private *dev_priv, uint32_t *out);
1061 int vmw_overlay_unref(struct vmw_private *dev_priv, uint32_t stream_id);
1062 int vmw_overlay_num_overlays(struct vmw_private *dev_priv);
1063 int vmw_overlay_num_free_overlays(struct vmw_private *dev_priv);
1069 int vmw_gmrid_man_init(struct vmw_private *dev_priv, int type);
1070 void vmw_gmrid_man_fini(struct vmw_private *dev_priv, int type);
1075 int vmw_sys_man_init(struct vmw_private *dev_priv);
1076 void vmw_sys_man_fini(struct vmw_private *dev_priv);
1098 extern int vmw_mob_bind(struct vmw_private *dev_priv, struct vmw_mob *mob,
1101 extern void vmw_mob_unbind(struct vmw_private *dev_priv,
1105 extern int vmw_otables_setup(struct vmw_private *dev_priv);
1106 extern void vmw_otables_takedown(struct vmw_private *dev_priv);
1159 int vmw_gb_surface_define(struct vmw_private *dev_priv,
1182 extern int vmw_compat_shader_add(struct vmw_private *dev_priv,
1196 extern void vmw_dx_shader_cotable_list_scrub(struct vmw_private *dev_priv,
1218 void vmw_dx_streamoutput_cotable_list_scrub(struct vmw_private *dev_priv,
1227 vmw_cmdbuf_res_man_create(struct vmw_private *dev_priv);
1250 extern struct vmw_resource *vmw_cotable_alloc(struct vmw_private *dev_priv,
1265 vmw_cmdbuf_man_create(struct vmw_private *dev_priv);
1342 int vmw_mksstat_get_kern_slot(pid_t pid, struct vmw_private *dev_priv);
1350 int vmw_mksstat_remove_all(struct vmw_private *dev_priv);
1407 static inline void vmw_fifo_resource_inc(struct vmw_private *dev_priv) in vmw_fifo_resource_inc() argument
1409 atomic_inc(&dev_priv->num_fifo_resources); in vmw_fifo_resource_inc()
1412 static inline void vmw_fifo_resource_dec(struct vmw_private *dev_priv) in vmw_fifo_resource_dec() argument
1414 atomic_dec(&dev_priv->num_fifo_resources); in vmw_fifo_resource_dec()
1446 static inline u32 vmw_fence_read(struct vmw_private *dev_priv) in vmw_fence_read() argument
1449 if (vmw_is_svga_v3(dev_priv)) in vmw_fence_read()
1450 fence = vmw_read(dev_priv, SVGA_REG_FENCE); in vmw_fence_read()
1452 fence = vmw_fifo_mem_read(dev_priv, SVGA_FIFO_FENCE); in vmw_fence_read()
1456 static inline void vmw_fence_write(struct vmw_private *dev_priv, in vmw_fence_write() argument
1459 BUG_ON(vmw_is_svga_v3(dev_priv)); in vmw_fence_write()
1460 vmw_fifo_mem_write(dev_priv, SVGA_FIFO_FENCE, fence); in vmw_fence_write()