Lines Matching refs:cbs

125 static int vmw_binding_emit_dirty(struct vmw_ctx_binding_state *cbs);
284 vmw_cbs_context(const struct vmw_ctx_binding_state *cbs) in vmw_cbs_context() argument
286 if (list_empty(&cbs->list)) in vmw_cbs_context()
289 return list_first_entry(&cbs->list, struct vmw_ctx_bindinfo, in vmw_cbs_context()
302 vmw_binding_loc(struct vmw_ctx_binding_state *cbs, in vmw_binding_loc() argument
308 return (struct vmw_ctx_bindinfo *)((u8 *) cbs + offset); in vmw_binding_loc()
339 void vmw_binding_add(struct vmw_ctx_binding_state *cbs, in vmw_binding_add() argument
344 vmw_binding_loc(cbs, bi->bt, shader_slot, slot); in vmw_binding_add()
352 list_add(&loc->ctx_list, &cbs->list); in vmw_binding_add()
367 void vmw_binding_cb_offset_update(struct vmw_ctx_binding_state *cbs, in vmw_binding_cb_offset_update() argument
371 vmw_binding_loc(cbs, vmw_ctx_binding_cb, shader_slot, slot); in vmw_binding_cb_offset_update()
383 void vmw_binding_add_uav_index(struct vmw_ctx_binding_state *cbs, uint32 slot, in vmw_binding_add_uav_index() argument
386 cbs->ua_views[slot].index = index; in vmw_binding_add_uav_index()
397 static void vmw_binding_transfer(struct vmw_ctx_binding_state *cbs, in vmw_binding_transfer() argument
403 ((unsigned long) cbs + offset); in vmw_binding_transfer()
413 list_add_tail(&loc->ctx_list, &cbs->list); in vmw_binding_transfer()
427 void vmw_binding_state_kill(struct vmw_ctx_binding_state *cbs) in vmw_binding_state_kill() argument
431 vmw_binding_state_scrub(cbs); in vmw_binding_state_kill()
432 list_for_each_entry_safe(entry, next, &cbs->list, ctx_list) in vmw_binding_state_kill()
445 void vmw_binding_state_scrub(struct vmw_ctx_binding_state *cbs) in vmw_binding_state_scrub() argument
449 list_for_each_entry(entry, &cbs->list, ctx_list) { in vmw_binding_state_scrub()
457 (void) vmw_binding_emit_dirty(cbs); in vmw_binding_state_scrub()
500 struct vmw_ctx_binding_state *cbs = in vmw_binding_res_list_scrub() local
503 (void) vmw_binding_emit_dirty(cbs); in vmw_binding_res_list_scrub()
542 int vmw_binding_rebind_all(struct vmw_ctx_binding_state *cbs) in vmw_binding_rebind_all() argument
547 list_for_each_entry(entry, &cbs->list, ctx_list) { in vmw_binding_rebind_all()
562 return vmw_binding_emit_dirty(cbs); in vmw_binding_rebind_all()
748 static void vmw_collect_view_ids(struct vmw_ctx_binding_state *cbs, in vmw_collect_view_ids() argument
754 cbs->bind_cmd_count = 0; in vmw_collect_view_ids()
755 cbs->bind_first_slot = 0; in vmw_collect_view_ids()
761 cbs->bind_cmd_buffer[cbs->bind_cmd_count++] = in vmw_collect_view_ids()
781 static void vmw_collect_dirty_view_ids(struct vmw_ctx_binding_state *cbs, in vmw_collect_dirty_view_ids() argument
790 cbs->bind_cmd_count = 0; in vmw_collect_dirty_view_ids()
793 cbs->bind_first_slot = i; in vmw_collect_dirty_view_ids()
797 cbs->bind_cmd_buffer[cbs->bind_cmd_count++] = in vmw_collect_dirty_view_ids()
815 static int vmw_emit_set_sr(struct vmw_ctx_binding_state *cbs, in vmw_emit_set_sr() argument
819 &cbs->per_shader[shader_slot].shader_res[0].bi; in vmw_emit_set_sr()
825 const struct vmw_resource *ctx = vmw_cbs_context(cbs); in vmw_emit_set_sr()
827 vmw_collect_dirty_view_ids(cbs, loc, in vmw_emit_set_sr()
828 cbs->per_shader[shader_slot].dirty_sr, in vmw_emit_set_sr()
830 if (cbs->bind_cmd_count == 0) in vmw_emit_set_sr()
833 view_id_size = cbs->bind_cmd_count*sizeof(uint32); in vmw_emit_set_sr()
842 cmd->body.startView = cbs->bind_first_slot; in vmw_emit_set_sr()
844 memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size); in vmw_emit_set_sr()
847 bitmap_clear(cbs->per_shader[shader_slot].dirty_sr, in vmw_emit_set_sr()
848 cbs->bind_first_slot, cbs->bind_cmd_count); in vmw_emit_set_sr()
858 static int vmw_emit_set_rt(struct vmw_ctx_binding_state *cbs) in vmw_emit_set_rt() argument
860 const struct vmw_ctx_bindinfo_view *loc = &cbs->render_targets[0]; in vmw_emit_set_rt()
866 const struct vmw_resource *ctx = vmw_cbs_context(cbs); in vmw_emit_set_rt()
868 vmw_collect_view_ids(cbs, loc, SVGA3D_DX_MAX_RENDER_TARGETS); in vmw_emit_set_rt()
869 view_id_size = cbs->bind_cmd_count*sizeof(uint32); in vmw_emit_set_rt()
878 if (cbs->ds_view.bi.ctx && !cbs->ds_view.bi.scrubbed) in vmw_emit_set_rt()
879 cmd->body.depthStencilViewId = cbs->ds_view.bi.res->id; in vmw_emit_set_rt()
883 memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size); in vmw_emit_set_rt()
905 static void vmw_collect_so_targets(struct vmw_ctx_binding_state *cbs, in vmw_collect_so_targets() argument
910 SVGA3dSoTarget *so_buffer = (SVGA3dSoTarget *) cbs->bind_cmd_buffer; in vmw_collect_so_targets()
912 cbs->bind_cmd_count = 0; in vmw_collect_so_targets()
913 cbs->bind_first_slot = 0; in vmw_collect_so_targets()
916 ++cbs->bind_cmd_count) { in vmw_collect_so_targets()
937 static int vmw_emit_set_so_target(struct vmw_ctx_binding_state *cbs) in vmw_emit_set_so_target() argument
939 const struct vmw_ctx_bindinfo_so_target *loc = &cbs->so_targets[0]; in vmw_emit_set_so_target()
945 const struct vmw_resource *ctx = vmw_cbs_context(cbs); in vmw_emit_set_so_target()
947 vmw_collect_so_targets(cbs, loc, SVGA3D_DX_MAX_SOTARGETS); in vmw_emit_set_so_target()
948 if (cbs->bind_cmd_count == 0) in vmw_emit_set_so_target()
951 so_target_size = cbs->bind_cmd_count*sizeof(SVGA3dSoTarget); in vmw_emit_set_so_target()
959 memcpy(&cmd[1], cbs->bind_cmd_buffer, so_target_size); in vmw_emit_set_so_target()
973 static int vmw_binding_emit_dirty_ps(struct vmw_ctx_binding_state *cbs) in vmw_binding_emit_dirty_ps() argument
975 struct vmw_dx_shader_bindings *sb = &cbs->per_shader[0]; in vmw_binding_emit_dirty_ps()
983 ret = vmw_emit_set_sr(cbs, i); in vmw_binding_emit_dirty_ps()
1008 static void vmw_collect_dirty_vbs(struct vmw_ctx_binding_state *cbs, in vmw_collect_dirty_vbs() argument
1016 SVGA3dVertexBuffer *vbs = (SVGA3dVertexBuffer *) &cbs->bind_cmd_buffer; in vmw_collect_dirty_vbs()
1018 cbs->bind_cmd_count = 0; in vmw_collect_dirty_vbs()
1021 cbs->bind_first_slot = i; in vmw_collect_dirty_vbs()
1034 cbs->bind_cmd_count++; in vmw_collect_dirty_vbs()
1049 static int vmw_emit_set_vb(struct vmw_ctx_binding_state *cbs) in vmw_emit_set_vb() argument
1052 &cbs->vertex_buffers[0].bi; in vmw_emit_set_vb()
1058 const struct vmw_resource *ctx = vmw_cbs_context(cbs); in vmw_emit_set_vb()
1060 vmw_collect_dirty_vbs(cbs, loc, cbs->dirty_vb, in vmw_emit_set_vb()
1062 if (cbs->bind_cmd_count == 0) in vmw_emit_set_vb()
1065 set_vb_size = cbs->bind_cmd_count*sizeof(SVGA3dVertexBuffer); in vmw_emit_set_vb()
1073 cmd->body.startBuffer = cbs->bind_first_slot; in vmw_emit_set_vb()
1075 memcpy(&cmd[1], cbs->bind_cmd_buffer, set_vb_size); in vmw_emit_set_vb()
1078 bitmap_clear(cbs->dirty_vb, in vmw_emit_set_vb()
1079 cbs->bind_first_slot, cbs->bind_cmd_count); in vmw_emit_set_vb()
1084 static int vmw_emit_set_uav(struct vmw_ctx_binding_state *cbs) in vmw_emit_set_uav() argument
1086 const struct vmw_ctx_bindinfo_view *loc = &cbs->ua_views[0].views[0]; in vmw_emit_set_uav()
1092 const struct vmw_resource *ctx = vmw_cbs_context(cbs); in vmw_emit_set_uav()
1094 vmw_collect_view_ids(cbs, loc, vmw_max_num_uavs(cbs->dev_priv)); in vmw_emit_set_uav()
1095 view_id_size = cbs->bind_cmd_count*sizeof(uint32); in vmw_emit_set_uav()
1105 cmd->body.uavSpliceIndex = cbs->ua_views[0].index; in vmw_emit_set_uav()
1107 memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size); in vmw_emit_set_uav()
1114 static int vmw_emit_set_cs_uav(struct vmw_ctx_binding_state *cbs) in vmw_emit_set_cs_uav() argument
1116 const struct vmw_ctx_bindinfo_view *loc = &cbs->ua_views[1].views[0]; in vmw_emit_set_cs_uav()
1122 const struct vmw_resource *ctx = vmw_cbs_context(cbs); in vmw_emit_set_cs_uav()
1124 vmw_collect_view_ids(cbs, loc, vmw_max_num_uavs(cbs->dev_priv)); in vmw_emit_set_cs_uav()
1125 view_id_size = cbs->bind_cmd_count*sizeof(uint32); in vmw_emit_set_cs_uav()
1135 cmd->body.startIndex = cbs->ua_views[1].index; in vmw_emit_set_cs_uav()
1137 memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size); in vmw_emit_set_cs_uav()
1154 static int vmw_binding_emit_dirty(struct vmw_ctx_binding_state *cbs) in vmw_binding_emit_dirty() argument
1159 while ((hit = find_next_bit(&cbs->dirty, VMW_BINDING_NUM_BITS, hit)) in vmw_binding_emit_dirty()
1164 ret = vmw_emit_set_rt(cbs); in vmw_binding_emit_dirty()
1167 ret = vmw_binding_emit_dirty_ps(cbs); in vmw_binding_emit_dirty()
1170 ret = vmw_emit_set_so_target(cbs); in vmw_binding_emit_dirty()
1173 ret = vmw_emit_set_vb(cbs); in vmw_binding_emit_dirty()
1176 ret = vmw_emit_set_uav(cbs); in vmw_binding_emit_dirty()
1179 ret = vmw_emit_set_cs_uav(cbs); in vmw_binding_emit_dirty()
1187 __clear_bit(hit, &cbs->dirty); in vmw_binding_emit_dirty()
1205 struct vmw_ctx_binding_state *cbs = in vmw_binding_scrub_sr() local
1208 __set_bit(biv->slot, cbs->per_shader[biv->shader_slot].dirty_sr); in vmw_binding_scrub_sr()
1210 &cbs->per_shader[biv->shader_slot].dirty); in vmw_binding_scrub_sr()
1211 __set_bit(VMW_BINDING_PS_BIT, &cbs->dirty); in vmw_binding_scrub_sr()
1225 struct vmw_ctx_binding_state *cbs = in vmw_binding_scrub_dx_rt() local
1228 __set_bit(VMW_BINDING_RT_BIT, &cbs->dirty); in vmw_binding_scrub_dx_rt()
1242 struct vmw_ctx_binding_state *cbs = in vmw_binding_scrub_so_target() local
1245 __set_bit(VMW_BINDING_SO_T_BIT, &cbs->dirty); in vmw_binding_scrub_so_target()
1261 struct vmw_ctx_binding_state *cbs = in vmw_binding_scrub_vb() local
1264 __set_bit(bivb->slot, cbs->dirty_vb); in vmw_binding_scrub_vb()
1265 __set_bit(VMW_BINDING_VB_BIT, &cbs->dirty); in vmw_binding_scrub_vb()
1309 struct vmw_ctx_binding_state *cbs = vmw_context_binding_state(bi->ctx); in vmw_binding_scrub_uav() local
1311 __set_bit(VMW_BINDING_UAV_BIT, &cbs->dirty); in vmw_binding_scrub_uav()
1317 struct vmw_ctx_binding_state *cbs = vmw_context_binding_state(bi->ctx); in vmw_binding_scrub_cs_uav() local
1319 __set_bit(VMW_BINDING_CS_UAV_BIT, &cbs->dirty); in vmw_binding_scrub_cs_uav()
1360 struct vmw_ctx_binding_state *cbs; in vmw_binding_state_alloc() local
1362 cbs = vzalloc(sizeof(*cbs)); in vmw_binding_state_alloc()
1363 if (!cbs) { in vmw_binding_state_alloc()
1367 cbs->dev_priv = dev_priv; in vmw_binding_state_alloc()
1368 INIT_LIST_HEAD(&cbs->list); in vmw_binding_state_alloc()
1370 return cbs; in vmw_binding_state_alloc()
1378 void vmw_binding_state_free(struct vmw_ctx_binding_state *cbs) in vmw_binding_state_free() argument
1380 vfree(cbs); in vmw_binding_state_free()
1392 struct list_head *vmw_binding_state_list(struct vmw_ctx_binding_state *cbs) in vmw_binding_state_list() argument
1394 return &cbs->list; in vmw_binding_state_list()
1405 void vmw_binding_state_reset(struct vmw_ctx_binding_state *cbs) in vmw_binding_state_reset() argument
1409 list_for_each_entry_safe(entry, next, &cbs->list, ctx_list) in vmw_binding_state_reset()