59
59
#define VMW_BINDING_PS_BIT 1
60
60
#define VMW_BINDING_SO_BIT 2
61
61
#define VMW_BINDING_VB_BIT 3
62
- #define VMW_BINDING_NUM_BITS 4
62
+ #define VMW_BINDING_UAV_BIT 4
63
+ #define VMW_BINDING_CS_UAV_BIT 5
64
+ #define VMW_BINDING_NUM_BITS 6
63
65
64
66
#define VMW_BINDING_PS_SR_BIT 0
65
67
75
77
* @vertex_buffers: Vertex buffer bindings.
76
78
* @index_buffer: Index buffer binding.
77
79
* @per_shader: Per shader-type bindings.
80
+ * @ua_views: UAV bindings.
78
81
* @dirty: Bitmap tracking per binding-type changes that have not yet
79
82
* been emitted to the device.
80
83
* @dirty_vb: Bitmap tracking individual vertex buffer binding changes that
@@ -99,6 +102,7 @@ struct vmw_ctx_binding_state {
99
102
struct vmw_ctx_bindinfo_vb vertex_buffers [SVGA3D_DX_MAX_VERTEXBUFFERS ];
100
103
struct vmw_ctx_bindinfo_ib index_buffer ;
101
104
struct vmw_dx_shader_bindings per_shader [SVGA3D_NUM_SHADERTYPE ];
105
+ struct vmw_ctx_bindinfo_uav ua_views [VMW_MAX_UAV_BIND_TYPE ];
102
106
103
107
unsigned long dirty ;
104
108
DECLARE_BITMAP (dirty_vb , SVGA3D_DX_MAX_VERTEXBUFFERS );
@@ -121,6 +125,9 @@ static int vmw_binding_scrub_dx_shader(struct vmw_ctx_bindinfo *bi,
121
125
bool rebind );
122
126
static int vmw_binding_scrub_ib (struct vmw_ctx_bindinfo * bi , bool rebind );
123
127
static int vmw_binding_scrub_vb (struct vmw_ctx_bindinfo * bi , bool rebind );
128
+ static int vmw_binding_scrub_uav (struct vmw_ctx_bindinfo * bi , bool rebind );
129
+ static int vmw_binding_scrub_cs_uav (struct vmw_ctx_bindinfo * bi , bool rebind );
130
+
124
131
static void vmw_binding_build_asserts (void ) __attribute__ ((unused ));
125
132
126
133
typedef int (* vmw_scrub_func )(struct vmw_ctx_bindinfo * , bool );
@@ -189,6 +196,12 @@ static const size_t vmw_binding_vb_offsets[] = {
189
196
static const size_t vmw_binding_ib_offsets [] = {
190
197
offsetof(struct vmw_ctx_binding_state , index_buffer ),
191
198
};
199
+ static const size_t vmw_binding_uav_offsets [] = {
200
+ offsetof(struct vmw_ctx_binding_state , ua_views [0 ].views ),
201
+ };
202
+ static const size_t vmw_binding_cs_uav_offsets [] = {
203
+ offsetof(struct vmw_ctx_binding_state , ua_views [1 ].views ),
204
+ };
192
205
193
206
static const struct vmw_binding_info vmw_binding_infos [] = {
194
207
[vmw_ctx_binding_shader ] = {
@@ -235,6 +248,14 @@ static const struct vmw_binding_info vmw_binding_infos[] = {
235
248
.size = sizeof (struct vmw_ctx_bindinfo_ib ),
236
249
.offsets = vmw_binding_ib_offsets ,
237
250
.scrub_func = vmw_binding_scrub_ib },
251
+ [vmw_ctx_binding_uav ] = {
252
+ .size = sizeof (struct vmw_ctx_bindinfo_view ),
253
+ .offsets = vmw_binding_uav_offsets ,
254
+ .scrub_func = vmw_binding_scrub_uav },
255
+ [vmw_ctx_binding_cs_uav ] = {
256
+ .size = sizeof (struct vmw_ctx_bindinfo_view ),
257
+ .offsets = vmw_binding_cs_uav_offsets ,
258
+ .scrub_func = vmw_binding_scrub_cs_uav },
238
259
};
239
260
240
261
/**
@@ -320,6 +341,18 @@ void vmw_binding_add(struct vmw_ctx_binding_state *cbs,
320
341
INIT_LIST_HEAD (& loc -> res_list );
321
342
}
322
343
344
+ /**
345
+ * vmw_binding_add_uav_index - Add UAV index for tracking.
346
+ * @cbs: Pointer to the context binding state tracker.
347
+ * @slot: UAV type to which bind this index.
348
+ * @index: The splice index to track.
349
+ */
350
+ void vmw_binding_add_uav_index (struct vmw_ctx_binding_state * cbs , uint32 slot ,
351
+ uint32 index )
352
+ {
353
+ cbs -> ua_views [slot ].index = index ;
354
+ }
355
+
323
356
/**
324
357
* vmw_binding_transfer: Transfer a context binding tracking entry.
325
358
*
@@ -459,6 +492,10 @@ void vmw_binding_state_commit(struct vmw_ctx_binding_state *to,
459
492
vmw_binding_transfer (to , from , entry );
460
493
vmw_binding_drop (entry );
461
494
}
495
+
496
+ /* Also transfer uav splice indices */
497
+ to -> ua_views [0 ].index = from -> ua_views [0 ].index ;
498
+ to -> ua_views [1 ].index = from -> ua_views [1 ].index ;
462
499
}
463
500
464
501
/**
@@ -1014,6 +1051,66 @@ static int vmw_emit_set_vb(struct vmw_ctx_binding_state *cbs)
1014
1051
return 0 ;
1015
1052
}
1016
1053
1054
+ static int vmw_emit_set_uav (struct vmw_ctx_binding_state * cbs )
1055
+ {
1056
+ const struct vmw_ctx_bindinfo * loc = & cbs -> ua_views [0 ].views [0 ].bi ;
1057
+ struct {
1058
+ SVGA3dCmdHeader header ;
1059
+ SVGA3dCmdDXSetUAViews body ;
1060
+ } * cmd ;
1061
+ size_t cmd_size , view_id_size ;
1062
+ const struct vmw_resource * ctx = vmw_cbs_context (cbs );
1063
+
1064
+ vmw_collect_view_ids (cbs , loc , SVGA3D_MAX_UAVIEWS );
1065
+ view_id_size = cbs -> bind_cmd_count * sizeof (uint32 );
1066
+ cmd_size = sizeof (* cmd ) + view_id_size ;
1067
+ cmd = VMW_FIFO_RESERVE_DX (ctx -> dev_priv , cmd_size , ctx -> id );
1068
+ if (!cmd )
1069
+ return - ENOMEM ;
1070
+
1071
+ cmd -> header .id = SVGA_3D_CMD_DX_SET_UA_VIEWS ;
1072
+ cmd -> header .size = sizeof (cmd -> body ) + view_id_size ;
1073
+
1074
+ /* Splice index is specified user-space */
1075
+ cmd -> body .uavSpliceIndex = cbs -> ua_views [0 ].index ;
1076
+
1077
+ memcpy (& cmd [1 ], cbs -> bind_cmd_buffer , view_id_size );
1078
+
1079
+ vmw_fifo_commit (ctx -> dev_priv , cmd_size );
1080
+
1081
+ return 0 ;
1082
+ }
1083
+
1084
+ static int vmw_emit_set_cs_uav (struct vmw_ctx_binding_state * cbs )
1085
+ {
1086
+ const struct vmw_ctx_bindinfo * loc = & cbs -> ua_views [1 ].views [0 ].bi ;
1087
+ struct {
1088
+ SVGA3dCmdHeader header ;
1089
+ SVGA3dCmdDXSetCSUAViews body ;
1090
+ } * cmd ;
1091
+ size_t cmd_size , view_id_size ;
1092
+ const struct vmw_resource * ctx = vmw_cbs_context (cbs );
1093
+
1094
+ vmw_collect_view_ids (cbs , loc , SVGA3D_MAX_UAVIEWS );
1095
+ view_id_size = cbs -> bind_cmd_count * sizeof (uint32 );
1096
+ cmd_size = sizeof (* cmd ) + view_id_size ;
1097
+ cmd = VMW_FIFO_RESERVE_DX (ctx -> dev_priv , cmd_size , ctx -> id );
1098
+ if (!cmd )
1099
+ return - ENOMEM ;
1100
+
1101
+ cmd -> header .id = SVGA_3D_CMD_DX_SET_CS_UA_VIEWS ;
1102
+ cmd -> header .size = sizeof (cmd -> body ) + view_id_size ;
1103
+
1104
+ /* Start index is specified user-space */
1105
+ cmd -> body .startIndex = cbs -> ua_views [1 ].index ;
1106
+
1107
+ memcpy (& cmd [1 ], cbs -> bind_cmd_buffer , view_id_size );
1108
+
1109
+ vmw_fifo_commit (ctx -> dev_priv , cmd_size );
1110
+
1111
+ return 0 ;
1112
+ }
1113
+
1017
1114
/**
1018
1115
* vmw_binding_emit_dirty - Issue delayed binding commands
1019
1116
*
@@ -1045,6 +1142,12 @@ static int vmw_binding_emit_dirty(struct vmw_ctx_binding_state *cbs)
1045
1142
case VMW_BINDING_VB_BIT :
1046
1143
ret = vmw_emit_set_vb (cbs );
1047
1144
break ;
1145
+ case VMW_BINDING_UAV_BIT :
1146
+ ret = vmw_emit_set_uav (cbs );
1147
+ break ;
1148
+ case VMW_BINDING_CS_UAV_BIT :
1149
+ ret = vmw_emit_set_cs_uav (cbs );
1150
+ break ;
1048
1151
default :
1049
1152
BUG ();
1050
1153
}
@@ -1171,6 +1274,22 @@ static int vmw_binding_scrub_ib(struct vmw_ctx_bindinfo *bi, bool rebind)
1171
1274
return 0 ;
1172
1275
}
1173
1276
1277
+ static int vmw_binding_scrub_uav (struct vmw_ctx_bindinfo * bi , bool rebind )
1278
+ {
1279
+ struct vmw_ctx_binding_state * cbs = vmw_context_binding_state (bi -> ctx );
1280
+
1281
+ __set_bit (VMW_BINDING_UAV_BIT , & cbs -> dirty );
1282
+ return 0 ;
1283
+ }
1284
+
1285
+ static int vmw_binding_scrub_cs_uav (struct vmw_ctx_bindinfo * bi , bool rebind )
1286
+ {
1287
+ struct vmw_ctx_binding_state * cbs = vmw_context_binding_state (bi -> ctx );
1288
+
1289
+ __set_bit (VMW_BINDING_CS_UAV_BIT , & cbs -> dirty );
1290
+ return 0 ;
1291
+ }
1292
+
1174
1293
/**
1175
1294
* vmw_binding_state_alloc - Allocate a struct vmw_ctx_binding_state with
1176
1295
* memory accounting.
@@ -1257,8 +1376,8 @@ void vmw_binding_state_reset(struct vmw_ctx_binding_state *cbs)
1257
1376
* Each time a resource is put on the validation list as the result of a
1258
1377
* context binding referencing it, we need to determine whether that resource
1259
1378
* will be dirtied (written to by the GPU) as a result of the corresponding
1260
- * GPU operation. Currently rendertarget-, depth-stencil-, and
1261
- * stream-output-target bindings are capable of dirtying its resource.
1379
+ * GPU operation. Currently rendertarget-, depth-stencil-, stream-output-target
1380
+ * and unordered access view bindings are capable of dirtying its resource.
1262
1381
*
1263
1382
* Return: Whether the binding type dirties the resource its binding points to.
1264
1383
*/
@@ -1269,10 +1388,12 @@ u32 vmw_binding_dirtying(enum vmw_ctx_binding_type binding_type)
1269
1388
[vmw_ctx_binding_dx_rt ] = VMW_RES_DIRTY_SET ,
1270
1389
[vmw_ctx_binding_ds ] = VMW_RES_DIRTY_SET ,
1271
1390
[vmw_ctx_binding_so ] = VMW_RES_DIRTY_SET ,
1391
+ [vmw_ctx_binding_uav ] = VMW_RES_DIRTY_SET ,
1392
+ [vmw_ctx_binding_cs_uav ] = VMW_RES_DIRTY_SET ,
1272
1393
};
1273
1394
1274
1395
/* Review this function as new bindings are added. */
1275
- BUILD_BUG_ON (vmw_ctx_binding_max != 11 );
1396
+ BUILD_BUG_ON (vmw_ctx_binding_max != 13 );
1276
1397
return is_binding_dirtying [binding_type ];
1277
1398
}
1278
1399
0 commit comments