@@ -2816,6 +2816,16 @@ static int vmw_cmd_intra_surface_copy(struct vmw_private *dev_priv,
2816
2816
& cmd -> body .surface .sid , NULL );
2817
2817
}
2818
2818
2819
+ static int vmw_cmd_sm5 (struct vmw_private * dev_priv ,
2820
+ struct vmw_sw_context * sw_context ,
2821
+ SVGA3dCmdHeader * header )
2822
+ {
2823
+ if (!has_sm5_context (dev_priv ))
2824
+ return - EINVAL ;
2825
+
2826
+ return 0 ;
2827
+ }
2828
+
2819
2829
static int vmw_cmd_sm5_view_define (struct vmw_private * dev_priv ,
2820
2830
struct vmw_sw_context * sw_context ,
2821
2831
SVGA3dCmdHeader * header )
@@ -2938,6 +2948,57 @@ static int vmw_cmd_set_cs_uav(struct vmw_private *dev_priv,
2938
2948
return ret ;
2939
2949
}
2940
2950
2951
+ static int vmw_cmd_indexed_instanced_indirect (struct vmw_private * dev_priv ,
2952
+ struct vmw_sw_context * sw_context ,
2953
+ SVGA3dCmdHeader * header )
2954
+ {
2955
+ struct vmw_draw_indexed_instanced_indirect_cmd {
2956
+ SVGA3dCmdHeader header ;
2957
+ SVGA3dCmdDXDrawIndexedInstancedIndirect body ;
2958
+ } * cmd = container_of (header , typeof (* cmd ), header );
2959
+
2960
+ if (!has_sm5_context (dev_priv ))
2961
+ return - EINVAL ;
2962
+
2963
+ return vmw_cmd_res_check (dev_priv , sw_context , vmw_res_surface ,
2964
+ VMW_RES_DIRTY_NONE , user_surface_converter ,
2965
+ & cmd -> body .argsBufferSid , NULL );
2966
+ }
2967
+
2968
+ static int vmw_cmd_instanced_indirect (struct vmw_private * dev_priv ,
2969
+ struct vmw_sw_context * sw_context ,
2970
+ SVGA3dCmdHeader * header )
2971
+ {
2972
+ struct vmw_draw_instanced_indirect_cmd {
2973
+ SVGA3dCmdHeader header ;
2974
+ SVGA3dCmdDXDrawInstancedIndirect body ;
2975
+ } * cmd = container_of (header , typeof (* cmd ), header );
2976
+
2977
+ if (!has_sm5_context (dev_priv ))
2978
+ return - EINVAL ;
2979
+
2980
+ return vmw_cmd_res_check (dev_priv , sw_context , vmw_res_surface ,
2981
+ VMW_RES_DIRTY_NONE , user_surface_converter ,
2982
+ & cmd -> body .argsBufferSid , NULL );
2983
+ }
2984
+
2985
+ static int vmw_cmd_dispatch_indirect (struct vmw_private * dev_priv ,
2986
+ struct vmw_sw_context * sw_context ,
2987
+ SVGA3dCmdHeader * header )
2988
+ {
2989
+ struct vmw_dispatch_indirect_cmd {
2990
+ SVGA3dCmdHeader header ;
2991
+ SVGA3dCmdDXDispatchIndirect body ;
2992
+ } * cmd = container_of (header , typeof (* cmd ), header );
2993
+
2994
+ if (!has_sm5_context (dev_priv ))
2995
+ return - EINVAL ;
2996
+
2997
+ return vmw_cmd_res_check (dev_priv , sw_context , vmw_res_surface ,
2998
+ VMW_RES_DIRTY_NONE , user_surface_converter ,
2999
+ & cmd -> body .argsBufferSid , NULL );
3000
+ }
3001
+
2941
3002
static int vmw_cmd_check_not_3d (struct vmw_private * dev_priv ,
2942
3003
struct vmw_sw_context * sw_context ,
2943
3004
void * buf , uint32_t * size )
@@ -3303,8 +3364,17 @@ static const struct vmw_cmd_entry vmw_cmd_entries[SVGA_3D_CMD_MAX] = {
3303
3364
false, true),
3304
3365
VMW_CMD_DEF (SVGA_3D_CMD_DX_SET_UA_VIEWS , & vmw_cmd_set_uav , true, false,
3305
3366
true),
3367
+ VMW_CMD_DEF (SVGA_3D_CMD_DX_DRAW_INDEXED_INSTANCED_INDIRECT ,
3368
+ & vmw_cmd_indexed_instanced_indirect , true, false, true),
3369
+ VMW_CMD_DEF (SVGA_3D_CMD_DX_DRAW_INSTANCED_INDIRECT ,
3370
+ & vmw_cmd_instanced_indirect , true, false, true),
3371
+ VMW_CMD_DEF (SVGA_3D_CMD_DX_DISPATCH , & vmw_cmd_sm5 , true, false, true),
3372
+ VMW_CMD_DEF (SVGA_3D_CMD_DX_DISPATCH_INDIRECT ,
3373
+ & vmw_cmd_dispatch_indirect , true, false, true),
3306
3374
VMW_CMD_DEF (SVGA_3D_CMD_DX_SET_CS_UA_VIEWS , & vmw_cmd_set_cs_uav , true,
3307
3375
false, true),
3376
+ VMW_CMD_DEF (SVGA_3D_CMD_DX_DEFINE_DEPTHSTENCIL_VIEW_V2 ,
3377
+ & vmw_cmd_sm5_view_define , true, false, true),
3308
3378
};
3309
3379
3310
3380
bool vmw_cmd_describe (const void * buf , u32 * size , char const * * cmd )
0 commit comments