1- package  x 
1+ package  workflow 
22
33import  (
44	"go.uber.org/cadence/internal/batch" 
5- 	"go.uber.org/cadence/workflow" 
65)
76
8- var  _  workflow. Future  =  (BatchFuture )(nil ) // to ensure it's compatible 
7+ var  _  Future  =  (BatchFuture )(nil ) // to ensure it's compatible 
98
109// BatchFuture wraps a collection of futures, and provides some convenience methods for dealing with them in bulk. 
1110type  BatchFuture  interface  {
@@ -22,10 +21,10 @@ type BatchFuture interface {
2221	// exposed normally, but multiple ones are bundled in the same way as errors.Join. 
2322	// For consistency when checking individual errors, consider using `multierr.Errors(err)` in all cases, 
2423	// or `GetFutures()[i].Get(ctx, nil)` to get the original errors at each index. 
25- 	Get (ctx  workflow. Context , valuePtr  interface {}) error 
24+ 	Get (ctx  Context , valuePtr  interface {}) error 
2625	// GetFutures returns a slice of all the wrapped futures. 
2726	// This slice MUST NOT be modified, but the individual futures can be used normally. 
28- 	GetFutures () []workflow. Future 
27+ 	GetFutures () []Future 
2928}
3029
3130// NewBatchFuture creates a bounded-concurrency helper for doing bulk work in your workflow. 
@@ -35,6 +34,6 @@ type BatchFuture interface {
3534// 
3635// When NewBatchFuture is called, futures created by the factories will be started concurrently until the concurrency limit (batchSize) is reached. 
3736// The remaining factories will be queued and started as previous futures complete, maintaining the specified concurrency level. 
38- func  NewBatchFuture (ctx  workflow. Context , batchSize  int , factories  []func (ctx  workflow. Context ) workflow. Future ) (BatchFuture , error ) {
37+ func  NewBatchFuture (ctx  Context , batchSize  int , factories  []func (ctx  Context ) Future ) (BatchFuture , error ) {
3938	return  batch .NewBatchFuture (ctx , batchSize , factories )
4039}
0 commit comments