@@ -182,21 +182,26 @@ end
182
182
183
183
function solve_batch (prob,alg,:: EnsembleSplitThreads ,I,pmap_batch_size,kwargs... )
184
184
wp= CachingPool (workers ())
185
+ N = nworkers ()
186
+ batch_size = length (I)÷ N
185
187
batch_data = let
186
- pmap (wp,1 : nprocs (),batch_size= pmap_batch_size) do i
187
- thread_monte (prob,I,alg,i,kwargs... )
188
+ pmap (wp,1 : N,batch_size= pmap_batch_size) do i
189
+ if i == N
190
+ I_local = I[(batch_size* (i- 1 )+ 1 ): end ]
191
+ else
192
+ I_local = I[(batch_size* (i- 1 )+ 1 ): (batch_size* i)]
193
+ end
194
+ thread_monte (prob,I_local,alg,i,kwargs... )
188
195
end
189
196
end
190
197
_batch_data = vector_batch_data_to_arr (batch_data)
191
198
end
192
199
193
200
function thread_monte (prob,I,alg,procid,kwargs... )
194
- start = I[1 ]+ (procid- 1 )* length (I)
195
- stop = I[1 ]+ procid* length (I)- 1
196
- portion = start: stop
197
- batch_data = Vector {Any} (undef,length (portion))
201
+ batch_data = Vector {Any} (undef,length (I))
202
+ @show I
198
203
let
199
- Threads. @threads for i in portion
204
+ Threads. @threads for i in I
200
205
iter = 1
201
206
new_prob = prob. prob_func (deepcopy (prob. prob),i,iter)
202
207
rerun = true
0 commit comments