|
1 | 1 | require 'concurrent'
|
| 2 | +require 'thread' |
2 | 3 |
|
3 | 4 | logger = Logger.new($stderr)
|
4 | 5 | logger.level = Logger::DEBUG
|
|
12 | 13 | it 'executes tasks asynchronously' do
|
13 | 14 | queue = Queue.new
|
14 | 15 | value = 12
|
15 |
| - Concurrent.post { queue << value } |
16 |
| - Concurrent.post(:io) { queue << value } |
| 16 | + Concurrent.post { queue.push(value) } |
| 17 | + Concurrent.post(:io) { queue.push(value) } |
17 | 18 | expect(queue.pop).to eq value
|
18 | 19 | expect(queue.pop).to eq value
|
19 | 20 | end
|
|
38 | 39 | it 'scheduled execution' do
|
39 | 40 | start = Time.now.to_f
|
40 | 41 | queue = Queue.new
|
41 |
| - future = Concurrent.schedule(0.1) { 1 + 1 }.then { |v| queue << v << Time.now.to_f - start } |
| 42 | + future = Concurrent.schedule(0.1) { 1 + 1 }.then { |v| queue.push(v); queue.push(Time.now.to_f - start); queue } |
42 | 43 |
|
43 | 44 | expect(future.value).to eq queue
|
44 | 45 | expect(queue.pop).to eq 2
|
45 |
| - expect(queue.pop).to be_between(0.1, 0.15) |
| 46 | + expect(queue.pop).to be_between(0.1, 0.2) |
46 | 47 | end
|
47 | 48 |
|
48 | 49 | it 'scheduled execution in graph' do
|
|
52 | 53 | future { sleep 0.1; 1 }.
|
53 | 54 | schedule(0.1).
|
54 | 55 | then { |v| v + 1 }.
|
55 |
| - then { |v| queue << v << Time.now.to_f - start } |
| 56 | + then { |v| queue.push(v); queue.push(Time.now.to_f - start); queue } |
56 | 57 |
|
57 | 58 | future.wait!
|
58 | 59 | expect(future.value).to eq queue
|
59 | 60 | expect(queue.pop).to eq 2
|
60 |
| - expect(queue.pop).to be_between(0.2, 0.25) |
| 61 | + expect(queue.pop).to be_between(0.2, 0.3) |
61 | 62 | end
|
62 | 63 | end
|
63 | 64 |
|
|
79 | 80 | f1 = Concurrent.future(:io) { queue.pop }
|
80 | 81 | f2 = Concurrent.future(:io) { queue.pop }
|
81 | 82 |
|
82 |
| - queue << 1 << 2 |
| 83 | + queue.push(1) |
| 84 | + queue.push(2) |
83 | 85 |
|
84 | 86 | anys = [Concurrent.any(f1, f2),
|
85 | 87 | f1 | f2,
|
|
96 | 98 | it 'has sync and async callbacks' do
|
97 | 99 | queue = Queue.new
|
98 | 100 | future = Concurrent.future { :value } # executed on FAST_EXECUTOR pool by default
|
99 |
| - future.on_completion(:io) { queue << :async } # async callback overridden to execute on IO_EXECUTOR pool |
100 |
| - future.on_completion! { queue << :sync } # sync callback executed right after completion in the same thread-pool |
| 101 | + future.on_completion(:io) { queue.push(:async) } # async callback overridden to execute on IO_EXECUTOR pool |
| 102 | + future.on_completion! { queue.push(:sync) } # sync callback executed right after completion in the same thread-pool |
101 | 103 |
|
102 | 104 | expect(future.value).to eq :value
|
103 |
| - expect(queue.pop).to eq :sync |
104 |
| - expect(queue.pop).to eq :async |
| 105 | + expect([queue.pop, queue.pop].sort).to eq [:async, :sync] |
105 | 106 | end
|
106 | 107 |
|
107 | 108 | it 'chains' do
|
|
178 | 179 |
|
179 | 180 | it 'has flat map' do
|
180 | 181 | f = Concurrent.future { Concurrent.future { 1 } }.flat.then(&:succ)
|
181 |
| - expect(f.value).to eq 2 |
| 182 | + expect(f.value!).to eq 2 |
182 | 183 | end
|
183 | 184 | end
|
184 | 185 |
|
|
0 commit comments