File tree Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Expand file tree Collapse file tree 1 file changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -126,20 +126,20 @@ async def main():
126
126
start_time = time .time ()
127
127
128
128
# Start up a ProcMesh.
129
- local_proc_mesh : ProcMesh = await this_host ().spawn_procs (
129
+ local_proc_mesh : ProcMesh = this_host ().spawn_procs (
130
130
per_host = {"procs" : NUM_CRAWLERS }
131
131
)
132
132
133
133
# Create queues across the mesh and use slice to target the first one; we will not use the rest.
134
134
# TODO: One ProcMesh::slice is implemented, avoid spawning the extra ones here.
135
- all_queues = await local_proc_mesh .spawn ("queues" , QueueActor )
135
+ all_queues = local_proc_mesh .spawn ("queues" , QueueActor )
136
136
target_queue = all_queues .slice (procs = slice (0 , 1 ))
137
137
138
138
# Prime the queue with the base URL we want to crawl.
139
139
await target_queue .insert .call_one (BASE , DEPTH )
140
140
141
141
# Make the crawlers and pass in the queues; crawlers will just use the first one as well.
142
- crawlers = await local_proc_mesh .spawn ("crawlers" , CrawlActor , all_queues )
142
+ crawlers = local_proc_mesh .spawn ("crawlers" , CrawlActor , all_queues )
143
143
144
144
# Run the crawlers; display the count of documents they crawled when done.
145
145
results = await crawlers .crawl .call ()
You can’t perform that action at this time.
0 commit comments