@@ -121,61 +121,62 @@ def blocking_lookup_v2_partial_proofs(filename: Path, plot_info: PlotInfo) -> Op
121
121
new_challenge .challenge_hash ,
122
122
new_challenge .sp_hash ,
123
123
)
124
- try :
125
- partial_proofs = plot_info . prover . get_partial_proofs_for_challenge ( sp_challenge_hash )
126
- except Exception as e :
127
- self . harvester . log . error ( f"Exception fetching partial proof for V2 plot { filename } . { e } " )
124
+ partial_proofs = plot_info . prover . get_partial_proofs_for_challenge ( sp_challenge_hash )
125
+
126
+ # If no partial proofs are found, return None
127
+ if partial_proofs is None or len ( partial_proofs ) == 0 :
128
128
return None
129
129
130
- if partial_proofs is not None and len (partial_proofs ) > 0 :
131
- # Get the appropriate difficulty for this plot
132
- difficulty = new_challenge .difficulty
133
- sub_slot_iters = new_challenge .sub_slot_iters
134
- if plot_info .pool_contract_puzzle_hash is not None :
135
- # Check for pool-specific difficulty
136
- for pool_difficulty in new_challenge .pool_difficulties :
137
- if pool_difficulty .pool_contract_puzzle_hash == plot_info .pool_contract_puzzle_hash :
138
- difficulty = pool_difficulty .difficulty
139
- sub_slot_iters = pool_difficulty .sub_slot_iters
140
- break
130
+ # Get the appropriate difficulty for this plot
131
+ difficulty = new_challenge .difficulty
132
+ sub_slot_iters = new_challenge .sub_slot_iters
133
+ if plot_info .pool_contract_puzzle_hash is not None :
134
+ # Check for pool-specific difficulty
135
+ for pool_difficulty in new_challenge .pool_difficulties :
136
+ if pool_difficulty .pool_contract_puzzle_hash == plot_info .pool_contract_puzzle_hash :
137
+ difficulty = pool_difficulty .difficulty
138
+ sub_slot_iters = pool_difficulty .sub_slot_iters
139
+ break
140
+
141
+ # Filter qualities that pass the required_iters check (same as V1 flow)
142
+ good_qualities = []
143
+ sp_interval_iters = calculate_sp_interval_iters (self .harvester .constants , sub_slot_iters )
144
+
145
+ for partial_proof in partial_proofs :
146
+ quality_str = quality_for_partial_proof (partial_proof , new_challenge .challenge_hash )
147
+ required_iters : uint64 = calculate_iterations_quality (
148
+ self .harvester .constants ,
149
+ quality_str ,
150
+ plot_info .prover .get_size (), # TODO: todo_v2_plots update for V2
151
+ difficulty ,
152
+ new_challenge .sp_hash ,
153
+ sub_slot_iters ,
154
+ new_challenge .last_tx_height ,
155
+ )
141
156
142
- # Filter qualities that pass the required_iters check (same as V1 flow)
143
- good_qualities = []
144
- sp_interval_iters = calculate_sp_interval_iters (self .harvester .constants , sub_slot_iters )
157
+ if required_iters < sp_interval_iters :
158
+ good_qualities .append (partial_proof )
145
159
146
- for partial_proof in partial_proofs :
147
- quality_str = quality_for_partial_proof (partial_proof , new_challenge .challenge_hash )
148
- required_iters : uint64 = calculate_iterations_quality (
149
- self .harvester .constants ,
150
- quality_str ,
151
- plot_info .prover .get_size (), # TODO: todo_v2_plots update for V2
152
- difficulty ,
153
- new_challenge .sp_hash ,
154
- sub_slot_iters ,
155
- new_challenge .last_tx_height ,
156
- )
157
-
158
- if required_iters < sp_interval_iters :
159
- good_qualities .append (partial_proof )
160
+ if len (good_qualities ) == 0 :
161
+ return None
160
162
161
- if len (good_qualities ) > 0 :
162
- size = plot_info .prover .get_size ().size_v2
163
- assert size is not None
164
- return PartialProofsData (
165
- new_challenge .challenge_hash ,
166
- new_challenge .sp_hash ,
167
- good_qualities [0 ].hex () + str (filename .resolve ()),
168
- good_qualities ,
169
- new_challenge .signage_point_index ,
170
- size ,
171
- difficulty ,
172
- plot_info .pool_public_key ,
173
- plot_info .pool_contract_puzzle_hash ,
174
- plot_info .plot_public_key ,
175
- )
163
+ size = plot_info .prover .get_size ().size_v2
164
+ assert size is not None
165
+ return PartialProofsData (
166
+ new_challenge .challenge_hash ,
167
+ new_challenge .sp_hash ,
168
+ good_qualities [0 ].hex () + str (filename .resolve ()),
169
+ good_qualities ,
170
+ new_challenge .signage_point_index ,
171
+ size ,
172
+ difficulty ,
173
+ plot_info .pool_public_key ,
174
+ plot_info .pool_contract_puzzle_hash ,
175
+ plot_info .plot_public_key ,
176
+ )
176
177
return None
177
- except Exception as e :
178
- self .harvester .log .error ( f"Unknown error in V2 quality lookup: { e } " )
178
+ except Exception :
179
+ self .harvester .log .exception ( "Failed V2 partial proof lookup " )
179
180
return None
180
181
181
182
def blocking_lookup (filename : Path , plot_info : PlotInfo ) -> list [tuple [bytes32 , ProofOfSpace ]]:
@@ -331,21 +332,21 @@ async def lookup_challenge(
331
332
# Passes the plot filter (does not check sp filter yet though, since we have not reached sp)
332
333
# This is being executed at the beginning of the slot
333
334
total += 1
334
- if self ._plot_passes_filter (try_plot_info , new_challenge ):
335
- if try_plot_info .prover .get_version () == PlotVersion .V2 :
336
- # TODO: todo_v2_plots need to check v2 filter
337
- v2_awaitables .append (
338
- loop .run_in_executor (
339
- self .harvester .executor ,
340
- blocking_lookup_v2_partial_proofs ,
341
- try_plot_filename ,
342
- try_plot_info ,
343
- )
335
+ if not self ._plot_passes_filter (try_plot_info , new_challenge ):
336
+ continue
337
+ if try_plot_info .prover .get_version () == PlotVersion .V2 :
338
+ v2_awaitables .append (
339
+ loop .run_in_executor (
340
+ self .harvester .executor ,
341
+ blocking_lookup_v2_partial_proofs ,
342
+ try_plot_filename ,
343
+ try_plot_info ,
344
344
)
345
- passed += 1
346
- else :
347
- passed += 1
348
- awaitables .append (lookup_challenge (try_plot_filename , try_plot_info ))
345
+ )
346
+ passed += 1
347
+ else :
348
+ passed += 1
349
+ awaitables .append (lookup_challenge (try_plot_filename , try_plot_info ))
349
350
self .harvester .log .debug (f"new_signage_point_harvester { passed } plots passed the plot filter" )
350
351
351
352
# Concurrently executes all lookups on disk, to take advantage of multiple disk parallelism
0 commit comments