@@ -42,7 +42,7 @@ def create_complete_dag(workbench: NodesDict) -> nx.DiGraph:
4242 )
4343 if node .input_nodes :
4444 for input_node_id in node .input_nodes :
45- predecessor_node = workbench .get (NodeIDStr ( input_node_id ) )
45+ predecessor_node = workbench .get (f" { input_node_id } " )
4646 if predecessor_node :
4747 dag_graph .add_edge (str (input_node_id ), node_id )
4848
@@ -95,19 +95,18 @@ async def get_node_io_payload_cb(node_id: NodeID) -> dict[str, Any]:
9595 return result
9696
9797 computed_hash = await compute_node_hash (node_id , get_node_io_payload_cb )
98- if computed_hash != node ["run_hash" ]:
99- return True
100- return False
98+ return bool (computed_hash != node ["run_hash" ])
10199
102100
103101async def _compute_node_dependencies_state (graph_data , node_id ) -> set [NodeID ]:
104102 node = graph_data [f"{ node_id } " ]
105103 # check if the previous node is outdated or waits for dependencies... in which case this one has to wait
106104 non_computed_dependencies : set [NodeID ] = set ()
107105 for input_port in node .get ("inputs" , {}).values ():
108- if isinstance (input_port , PortLink ):
109- if _node_needs_computation (graph_data , input_port .node_uuid ):
110- non_computed_dependencies .add (input_port .node_uuid )
106+ if isinstance (input_port , PortLink ) and _node_needs_computation (
107+ graph_data , input_port .node_uuid
108+ ):
109+ non_computed_dependencies .add (input_port .node_uuid )
111110 # all good. ready
112111 return non_computed_dependencies
113112
@@ -188,14 +187,14 @@ def compute_pipeline_started_timestamp(
188187 if not pipeline_dag .nodes :
189188 return None
190189 node_id_to_comp_task : dict [NodeIDStr , CompTaskAtDB ] = {
191- NodeIDStr ( f"{ task .node_id } " ) : task for task in comp_tasks
190+ f"{ task .node_id } " : task for task in comp_tasks
192191 }
193- TOMORROW = arrow .utcnow ().shift (days = 1 ).datetime
192+ tomorrow = arrow .utcnow ().shift (days = 1 ).datetime
194193 pipeline_started_at : datetime .datetime | None = min (
195- node_id_to_comp_task [node_id ].start or TOMORROW
194+ node_id_to_comp_task [node_id ].start or tomorrow
196195 for node_id in pipeline_dag .nodes
197196 )
198- if pipeline_started_at == TOMORROW :
197+ if pipeline_started_at == tomorrow :
199198 pipeline_started_at = None
200199 return pipeline_started_at
201200
@@ -206,13 +205,13 @@ def compute_pipeline_stopped_timestamp(
206205 if not pipeline_dag .nodes :
207206 return None
208207 node_id_to_comp_task : dict [NodeIDStr , CompTaskAtDB ] = {
209- NodeIDStr ( f"{ task .node_id } " ) : task for task in comp_tasks
208+ f"{ task .node_id } " : task for task in comp_tasks
210209 }
211- TOMORROW = arrow .utcnow ().shift (days = 1 ).datetime
210+ tomorrow = arrow .utcnow ().shift (days = 1 ).datetime
212211 pipeline_stopped_at : datetime .datetime | None = max (
213- node_id_to_comp_task [node_id ].end or TOMORROW for node_id in pipeline_dag .nodes
212+ node_id_to_comp_task [node_id ].end or tomorrow for node_id in pipeline_dag .nodes
214213 )
215- if pipeline_stopped_at == TOMORROW :
214+ if pipeline_stopped_at == tomorrow :
216215 pipeline_stopped_at = None
217216 return pipeline_stopped_at
218217
@@ -227,15 +226,15 @@ async def compute_pipeline_details(
227226
228227 # NOTE: the latest progress is available in comp_tasks only
229228 node_id_to_comp_task : dict [NodeIDStr , CompTaskAtDB ] = {
230- NodeIDStr ( f"{ task .node_id } " ) : task for task in comp_tasks
229+ f"{ task .node_id } " : task for task in comp_tasks
231230 }
232231 pipeline_progress = None
233232 if len (pipeline_dag .nodes ) > 0 :
234-
235233 pipeline_progress = sum (
236234 (node_id_to_comp_task [node_id ].progress or 0 ) / len (pipeline_dag .nodes )
237235 for node_id in pipeline_dag .nodes
238- if node_id_to_comp_task [node_id ].progress is not None
236+ if node_id in node_id_to_comp_task
237+ and node_id_to_comp_task [node_id ].progress is not None
239238 )
240239 pipeline_progress = max (0.0 , min (pipeline_progress , 1.0 ))
241240
@@ -246,10 +245,15 @@ async def compute_pipeline_details(
246245 node_id : NodeState (
247246 modified = node_data .get (kNODE_MODIFIED_STATE , False ),
248247 dependencies = node_data .get (kNODE_DEPENDENCIES_TO_COMPUTE , set ()),
249- current_status = node_id_to_comp_task [node_id ].state ,
248+ current_status = (
249+ node_id_to_comp_task [node_id ].state
250+ if node_id in node_id_to_comp_task
251+ else RunningState .UNKNOWN
252+ ),
250253 progress = (
251254 node_id_to_comp_task [node_id ].progress
252- if node_id_to_comp_task [node_id ].progress is not None
255+ if node_id in node_id_to_comp_task
256+ and node_id_to_comp_task [node_id ].progress is not None
253257 else None
254258 ),
255259 )
@@ -261,12 +265,13 @@ async def compute_pipeline_details(
261265
262266def find_computational_node_cycles (dag : nx .DiGraph ) -> list [list [str ]]:
263267 """returns a list of nodes part of a cycle and computational, which is currently forbidden."""
264- computational_node_cycles = []
268+
265269 list_potential_cycles = nx .algorithms .cycles .simple_cycles (dag )
266- for cycle in list_potential_cycles :
270+ return [
271+ deepcopy (cycle )
272+ for cycle in list_potential_cycles
267273 if any (
268274 dag .nodes [node_id ]["node_class" ] is NodeClass .COMPUTATIONAL
269275 for node_id in cycle
270- ):
271- computational_node_cycles .append (deepcopy (cycle ))
272- return computational_node_cycles
276+ )
277+ ]
0 commit comments