Skip to content

Commit 7ab68fd

Browse files
authored
Merge pull request #113 from firstbatchxyz/erhant/update-workflows-for-error
updated workflows with error handling, added new model
2 parents aac2150 + 6238437 commit 7ab68fd

File tree

6 files changed

+26
-20
lines changed

6 files changed

+26
-20
lines changed

Cargo.lock

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "dkn-compute"
3-
version = "0.1.6"
3+
version = "0.1.7"
44
edition = "2021"
55
license = "Apache-2.0"
66
readme = "README.md"
@@ -46,7 +46,7 @@ sha3 = "0.10.8"
4646
fastbloom-rs = "0.5.9"
4747

4848
# workflows
49-
ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "d6b2e1e" }
49+
ollama-workflows = { git = "https://github.com/andthattoo/ollama-workflows", rev = "ba038f7" }
5050

5151
# peer-to-peer
5252
libp2p = { git = "https://github.com/anilaltuner/rust-libp2p.git", rev = "3c55e95", features = [

docs/NODE_GUIDE.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,7 @@ Based on the resources of your machine, you must decide which models that you wi
136136
- `phi3:14b-medium-128k-instruct-q4_1`
137137
- `phi3:3.8b`
138138
- `llama3.1:latest`
139+
- `llama3.1:8b-instruct-q8_0`
139140
- `phi3.5:3.8b`
140141
- `phi3.5:3.8b-mini-instruct-fp16`
141142

src/config/ollama.rs

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -210,11 +210,14 @@ impl OllamaConfig {
210210
log::warn!("Ignoring model {}: Workflow timed out", model);
211211
},
212212
result = executor.execute(None, workflow, &mut memory) => {
213-
if result.is_empty() {
214-
log::warn!("Ignoring model {}: Workflow returned empty result", model);
215-
} else {
216-
log::info!("Accepting model {}", model);
217-
return true;
213+
match result {
214+
Ok(_) => {
215+
log::info!("Accepting model {}", model);
216+
return true;
217+
}
218+
Err(e) => {
219+
log::warn!("Ignoring model {}: Workflow failed with error {}", model, e);
220+
}
218221
}
219222
}
220223
};
@@ -292,6 +295,6 @@ mod tests {
292295
let mut memory = ProgramMemory::new();
293296

294297
let result = exe.execute(None, workflow, &mut memory).await;
295-
println!("Result: {}", result);
298+
println!("Result: {}", result.unwrap());
296299
}
297300
}

src/handlers/workflow.rs

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ impl ComputeHandler for WorkflowHandler {
7676
.get_any_matching_model(task.input.model)?;
7777
log::info!("Using model {} for task {}", model, task.task_id);
7878

79-
// execute workflow with cancellation
79+
// prepare workflow executor
8080
let executor = if model_provider == ModelProvider::Ollama {
8181
Executor::new_at(
8282
model,
@@ -91,26 +91,28 @@ impl ComputeHandler for WorkflowHandler {
9191
.input
9292
.prompt
9393
.map(|prompt| Entry::try_value_or_str(&prompt));
94-
let result: Option<String>;
94+
95+
// execute workflow with cancellation
96+
let result: String;
9597
tokio::select! {
9698
_ = node.cancellation.cancelled() => {
9799
log::info!("Received cancellation, quitting all tasks.");
98100
return Ok(MessageAcceptance::Accept)
99101
},
100102
exec_result = executor.execute(entry.as_ref(), task.input.workflow, &mut memory) => {
101-
if exec_result.is_empty() {
102-
return Err(format!("Got empty string result for task {}", task.task_id).into());
103-
} else {
104-
result = Some(exec_result);
103+
match exec_result {
104+
Ok(exec_result) => {
105+
result = exec_result;
106+
}
107+
Err(e) => {
108+
return Err(format!("Workflow failed with error {}", e).into());
109+
}
105110
}
106111
}
107112
}
108-
let result = result.ok_or::<String>(format!("No result for task {}", task.task_id))?;
109113

110114
// publish the result
111115
node.send_result(result_topic, &task.public_key, &task.task_id, result)?;
112-
113-
// accept message, someone else may be included in the filter
114116
Ok(MessageAcceptance::Accept)
115117
}
116118
}

start.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ handle_ollama_env() {
182182

183183
# if there is no ollama model given, do not add any ollama compose profile
184184
ollama_needed=false
185-
ollama_models="nous-hermes2theta-llama3-8b phi3:medium phi3:medium-128k phi3:3.8b phi3.5 llama3.1:latest"
185+
ollama_models="nous-hermes2theta-llama3-8b phi3:medium phi3:medium-128k phi3:3.8b phi3.5 llama3.1:latest llama3.1:8b-instruct-q8_0"
186186
for m in $(echo "$DKN_MODELS" | tr ',' ' '); do
187187
case " $ollama_models " in
188188
*" $m "*) ollama_needed=true; break;;

0 commit comments

Comments
 (0)