You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
use sysinfo::{CpuRefreshKind,RefreshKind,System,MINIMUM_CPU_UPDATE_INTERVAL};
7
12
8
13
#[tokio::main]
9
14
asyncfnmain(){
10
-
// initialize logger
11
-
env_logger::init();
15
+
#[cfg(feature = "profiling")]
16
+
{
17
+
// initialize logger
18
+
env_logger::init();
12
19
13
-
let cfg = DriaWorkflowsConfig::new_from_csv("finalend/hermes-3-llama-3.1:8b-q8_0,phi3:14b-medium-4k-instruct-q4_1,phi3:14b-medium-128k-instruct-q4_1,phi3.5:3.8b,phi3.5:3.8b-mini-instruct-fp16,gemma2:9b-instruct-q8_0,gemma2:9b-instruct-fp16,llama3.1:latest,llama3.1:8b-instruct-q8_0,llama3.1:8b-instruct-fp16,llama3.1:70b-instruct-q4_0,llama3.1:70b-instruct-q8_0,llama3.2:1b,llama3.2:3b,qwen2.5:7b-instruct-q5_0,qwen2.5:7b-instruct-fp16,qwen2.5:32b-instruct-fp16,qwen2.5-coder:1.5b,qwen2.5-coder:7b-instruct,llama3.2:3b,qwen2.5-coder:7b-instruct-q8_0,qwen2.5-coder:7b-instruct-fp16,deepseek-coder:6.7b,mixtral:8x7b");
14
-
let config = OllamaConfig::default();
15
-
let ollama = Ollama::new(config.host, config.port);
20
+
let models = vec![
21
+
Model::NousTheta,
22
+
Model::Phi3Medium,
23
+
Model::Phi3Medium128k,
24
+
Model::Phi3_5Mini,
25
+
Model::Phi3_5MiniFp16,
26
+
Model::Gemma2_9B,
27
+
Model::Gemma2_9BFp16,
28
+
Model::Llama3_1_8B,
29
+
Model::Llama3_1_8Bq8,
30
+
Model::Llama3_1_8Bf16,
31
+
Model::Llama3_1_8BTextQ4KM,
32
+
Model::Llama3_1_8BTextQ8,
33
+
Model::Llama3_1_70B,
34
+
Model::Llama3_1_70Bq8,
35
+
Model::Llama3_1_70BTextQ4KM,
36
+
Model::Llama3_2_1B,
37
+
Model::Llama3_2_3B,
38
+
Model::Llama3_2_1BTextQ4KM,
39
+
Model::Qwen2_5_7B,
40
+
Model::Qwen2_5_7Bf16,
41
+
Model::Qwen2_5_32Bf16,
42
+
Model::Qwen2_5Coder1_5B,
43
+
Model::Qwen2_5coder7B,
44
+
Model::Qwen2_5oder7Bq8,
45
+
Model::Qwen2_5coder7Bf16,
46
+
Model::DeepSeekCoder6_7B,
47
+
Model::Mixtral8_7b,
48
+
Model::GPT4Turbo,
49
+
Model::GPT4o,
50
+
Model::GPT4oMini,
51
+
Model::O1Preview,
52
+
Model::O1Mini,
53
+
Model::Gemini15ProExp0827,
54
+
Model::Gemini15Pro,
55
+
Model::Gemini15Flash,
56
+
Model::Gemini10Pro,
57
+
Model::Gemma2_2bIt,
58
+
Model::Gemma2_27bIt,
59
+
];
16
60
17
-
log::info!("Starting...");
18
-
// ensure that all lists of CPUs and processes are filled
19
-
letmut system = System::new_all();
20
-
// update all information of the system
21
-
system.refresh_all();
61
+
let cfg = DriaWorkflowsConfig::new(models);
62
+
let config = OllamaConfig::default();
63
+
let ollama = Ollama::new(config.host, config.port);
64
+
log::info!("Starting...");
65
+
// ensure that all lists of CPUs and processes are filled
66
+
letmut system = System::new_all();
67
+
// update all information of the system
68
+
system.refresh_all();
22
69
23
-
log::debug!("Getting system information...");
24
-
let brand = system.cpus()[0].brand().to_string();
25
-
let os_name = System::name().unwrap_or_else(|| "Unknown".to_string());
26
-
let os_version = System::long_os_version().unwrap_or_else(|| "Unknown".to_string());
27
-
let cpu_usage = system.global_cpu_usage();
28
-
let total_memory = system.total_memory();
29
-
let used_memory = system.used_memory();
70
+
log::debug!("Getting system information...");
71
+
let brand = system.cpus()[0].brand().to_string();
72
+
let os_name = System::name().unwrap_or_else(|| "Unknown".to_string());
73
+
let os_version = System::long_os_version().unwrap_or_else(|| "Unknown".to_string());
74
+
let cpu_usage = system.global_cpu_usage();
75
+
let total_memory = system.total_memory();
76
+
let used_memory = system.used_memory();
30
77
31
-
for(_, model)in cfg.models{
32
-
log::info!("Pulling model: {}", model);
78
+
for(_, model)in cfg.models{
79
+
log::info!("Pulling model: {}", model);
33
80
34
-
// pull model
35
-
match ollama.pull_model(model.to_string(),false).await{
0 commit comments