Skip to content

Commit 0405b22

Browse files
committed
lint fixes, added spec docs
1 parent 87c1ddb commit 0405b22

File tree

9 files changed

+20
-10
lines changed

9 files changed

+20
-10
lines changed

README.md

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ Compute nodes can technically do any arbitrary task, from computing the square r
4040

4141
## Requirements
4242

43-
The compute node is a very lightweight process, with few MBs of memory usage along with an image size of less than ~65MBs. If you are using Ollama, you will need the memory to run large models locally, which depend on the model's size that you are willing to.
43+
### Software
4444

4545
You need the following applications to run compute node:
4646

@@ -56,6 +56,16 @@ You need the following applications to run compute node:
5656
> which docker
5757
> ```
5858
59+
### Hardware
60+
61+
**For overall specifications about required CPU and RAM, please refer to [dkn-node-specs](https://github.com/firstbatchxyz/dkn-node-specs).**
62+
63+
In general, if you are using Ollama you will need the memory to run large models locally, which depend on the model's size that you are willing to. If you are in a memory-constrained environment, you can opt to use OpenAI models instead.
64+
65+
> [!NOTE]
66+
>
67+
> The compute node is a lightweight process, but you may see increased memory & CPU usage during the initial testing phases, due to various protocol-level operations with the growing network size.
68+
5969
## Setup
6070
6171
To be able to run a node, we need to make a few simple preparations. Follow the steps below one by one.

src/config/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ impl DriaComputeNodeConfig {
157157

158158
// update good models
159159
if good_models.is_empty() {
160-
return Err("No good models found, please check logs for errors.".into());
160+
Err("No good models found, please check logs for errors.".into())
161161
} else {
162162
self.model_config.models = good_models;
163163
Ok(())

src/config/ollama.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ impl OllamaConfig {
141141
// otherwise, give error
142142
log::error!("Please download missing model with: ollama pull {}", model);
143143
log::error!("Or, set OLLAMA_AUTO_PULL=true to pull automatically.");
144-
return Err("Required model not pulled in Ollama.".into());
144+
Err("Required model not pulled in Ollama.".into())
145145
}
146146
}
147147

@@ -219,6 +219,6 @@ impl OllamaConfig {
219219
}
220220
};
221221

222-
return false;
222+
false
223223
}
224224
}

src/config/openai.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ mod tests {
105105
use super::*;
106106

107107
#[tokio::test]
108+
#[ignore = "requires OpenAI API key"]
108109
async fn test_openai_check() {
109110
let config = OpenAIConfig::new();
110111
let res = config.check(vec![]).await;

src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ pub(crate) mod utils;
1010

1111
/// Crate version of the compute node.
1212
/// This value is attached within the published messages.
13-
pub const DRIA_COMPUTE_NODE_VERSION: &'static str = env!("CARGO_PKG_VERSION");
13+
pub const DRIA_COMPUTE_NODE_VERSION: &str = env!("CARGO_PKG_VERSION");
1414

1515
pub use config::DriaComputeNodeConfig;
1616
pub use node::DriaComputeNode;

src/main.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
4242
tokio::select! {
4343
_ = service_check_token.cancelled() => {
4444
log::info!("Service check cancelled.");
45-
return;
4645
}
4746
result = config_clone.check_services() => {
4847
if let Err(err) = result {

src/node.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ impl DriaComputeNode {
134134
// handle message w.r.t topic
135135
if std::matches!(topic_str, PINGPONG_LISTEN_TOPIC | WORKFLOW_LISTEN_TOPIC) {
136136
// ensure that the message is from a valid source (origin)
137-
let source_peer_id = match message.source.clone() {
137+
let source_peer_id = match message.source {
138138
Some(peer) => peer,
139139
None => {
140140
log::warn!("Received {} message from {} without source.", topic_str, peer_id);

src/p2p/client.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ impl P2PClient {
123123

124124
Ok(Self {
125125
swarm,
126-
version: Version::parse(&DRIA_COMPUTE_NODE_VERSION).unwrap(),
126+
version: Version::parse(DRIA_COMPUTE_NODE_VERSION).unwrap(),
127127
peer_count: (0, 0),
128128
peer_last_refreshed: Instant::now(),
129129
})
@@ -272,8 +272,7 @@ impl P2PClient {
272272
.iter()
273273
.find(|p| p.to_string().starts_with("/dria/kad/"))
274274
{
275-
let protocol_ok =
276-
self.check_version_with_prefix(&kad_protocol.to_string(), "/dria/kad/");
275+
let protocol_ok = self.check_version_with_prefix(kad_protocol.as_ref(), "/dria/kad/");
277276

278277
// if it matches our protocol, add it to the Kademlia routing table
279278
if protocol_ok {

src/utils/crypto.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,7 @@ mod tests {
120120
}
121121

122122
#[test]
123+
#[ignore = "run only with profiler if wanted"]
123124
fn test_memory_usage() {
124125
let secret_key =
125126
SecretKey::parse_slice(DUMMY_KEY).expect("Should parse private key slice.");

0 commit comments

Comments
 (0)