Skip to content

Commit 553a553

Browse files
authored
Update Cargo.toml
1 parent ce92256 commit 553a553

File tree

1 file changed

+83
-5
lines changed

1 file changed

+83
-5
lines changed

Cargo.toml

Lines changed: 83 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,90 @@
1-
<<<<<<< HEAD
2-
[package]\nname = "neural-nexus-core"\nversion = "0.1.0"\nedition = "2021"
3-
=======
41
[package]
52
name = "neural-nexus-core"
63
version = "0.1.0"
74
edition = "2021"
5+
authors = ["Neural Nexus Team <team@neuralnexus.dev>"]
6+
description = "Plataforma de IA Distribuida para Edge Computing con Inferencia Neuromorfica"
7+
license = "Apache-2.0"
8+
repository = "https://github.com/mechmind-dwv/core-system"
9+
keywords = ["edge-computing", "ai", "neuromorphic", "distributed", "inference"]
10+
categories = ["science", "network-programming", "embedded"]
11+
12+
[workspace]
13+
members = [
14+
"core/inference_engine",
15+
"core/distributed",
16+
"core/neuromorphic",
17+
"edge_modules/raspberry_pi",
18+
"edge_modules/jetson"
19+
]
820

921
[dependencies]
22+
# Core async runtime
1023
tokio = { version = "1.0", features = ["full"] }
11-
prost = "0.12"
12-
>>>>>>> 5a764be7 (feat: Initial Neural Nexus core structure)
24+
async-trait = "0.1"
25+
26+
# Serialization
27+
serde = { version = "1.0", features = ["derive"] }
28+
serde_json = "1.0"
29+
bincode = "1.3"
30+
31+
# Networking & Communication
32+
tonic = "0.10" # gRPC
33+
prost = "0.12" # Protocol Buffers
34+
rumqttc = "0.24" # MQTT client
35+
hyper = "0.14"
36+
37+
# Machine Learning
38+
candle-core = "0.4" # Rust ML framework
39+
ort = "1.16" # ONNX Runtime bindings
40+
41+
# Monitoring & Metrics
42+
prometheus = "0.13"
43+
tracing = "0.1"
44+
tracing-subscriber = "0.3"
45+
46+
# Utilities
47+
anyhow = "1.0"
48+
thiserror = "1.0"
49+
uuid = { version = "1.0", features = ["v4"] }
50+
chrono = { version = "0.4", features = ["serde"] }
51+
52+
# Hardware specific
53+
rppal = { version = "0.14", optional = true } # Raspberry Pi
54+
jetson-inference = { version = "0.1", optional = true } # NVIDIA Jetson
55+
56+
[dev-dependencies]
57+
criterion = "0.5" # Benchmarking
58+
mockall = "0.12" # Mocking
59+
tokio-test = "0.4"
60+
61+
[features]
62+
default = ["inference", "distributed"]
63+
inference = []
64+
distributed = []
65+
neuromorphic = []
66+
raspberry-pi = ["rppal"]
67+
jetson = ["jetson-inference"]
68+
gpu-acceleration = []
69+
70+
[profile.release]
71+
lto = true
72+
codegen-units = 1
73+
panic = "abort"
74+
strip = true
75+
76+
[profile.dev]
77+
debug = true
78+
opt-level = 0
79+
80+
[[bin]]
81+
name = "neural-nexus-node"
82+
path = "src/bin/node.rs"
83+
84+
[[bin]]
85+
name = "neural-nexus-orchestrator"
86+
path = "src/bin/orchestrator.rs"
87+
88+
[[bench]]
89+
name = "inference_benchmark"
90+
harness = false

0 commit comments

Comments
 (0)