diff --git a/Cargo.lock b/Cargo.lock index 7e2cda41..9190d622 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -138,8 +138,10 @@ dependencies = [ [[package]] name = "anyhow" version = "1.0.98" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "arc-swap" @@ -302,9 +304,31 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "bytecheck" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2" +dependencies = [ + "bytecheck_derive", + "ptr_meta 0.1.4", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] [[package]] name = "bytemuck" @@ -1166,6 +1190,8 @@ dependencies = [ "itertools", "kdam", "lazy_static", + "libloading", + "native-tls", "pest", "pest_derive", "polars", @@ -1705,6 +1731,16 @@ version = "0.2.172" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" +[[package]] +name = "libloading" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" +dependencies = [ + "cfg-if", + "windows-targets 0.53.0", +] + [[package]] name = "libm" version = "0.2.15" @@ -2874,18 +2910,54 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe9e76f66d3f9606f44e45598d155cb13ecf09f4a28199e48daf8c8fc937ea90" dependencies = [ - "ptr_meta_derive", + "ptr_meta_derive 0.3.0", ] [[package]] name = "ptr_meta_derive" -version = "0.3.0" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca414edb151b4c8d125c12566ab0d74dc9cdba36fb80eb7b848c15f495fd32d1" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 1.0.109", +] + +[[package]] +name = "query-container" +version = "0.1.0" +dependencies = [ + "chrono", + "dirs 5.0.1", + "get_routes", + "heed3", + "helixdb", + "inventory", + "rand 0.9.1", + "serde", + "serde_json", + "sonic-rs", + "tokio", + "uuid", +] + +[[package]] +name = "query-container" +version = "0.1.0" +dependencies = [ + "chrono", + "dirs 5.0.1", + "get_routes", + "heed3", + "helixdb", + "inventory", + "rand 0.9.1", + "serde", + "serde_json", + "sonic-rs", + "tokio", + "uuid", ] [[package]] @@ -3593,9 +3665,9 @@ dependencies = [ [[package]] name = "sonic-rs" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93070f7e7c0d7ec7d08406b1b407234af30420320fd854f304029e3c6db4a899" +checksum = "7be54789747a46a8b1eb7b2c9cb0879cd7559a5f71bfff950868369f6868b9ad" dependencies = [ "ahash", "bumpalo", diff --git a/Cargo.toml b/Cargo.toml index b4bd313d..96c1aad9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,8 @@ members = [ "helix-container", "helix-macros", "helix-cli", - "hql-tests" + "query-container", + "hql-tests", ] resolver = "2" diff --git a/helix-cli/src/main.rs b/helix-cli/src/main.rs index e9d1eb35..2c853f11 100644 --- a/helix-cli/src/main.rs +++ b/helix-cli/src/main.rs @@ -4,13 +4,13 @@ use crate::{ types::*, utils::*, }; +use clap::Parser; use helix_db::{helix_engine::graph_core::config::Config, utils::styled_string::StyledString}; use spinners::{Spinner, Spinners}; -use clap::Parser; use std::{ fmt::Write, fs::{self, OpenOptions, read_to_string}, - io::{Write as iWrite}, + io::Write as iWrite, path::{Path, PathBuf}, process::Command, }; @@ -67,13 +67,16 @@ async fn main() -> Result<(), ()> { match instance_manager.start_instance(&command.cluster.unwrap(), None) { Ok(instance) => { sp.stop_with_message(format!( - "{}", - "Successfully started Helix instance".green().bold() + "{}", + "Successfully started Helix instance".green().bold() )); print_instance(&instance); } Err(e) => { - sp.stop_with_message(format!("{}", "Failed to start instance".red().bold())); + sp.stop_with_message(format!( + "{}", + "Failed to start instance".red().bold() + )); println!("└── {} {}", "Error:".red().bold(), e); return Err(()); } @@ -88,7 +91,6 @@ async fn main() -> Result<(), ()> { Some(port) => port, None => 6969, }; - let path = get_cfg_deploy_path(command.path.clone()); let files = match check_and_read_files(&path) { @@ -109,8 +111,8 @@ async fn main() -> Result<(), ()> { Err(_) => return Err(()), }; - if command.cluster.is_some() && - (command.path.is_some() || Path::new(&format!("./{}", DB_DIR)).is_dir()) + if command.cluster.is_some() + && (command.path.is_some() || Path::new(&format!("./{}", DB_DIR)).is_dir()) { match redeploy_helix(command.cluster.unwrap(), code) { Ok(_) => {} @@ -120,8 +122,8 @@ async fn main() -> Result<(), ()> { } // -- helix deploy -- - if command.cluster.is_none() && - (command.path.is_some() || Path::new(&format!("./{}", DB_DIR)).is_dir()) + if command.cluster.is_none() + && (command.path.is_some() || Path::new(&format!("./{}", DB_DIR)).is_dir()) { let port = match find_available_port(start_port) { Some(port) => { @@ -159,9 +161,11 @@ async fn main() -> Result<(), ()> { Err(_) => return Err(()), } } else { - println!("{}", + println!( + "{}", "Need to pass in a cluster id when redeploying a remote instance!" - .red().bold() + .red() + .bold() ); return Err(()); } @@ -175,8 +179,8 @@ async fn main() -> Result<(), ()> { println!( "{}", "Helix is not installed. Please run `helix install` first." - .red() - .bold() + .red() + .bold() ); return Err(()); } @@ -205,7 +209,8 @@ async fn main() -> Result<(), ()> { let local_cli_version = match get_cli_version() { Ok(val) => val, Err(e) => { - println!("{} {}", + println!( + "{} {}", "Failed fetching the local cli version".red().bold(), e ); @@ -215,7 +220,8 @@ async fn main() -> Result<(), ()> { let local_helix_version = match get_crate_version(&repo_path) { Ok(val) => val, Err(e) => { - println!("{} {}", + println!( + "{} {}", "Failed fetching the local db version".red().bold(), e ); @@ -244,15 +250,19 @@ async fn main() -> Result<(), ()> { println!( "{} {}", "Error while reseting installed helix-db version:" - .red() - .bold(), + .red() + .bold(), e ); return Err(()); } } - match Command::new("git").arg("pull").current_dir(&repo_path).output() { + match Command::new("git") + .arg("pull") + .current_dir(&repo_path) + .output() + { Ok(_) => println!( "{}", "New helix-db version successfully pulled!".green().bold() @@ -271,8 +281,8 @@ async fn main() -> Result<(), ()> { Ok(_) => println!( "{}", "New helix-cli version successfully installed!" - .green() - .bold() + .green() + .bold() ), Err(e) => { println!( @@ -346,8 +356,8 @@ async fn main() -> Result<(), ()> { let mut generated_rust_code = String::new(); match write!(&mut generated_rust_code, "{}", analyzed_source) { Ok(_) => sp.stop_with_message(format!( - "{}", - "Successfully transpiled queries".green().bold() + "{}", + "Successfully transpiled queries".green().bold() )), Err(e) => { println!("{}", "Failed to transpile queries".red().bold()); @@ -438,11 +448,11 @@ async fn main() -> Result<(), ()> { let repo_path = { // check if helix repo exists let home_dir = match dirs::home_dir() { - Some(dir) => dir, - None => { - println!("{}", "Could not determine home directory".red().bold()); - return Err(()); - } + Some(dir) => dir, + None => { + println!("{}", "Could not determine home directory".red().bold()); + return Err(()); + } }; home_dir.join(".helix/repo") }; @@ -481,9 +491,7 @@ async fn main() -> Result<(), ()> { runner.arg("clone"); runner.arg("https://github.com/HelixDB/helix-db.git"); if command.dev { - runner - .arg("--branch") - .arg("dev"); + runner.arg("--branch").arg("dev"); } runner.current_dir(&repo_path); @@ -567,7 +575,7 @@ async fn main() -> Result<(), ()> { println!(); } } - Err(e) => println!("{} {}", "Failed to list instances:".red().bold(), e) + Err(e) => println!("{} {}", "Failed to list instances:".red().bold(), e), } } @@ -611,8 +619,9 @@ async fn main() -> Result<(), ()> { cluster_id ) } - Ok(true) => - println!("{} {}", "Stopped instance".green().bold(), cluster_id), + Ok(true) => { + println!("{} {}", "Stopped instance".green().bold(), cluster_id) + } Err(e) => println!("{} {}", "Failed to stop instance:".red().bold(), e), } } else { @@ -739,46 +748,47 @@ async fn main() -> Result<(), ()> { } } - CommandType::Version => { - match check_helix_installation() { - Some(_) => { - let repo_path = { - let home_dir = match dirs::home_dir() { - Some(dir) => dir, - None => { - println!("{}", - "helix-db: not installed (could not determine home directory)" - .red().bold() + CommandType::Version => match check_helix_installation() { + Some(_) => { + let repo_path = { + let home_dir = match dirs::home_dir() { + Some(dir) => dir, + None => { + println!( + "{}", + "helix-db: not installed (could not determine home directory)" + .red() + .bold() + ); + return Err(()); + } + }; + home_dir.join(".helix/repo/helix-db/helix-db") + }; + + match get_crate_version(repo_path) { + Ok(local_db_version) => { + let local_cli_version = match get_cli_version() { + Ok(val) => val, + Err(e) => { + println!( + "{} {}", + "Error while fetching the local cli version!".red().bold(), + e ); return Err(()); } }; - home_dir.join(".helix/repo/helix-db/helix-db") - }; - - match get_crate_version(repo_path) { - Ok(local_db_version) => { - let local_cli_version = match get_cli_version() { - Ok(val) => val, - Err(e) => { - println!("{} {}", - "Error while fetching the local cli version!".red().bold(), - e - ); - return Err(()); - } - }; - println!( - "helix-cli version: {}, helix-db version: {}", - local_cli_version, local_db_version - ); - } - Err(_) => println!("helix-db: installed but version could not be determined"), + println!( + "helix-cli version: {}, helix-db version: {}", + local_cli_version, local_db_version + ); } + Err(_) => println!("helix-db: installed but version could not be determined"), } - None => println!("helix-db: not installed (run 'helix install' to install)"), } - } + None => println!("helix-db: not installed (run 'helix install' to install)"), + }, CommandType::Visualize(command) => { let instance_manager = InstanceManager::new().unwrap(); @@ -844,7 +854,9 @@ async fn main() -> Result<(), ()> { .open(cred_path) .unwrap(); - if let Err(e) = cred_file.write_all(&format!("helix_user_id={user_id}\nhelix_user_key={key}").into_bytes()) { + if let Err(e) = cred_file + .write_all(&format!("helix_user_id={user_id}\nhelix_user_key={key}").into_bytes()) + { println!( "Got error when writing key: {}\nYou're key is: {}", e.to_string().red(), @@ -870,4 +882,3 @@ async fn main() -> Result<(), ()> { Ok(()) } - diff --git a/helix-container/Cargo.toml b/helix-container/Cargo.toml index 727d9689..230f77b1 100644 --- a/helix-container/Cargo.toml +++ b/helix-container/Cargo.toml @@ -19,12 +19,12 @@ uuid = { version = "1.12.1", features = ["std", "v4", "v6", "fast-rng"] } heed3 = "0.22.0" async-trait = "0.1" -[profile.release] -strip = "debuginfo" -lto = true -opt-level = 3 -codegen-units = 1 -panic = "abort" +# [profile.release] +# strip = "debuginfo" +# lto = true +# opt-level = 3 +# codegen-units = 1 +# panic = "abort" [features] dev = ["helix-db/dev"] diff --git a/helix-container/src/main.rs b/helix-container/src/main.rs index 496d39a8..481c6c18 100644 --- a/helix-container/src/main.rs +++ b/helix-container/src/main.rs @@ -55,7 +55,7 @@ async fn main() { submission.0.name ); let handler = &submission.0; - let func: HandlerFn = Arc::new(move |input, response| (handler.func)(input, response)); + let func: HandlerFn = Arc::new(handler.func); ( ("POST".to_string(), format!("/{}", handler.name.to_string())), func, diff --git a/helix-db/.cargo/config.toml b/helix-db/.cargo/config.toml new file mode 100644 index 00000000..52e8b726 --- /dev/null +++ b/helix-db/.cargo/config.toml @@ -0,0 +1,2 @@ +[build] +rustflags = ["-C", "link-args=-rdynamic"] diff --git a/helix-db/src/helix_gateway/router/dynamic.rs b/helix-db/src/helix_gateway/router/dynamic.rs new file mode 100644 index 00000000..15753020 --- /dev/null +++ b/helix-db/src/helix_gateway/router/dynamic.rs @@ -0,0 +1,64 @@ +use libloading::{self, Library, Symbol}; +use std::{ + collections::HashMap, + error::Error, + ops::Deref, + path::{Path, PathBuf}, + sync::Arc, +}; + +use crate::{ + helix_engine::types::GraphError, + helix_gateway::router::{ + router::{HandlerFn, HandlerInput, HelixRouter}, + QueryHandler, + }, + protocol::response::Response, +}; + +#[derive(Clone)] +pub struct DynHandler { + // holding this guarentees that the Symbol is still valid + _source: Arc, + func: extern "Rust" fn(&HandlerInput, &mut Response) -> Result<(), GraphError>, +} + +impl QueryHandler for DynHandler { + fn handle(&self, input: &HandlerInput, response: &mut Response) -> Result<(), GraphError> { + (self.func)(input, response) + } +} + +type DynQueryFn = extern "Rust" fn(&HandlerInput, &mut Response) -> Result<(), GraphError>; +type GetQueryFn = extern "Rust" fn() -> Vec<(String, DynQueryFn)>; + +pub struct Plugin { + lib: Arc, +} + +impl Plugin { + /// SAFETY: This must be called with a path to Helix query dynamic library, compiled with the same version of Rust as the main database + pub unsafe fn open(lib_path: impl AsRef) -> Result> { + let lib = Library::new(lib_path.as_ref())?; + Ok(Plugin { lib: Arc::new(lib) }) + } + + pub fn get_queries(&self) -> Result, Box> { + // SAFETY: If a valid file was opened it will have a get_queries function of this type + let get_fn: Symbol = unsafe { self.lib.get(b"get_queries")? }; + + let queries = get_fn(); + + let mut acc: HashMap<(String, String), HandlerFn> = HashMap::new(); + + for (n, func) in queries.into_iter() { + let handler = DynHandler { + _source: self.lib.clone(), + func, + }; + + acc.insert(("POST".to_string(), format!("/{n}")), Arc::new(handler)); + } + Ok(acc) + } +} diff --git a/helix-db/src/helix_gateway/router/mod.rs b/helix-db/src/helix_gateway/router/mod.rs index fcddb5c6..80c3c3f4 100644 --- a/helix-db/src/helix_gateway/router/mod.rs +++ b/helix-db/src/helix_gateway/router/mod.rs @@ -1 +1,18 @@ +use crate::{ + helix_engine::types::GraphError, + helix_gateway::router::router::{BasicHandlerFn, HandlerInput}, + protocol::response::Response, +}; + +pub mod dynamic; pub mod router; + +pub trait QueryHandler: Send + Sync { + fn handle(&self, input: &HandlerInput, response: &mut Response) -> Result<(), GraphError>; +} + +impl QueryHandler for BasicHandlerFn { + fn handle(&self, input: &HandlerInput, response: &mut Response) -> Result<(), GraphError> { + self(input, response) + } +} diff --git a/helix-db/src/helix_gateway/router/router.rs b/helix-db/src/helix_gateway/router/router.rs index ac27e3d2..b68568c7 100644 --- a/helix-db/src/helix_gateway/router/router.rs +++ b/helix-db/src/helix_gateway/router/router.rs @@ -7,12 +7,17 @@ // returns response +use tokio::sync::RwLock; + use crate::{ helix_engine::{graph_core::graph_core::HelixGraphEngine, types::GraphError}, - helix_gateway::mcp::mcp::{MCPHandlerFn, MCPToolInput}, + helix_gateway::{ + mcp::mcp::{MCPHandlerFn, MCPToolInput}, + router::{dynamic::Plugin, QueryHandler}, + }, }; use core::fmt; -use std::{collections::HashMap, sync::Arc}; +use std::{collections::HashMap, ffi::OsStr, os::unix::ffi::OsStrExt, sync::Arc}; use crate::protocol::{request::Request, response::Response}; @@ -25,8 +30,7 @@ pub struct HandlerInput { pub type BasicHandlerFn = fn(&HandlerInput, &mut Response) -> Result<(), GraphError>; // thread safe type for multi threaded use -pub type HandlerFn = - Arc Result<(), GraphError> + Send + Sync>; +pub type HandlerFn = Arc; #[derive(Clone, Debug)] pub struct HandlerSubmission(pub Handler); @@ -45,12 +49,19 @@ impl Handler { inventory::collect!(HandlerSubmission); +impl HandlerSubmission { + pub fn collect_linked_handlers() -> Vec<&'static HandlerSubmission> { + let submissions: Vec<_> = inventory::iter::.into_iter().collect(); + submissions + } +} + /// Router for handling requests and MCP requests /// /// Standard Routes and MCP Routes are stored in a HashMap with the method and path as the key pub struct HelixRouter { /// Method+Path => Function - pub routes: HashMap<(String, String), HandlerFn>, + pub routes: RwLock>, pub mcp_routes: HashMap<(String, String), MCPHandlerFn>, } @@ -69,15 +80,17 @@ impl HelixRouter { None => HashMap::new(), }; Self { - routes: rts, + routes: RwLock::new(rts), mcp_routes: mcp_rts, } } /// Add a route to the router - pub fn add_route(&mut self, method: &str, path: &str, handler: BasicHandlerFn) { + pub async fn add_route(&mut self, method: &str, path: &str, handler: HandlerFn) { self.routes - .insert((method.to_uppercase(), path.to_string()), Arc::new(handler)); + .write() + .await + .insert((method.to_uppercase(), path.to_string()), handler); } /// Handle a request by finding the appropriate handler and executing it @@ -92,7 +105,7 @@ impl HelixRouter { /// /// * `Ok(())` if the request was handled successfully /// * `Err(RouterError)` if there was an error handling the request - pub fn handle( + pub async fn handle( &self, graph_access: Arc, request: Request, @@ -100,12 +113,25 @@ impl HelixRouter { ) -> Result<(), GraphError> { let route_key = (request.method.clone(), request.path.clone()); - if let Some(handler) = self.routes.get(&route_key) { + if route_key.0 == "PATCH" { + let body = OsStr::from_bytes(&request.body); + + let plugin = unsafe { Plugin::open(body) }.unwrap(); + + let qs = plugin.get_queries().unwrap(); + + *self.routes.write().await = qs; + + response.status = 200; + return Ok(()); + } + + if let Some(handler) = self.routes.read().await.get(&route_key) { let input = HandlerInput { request, graph: Arc::clone(&graph_access), }; - return handler(&input, response); + return handler.handle(&input, response); } if let Some(mcp_handler) = self.mcp_routes.get(&route_key) { diff --git a/helix-db/src/helix_gateway/thread_pool/thread_pool.rs b/helix-db/src/helix_gateway/thread_pool/thread_pool.rs index bb389992..53cc3b22 100644 --- a/helix-db/src/helix_gateway/thread_pool/thread_pool.rs +++ b/helix-db/src/helix_gateway/thread_pool/thread_pool.rs @@ -7,17 +7,16 @@ use crate::helix_gateway::router::router::{HelixRouter, RouterError}; use crate::protocol::request::Request; use crate::protocol::response::Response; - extern crate tokio; use tokio::net::TcpStream; /// Worker for handling requests -/// +/// /// A worker is a thread that handles requests -/// +/// /// It receives a connection from the thread pool and handles the request -/// +/// /// It sends the response back to the client pub struct Worker { pub id: usize, @@ -26,7 +25,7 @@ pub struct Worker { impl Worker { /// Creates a new worker - /// + /// /// It receives a connection from the thread pool and handles the request /// It sends the response back to the client fn new( @@ -54,7 +53,10 @@ impl Worker { }; let mut response = Response::new(); - if let Err(e) = router.handle(Arc::clone(&graph_access), request, &mut response) { + if let Err(e) = router + .handle(Arc::clone(&graph_access), request, &mut response) + .await + { eprintln!("Error handling request: {:?}", e); response.status = 500; response.body = format!("\n{:?}", e).into_bytes(); @@ -104,11 +106,15 @@ impl ThreadPool { let (tx, rx) = flume::bounded::(1000); // TODO: make this configurable let mut workers = Vec::with_capacity(size); for id in 0..size { - workers.push(Worker::new(id, Arc::clone(&graph), Arc::clone(&router), rx.clone())); + workers.push(Worker::new( + id, + Arc::clone(&graph), + Arc::clone(&router), + rx.clone(), + )); } println!("Thread pool initialized with {} workers", workers.len()); - Ok(ThreadPool { sender: tx, num_unused_workers: Mutex::new(size), @@ -116,4 +122,4 @@ impl ThreadPool { workers, }) } -} \ No newline at end of file +} diff --git a/helix-db/src/helixc/analyzer/methods/object_validation.rs b/helix-db/src/helixc/analyzer/methods/object_validation.rs index c2f51b39..6e8fd6b4 100644 --- a/helix-db/src/helixc/analyzer/methods/object_validation.rs +++ b/helix-db/src/helixc/analyzer/methods/object_validation.rs @@ -6,8 +6,8 @@ use crate::helixc::{ methods::{infer_expr_type::infer_expr_type, traversal_validation::validate_traversal}, types::Type, utils::{ - FieldLookup, Variable, VariableAccess, gen_property_access, is_valid_identifier, - type_in_scope, validate_field_name_existence_for_item_type, + gen_property_access, is_valid_identifier, type_in_scope, + validate_field_name_existence_for_item_type, FieldLookup, Variable, VariableAccess, }, }, generator::{ diff --git a/helix-db/src/helixc/analyzer/methods/schema_methods.rs b/helix-db/src/helixc/analyzer/methods/schema_methods.rs index 96d69a09..2f2e658e 100644 --- a/helix-db/src/helixc/analyzer/methods/schema_methods.rs +++ b/helix-db/src/helixc/analyzer/methods/schema_methods.rs @@ -19,8 +19,7 @@ pub(crate) fn build_field_lookups<'a>( .map(|n| { ( n.name.1.as_str(), - n - .fields + n.fields .iter() .map(|f| (f.name.as_str(), Cow::Borrowed(f))) .collect::>>(), diff --git a/helix-db/src/helixc/generator/utils.rs b/helix-db/src/helixc/generator/utils.rs index f1fbb599..6be02c64 100644 --- a/helix-db/src/helixc/generator/utils.rs +++ b/helix-db/src/helixc/generator/utils.rs @@ -332,72 +332,7 @@ pub fn write_headers() -> String { // None // } - - -use heed3::RoTxn; -use helix_macros::{handler, tool_call, mcp_handler}; -use helix_db::{ - helix_engine::{ - graph_core::{ - config::{Config, GraphConfig, VectorConfig}, - ops::{ - bm25::search_bm25::SearchBM25Adapter, - g::G, - in_::{in_::InAdapter, in_e::InEdgesAdapter, to_n::ToNAdapter, to_v::ToVAdapter}, - out::{ - from_n::FromNAdapter, from_v::FromVAdapter, out::OutAdapter, out_e::OutEdgesAdapter, - }, - source::{ - add_e::{AddEAdapter, EdgeType}, - add_n::AddNAdapter, - e_from_id::EFromIdAdapter, - e_from_type::EFromTypeAdapter, - n_from_id::NFromIdAdapter, - n_from_index::NFromIndexAdapter, - n_from_type::NFromTypeAdapter, - }, - tr_val::{Traversable, TraversalVal}, - util::{ - dedup::DedupAdapter, drop::Drop, exist::Exist, filter_mut::FilterMut, - filter_ref::FilterRefAdapter, map::MapAdapter, paths::ShortestPathAdapter, - props::PropsAdapter, range::RangeAdapter, update::UpdateAdapter, - }, - vectors::{ - brute_force_search::BruteForceSearchVAdapter, insert::InsertVAdapter, - search::SearchVAdapter, - }, - } - }, - types::GraphError, - vector_core::vector::HVector, - }, - helix_gateway::{ - embedding_providers::embedding_providers::{EmbeddingModel, get_embedding_model}, - router::router::HandlerInput, - mcp::mcp::{MCPHandlerSubmission, MCPToolInput, MCPHandler} - }, - node_matches, props, embed, - field_remapping, identifier_remapping, - traversal_remapping, exclude_field, value_remapping, - protocol::{ - remapping::{Remapping, RemappingMap, ResponseRemapping}, - response::Response, - return_values::ReturnValue, - value::Value, - format::Format, - }, - utils::{ - count::Count, - filterable::Filterable, - id::ID, - items::{Edge, Node}, - }, -}; -use sonic_rs::{Deserialize, Serialize}; -use std::collections::{HashMap, HashSet}; -use std::sync::Arc; -use std::time::Instant; -use chrono::{DateTime, Utc}; +use helix_db::helixc::prelude::*; "# .to_string() } diff --git a/helix-db/src/helixc/mod.rs b/helix-db/src/helixc/mod.rs index a4a92f94..b04d60b1 100644 --- a/helix-db/src/helixc/mod.rs +++ b/helix-db/src/helixc/mod.rs @@ -1,3 +1,4 @@ pub mod analyzer; pub mod generator; pub mod parser; +pub mod prelude; diff --git a/helix-db/src/helixc/prelude.rs b/helix-db/src/helixc/prelude.rs new file mode 100644 index 00000000..9e963d6f --- /dev/null +++ b/helix-db/src/helixc/prelude.rs @@ -0,0 +1,66 @@ +pub use chrono::{DateTime, Utc}; +pub use heed3::RoTxn; +pub use helix_db::{ + embed, exclude_field, field_remapping, + helix_engine::{ + graph_core::{ + config::{Config, GraphConfig, VectorConfig}, + ops::{ + bm25::search_bm25::SearchBM25Adapter, + g::G, + in_::{in_::InAdapter, in_e::InEdgesAdapter, to_n::ToNAdapter, to_v::ToVAdapter}, + out::{ + from_n::FromNAdapter, from_v::FromVAdapter, out::OutAdapter, + out_e::OutEdgesAdapter, + }, + source::{ + add_e::{AddEAdapter, EdgeType}, + add_n::AddNAdapter, + e_from_id::EFromIdAdapter, + e_from_type::EFromTypeAdapter, + n_from_id::NFromIdAdapter, + n_from_index::NFromIndexAdapter, + n_from_type::NFromTypeAdapter, + }, + tr_val::{Traversable, TraversalVal}, + util::{ + dedup::DedupAdapter, drop::Drop, exist::Exist, filter_mut::FilterMut, + filter_ref::FilterRefAdapter, map::MapAdapter, paths::ShortestPathAdapter, + props::PropsAdapter, range::RangeAdapter, update::UpdateAdapter, + }, + vectors::{ + brute_force_search::BruteForceSearchVAdapter, insert::InsertVAdapter, + search::SearchVAdapter, + }, + }, + }, + types::GraphError, + vector_core::vector::HVector, + }, + helix_gateway::{ + embedding_providers::embedding_providers::{get_embedding_model, EmbeddingModel}, + mcp::mcp::{MCPHandler, MCPHandlerSubmission, MCPToolInput}, + router::router::HandlerInput, + }, + identifier_remapping, node_matches, props, + protocol::{ + format::Format, + remapping::{Remapping, RemappingMap, ResponseRemapping}, + response::Response, + return_values::ReturnValue, + value::Value, + }, + traversal_remapping, + utils::{ + count::Count, + filterable::Filterable, + id::ID, + items::{Edge, Node}, + }, + value_remapping, +}; +pub use helix_macros::{handler, mcp_handler, tool_call}; +pub use sonic_rs::{Deserialize, Serialize}; +pub use std::collections::{HashMap, HashSet}; +pub use std::sync::Arc; +pub use std::time::Instant; diff --git a/helix-db/src/utils/filterable.rs b/helix-db/src/utils/filterable.rs index b0379981..d0a8e69d 100644 --- a/helix-db/src/utils/filterable.rs +++ b/helix-db/src/utils/filterable.rs @@ -2,13 +2,8 @@ use std::{borrow::Cow, collections::HashMap}; use crate::{ helix_engine::types::GraphError, - utils::{ - items::{Edge, Node}, - }, - protocol:: { - return_values::ReturnValue, - value::Value, - }, + protocol::{return_values::ReturnValue, value::Value}, + utils::items::{Edge, Node}, }; #[derive(Debug, Clone)] @@ -46,7 +41,7 @@ pub trait Filterable { fn properties_ref(&self) -> &Option>; - fn check_property(&self, key: &str) -> Result, GraphError>; + fn check_property(&self, key: &str) -> Result, GraphError>; fn find_property<'a>( &'a self, @@ -123,17 +118,18 @@ impl Filterable for Node { } #[inline(always)] - fn check_property(&self, key: &str) -> Result, GraphError> { + fn check_property(&self, key: &str) -> Result, GraphError> { match key { "id" => Ok(Cow::Owned(Value::from(self.uuid()))), "label" => Ok(Cow::Owned(Value::from(self.label.to_string()))), - _ => match &self.properties { + _ => match &self.properties { Some(properties) => properties .get(key) .ok_or(GraphError::ConversionError(format!( "Property {} not found", key - ))).map(|v| Cow::Borrowed(v)), + ))) + .map(|v| Cow::Borrowed(v)), None => Err(GraphError::ConversionError(format!( "Property {} not found", key @@ -229,27 +225,28 @@ impl Filterable for Edge { } #[inline(always)] - fn check_property(&self, key: &str) -> Result, GraphError> { + fn check_property(&self, key: &str) -> Result, GraphError> { match key { "id" => Ok(Cow::Owned(Value::from(self.uuid()))), "label" => Ok(Cow::Owned(Value::from(self.label.to_string()))), "from_node" => Ok(Cow::Owned(Value::from(self.from_node_uuid()))), "to_node" => Ok(Cow::Owned(Value::from(self.to_node_uuid()))), - _ => match &self.properties { + _ => match &self.properties { Some(properties) => properties .get(key) .ok_or(GraphError::ConversionError(format!( "Property {} not found", key - ))).map(|v| Cow::Borrowed(v)), + ))) + .map(|v| Cow::Borrowed(v)), None => Err(GraphError::ConversionError(format!( "Property {} not found", key ))), - } + }, } - } - + } + #[inline(always)] fn find_property<'a>( &'a self, diff --git a/query-container/.cargo/config.toml b/query-container/.cargo/config.toml new file mode 100644 index 00000000..f06ff793 --- /dev/null +++ b/query-container/.cargo/config.toml @@ -0,0 +1,2 @@ +[build] +rustflags = ["-C", "link-arg=-Wl,-undefined,dynamic_lookup"] diff --git a/query-container/Cargo.toml b/query-container/Cargo.toml new file mode 100644 index 00000000..b66ec9e0 --- /dev/null +++ b/query-container/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "query-container" +version = "0.1.0" +edition = "2024" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +helixdb = { path = "../helixdb" } +get_routes = { path = "../get_routes" } +inventory = "0.3.16" +rand = "0.9.1" +dirs = "5.0.1" +chrono = { version = "0.4.41", features = ["serde"] } +serde = { version = "1.0", features = ["derive"] } +sonic-rs = "0.5.0" +tokio = { version = "1.44.2", features = ["full"] } +serde_json = "1.0.140" +uuid = { version = "1.12.1", features = ["std", "v4", "v6", "fast-rng"] } +heed3 = "0.22.0" diff --git a/query-container/src/lib.rs b/query-container/src/lib.rs new file mode 100644 index 00000000..7be8b6b2 --- /dev/null +++ b/query-container/src/lib.rs @@ -0,0 +1,26 @@ +use helixdb::{ + helix_engine::types::GraphError, + helix_gateway::router::router::{HandlerInput, HandlerSubmission}, + protocol::response::Response, +}; + +mod query; + +type DynQueryFn = fn(&HandlerInput, &mut Response) -> Result<(), GraphError>; + +#[unsafe(no_mangle)] +pub extern "Rust" fn get_queries() -> Vec<(String, DynQueryFn)> { + println!("get_queries called!!!!\n\n\n"); + let submissions = HandlerSubmission::collect_linked_handlers() + .into_iter() + .collect::>(); + + println!("got {} submissions", submissions.len()); + + let ret = submissions + .into_iter() + .map(|hs| (hs.0.name.to_owned(), hs.0.func)) + .collect(); + + ret +} diff --git a/query-container/src/query.rs b/query-container/src/query.rs new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/query-container/src/query.rs @@ -0,0 +1 @@ +