diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ff674b36..b28ed745 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,7 +9,7 @@ on: jobs: test: - runs-on: ubuntu-latest + runs-on: ubuntu-latest-16 timeout-minutes: 10 steps: diff --git a/Cargo.lock b/Cargo.lock index 1f3aafc1..7a84fdf7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -286,6 +286,7 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" name = "codegen-sdk-analyzer" version = "0.1.0" dependencies = [ + "ambassador", "anyhow", "codegen-sdk-ast", "codegen-sdk-common", @@ -307,10 +308,12 @@ dependencies = [ "crossbeam-channel", "dashmap", "env_logger", + "glob", "indicatif", "indicatif-log-bridge", "log", "notify-debouncer-mini", + "rayon", "salsa", "test-log", ] @@ -319,6 +322,7 @@ dependencies = [ name = "codegen-sdk-ast" version = "0.1.0" dependencies = [ + "ambassador", "codegen-sdk-common", "codegen-sdk-cst", "codegen-sdk-typescript", @@ -397,13 +401,8 @@ dependencies = [ "codegen-sdk-ast", "codegen-sdk-common", "codegen-sdk-typescript", - "crossbeam", - "crossbeam-channel", "env_logger", - "glob", - "indicatif", "log", - "rayon", "rkyv", "salsa", "sysinfo", @@ -709,6 +708,8 @@ dependencies = [ "log", "salsa", "subenum", + "tempfile", + "test-log", "tree-sitter", ] @@ -784,19 +785,6 @@ dependencies = [ "libc", ] -[[package]] -name = "crossbeam" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-epoch", - "crossbeam-queue", - "crossbeam-utils", -] - [[package]] name = "crossbeam-channel" version = "0.5.14" diff --git a/Cargo.toml b/Cargo.toml index 1530e93e..7099a554 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,15 +13,10 @@ codegen-sdk-common = { workspace = true} anyhow = { workspace = true} salsa = { workspace = true} codegen-sdk-typescript = { workspace = true} -crossbeam = "0.8.4" -glob = "0.3.2" env_logger = { workspace = true } log = { workspace = true } -rayon = { workspace = true} sysinfo = "0.33.1" rkyv.workspace = true -indicatif = { workspace = true } -crossbeam-channel = { workspace = true } [features] python = [ "codegen-sdk-analyzer/python"] # TODO: Add python support typescript = [ "codegen-sdk-analyzer/typescript"] @@ -38,7 +33,7 @@ yaml = [ "codegen-sdk-analyzer/yaml"] toml = [ "codegen-sdk-analyzer/toml"] serialization = ["codegen-sdk-common/serialization", "codegen-sdk-analyzer/serialization"] stable = ["json", "toml", "typescript", "tsx", "jsx", "go", "python", "yaml", "java", "ruby", "rust", "javascript", "markdown"] -default = ["json", "toml", "typescript"] +default = [] [dev-dependencies] test-log = { workspace = true } [workspace] diff --git a/README.md b/README.md index e1791f5d..4f861fae 100644 --- a/README.md +++ b/README.md @@ -32,9 +32,9 @@ rustup toolchain install nightly ### Installing tools ```bash -cargo install cargo-binstall -y -cargo binstall cargo-nextest -y -cargo binstall cargo-insta -y +cargo install --no-confirm cargo-binstall +cargo binstall --no-confirm cargo-nextest +cargo binstall --no-confirm cargo-insta ``` ### Building the project diff --git a/codegen-sdk-analyzer/Cargo.toml b/codegen-sdk-analyzer/Cargo.toml index 1dd1d17a..6589df18 100644 --- a/codegen-sdk-analyzer/Cargo.toml +++ b/codegen-sdk-analyzer/Cargo.toml @@ -30,6 +30,9 @@ log = { workspace = true } indicatif-log-bridge = {workspace = true} indicatif = {workspace = true} crossbeam-channel = { workspace = true } +glob = "0.3.2" +rayon = { workspace = true } +ambassador = { workspace = true } [features] python = [ "codegen-sdk-python"] # TODO: Add python support typescript = [ "codegen-sdk-typescript"] @@ -45,7 +48,7 @@ markdown = [ "codegen-sdk-markdown"] yaml = [ "codegen-sdk-yaml"] toml = [ "codegen-sdk-toml"] serialization = ["codegen-sdk-common/serialization"] -default = ["json", "toml", "typescript"] +default = ["json"] [dev-dependencies] test-log = { workspace = true } diff --git a/codegen-sdk-analyzer/src/codebase.rs b/codegen-sdk-analyzer/src/codebase.rs new file mode 100644 index 00000000..3a933fb4 --- /dev/null +++ b/codegen-sdk-analyzer/src/codebase.rs @@ -0,0 +1,103 @@ +use std::path::PathBuf; + +use anyhow::Context; +use codegen_sdk_ast::Input; +#[cfg(feature = "serialization")] +use codegen_sdk_common::serialization::Cache; +use discovery::FilesToParse; +use notify_debouncer_mini::DebounceEventResult; +use salsa::Setter; + +use crate::{ + ParsedFile, + database::{CodegenDatabase, Db}, + parser::parse_file, +}; +mod discovery; +mod parser; +pub struct Codebase { + db: CodegenDatabase, + root: PathBuf, + rx: crossbeam_channel::Receiver, + #[cfg(feature = "serialization")] + cache: Cache, +} + +impl Codebase { + pub fn new(root: PathBuf) -> Self { + let (tx, rx) = crossbeam_channel::unbounded(); + let mut db = CodegenDatabase::new(tx); + db.watch_dir(PathBuf::from(&root)).unwrap(); + let codebase = Self { db, root, rx }; + codebase.sync(); + codebase + } + pub fn check_update(&mut self) -> anyhow::Result<()> { + for event in self.rx.recv()?.unwrap() { + match event.path.canonicalize() { + Ok(path) => { + log::info!("File changed: {}", path.display()); + let file = match self.db.files.get(&path) { + Some(file) => *file, + None => continue, + }; + // `path` has changed, so read it and update the contents to match. + // This creates a new revision and causes the incremental algorithm + // to kick in, just like any other update to a salsa input. + let contents = std::fs::read_to_string(path) + .with_context(|| format!("Failed to read file {}", event.path.display()))?; + let input = Input::new(&self.db, contents); + file.set_contents(&mut self.db).to(input); + } + Err(e) => { + log::error!( + "Failed to canonicalize path {} for file {}", + e, + event.path.display() + ); + } + } + } + Ok(()) + } + pub fn get_file(&self, path: PathBuf) -> Option<&ParsedFile<'_>> { + let file = self.db.files.get(&path); + if let Some(file) = file { + return parse_file(&self.db, file.clone()).file(&self.db).as_ref(); + } + None + } + fn discover(&self) -> FilesToParse { + discovery::collect_files(&self.db, &self.root) + } + pub fn files(&self) -> Vec<&ParsedFile<'_>> { + let mut files = Vec::new(); + for file in self.discover().files(&self.db) { + if let Some(file) = self.get_file(file.path(&self.db)) { + files.push(file); + } + } + files + } + pub fn errors(&self) -> Vec<()> { + let mut errors = Vec::new(); + for file in self.discover().files(&self.db) { + if self.get_file(file.path(&self.db)).is_none() { + errors.push(()); + } + } + errors + } + pub fn sync(&self) -> () { + let files = self.discover(); + parser::parse_files( + &self.db, + #[cfg(feature = "serialization")] + &self.cache, + files, + ) + } + pub fn db(&self) -> &CodegenDatabase { + &self.db + } +} diff --git a/src/discovery.rs b/codegen-sdk-analyzer/src/codebase/discovery.rs similarity index 62% rename from src/discovery.rs rename to codegen-sdk-analyzer/src/codebase/discovery.rs index 58b2623f..9c2a7b4b 100644 --- a/src/discovery.rs +++ b/codegen-sdk-analyzer/src/codebase/discovery.rs @@ -1,8 +1,11 @@ -use codegen_sdk_analyzer::{CodegenDatabase, Db}; +use std::path::PathBuf; + use codegen_sdk_ast::*; #[cfg(feature = "serialization")] use codegen_sdk_common::serialize::Cache; use glob::glob; + +use crate::database::{CodegenDatabase, Db}; #[salsa::input] pub struct FilesToParse { pub files: Vec, @@ -17,18 +20,25 @@ pub fn log_languages() { } } -pub fn collect_files(db: &CodegenDatabase, dir: String) -> FilesToParse { +pub fn collect_files(db: &CodegenDatabase, dir: &PathBuf) -> FilesToParse { let mut files = Vec::new(); for language in LANGUAGES.iter() { for extension in language.file_extensions.iter() { - files.extend(glob(&format!("{dir}**/*.{}", extension)).unwrap()); + files.extend( + glob(&format!( + "{dir}**/*.{extension}", + extension = extension, + dir = dir.display() + )) + .unwrap(), + ); } } let files = files .into_iter() .filter_map(|file| file.ok()) - .filter(|file| !file.is_dir()) + .filter(|file| !file.is_dir() && !file.is_symlink()) .map(|file| db.input(file).unwrap()) .collect(); FilesToParse::new(db, files) diff --git a/src/parser.rs b/codegen-sdk-analyzer/src/codebase/parser.rs similarity index 51% rename from src/parser.rs rename to codegen-sdk-analyzer/src/codebase/parser.rs index a10af99c..75f3502e 100644 --- a/src/parser.rs +++ b/codegen-sdk-analyzer/src/codebase/parser.rs @@ -1,22 +1,20 @@ -use codegen_sdk_analyzer::{CodegenDatabase, Db}; +use codegen_sdk_ast::{Definitions, References, input::File}; #[cfg(feature = "serialization")] use codegen_sdk_common::serialize::Cache; use indicatif::{ProgressBar, ProgressStyle}; -use crate::discovery::{FilesToParse, collect_files, log_languages}; -fn parse_file<'db>( - db: &'db dyn Db, - #[cfg(feature = "serialization")] cache: &Cache, - file: codegen_sdk_ast::input::File, -) { - if file.path(db).is_dir() { - log::warn!("Skipping directory: {}", file.path(db).display()); - return; - } - codegen_sdk_analyzer::parse_file(db, file); -} -#[salsa::tracked] -fn parse_files_par(db: &dyn Db, files: FilesToParse) { +use super::discovery::{FilesToParse, log_languages}; +use crate::{ + ParsedFile, + database::{CodegenDatabase, Db}, + parser::parse_file, +}; +fn execute_op_with_progress( + db: &Database, + files: FilesToParse, + name: &str, + op: fn(&Database, File) -> T, +) -> Vec { let multi = db.multi_progress(); let style = ProgressStyle::with_template( "[{elapsed_precise}] {wide_bar} {msg} [{per_sec}] [estimated time remaining: {eta}]", @@ -25,44 +23,62 @@ fn parse_files_par(db: &dyn Db, files: FilesToParse) { let pg = multi.add( ProgressBar::new(files.files(db).len() as u64) .with_style(style) - .with_message("Parsing Files"), + .with_message(name.to_string()), ); let inputs = files .files(db) .into_iter() - .map(|file| (&pg, file)) + .map(|file| (&pg, file, op)) .collect::>(); - let _: Vec<()> = salsa::par_map(db, inputs, |db, input| { - let (pg, file) = input; - parse_file( + let results: Vec = salsa::par_map(db, inputs, move |db, input| { + let (pg, file, op) = input; + let res = op( db, #[cfg(feature = "serialization")] &cache, file, ); pg.inc(1); - () + res }); pg.finish(); multi.remove(&pg); + results +} +#[salsa::tracked] +fn parse_files_par(db: &dyn Db, files: FilesToParse) { + let _: Vec<_> = execute_op_with_progress(db, files, "Parsing Files", |db, file| { + parse_file(db, file); + }); +} +#[salsa::tracked] +fn parse_files_definitions_par(db: &dyn Db, files: FilesToParse) { + let _: Vec<_> = execute_op_with_progress(db, files, "Parsing Definitions", |db, file| { + let file = parse_file(db, file); + if let Some(parsed) = file.file(db) { + #[cfg(feature = "typescript")] + if let ParsedFile::Typescript(parsed) = parsed { + parsed.definitions(db); + parsed.references(db); + } + } + () + }); } pub fn parse_files<'db>( db: &'db CodegenDatabase, #[cfg(feature = "serialization")] cache: &'db Cache, - dir: String, -) -> (FilesToParse, Vec) { + files_to_parse: FilesToParse, +) -> () { rayon::ThreadPoolBuilder::new() .stack_size(1024 * 1024 * 1024 * 10) .build_global() .unwrap(); - let (tx, rx) = crossbeam::channel::unbounded(); - let mut errors = Vec::new(); log_languages(); #[cfg(feature = "serialization")] let cache = Cache::new().unwrap(); #[cfg(feature = "serialization")] let cached = get_cached_count(&cache, &files_to_parse); - let files_to_parse = collect_files(db, dir); log::info!("Parsing {} files", files_to_parse.files(db).len()); parse_files_par( db, @@ -70,11 +86,13 @@ pub fn parse_files<'db>( &cache, files_to_parse, ); - drop(tx); + log::info!("Parsing definitions"); + parse_files_definitions_par( + db, + #[cfg(feature = "serialization")] + &cache, + files_to_parse, + ); #[cfg(feature = "serialization")] report_cached_count(cached, &files_to_parse.files(db)); - for e in rx.iter() { - errors.push(e); - } - (files_to_parse, errors) } diff --git a/codegen-sdk-analyzer/src/lib.rs b/codegen-sdk-analyzer/src/lib.rs index e0d5af4c..f6e05d95 100644 --- a/codegen-sdk-analyzer/src/lib.rs +++ b/codegen-sdk-analyzer/src/lib.rs @@ -2,5 +2,6 @@ mod database; mod parser; mod progress; -pub use database::{CodegenDatabase, Db}; -pub use parser::{Parsed, ParsedFile, parse_file}; +pub use parser::{Parsed, ParsedFile}; +mod codebase; +pub use codebase::Codebase; diff --git a/codegen-sdk-ast-generator/src/generator.rs b/codegen-sdk-ast-generator/src/generator.rs index 39864d0f..8e7c660d 100644 --- a/codegen-sdk-ast-generator/src/generator.rs +++ b/codegen-sdk-ast-generator/src/generator.rs @@ -2,38 +2,55 @@ use codegen_sdk_common::language::Language; use proc_macro2::TokenStream; use quote::{format_ident, quote}; fn get_definitions_impl(language: &Language) -> TokenStream { + let language_struct_name = format_ident!("{}File", language.struct_name); if !language.tag_query.contains("@definition") { return quote! { - pub fn definitions(self, _db: &'db dyn salsa::Database) -> (){ + + impl<'db> codegen_sdk_ast::Definitions<'db> for #language_struct_name<'db> { + type Definitions = (); + fn definitions(self, _db: &'db dyn salsa::Database) -> Self::Definitions{ + () } + } }; } quote! { #[salsa::tracked] - pub fn definitions(self, db: &'db dyn salsa::Database) -> Definitions<'db> { - let mut definitions = Definitions::default(); - if let Some(program) = self.node(db) { - definitions = definitions.visit_by_val_infallible(&program); + impl<'db> codegen_sdk_ast::Definitions<'db> for #language_struct_name<'db> { + type Definitions = Definitions<'db>; + fn definitions(self, db: &'db dyn salsa::Database) -> Self::Definitions { + let mut definitions = Definitions::default(); + if let Some(program) = self.node(db) { + definitions = definitions.visit_by_val_infallible(&program); + } + definitions } - definitions } } } fn get_references_impl(language: &Language) -> TokenStream { + let language_struct_name = format_ident!("{}File", language.struct_name); if !language.tag_query.contains("@reference") { return quote! { - pub fn references(self, _db: &'db dyn salsa::Database) -> (){ + impl<'db> codegen_sdk_ast::References<'db> for #language_struct_name<'db> { + type References = (); + fn references(self, _db: &'db dyn salsa::Database) -> Self::References { + () + } } }; } quote! { #[salsa::tracked] - pub fn references(self, db: &'db dyn salsa::Database) -> References<'db> { - let mut references = References::default(); + impl<'db> codegen_sdk_ast::References<'db> for #language_struct_name<'db> { + type References = References<'db>; + fn references(self, db: &'db dyn salsa::Database) -> Self::References { + let mut references = References::default(); if let Some(program) = self.node(db) { references = references.visit_by_val_infallible(&program); } - references + references + } } } } @@ -67,11 +84,8 @@ pub fn generate_ast(language: &Language) -> anyhow::Result { } - #[salsa::tracked] - impl<'db> #language_struct_name<'db> { - #definitions_impl - #references_impl - } + #definitions_impl + #references_impl // impl<'db> HasNode for {language_struct_name}File<'db> { // type Node = {language_name}::{root_node_name}<'db>; // fn node(&self) -> &Self::Node { diff --git a/codegen-sdk-ast/Cargo.toml b/codegen-sdk-ast/Cargo.toml index 01e083f3..1e3e28e5 100644 --- a/codegen-sdk-ast/Cargo.toml +++ b/codegen-sdk-ast/Cargo.toml @@ -7,6 +7,7 @@ edition = "2024" codegen-sdk-cst = { workspace = true} codegen-sdk-common = { workspace = true } salsa = { workspace = true } +ambassador = {workspace = true} [dev-dependencies] test-log = { workspace = true } tempfile = { workspace = true } diff --git a/codegen-sdk-ast/src/lib.rs b/codegen-sdk-ast/src/lib.rs index 153579a9..f0fd4679 100644 --- a/codegen-sdk-ast/src/lib.rs +++ b/codegen-sdk-ast/src/lib.rs @@ -1,5 +1,6 @@ #![recursion_limit = "512"] pub mod input; +use ambassador::delegatable_trait; use codegen_sdk_common::File; pub use codegen_sdk_common::language::LANGUAGES; pub use codegen_sdk_cst::*; @@ -11,3 +12,23 @@ impl Named for T { self.path().file_name().unwrap().to_str().unwrap() } } +#[delegatable_trait] +pub trait Definitions<'db> { + type Definitions; + fn definitions(self, db: &'db dyn salsa::Database) -> Self::Definitions; +} +#[delegatable_trait] +pub trait References<'db> { + type References; + fn references(self, db: &'db dyn salsa::Database) -> Self::References; +} +#[delegatable_trait] +pub trait FileExt<'db>: References<'db> + Definitions<'db> + Clone { + fn precompute(self, db: &'db dyn salsa::Database) + where + Self: Sized, + { + self.clone().definitions(db); + self.references(db); + } +} diff --git a/codegen-sdk-macros/src/lib.rs b/codegen-sdk-macros/src/lib.rs index 960946f3..3db7f56e 100644 --- a/codegen-sdk-macros/src/lib.rs +++ b/codegen-sdk-macros/src/lib.rs @@ -55,6 +55,15 @@ pub fn languages_ast(_item: TokenStream) -> TokenStream { } let enum_output: TokenStream = quote! { #[derive(Debug, Clone, Eq, PartialEq, Hash, salsa::Update)] + // #[delegate( + // codegen_sdk_ast::Definitions<'db> + // )] + // #[delegate( + // codegen_sdk_ast::References<'db> + // )] + // #[delegate( + // codegen_sdk_ast::FileExt<'db> + // )] pub enum ParsedFile<'db> { #(#output)* } diff --git a/languages/codegen-sdk-typescript/Cargo.toml b/languages/codegen-sdk-typescript/Cargo.toml index bcbaface..c4e0ad0f 100644 --- a/languages/codegen-sdk-typescript/Cargo.toml +++ b/languages/codegen-sdk-typescript/Cargo.toml @@ -22,5 +22,9 @@ codegen-sdk-ast-generator = { workspace = true } codegen-sdk-common = { workspace = true, features = ["typescript"] } env_logger = { workspace = true } log = { workspace = true } +[dev-dependencies] +test-log = { workspace = true } +tempfile = {workspace = true} + [features] serialization = ["codegen-sdk-common/serialization"] diff --git a/codegen-sdk-ast/tests/test_typescript.rs b/languages/codegen-sdk-typescript/tests/test_typescript.rs similarity index 97% rename from codegen-sdk-ast/tests/test_typescript.rs rename to languages/codegen-sdk-typescript/tests/test_typescript.rs index bd700a64..1ab0d82c 100644 --- a/codegen-sdk-ast/tests/test_typescript.rs +++ b/languages/codegen-sdk-typescript/tests/test_typescript.rs @@ -1,6 +1,7 @@ #![recursion_limit = "512"] use std::path::PathBuf; +use codegen_sdk_ast::Definitions; fn write_to_temp_file(content: &str, temp_dir: &tempfile::TempDir) -> PathBuf { let file_path = temp_dir.path().join("test.ts"); std::fs::write(&file_path, content).unwrap(); diff --git a/src/lib.rs b/src/lib.rs index 26844d2e..1a71d857 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,4 @@ #![recursion_limit = "2048"] #[cfg(feature = "serialization")] mod cache; -pub mod discovery; -pub mod parser; pub mod system; diff --git a/src/main.rs b/src/main.rs index 71cd9b2f..5703ca14 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,28 +1,24 @@ #![recursion_limit = "2048"] use std::{path::PathBuf, time::Instant}; -use anyhow::Context; use clap::Parser; -use codegen_sdk_analyzer::{CodegenDatabase, Db, ParsedFile, parse_file}; -use codegen_sdk_ast::Input; +use codegen_sdk_analyzer::{Codebase, ParsedFile}; +use codegen_sdk_ast::Definitions; #[cfg(feature = "serialization")] use codegen_sdk_common::serialize::Cache; -use codegen_sdk_core::{discovery::FilesToParse, parser::parse_files, system::get_memory}; -use salsa::Setter; +use codegen_sdk_core::system::get_memory; #[derive(Debug, Parser)] struct Args { input: String, } -#[salsa::tracked] -fn get_total_definitions( - db: &dyn Db, - files_to_parse: FilesToParse, -) -> Vec<(usize, usize, usize, usize, usize)> { - salsa::par_map(db, files_to_parse.files(db), |db, file| { - let parsed = parse_file(db, file); - if let Some(parsed) = parsed.file(db) { +fn get_total_definitions(codebase: &Codebase) -> Vec<(usize, usize, usize, usize, usize)> { + codebase + .files() + .into_iter() + .map(|parsed| { + #[cfg(feature = "typescript")] if let ParsedFile::Typescript(file) = parsed { - let definitions = file.definitions(db); + let definitions = file.definitions(codebase.db()); return ( definitions.classes.len(), definitions.functions.len(), @@ -31,19 +27,18 @@ fn get_total_definitions( definitions.modules.len(), ); } - } - (0, 0, 0, 0, 0) - }) + (0, 0, 0, 0, 0) + }) + .collect() } #[cfg(feature = "typescript")] -fn print_definitions(db: &CodegenDatabase, files_to_parse: &FilesToParse) { +fn print_definitions(codebase: &Codebase) { let mut total_classes = 0; let mut total_functions = 0; let mut total_interfaces = 0; let mut total_methods = 0; let mut total_modules = 0; - let new_files = FilesToParse::new(db, files_to_parse.files(db).clone()); - let definitions = get_total_definitions(db, new_files); + let definitions = get_total_definitions(codebase); for (classes, functions, interfaces, methods, modules) in definitions { total_classes += classes; total_functions += functions; @@ -65,32 +60,22 @@ fn main() -> anyhow::Result<()> { let args = Args::parse(); let dir = args.input; let start = Instant::now(); - let (tx, rx) = crossbeam_channel::unbounded(); - let mut db = CodegenDatabase::new(tx); - db.watch_dir(PathBuf::from(&dir)).unwrap(); - let (files_to_parse, errors) = parse_files( - &db, - #[cfg(feature = "serialization")] - &cache, - dir, - ); - let num_errors = errors.len(); - drop(errors); + let mut codebase = Codebase::new(PathBuf::from(&dir)); let end = Instant::now(); let duration: std::time::Duration = end.duration_since(start); let memory = get_memory(); log::info!( "{} files parsed in {:?}.{} seconds with {} errors. Using {} MB of memory", - files_to_parse.files(&db).len(), + codebase.files().len(), duration.as_secs(), duration.subsec_millis(), - num_errors, + codebase.errors().len(), memory / 1024 / 1024 ); loop { // Compile the code starting at the provided input, this will read other // needed files using the on-demand mechanism. - print_definitions(&db, &files_to_parse); + print_definitions(&codebase); // let diagnostics = compile::accumulated::(&db, initial); // if diagnostics.is_empty() { // println!("Sum is: {}", sum); @@ -99,33 +84,8 @@ fn main() -> anyhow::Result<()> { // println!("{}", diagnostic.0); // } // } - + codebase.check_update()?; // Wait for file change events, the output can't change unless the // inputs change. - for event in rx.recv()?.unwrap() { - match event.path.canonicalize() { - Ok(path) => { - log::info!("File changed: {}", path.display()); - let file = match db.files.get(&path) { - Some(file) => *file, - None => continue, - }; - // `path` has changed, so read it and update the contents to match. - // This creates a new revision and causes the incremental algorithm - // to kick in, just like any other update to a salsa input. - let contents = std::fs::read_to_string(path) - .with_context(|| format!("Failed to read file {}", event.path.display()))?; - let input = Input::new(&db, contents); - file.set_contents(&mut db).to(input); - } - Err(e) => { - log::error!( - "Failed to canonicalize path {} for file {}", - e, - event.path.display() - ); - } - } - } } }