Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:

jobs:
test:
runs-on: ubuntu-latest
runs-on: ubuntu-latest-16
timeout-minutes: 10

steps:
Expand Down
24 changes: 6 additions & 18 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 1 addition & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,10 @@ codegen-sdk-common = { workspace = true}
anyhow = { workspace = true}
salsa = { workspace = true}
codegen-sdk-typescript = { workspace = true}
crossbeam = "0.8.4"
glob = "0.3.2"
env_logger = { workspace = true }
log = { workspace = true }
rayon = { workspace = true}
sysinfo = "0.33.1"
rkyv.workspace = true
indicatif = { workspace = true }
crossbeam-channel = { workspace = true }
[features]
python = [ "codegen-sdk-analyzer/python"] # TODO: Add python support
typescript = [ "codegen-sdk-analyzer/typescript"]
Expand All @@ -38,7 +33,7 @@ yaml = [ "codegen-sdk-analyzer/yaml"]
toml = [ "codegen-sdk-analyzer/toml"]
serialization = ["codegen-sdk-common/serialization", "codegen-sdk-analyzer/serialization"]
stable = ["json", "toml", "typescript", "tsx", "jsx", "go", "python", "yaml", "java", "ruby", "rust", "javascript", "markdown"]
default = ["json", "toml", "typescript"]
default = []
[dev-dependencies]
test-log = { workspace = true }
[workspace]
Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ rustup toolchain install nightly
### Installing tools

```bash
cargo install cargo-binstall -y
cargo binstall cargo-nextest -y
cargo binstall cargo-insta -y
cargo install --no-confirm cargo-binstall
cargo binstall --no-confirm cargo-nextest
cargo binstall --no-confirm cargo-insta
```

### Building the project
Expand Down
5 changes: 4 additions & 1 deletion codegen-sdk-analyzer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ log = { workspace = true }
indicatif-log-bridge = {workspace = true}
indicatif = {workspace = true}
crossbeam-channel = { workspace = true }
glob = "0.3.2"
rayon = { workspace = true }
ambassador = { workspace = true }
[features]
python = [ "codegen-sdk-python"] # TODO: Add python support
typescript = [ "codegen-sdk-typescript"]
Expand All @@ -45,7 +48,7 @@ markdown = [ "codegen-sdk-markdown"]
yaml = [ "codegen-sdk-yaml"]
toml = [ "codegen-sdk-toml"]
serialization = ["codegen-sdk-common/serialization"]
default = ["json", "toml", "typescript"]
default = ["json"]

[dev-dependencies]
test-log = { workspace = true }
103 changes: 103 additions & 0 deletions codegen-sdk-analyzer/src/codebase.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
use std::path::PathBuf;

use anyhow::Context;
use codegen_sdk_ast::Input;
#[cfg(feature = "serialization")]
use codegen_sdk_common::serialization::Cache;
use discovery::FilesToParse;
use notify_debouncer_mini::DebounceEventResult;
use salsa::Setter;

use crate::{
ParsedFile,
database::{CodegenDatabase, Db},
parser::parse_file,
};
mod discovery;
mod parser;
pub struct Codebase {
db: CodegenDatabase,
root: PathBuf,
rx: crossbeam_channel::Receiver<DebounceEventResult>,
#[cfg(feature = "serialization")]
cache: Cache,
}

impl Codebase {
pub fn new(root: PathBuf) -> Self {
let (tx, rx) = crossbeam_channel::unbounded();
let mut db = CodegenDatabase::new(tx);
db.watch_dir(PathBuf::from(&root)).unwrap();
let codebase = Self { db, root, rx };
codebase.sync();
codebase
}
pub fn check_update(&mut self) -> anyhow::Result<()> {
for event in self.rx.recv()?.unwrap() {
match event.path.canonicalize() {
Ok(path) => {
log::info!("File changed: {}", path.display());
let file = match self.db.files.get(&path) {
Some(file) => *file,
None => continue,
};
// `path` has changed, so read it and update the contents to match.
// This creates a new revision and causes the incremental algorithm
// to kick in, just like any other update to a salsa input.
let contents = std::fs::read_to_string(path)
.with_context(|| format!("Failed to read file {}", event.path.display()))?;
let input = Input::new(&self.db, contents);
file.set_contents(&mut self.db).to(input);
}
Err(e) => {
log::error!(
"Failed to canonicalize path {} for file {}",
e,
event.path.display()
);
}
}
}
Ok(())
}
pub fn get_file(&self, path: PathBuf) -> Option<&ParsedFile<'_>> {
let file = self.db.files.get(&path);
if let Some(file) = file {
return parse_file(&self.db, file.clone()).file(&self.db).as_ref();
}
None
}
fn discover(&self) -> FilesToParse {
discovery::collect_files(&self.db, &self.root)
}
pub fn files(&self) -> Vec<&ParsedFile<'_>> {
let mut files = Vec::new();
for file in self.discover().files(&self.db) {
if let Some(file) = self.get_file(file.path(&self.db)) {
files.push(file);
}
}
files
}
pub fn errors(&self) -> Vec<()> {
let mut errors = Vec::new();
for file in self.discover().files(&self.db) {
if self.get_file(file.path(&self.db)).is_none() {
errors.push(());
}
}
errors
}
pub fn sync(&self) -> () {
let files = self.discover();
parser::parse_files(
&self.db,
#[cfg(feature = "serialization")]
&self.cache,
files,
)
}
pub fn db(&self) -> &CodegenDatabase {
&self.db
}
}
18 changes: 14 additions & 4 deletions src/discovery.rs → ...en-sdk-analyzer/src/codebase/discovery.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
use codegen_sdk_analyzer::{CodegenDatabase, Db};
use std::path::PathBuf;

use codegen_sdk_ast::*;
#[cfg(feature = "serialization")]
use codegen_sdk_common::serialize::Cache;
use glob::glob;

use crate::database::{CodegenDatabase, Db};
#[salsa::input]
pub struct FilesToParse {
pub files: Vec<codegen_sdk_ast::input::File>,
Expand All @@ -17,18 +20,25 @@ pub fn log_languages() {
}
}

pub fn collect_files(db: &CodegenDatabase, dir: String) -> FilesToParse {
pub fn collect_files(db: &CodegenDatabase, dir: &PathBuf) -> FilesToParse {
let mut files = Vec::new();
for language in LANGUAGES.iter() {
for extension in language.file_extensions.iter() {
files.extend(glob(&format!("{dir}**/*.{}", extension)).unwrap());
files.extend(
glob(&format!(
"{dir}**/*.{extension}",
extension = extension,
dir = dir.display()
))
.unwrap(),
);
}
}

let files = files
.into_iter()
.filter_map(|file| file.ok())
.filter(|file| !file.is_dir())
.filter(|file| !file.is_dir() && !file.is_symlink())
.map(|file| db.input(file).unwrap())
.collect();
FilesToParse::new(db, files)
Expand Down
Loading
Loading