diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ae38f29d..ff674b36 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,6 +19,6 @@ jobs: uses: ./.github/actions/setup-environment - name: Run tests run: | - ~/.cargo/bin/cargo-nextest nextest run --workspace + ~/.cargo/bin/cargo-nextest nextest run --workspace --features stable env: CARGO_TERM_COLOR: always diff --git a/.gitignore b/.gitignore index 08e70630..11cbd479 100644 --- a/.gitignore +++ b/.gitignore @@ -60,6 +60,7 @@ src/**/*.html .ccache/ uv-*.tar.gz .venv +**.mm_profdata graph-sitter-types/out/** graph-sitter-types/typings/** coverage.json diff --git a/Cargo.lock b/Cargo.lock index a5c13d36..1f3aafc1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,18 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + [[package]] name = "aho-corasick" version = "1.1.3" @@ -26,6 +38,24 @@ dependencies = [ "memchr", ] +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "ambassador" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b27ba24e4d8a188489d5a03c7fabc167a60809a383cdb4d15feb37479cd2a48" +dependencies = [ + "itertools 0.10.5", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "anstream" version = "0.6.18" @@ -62,7 +92,7 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -73,7 +103,7 @@ checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", "once_cell", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -85,6 +115,12 @@ dependencies = [ "backtrace", ] +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + [[package]] name = "autocfg" version = "1.4.0" @@ -133,6 +169,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "boxcar" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225450ee9328e1e828319b48a89726cffc1b0ad26fd9211ad435de9fa376acae" +dependencies = [ + "loom", +] + [[package]] name = "buildid" version = "1.0.3" @@ -145,6 +190,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "bumpalo" +version = "3.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" + [[package]] name = "bytecheck" version = "0.8.1" @@ -168,12 +219,6 @@ dependencies = [ "syn 2.0.98", ] -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - [[package]] name = "bytes" version = "1.10.0" @@ -241,7 +286,31 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" name = "codegen-sdk-analyzer" version = "0.1.0" dependencies = [ + "anyhow", "codegen-sdk-ast", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-go", + "codegen-sdk-java", + "codegen-sdk-javascript", + "codegen-sdk-json", + "codegen-sdk-jsx", + "codegen-sdk-macros", + "codegen-sdk-markdown", + "codegen-sdk-python", + "codegen-sdk-ruby", + "codegen-sdk-rust", + "codegen-sdk-toml", + "codegen-sdk-tsx", + "codegen-sdk-typescript", + "codegen-sdk-yaml", + "crossbeam-channel", + "dashmap", + "env_logger", + "indicatif", + "indicatif-log-bridge", + "log", + "notify-debouncer-mini", "salsa", "test-log", ] @@ -250,14 +319,10 @@ dependencies = [ name = "codegen-sdk-ast" version = "0.1.0" dependencies = [ - "codegen-sdk-ast-generator", "codegen-sdk-common", "codegen-sdk-cst", - "codegen-sdk-macros", - "derive-visitor", - "env_logger", - "log", - "rayon", + "codegen-sdk-typescript", + "salsa", "tempfile", "test-log", ] @@ -270,12 +335,16 @@ dependencies = [ "codegen-sdk-common", "codegen-sdk-cst", "codegen-sdk-cst-generator", + "codegen-sdk-ts_query", + "codegen-sdk-typescript", "convert_case 0.7.1", "derive_more", "insta", "log", "proc-macro2", "quote", + "salsa", + "syn 2.0.98", "test-log", ] @@ -283,17 +352,18 @@ dependencies = [ name = "codegen-sdk-common" version = "0.1.0" dependencies = [ + "ambassador", "anyhow", "base64", "buildid", "bytes", "convert_case 0.7.1", - "enum_delegate", "lazy_static", "mockall", "phf", "prettyplease", "rkyv", + "salsa", "serde", "serde_json", "sha2", @@ -305,7 +375,6 @@ dependencies = [ "tree-sitter-java", "tree-sitter-javascript", "tree-sitter-json", - "tree-sitter-language", "tree-sitter-md", "tree-sitter-python", "tree-sitter-query", @@ -322,16 +391,21 @@ dependencies = [ name = "codegen-sdk-core" version = "0.1.0" dependencies = [ + "anyhow", "clap", "codegen-sdk-analyzer", "codegen-sdk-ast", "codegen-sdk-common", + "codegen-sdk-typescript", "crossbeam", + "crossbeam-channel", "env_logger", "glob", + "indicatif", "log", "rayon", "rkyv", + "salsa", "sysinfo", "test-log", ] @@ -342,17 +416,11 @@ version = "0.1.0" dependencies = [ "bytes", "codegen-sdk-common", - "codegen-sdk-cst-generator", - "codegen-sdk-macros", "convert_case 0.7.1", - "derive-visitor", - "derive_more", - "enum_delegate", - "env_logger", + "dashmap", "log", - "rayon", "rkyv", - "subenum", + "salsa", "tempfile", "test-log", "tree-sitter", @@ -364,11 +432,9 @@ version = "0.1.0" dependencies = [ "anyhow", "codegen-sdk-common", - "convert_case 0.7.1", "insta", "log", "mockall_double", - "prettyplease", "proc-macro2", "quote", "syn 2.0.98", @@ -378,11 +444,292 @@ dependencies = [ "tree-sitter-python", ] +[[package]] +name = "codegen-sdk-go" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-java" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-javascript" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-json" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "test-log", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-jsx" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + [[package]] name = "codegen-sdk-macros" version = "0.1.0" dependencies = [ "codegen-sdk-common", + "proc-macro2", + "quote", +] + +[[package]] +name = "codegen-sdk-markdown" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-python" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-ruby" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-rust" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-toml" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-ts_query" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-tsx" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-typescript" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", +] + +[[package]] +name = "codegen-sdk-yaml" +version = "0.1.0" +dependencies = [ + "ambassador", + "bytes", + "codegen-sdk-ast", + "codegen-sdk-ast-generator", + "codegen-sdk-common", + "codegen-sdk-cst", + "codegen-sdk-cst-generator", + "derive_generic_visitor", + "derive_more", + "env_logger", + "log", + "salsa", + "subenum", + "tree-sitter", ] [[package]] @@ -400,14 +747,18 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "windows-sys", + "unicode-width", + "windows-sys 0.59.0", ] [[package]] name = "convert_case" -version = "0.4.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] [[package]] name = "convert_case" @@ -500,25 +851,75 @@ dependencies = [ ] [[package]] -name = "derive-visitor" -version = "0.4.0" +name = "darling" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d47165df83b9707cbada3216607a5d66125b6a66906de0bc1216c0669767ca9e" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ - "derive-visitor-macros", + "darling_core", + "darling_macro", ] [[package]] -name = "derive-visitor-macros" -version = "0.4.0" +name = "darling_core" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "427b39a85fecafea16b1a5f3f50437151022e35eb4fe038107f08adbf7f8def6" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ - "convert_case 0.4.0", - "itertools", + "fnv", + "ident_case", "proc-macro2", "quote", - "syn 1.0.109", + "strsim", + "syn 2.0.98", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.98", +] + +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "derive_generic_visitor" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3e1c241e4f464b614bd7650f1a7c4c0e20e5ef21564d6b916b4c51fd76f7688" +dependencies = [ + "derive_generic_visitor_macros", +] + +[[package]] +name = "derive_generic_visitor_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "885f5274163b5b1720591c0c24b34350a0b05e4774351f9fb3d13c192d8c995b" +dependencies = [ + "convert_case 0.6.0", + "darling", + "itertools 0.13.0", + "proc-macro2", + "quote", + "syn 2.0.98", ] [[package]] @@ -570,30 +971,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" -[[package]] -name = "enum_delegate" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8ea75f31022cba043afe037940d73684327e915f88f62478e778c3de914cd0a" -dependencies = [ - "enum_delegate_lib", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "enum_delegate_lib" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e1f6c3800b304a6be0012039e2a45a322a093539c45ab818d9e6895a39c90fe" -dependencies = [ - "proc-macro2", - "quote", - "rand", - "syn 1.0.109", -] - [[package]] name = "env_filter" version = "0.1.3" @@ -630,7 +1007,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -639,6 +1016,24 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "filetime" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" +dependencies = [ + "cfg-if", + "libc", + "libredox", + "windows-sys 0.59.0", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + [[package]] name = "fragile" version = "2.0.0" @@ -646,24 +1041,35 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] -name = "generic-array" -version = "0.14.7" +name = "fsevent-sys" +version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" dependencies = [ - "typenum", - "version_check", + "libc", ] [[package]] -name = "getrandom" -version = "0.2.15" +name = "generator" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "log", + "rustversion", + "windows 0.58.0", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", ] [[package]] @@ -692,9 +1098,13 @@ checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "hashbrown" -version = "0.12.3" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] [[package]] name = "hashbrown" @@ -703,12 +1113,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] -name = "heck" -version = "0.3.3" +name = "hashlink" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" dependencies = [ - "unicode-segmentation", + "hashbrown 0.14.5", ] [[package]] @@ -730,14 +1140,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] -name = "indexmap" -version = "1.9.3" +name = "ident_case" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", -] +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "indexmap" @@ -749,6 +1155,50 @@ dependencies = [ "hashbrown 0.15.2", ] +[[package]] +name = "indicatif" +version = "0.17.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" +dependencies = [ + "console", + "number_prefix", + "portable-atomic", + "rayon", + "unicode-width", + "web-time", +] + +[[package]] +name = "indicatif-log-bridge" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63703cf9069b85dbe6fe26e1c5230d013dee99d3559cd3d02ba39e099ef7ab02" +dependencies = [ + "indicatif", + "log", +] + +[[package]] +name = "inotify" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" +dependencies = [ + "bitflags 2.8.0", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + [[package]] name = "insta" version = "1.42.1" @@ -762,15 +1212,6 @@ dependencies = [ "similar", ] -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - [[package]] name = "is_terminal_polyfill" version = "1.70.1" @@ -779,9 +1220,18 @@ checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itertools" -version = "0.10.5" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] @@ -801,6 +1251,36 @@ dependencies = [ "libc", ] +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "kqueue" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7447f1ca1b7b563588a205fe93dea8df60fd981423a768bc1c0ded35ed147d0c" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -813,6 +1293,17 @@ version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.8.0", + "libc", + "redox_syscall", +] + [[package]] name = "linked-hash-map" version = "0.5.6" @@ -841,6 +1332,19 @@ version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +[[package]] +name = "loom" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" +dependencies = [ + "cfg-if", + "generator", + "scoped-tls", + "tracing", + "tracing-subscriber", +] + [[package]] name = "matchers" version = "0.1.0" @@ -865,6 +1369,18 @@ dependencies = [ "adler2", ] +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + [[package]] name = "mockall" version = "0.13.1" @@ -923,6 +1439,45 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "notify" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943" +dependencies = [ + "bitflags 2.8.0", + "crossbeam-channel", + "filetime", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio", + "notify-types", + "walkdir", + "windows-sys 0.59.0", +] + +[[package]] +name = "notify-debouncer-mini" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a689eb4262184d9a1727f9087cd03883ea716682ab03ed24efec57d7716dccb8" +dependencies = [ + "crossbeam-channel", + "log", + "notify", + "notify-types", + "tempfile", +] + +[[package]] +name = "notify-types" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" + [[package]] name = "ntapi" version = "0.4.1" @@ -942,6 +1497,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + [[package]] name = "object" version = "0.36.7" @@ -957,12 +1518,6 @@ version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" -[[package]] -name = "oorandom" -version = "11.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" - [[package]] name = "overload" version = "0.1.1" @@ -971,27 +1526,25 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] name = "parking_lot" -version = "0.11.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ - "instant", "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" -version = "0.8.6" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", - "instant", "libc", "redox_syscall", "smallvec", - "winapi", + "windows-targets", ] [[package]] @@ -1069,13 +1622,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] -name = "ppv-lite86" -version = "0.2.20" +name = "portable-atomic" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" -dependencies = [ - "zerocopy", -] +checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" [[package]] name = "predicates" @@ -1166,18 +1716,6 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", "rand_core", ] @@ -1186,9 +1724,6 @@ name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.15", -] [[package]] name = "rayon" @@ -1212,11 +1747,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.2.16" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +checksum = "82b568323e98e49e2a0899dcee453dd679fae22d69adf9b11dd508d1549b7e2f" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.8.0", ] [[package]] @@ -1281,7 +1816,7 @@ dependencies = [ "bytecheck", "bytes", "hashbrown 0.15.2", - "indexmap 2.7.1", + "indexmap", "munge", "ptr_meta", "rancor", @@ -1310,9 +1845,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" -version = "1.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustix" @@ -1324,9 +1859,15 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.59.0", ] +[[package]] +name = "rustversion" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" + [[package]] name = "ryu" version = "1.0.19" @@ -1335,33 +1876,60 @@ checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "salsa" -version = "0.16.1" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b84d9f96071f3f3be0dc818eae3327625d8ebc95b58da37d6850724f31d3403" +checksum = "80e59d074084ce0a89693f021d8317cbc53d23d6502d3b3e2a3d1a7db1ceb13b" dependencies = [ - "crossbeam-utils", - "indexmap 1.9.3", - "lock_api", - "log", - "oorandom", + "arc-swap", + "boxcar", + "crossbeam-queue", + "dashmap", + "hashbrown 0.14.5", + "hashlink", + "indexmap", "parking_lot", + "rayon", "rustc-hash", + "salsa-macro-rules", "salsa-macros", "smallvec", + "tracing", ] +[[package]] +name = "salsa-macro-rules" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e354e0bdf1a23d822161e2b0f95c07846535a0e81deba77248a6ac22d19bc97" + [[package]] name = "salsa-macros" -version = "0.16.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3904a4ba0a9d0211816177fd34b04c7095443f8cdacd11175064fe541c8fe2" +checksum = "8b061c51d6c6d5d8e4459bcaa11ef18d268286c68263615d65e983071b357fd9" dependencies = [ - "heck 0.3.3", + "heck 0.5.0", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.98", + "synstructure", +] + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", ] +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + [[package]] name = "scopeguard" version = "1.2.0" @@ -1394,7 +1962,7 @@ version = "1.0.138" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" dependencies = [ - "indexmap 2.7.1", + "indexmap", "itoa", "memchr", "ryu", @@ -1466,8 +2034,7 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "subenum" version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f5d5dfb8556dd04017db5e318bbeac8ab2b0c67b76bf197bfb79e9b29f18ecf" +source = "git+https://github.com/mrenow/subenum?branch=main#d623bc4c0e2a8ab9bc24f255e933411f0c4c9c72" dependencies = [ "heck 0.4.1", "proc-macro2", @@ -1497,6 +2064,17 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "sysinfo" version = "0.33.1" @@ -1508,7 +2086,7 @@ dependencies = [ "memchr", "ntapi", "rayon", - "windows", + "windows 0.57.0", ] [[package]] @@ -1519,10 +2097,10 @@ checksum = "22e5a0acb1f3f55f65cc4a866c361b2fb2a0ff6366785ae6fbb5f85df07ba230" dependencies = [ "cfg-if", "fastrand", - "getrandom 0.3.1", + "getrandom", "once_cell", "rustix", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -1605,9 +2183,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", + "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-attributes" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "tracing-core" version = "0.1.33" @@ -1640,6 +2230,7 @@ dependencies = [ "once_cell", "regex", "sharded-slab", + "smallvec", "thread_local", "tracing", "tracing-core", @@ -1708,9 +2299,8 @@ checksum = "38eee4db33814de3d004de9d8d825627ed3320d0989cce0dea30efaf5be4736c" [[package]] name = "tree-sitter-md" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f968c22a01010b83fc960455ae729db08dbeb6388617d9113897cb9204b030" +version = "0.4.0" +source = "git+https://github.com/tree-sitter-grammars/tree-sitter-markdown#192407ab5a24bfc24f13332979b5e7967518754a" dependencies = [ "cc", "tree-sitter-language", @@ -1803,6 +2393,12 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -1833,6 +2429,16 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -1848,6 +2454,73 @@ dependencies = [ "wit-bindgen-rt", ] +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.98", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "winapi" version = "0.3.9" @@ -1864,6 +2537,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" @@ -1876,7 +2558,17 @@ version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" dependencies = [ - "windows-core", + "windows-core 0.57.0", + "windows-targets", +] + +[[package]] +name = "windows" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" +dependencies = [ + "windows-core 0.58.0", "windows-targets", ] @@ -1886,9 +2578,22 @@ version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" dependencies = [ - "windows-implement", - "windows-interface", - "windows-result", + "windows-implement 0.57.0", + "windows-interface 0.57.0", + "windows-result 0.1.2", + "windows-targets", +] + +[[package]] +name = "windows-core" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" +dependencies = [ + "windows-implement 0.58.0", + "windows-interface 0.58.0", + "windows-result 0.2.0", + "windows-strings", "windows-targets", ] @@ -1903,6 +2608,17 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "windows-implement" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "windows-interface" version = "0.57.0" @@ -1914,6 +2630,17 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "windows-interface" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "windows-result" version = "0.1.2" @@ -1923,6 +2650,34 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result 0.2.0", + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-sys" version = "0.59.0" @@ -2017,7 +2772,6 @@ version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ - "byteorder", "zerocopy-derive", ] diff --git a/Cargo.toml b/Cargo.toml index de6d0743..1530e93e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,4 +1,5 @@ cargo-features = ["codegen-backend"] + [package] name = "codegen-sdk-core" version = "0.1.0" @@ -9,6 +10,9 @@ clap = { version = "4.5.28", features = ["derive"] } codegen-sdk-analyzer = { path = "codegen-sdk-analyzer" } codegen-sdk-ast = { workspace = true} codegen-sdk-common = { workspace = true} +anyhow = { workspace = true} +salsa = { workspace = true} +codegen-sdk-typescript = { workspace = true} crossbeam = "0.8.4" glob = "0.3.2" env_logger = { workspace = true } @@ -16,22 +20,25 @@ log = { workspace = true } rayon = { workspace = true} sysinfo = "0.33.1" rkyv.workspace = true +indicatif = { workspace = true } +crossbeam-channel = { workspace = true } [features] -python = [ "codegen-sdk-ast/python"] # TODO: Add python support -typescript = [ "codegen-sdk-ast/typescript"] -tsx = [ "codegen-sdk-ast/tsx"] -jsx = [ "codegen-sdk-ast/jsx"] -javascript = [ "codegen-sdk-ast/typescript"] -json = [ "codegen-sdk-ast/json"] -java = [ "codegen-sdk-ast/java"] -ruby = [ "codegen-sdk-ast/ruby"] -rust = [ "codegen-sdk-ast/rust"] -go = [ "codegen-sdk-ast/go"] -markdown = [ "codegen-sdk-ast/markdown"] -yaml = [ "codegen-sdk-ast/yaml"] -toml = [ "codegen-sdk-ast/toml"] -ts_query = [] -default = ["json", "ts_query", "toml", "typescript"] +python = [ "codegen-sdk-analyzer/python"] # TODO: Add python support +typescript = [ "codegen-sdk-analyzer/typescript"] +tsx = [ "codegen-sdk-analyzer/tsx"] +jsx = [ "codegen-sdk-analyzer/jsx"] +javascript = [ "codegen-sdk-analyzer/javascript"] +json = [ "codegen-sdk-analyzer/json"] +java = [ "codegen-sdk-analyzer/java"] +ruby = [ "codegen-sdk-analyzer/ruby"] +rust = [ "codegen-sdk-analyzer/rust"] +go = [ "codegen-sdk-analyzer/go"] +markdown = [ "codegen-sdk-analyzer/markdown"] +yaml = [ "codegen-sdk-analyzer/yaml"] +toml = [ "codegen-sdk-analyzer/toml"] +serialization = ["codegen-sdk-common/serialization", "codegen-sdk-analyzer/serialization"] +stable = ["json", "toml", "typescript", "tsx", "jsx", "go", "python", "yaml", "java", "ruby", "rust", "javascript", "markdown"] +default = ["json", "toml", "typescript"] [dev-dependencies] test-log = { workspace = true } [workspace] @@ -40,8 +47,24 @@ members = [ "codegen-sdk-ast", "codegen-sdk-ast-generator", "codegen-sdk-common", "codegen-sdk-cst", - "codegen-sdk-cst-generator", "codegen-sdk-macros", + "codegen-sdk-cst-generator", + "codegen-sdk-macros", + "languages/codegen-sdk-json", + "languages/codegen-sdk-python", + "languages/codegen-sdk-java", + "languages/codegen-sdk-ruby", + "languages/codegen-sdk-rust", + "languages/codegen-sdk-go", + "languages/codegen-sdk-jsx", + "languages/codegen-sdk-tsx", + "languages/codegen-sdk-javascript", + "languages/codegen-sdk-markdown", + "languages/codegen-sdk-yaml", + "languages/codegen-sdk-toml", + "languages/codegen-sdk-ts_query", + "languages/codegen-sdk-typescript", ] +resolver = "2" [workspace.dependencies] rayon = "1.10.0" env_logger = "0.11.6" @@ -55,7 +78,7 @@ tree-sitter-java = "0.23.5" tree-sitter-ruby = "0.23.1" tree-sitter-rust = "0.23.2" tree-sitter-go = "0.23.4" -tree-sitter-md = "0.3.2" +tree-sitter-md = {git="https://github.com/tree-sitter-grammars/tree-sitter-markdown"} tree-sitter-yaml = "0.7.0" tree-sitter-toml-ng = "0.7.0" bytes = "1.10.0" @@ -65,35 +88,63 @@ serde_json = "1.0.138" anyhow = { version = "1.0.95", features = ["backtrace"] } rkyv = { version = "0.8.10", features = ["bytes-1","pointer_width_64"] } test-log = "0.2.17" -enum_delegate = "0.2.0" +ambassador = "0.4.1" mockall = "0.13.1" codegen-sdk-common = { path = "codegen-sdk-common" } codegen-sdk-cst = { path = "codegen-sdk-cst"} codegen-sdk-ast = { path = "codegen-sdk-ast" } codegen-sdk-cst-generator = { path = "codegen-sdk-cst-generator" } +codegen-sdk-ast-generator = { path = "codegen-sdk-ast-generator" } +codegen-sdk-ts_query = { path = "languages/codegen-sdk-ts_query" } +codegen-sdk-typescript = { path = "languages/codegen-sdk-typescript" } +codegen-sdk-python = { path = "languages/codegen-sdk-python" } +codegen-sdk-java = { path = "languages/codegen-sdk-java" } +codegen-sdk-ruby = { path = "languages/codegen-sdk-ruby" } +codegen-sdk-rust = { path = "languages/codegen-sdk-rust" } +codegen-sdk-go = { path = "languages/codegen-sdk-go" } +codegen-sdk-markdown = { path = "languages/codegen-sdk-markdown" } +codegen-sdk-yaml = { path = "languages/codegen-sdk-yaml" } +codegen-sdk-toml = { path = "languages/codegen-sdk-toml" } +codegen-sdk-jsx = { path = "languages/codegen-sdk-jsx" } +codegen-sdk-tsx = { path = "languages/codegen-sdk-tsx" } +codegen-sdk-javascript = { path = "languages/codegen-sdk-javascript" } +codegen-sdk-json = { path = "languages/codegen-sdk-json" } tempfile = "3.16.0" quote = "1.0.38" proc-macro2 = "1.0.93" -derive-visitor = "0.4.0" +derive_generic_visitor = "0.1.1" insta = "1.42.1" prettyplease = "0.2.29" syn = { version = "2.0.98", features = ["proc-macro"] } derive_more = { version = "2.0.1", features = ["debug", "display"] } - +salsa = "0.18.0" +subenum = {git = "https://github.com/mrenow/subenum", branch = "main"} +indicatif-log-bridge = "0.2.3" +indicatif = { version = "0.17.11", features = ["rayon"] } +crossbeam-channel = "0.5.11" [profile.dev] -debug = 0 -codegen-backend = "cranelift" +# codegen-backend = "cranelift" +split-debuginfo = "unpacked" [profile.dev.package] insta.opt-level = 3 similar.opt-level = 3 +syn.opt-level = 3 +convert_case.opt-level = 3 +codegen-sdk-cst-generator.opt-level = 3 +codegen-sdk-common.opt-level = 3 -[profile.test] +[profile.dev.build-override] +opt-level = 3 + +[profile.test.package."codegen-sdk-cst"] inherits = "dev" opt-level = 0 debug = 0 strip = "none" -lto = false codegen-units = 256 incremental = true -codegen-backend = "cranelift" +# codegen-backend = "cranelift" + +[profile.test] +lto = false diff --git a/codegen-sdk-analyzer/Cargo.toml b/codegen-sdk-analyzer/Cargo.toml index 5482e16f..1dd1d17a 100644 --- a/codegen-sdk-analyzer/Cargo.toml +++ b/codegen-sdk-analyzer/Cargo.toml @@ -4,7 +4,48 @@ version = "0.1.0" edition = "2024" [dependencies] -salsa = "0.16.1" +salsa = { workspace = true } +codegen-sdk-common = { workspace = true } codegen-sdk-ast = { workspace = true } +codegen-sdk-cst = { workspace = true } +codegen-sdk-python = { workspace = true, optional = true} +codegen-sdk-typescript = { workspace = true, optional = true} +codegen-sdk-tsx = { workspace = true, optional = true} +codegen-sdk-jsx = { workspace = true, optional = true} +codegen-sdk-javascript = { workspace = true, optional = true} +codegen-sdk-json = { workspace = true, optional = true} +codegen-sdk-java = { workspace = true, optional = true} +codegen-sdk-ruby = { workspace = true, optional = true} +codegen-sdk-rust = { workspace = true, optional = true} +codegen-sdk-go = { workspace = true, optional = true} +codegen-sdk-markdown = { workspace = true, optional = true} +codegen-sdk-yaml = { workspace = true, optional = true} +codegen-sdk-toml = { workspace = true, optional = true} +codegen-sdk-macros = { path = "../codegen-sdk-macros"} +dashmap = "6.1.0" +notify-debouncer-mini = { version = "0.6.0", features = ["macos_fsevent", "crossbeam-channel"] } +anyhow = {workspace = true} +env_logger = { workspace = true } +log = { workspace = true } +indicatif-log-bridge = {workspace = true} +indicatif = {workspace = true} +crossbeam-channel = { workspace = true } +[features] +python = [ "codegen-sdk-python"] # TODO: Add python support +typescript = [ "codegen-sdk-typescript"] +tsx = [ "codegen-sdk-tsx"] +jsx = [ "codegen-sdk-jsx"] +javascript = [ "codegen-sdk-javascript"] +json = [ "codegen-sdk-json"] +java = [ "codegen-sdk-java"] +ruby = [ "codegen-sdk-ruby"] +rust = [ "codegen-sdk-rust"] +go = [ "codegen-sdk-go"] +markdown = [ "codegen-sdk-markdown"] +yaml = [ "codegen-sdk-yaml"] +toml = [ "codegen-sdk-toml"] +serialization = ["codegen-sdk-common/serialization"] +default = ["json", "toml", "typescript"] + [dev-dependencies] test-log = { workspace = true } diff --git a/codegen-sdk-analyzer/src/database.rs b/codegen-sdk-analyzer/src/database.rs new file mode 100644 index 00000000..588c8028 --- /dev/null +++ b/codegen-sdk-analyzer/src/database.rs @@ -0,0 +1,112 @@ +use std::{ + path::PathBuf, + sync::{Arc, Mutex}, + time::Duration, +}; + +use anyhow::Context; +use codegen_sdk_ast::input::File; +use codegen_sdk_cst::Input; +use dashmap::{DashMap, mapref::entry::Entry}; +use indicatif::MultiProgress; +use notify_debouncer_mini::{ + Config, DebounceEventResult, Debouncer, new_debouncer_opt, + notify::{RecommendedWatcher, RecursiveMode}, +}; + +use crate::progress::get_multi_progress; +#[salsa::db] +pub trait Db: salsa::Database + Send { + fn input(&self, path: PathBuf) -> anyhow::Result; + fn multi_progress(&self) -> &MultiProgress; + fn watch_dir(&mut self, path: PathBuf) -> anyhow::Result<()>; +} +#[salsa::db] +#[derive(Clone)] +// Basic Database implementation for Query generation. This is not used for anything else. +pub struct CodegenDatabase { + storage: salsa::Storage, + pub files: DashMap, + dirs: Vec, + multi_progress: MultiProgress, + file_watcher: Arc>>, +} +fn get_watcher( + tx: crossbeam_channel::Sender, +) -> Arc>> { + let config = Config::default() + .with_batch_mode(true) + .with_timeout(Duration::from_secs(2)); + Arc::new(Mutex::new(new_debouncer_opt(config, tx).unwrap())) +} +impl CodegenDatabase { + pub fn new(tx: crossbeam_channel::Sender) -> Self { + let multi_progress = get_multi_progress(); + Self { + file_watcher: get_watcher(tx), + storage: salsa::Storage::default(), + multi_progress, + files: DashMap::new(), + dirs: Vec::new(), + } + } + fn _watch_file(&self, path: &PathBuf) -> anyhow::Result<()> { + for dir in self.dirs.iter() { + if path.starts_with(dir) { + return Ok(()); + } + } + let watcher = &mut *self.file_watcher.lock().unwrap(); + watcher + .watcher() + .watch(&path, RecursiveMode::NonRecursive) + .unwrap(); + Ok(()) + } +} +#[salsa::db] +impl salsa::Database for CodegenDatabase { + fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) { + // don't log boring events + let event = event(); + if let salsa::EventKind::WillExecute { .. } = event.kind { + log::debug!("{:?}", event); + } + } +} +#[salsa::db] +impl Db for CodegenDatabase { + fn watch_dir(&mut self, path: PathBuf) -> anyhow::Result<()> { + let path = path.canonicalize()?; + let watcher = &mut *self.file_watcher.lock().unwrap(); + watcher + .watcher() + .watch(&path, RecursiveMode::Recursive) + .unwrap(); + self.dirs.push(path); + Ok(()) + } + fn input(&self, path: PathBuf) -> anyhow::Result { + let path = path + .canonicalize() + .with_context(|| format!("Failed to read {}", path.display()))?; + Ok(match self.files.entry(path.clone()) { + // If the file already exists in our cache then just return it. + Entry::Occupied(entry) => *entry.get(), + // If we haven't read this file yet set up the watch, read the + // contents, store it in the cache, and return it. + Entry::Vacant(entry) => { + // Set up the watch before reading the contents to try to avoid + // race conditions. + self._watch_file(&path)?; + let contents = std::fs::read_to_string(&path) + .with_context(|| format!("Failed to read {}", path.display()))?; + let input = Input::new(self, contents); + *entry.insert(File::new(self, path, input)) + } + }) + } + fn multi_progress(&self) -> &MultiProgress { + &self.multi_progress + } +} diff --git a/codegen-sdk-analyzer/src/lib.rs b/codegen-sdk-analyzer/src/lib.rs index 8f1f58fe..e0d5af4c 100644 --- a/codegen-sdk-analyzer/src/lib.rs +++ b/codegen-sdk-analyzer/src/lib.rs @@ -1,14 +1,6 @@ -pub fn add(left: u64, right: u64) -> u64 { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test_log::test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} +#![recursion_limit = "512"] +mod database; +mod parser; +mod progress; +pub use database::{CodegenDatabase, Db}; +pub use parser::{Parsed, ParsedFile, parse_file}; diff --git a/codegen-sdk-analyzer/src/parser.rs b/codegen-sdk-analyzer/src/parser.rs new file mode 100644 index 00000000..d1dc1072 --- /dev/null +++ b/codegen-sdk-analyzer/src/parser.rs @@ -0,0 +1,14 @@ +use codegen_sdk_cst::CSTLanguage; +use codegen_sdk_macros::{languages_ast, parse_language}; +languages_ast!(); + +#[salsa::tracked] +pub struct Parsed<'db> { + #[return_ref] + pub file: Option>, +} +#[salsa::tracked] +pub fn parse_file(db: &dyn salsa::Database, file: codegen_sdk_ast::input::File) -> Parsed<'_> { + parse_language!(); + Parsed::new(db, None) +} diff --git a/codegen-sdk-analyzer/src/progress.rs b/codegen-sdk-analyzer/src/progress.rs new file mode 100644 index 00000000..c6b8a6ff --- /dev/null +++ b/codegen-sdk-analyzer/src/progress.rs @@ -0,0 +1,12 @@ +use indicatif::MultiProgress; +use indicatif_log_bridge::LogWrapper; + +pub fn get_multi_progress() -> MultiProgress { + let logger = + env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).build(); + let level = logger.filter(); + let multi_progress = MultiProgress::new(); + log::set_max_level(level); + LogWrapper::new(multi_progress.clone(), logger); + return multi_progress; +} diff --git a/codegen-sdk-ast-generator/Cargo.toml b/codegen-sdk-ast-generator/Cargo.toml index ffb25287..3c68177c 100644 --- a/codegen-sdk-ast-generator/Cargo.toml +++ b/codegen-sdk-ast-generator/Cargo.toml @@ -5,14 +5,18 @@ edition = "2024" [dependencies] codegen-sdk-common = { workspace = true } -codegen-sdk-cst = { workspace = true , features = ["ts_query"]} +codegen-sdk-cst = { workspace = true } anyhow = { workspace = true } quote = { workspace = true } proc-macro2 = { workspace = true } log = { workspace = true } derive_more = { workspace = true } codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ts_query = { workspace = true } convert_case = { workspace = true } +salsa = { workspace = true } +syn = { workspace = true } [dev-dependencies] test-log = { workspace = true } insta = { workspace = true } +codegen-sdk-typescript = {workspace = true} diff --git a/codegen-sdk-ast-generator/src/generator.rs b/codegen-sdk-ast-generator/src/generator.rs index 06fd2b7b..39864d0f 100644 --- a/codegen-sdk-ast-generator/src/generator.rs +++ b/codegen-sdk-ast-generator/src/generator.rs @@ -1,49 +1,85 @@ use codegen_sdk_common::language::Language; -pub fn generate_ast(language: &Language) -> anyhow::Result { - let content = format!( - " - #[derive(Debug, Clone)] - pub struct {language_struct_name}File {{ - node: {language_name}::{root_node_name}, - path: PathBuf, - pub references: References, - pub definitions: Definitions - }} - impl File for {language_struct_name}File {{ - fn path(&self) -> &PathBuf {{ - &self.path - }} - fn parse(path: &PathBuf) -> Result {{ - log::debug!(\"Parsing {language_name} file: {{}}\", path.display()); - let ast = {language_name}::{language_struct_name}::parse_file(path)?; - let mut references = References::default(); +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; +fn get_definitions_impl(language: &Language) -> TokenStream { + if !language.tag_query.contains("@definition") { + return quote! { + pub fn definitions(self, _db: &'db dyn salsa::Database) -> (){ + } + }; + } + quote! { + #[salsa::tracked] + pub fn definitions(self, db: &'db dyn salsa::Database) -> Definitions<'db> { let mut definitions = Definitions::default(); - ast.drive(&mut definitions); - ast.drive(&mut references); - Ok({language_struct_name}File {{ node: ast, path: path.clone(), references, definitions }}) - }} - }} - impl HasNode for {language_struct_name}File {{ - type Node = {language_name}::{root_node_name}; - fn node(&self) -> &Self::Node {{ - &self.node - }} - }} - ", - language_struct_name = language.struct_name, - language_name = language.name(), - root_node_name = language.root_node(), - ); - // for (name, query) in language.definitions() { - // content.push_str(&format!(" - // impl {language_struct_name}File {{ - // pub fn {name}(&self) -> {language_struct_name}File {{ - // {language_struct_name}File {{ - // node: self.node.children().find(|node| node.type_name == \"{name}\").unwrap(), - // path: self.path.clone() - // }} - // }} - // ")); + if let Some(program) = self.node(db) { + definitions = definitions.visit_by_val_infallible(&program); + } + definitions + } + } +} +fn get_references_impl(language: &Language) -> TokenStream { + if !language.tag_query.contains("@reference") { + return quote! { + pub fn references(self, _db: &'db dyn salsa::Database) -> (){ + } + }; + } + quote! { + #[salsa::tracked] + pub fn references(self, db: &'db dyn salsa::Database) -> References<'db> { + let mut references = References::default(); + if let Some(program) = self.node(db) { + references = references.visit_by_val_infallible(&program); + } + references + } + } +} +pub fn generate_ast(language: &Language) -> anyhow::Result { + let language_struct_name = format_ident!("{}File", language.struct_name); + let language_name_str = language.name(); + let definitions_impl = get_definitions_impl(language); + let references_impl = get_references_impl(language); + let program_id = format_ident!("{}", language.root_node()); + let content = quote! { + #[salsa::tracked] + pub struct #language_struct_name<'db> { + #[return_ref] + node: Option>, + #[id] + pub path: PathBuf, + } + // impl<'db> File for {language_struct_name}File<'db> {{ + // fn path(&self) -> &PathBuf {{ + // &self.path(db) + // }} + // }} + pub fn parse(db: &dyn salsa::Database, input: codegen_sdk_ast::input::File) -> #language_struct_name<'_> { + log::debug!("Parsing {} file: {}", input.path(db).display(), #language_name_str); + let ast = crate::cst::parse_program_raw(db, input.contents(db)); + #language_struct_name::new(db, ast, input.path(db).clone()) + } + #[salsa::tracked] + pub fn parse_query(db: &dyn salsa::Database, input: codegen_sdk_ast::input::File) -> #language_struct_name<'_> { + parse(db, input) + } + + + #[salsa::tracked] + impl<'db> #language_struct_name<'db> { + #definitions_impl + #references_impl + } + // impl<'db> HasNode for {language_struct_name}File<'db> { + // type Node = {language_name}::{root_node_name}<'db>; + // fn node(&self) -> &Self::Node { + // &self.node + // } // } + + }; + Ok(content) } diff --git a/codegen-sdk-ast-generator/src/lib.rs b/codegen-sdk-ast-generator/src/lib.rs index 5c3ff0b5..b7e858e7 100644 --- a/codegen-sdk-ast-generator/src/lib.rs +++ b/codegen-sdk-ast-generator/src/lib.rs @@ -1,26 +1,39 @@ #![feature(extend_one)] use codegen_sdk_common::{generator::format_code, language::Language}; -use quote::quote; - +use codegen_sdk_cst::CSTDatabase; +use quote::{ToTokens, quote}; mod generator; mod query; mod visitor; +use syn::parse_quote; pub fn generate_ast(language: &Language) -> anyhow::Result<()> { + let db = CSTDatabase::default(); let imports = quote! { - use derive_visitor::{Visitor, Drive}; + use derive_generic_visitor::{Visitor, Drive, Visit}; use codegen_sdk_common::*; use std::path::PathBuf; use codegen_sdk_cst::CSTLanguage; }; - let mut ast = generator::generate_ast(language)?; - let definitions = visitor::generate_visitor(language, "definition"); - let references = visitor::generate_visitor(language, "reference"); - ast = imports.to_string() + &ast + &definitions.to_string() + &references.to_string(); - ast = format_code(&ast) - .unwrap_or_else(|_| panic!("Failed to format ast for {}", language.name())); + let ast = generator::generate_ast(language)?; + let definition_visitor = visitor::generate_visitor(&db, language, "definition"); + let reference_visitor = visitor::generate_visitor(&db, language, "reference"); + let ast: syn::File = parse_quote! { + #imports + #ast + #definition_visitor + #reference_visitor + }; let out_dir = std::env::var("OUT_DIR")?; - let out_file = format!("{}/{}.rs", out_dir, language.name()); + let out_file = format!("{}/{}-ast.rs", out_dir, language.name()); + std::fs::write(&out_file, ast.to_token_stream().to_string())?; + let ast = format_code(&ast).unwrap_or_else(|_| { + panic!( + "Failed to format ast for {} at {}", + language.name(), + out_file + ) + }); std::fs::write(out_file, ast)?; Ok(()) } diff --git a/codegen-sdk-ast-generator/src/query.rs b/codegen-sdk-ast-generator/src/query.rs index 455319cf..08bb1207 100644 --- a/codegen-sdk-ast-generator/src/query.rs +++ b/codegen-sdk-ast-generator/src/query.rs @@ -4,15 +4,16 @@ use codegen_sdk_common::{ CSTNode, HasChildren, Language, naming::{normalize_field_name, normalize_type_name}, }; -use codegen_sdk_cst::{CSTLanguage, ts_query}; -use codegen_sdk_cst_generator::{Field, State}; +use codegen_sdk_cst::CSTLanguage; +use codegen_sdk_cst_generator::{Config, Field, State}; +use codegen_sdk_ts_query::cst as ts_query; use derive_more::Debug; use log::{debug, info, warn}; use proc_macro2::{Ident, TokenStream}; use quote::{format_ident, quote}; -fn captures_for_field_definition( - node: &ts_query::FieldDefinition, -) -> impl Iterator { +fn captures_for_field_definition<'a>( + node: &ts_query::FieldDefinition<'a>, +) -> impl Iterator> { let mut captures = Vec::new(); for child in node.children() { match child { @@ -27,7 +28,9 @@ fn captures_for_field_definition( } captures.into_iter() } -fn captures_for_named_node(node: &ts_query::NamedNode) -> impl Iterator { +fn captures_for_named_node<'a>( + node: &ts_query::NamedNode<'a>, +) -> impl Iterator> { let mut captures = Vec::new(); for child in node.children() { match child { @@ -45,14 +48,21 @@ fn captures_for_named_node(node: &ts_query::NamedNode) -> impl Iterator { - node: ts_query::NamedNode, + node: ts_query::NamedNode<'a>, language: &'a Language, - state: Arc>, + pub(crate) state: Arc>, } impl<'a> Query<'a> { - pub fn from_queries(source: &str, language: &'a Language) -> BTreeMap { - let parsed = ts_query::Query::parse(source).unwrap(); - let state = Arc::new(State::new(language)); + pub fn from_queries( + db: &'a dyn salsa::Database, + source: &str, + language: &'a Language, + ) -> BTreeMap { + let parsed = ts_query::Query::parse(db, source.to_string()) + .as_ref() + .unwrap(); + let config = Config::default(); + let state = Arc::new(State::new(language, config)); let mut queries = BTreeMap::new(); for node in parsed.children() { match node { @@ -81,7 +91,7 @@ impl<'a> Query<'a> { queries } fn from_named_node( - named: &ts_query::NamedNode, + named: &ts_query::NamedNode<'a>, language: &'a Language, state: Arc>, ) -> Self { @@ -197,12 +207,11 @@ impl<'a> Query<'a> { let name = normalize_field_name(&identifier.source()); if let Some(field) = self.get_field_for_field_name(&name, struct_name) { let field_name = format_ident!("{}", name); - let new_identifier = format_ident!("field"); let normalized_struct_name = field.type_name(); let wrapped = self.get_matcher_for_definition( &normalized_struct_name, other_child.clone(), - &new_identifier, + &field_name, ); assert!( wrapped.to_string().len() > 0, @@ -213,12 +222,12 @@ impl<'a> Query<'a> { ); if !field.is_optional() { return quote! { - let #new_identifier = &#current_node.#field_name; + let #field_name = &*#current_node.#field_name; #wrapped }; } else { return quote! { - if let Some(field) = &#current_node.#field_name { + if let Some(#field_name) = &*#current_node.#field_name { #wrapped } }; @@ -253,6 +262,96 @@ impl<'a> Query<'a> { } matchers } + fn _get_matcher_for_named_node( + &self, + struct_name: &str, + target_name: &str, + target_kind: &str, + current_node: &Ident, + remaining_nodes: Vec>, + ) -> TokenStream { + let mut matchers = TokenStream::new(); + let mut field_matchers = TokenStream::new(); + let mut comment_variant = None; + let variants = self.state.get_variants(&format!("{}Children", target_kind)); + if variants.len() == 2 { + if variants.iter().any(|v| v.normalize() == "Comment") { + for variant in variants { + if variant.normalize() == "Comment" { + continue; + } + comment_variant = Some(variant.normalize()); + } + } + } + + for child in remaining_nodes { + if child.kind() == "field_definition" { + field_matchers.extend_one(self.get_matcher_for_definition( + &target_name, + child.into(), + current_node, + )); + } else { + let result = self.get_matcher_for_definition( + &target_name, + child.into(), + &format_ident!("child"), + ); + + if let Some(ref variant) = comment_variant { + let children = format_ident!("{}Children", target_name); + let variant = format_ident!("{}", variant); + matchers.extend_one(quote! { + + if let crate::cst::#children::#variant(#current_node) = #current_node { + #result + } + }); + } else { + matchers.extend_one(quote! { + #result + }); + } + } + } + let matchers = if matchers.is_empty() { + quote! {} + } else { + quote! { + for child in #current_node.children().into_iter() { + #matchers + break; + } + } + }; + let query_source = format!( + "Code for query: {}", + &self.node().source().replace("\n", " ") // Newlines mess with quote's doc comments + ); + if matchers.is_empty() && field_matchers.is_empty() { + return quote! {}; + } + let base_matcher = quote! { + #[doc = #query_source] + #matchers + #field_matchers + }; + if struct_name == target_name { + return base_matcher; + } else { + let mut children = format_ident!("{}", struct_name); + if let Some(node) = self.state.get_node_for_struct_name(struct_name) { + children = format_ident!("{}", node.children_struct_name()); + } + let variant = format_ident!("{}", target_name); + return quote! { + if let crate::cst::#children::#variant(#current_node) = #current_node { + #base_matcher + } + }; + } + } fn get_matcher_for_named_node( &self, @@ -275,14 +374,15 @@ impl<'a> Query<'a> { let name_node = self.state.get_node_for_raw_name(&first_node.source()); if let Some(name_node) = name_node { - for child in remaining_nodes { - let result = self.get_matcher_for_definition( - &name_node.normalize_name(), - child.into(), - current_node, - ); - matchers.extend_one(result); - } + let target_name = name_node.normalize_name(); + let matcher = self._get_matcher_for_named_node( + struct_name, + &target_name, + name_node.kind(), + current_node, + remaining_nodes, + ); + matchers.extend_one(matcher); } else { let subenum = self.state.get_subenum_variants(&first_node.source()); log::info!( @@ -294,17 +394,19 @@ impl<'a> Query<'a> { if variant.normalize_name() == "Comment" { continue; } - for child in remaining_nodes.clone() { - let result = self.get_matcher_for_definition( - &variant.normalize_name(), - child.into(), - current_node, - ); - matchers.extend_one(result); - } + let matcher = self._get_matcher_for_named_node( + struct_name, + &variant.normalize_name(), + variant.kind(), + current_node, + remaining_nodes.clone(), + ); + matchers.extend_one(matcher); } } - matchers + quote! { + #matchers + } } fn get_default_matcher(&self) -> TokenStream { let to_append = self.executor_id(); @@ -348,31 +450,32 @@ impl<'a> Query<'a> { self.get_matchers_for_grouping(&grouping, struct_name, current_node) } ts_query::NodeTypes::Identifier(identifier) => { + // We have 2 nodes, the parent node and the identifier node let to_append = self.get_default_matcher(); - let language = format_ident!("{}", self.language.name()); let children; + // Case 1: The identifier is the same as the struct name (IE: we know this is the corrent node) + if normalize_type_name(&identifier.source(), true) == struct_name { + return to_append; + } + // Case 2: We have a node for the parent struct if let Some(node) = self.state.get_node_for_struct_name(struct_name) { children = format_ident!("{}Children", struct_name); // When there is only 1 possible child, we can use the default matcher if node.children_struct_name() != children.to_string() { - return self.get_default_matcher(); + return to_append; } } else { + // Case 3: This is a subenum // If this is a field, we may be dealing with multiple types and can't operate over all of them return self.get_default_matcher(); // TODO: Handle this case } let struct_name = format_ident!("{}", normalize_type_name(&identifier.source(), true)); quote! { - if #current_node.children().into_iter().any(|child| { - if let #language::#children::#struct_name(_) = child { - true - } else { - false - } - }) { + if let crate::cst::#children::#struct_name(child) = #current_node { #to_append } + } } unhandled => { @@ -393,21 +496,31 @@ impl<'a> Query<'a> { self.node().source(), self.node().children() ); - let mut matchers = TokenStream::new(); - for child in self.node().children().into_iter().skip(1) { - let result = - self.get_matcher_for_definition(struct_name, child.into(), &format_ident!("node")); - matchers.extend_one(result); - } - matchers + let node = self.state.get_node_for_struct_name(struct_name); + let kind = if let Some(node) = node { + node.kind() + } else { + struct_name + }; + return self._get_matcher_for_named_node( + struct_name, + &struct_name, + kind, + &format_ident!("node"), + self.node().children().into_iter().skip(1).collect(), + ); } } pub trait HasQuery { - fn queries(&self) -> BTreeMap; - fn queries_with_prefix(&self, prefix: &str) -> BTreeMap>> { + fn queries<'a, 'db: 'a>(&'a self, db: &'db dyn salsa::Database) -> BTreeMap>; + fn queries_with_prefix<'a, 'db: 'a>( + &'a self, + db: &'db dyn salsa::Database, + prefix: &str, + ) -> BTreeMap>> { let mut queries = BTreeMap::new(); - for (name, query) in self.queries().into_iter() { + for (name, query) in self.queries(db).into_iter() { if name.starts_with(prefix) { let new_name = name.split(".").last().unwrap(); queries @@ -420,7 +533,21 @@ pub trait HasQuery { } } impl HasQuery for Language { - fn queries(&self) -> BTreeMap> { - Query::from_queries(&self.tag_query, self) + fn queries<'a, 'db: 'a>(&'a self, db: &'db dyn salsa::Database) -> BTreeMap> { + Query::from_queries(db, &self.tag_query, self) + } +} +#[cfg(test)] +mod tests { + use codegen_sdk_common::language::ts_query; + use codegen_sdk_cst::CSTDatabase; + + use super::*; + #[test] + fn test_query_basic() { + let database = CSTDatabase::default(); + let language = &ts_query::Query; + let queries = Query::from_queries(&database, "(abc) @definition.abc", language); + assert!(queries.len() > 0); } } diff --git a/codegen-sdk-ast-generator/src/snapshots/codegen_sdk_ast_generator__visitor__tests__generate_visitor.snap b/codegen-sdk-ast-generator/src/snapshots/codegen_sdk_ast_generator__visitor__tests__generate_visitor.snap index dec37b8b..d0e73fcd 100644 --- a/codegen-sdk-ast-generator/src/snapshots/codegen_sdk_ast_generator__visitor__tests__generate_visitor.snap +++ b/codegen-sdk-ast-generator/src/snapshots/codegen_sdk_ast_generator__visitor__tests__generate_visitor.snap @@ -1,53 +1,1118 @@ --- source: codegen-sdk-ast-generator/src/visitor.rs -expression: "codegen_sdk_common::generator::format_code(&visitor.to_string()).unwrap()" +expression: "codegen_sdk_common::generator::format_code_string(&visitor.to_string()).unwrap()" --- -#[derive(Visitor, Default, Debug, Clone)] -#[visitor( - typescript::AbstractClassDeclaration(enter), - typescript::FunctionSignature(enter), - typescript::InterfaceDeclaration(enter), - typescript::AbstractMethodSignature(enter), - typescript::Module(enter) +#[derive(Visitor, Visit, Debug, Clone, Eq, PartialEq, salsa::Update, Hash, Default)] +#[visit(drive(&crate::cst::AbstractClassDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::AbstractMethodSignatureChildren<'db>))] +#[visit(drive(&crate::cst::AbstractMethodSignatureName<'db>))] +#[visit(drive(&crate::cst::AbstractMethodSignatureReturnType<'db>))] +#[visit(drive(&crate::cst::AccessibilityModifier<'db>))] +#[visit(drive(&crate::cst::AddingTypeAnnotation<'db>))] +#[visit(drive(&crate::cst::AddingTypeAnnotationChildren<'db>))] +#[visit(drive(&crate::cst::AmbientDeclaration<'db>))] +#[visit(drive(&crate::cst::AmbientDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::AnonymousAbstract<'db>))] +#[visit(drive(&crate::cst::AnonymousAccessor<'db>))] +#[visit(drive(&crate::cst::AnonymousAmpersand<'db>))] +#[visit(drive(&crate::cst::AnonymousAmpersandAmpersand<'db>))] +#[visit(drive(&crate::cst::AnonymousAmpersandAmpersandEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousAmpersandEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousAny<'db>))] +#[visit(drive(&crate::cst::AnonymousAs<'db>))] +#[visit(drive(&crate::cst::AnonymousAssert<'db>))] +#[visit(drive(&crate::cst::AnonymousAsserts<'db>))] +#[visit(drive(&crate::cst::AnonymousAsterisk<'db>))] +#[visit(drive(&crate::cst::AnonymousAsteriskAsterisk<'db>))] +#[visit(drive(&crate::cst::AnonymousAsteriskAsteriskEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousAsteriskEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousAsync<'db>))] +#[visit(drive(&crate::cst::AnonymousAt<'db>))] +#[visit(drive(&crate::cst::AnonymousAwait<'db>))] +#[visit(drive(&crate::cst::AnonymousBacktick<'db>))] +#[visit(drive(&crate::cst::AnonymousBang<'db>))] +#[visit(drive(&crate::cst::AnonymousBangEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousBangEqualsEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousBoolean<'db>))] +#[visit(drive(&crate::cst::AnonymousBreak<'db>))] +#[visit(drive(&crate::cst::AnonymousCaret<'db>))] +#[visit(drive(&crate::cst::AnonymousCaretEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousCase<'db>))] +#[visit(drive(&crate::cst::AnonymousCatch<'db>))] +#[visit(drive(&crate::cst::AnonymousClass<'db>))] +#[visit(drive(&crate::cst::AnonymousCloseBrace<'db>))] +#[visit(drive(&crate::cst::AnonymousCloseBracket<'db>))] +#[visit(drive(&crate::cst::AnonymousCloseParen<'db>))] +#[visit(drive(&crate::cst::AnonymousColon<'db>))] +#[visit(drive(&crate::cst::AnonymousComma<'db>))] +#[visit(drive(&crate::cst::AnonymousConst<'db>))] +#[visit(drive(&crate::cst::AnonymousContinue<'db>))] +#[visit(drive(&crate::cst::AnonymousDebugger<'db>))] +#[visit(drive(&crate::cst::AnonymousDeclare<'db>))] +#[visit(drive(&crate::cst::AnonymousDefault<'db>))] +#[visit(drive(&crate::cst::AnonymousDelete<'db>))] +#[visit(drive(&crate::cst::AnonymousDo<'db>))] +#[visit(drive(&crate::cst::AnonymousDollarOpenBrace<'db>))] +#[visit(drive(&crate::cst::AnonymousDot<'db>))] +#[visit(drive(&crate::cst::AnonymousDotDotDot<'db>))] +#[visit(drive(&crate::cst::AnonymousDoubleQuote<'db>))] +#[visit(drive(&crate::cst::AnonymousElse<'db>))] +#[visit(drive(&crate::cst::AnonymousEnum<'db>))] +#[visit(drive(&crate::cst::AnonymousEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousEqualsEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousEqualsEqualsEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousEqualsGreaterThan<'db>))] +#[visit(drive(&crate::cst::AnonymousExport<'db>))] +#[visit(drive(&crate::cst::AnonymousExtends<'db>))] +#[visit(drive(&crate::cst::AnonymousFinally<'db>))] +#[visit(drive(&crate::cst::AnonymousFor<'db>))] +#[visit(drive(&crate::cst::AnonymousFrom<'db>))] +#[visit(drive(&crate::cst::AnonymousFunction<'db>))] +#[visit(drive(&crate::cst::AnonymousGet<'db>))] +#[visit(drive(&crate::cst::AnonymousGlobal<'db>))] +#[visit(drive(&crate::cst::AnonymousGreaterThan<'db>))] +#[visit(drive(&crate::cst::AnonymousGreaterThanEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousGreaterThanGreaterThan<'db>))] +#[visit(drive(&crate::cst::AnonymousGreaterThanGreaterThanEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousGreaterThanGreaterThanGreaterThan<'db>))] +#[visit(drive(&crate::cst::AnonymousGreaterThanGreaterThanGreaterThanEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousIf<'db>))] +#[visit(drive(&crate::cst::AnonymousImplements<'db>))] +#[visit(drive(&crate::cst::AnonymousImport<'db>))] +#[visit(drive(&crate::cst::AnonymousIn<'db>))] +#[visit(drive(&crate::cst::AnonymousInfer<'db>))] +#[visit(drive(&crate::cst::AnonymousInstanceof<'db>))] +#[visit(drive(&crate::cst::AnonymousInterface<'db>))] +#[visit(drive(&crate::cst::AnonymousIs<'db>))] +#[visit(drive(&crate::cst::AnonymousKeyof<'db>))] +#[visit(drive(&crate::cst::AnonymousLessThan<'db>))] +#[visit(drive(&crate::cst::AnonymousLessThanEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousLessThanLessThan<'db>))] +#[visit(drive(&crate::cst::AnonymousLessThanLessThanEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousLet<'db>))] +#[visit(drive(&crate::cst::AnonymousMeta<'db>))] +#[visit(drive(&crate::cst::AnonymousMinus<'db>))] +#[visit(drive(&crate::cst::AnonymousMinusEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousMinusMinus<'db>))] +#[visit(drive(&crate::cst::AnonymousMinusQuestionMarkColon<'db>))] +#[visit(drive(&crate::cst::AnonymousModule<'db>))] +#[visit(drive(&crate::cst::AnonymousNamespace<'db>))] +#[visit(drive(&crate::cst::AnonymousNever<'db>))] +#[visit(drive(&crate::cst::AnonymousNew<'db>))] +#[visit(drive(&crate::cst::AnonymousNumber<'db>))] +#[visit(drive(&crate::cst::AnonymousObject<'db>))] +#[visit(drive(&crate::cst::AnonymousOf<'db>))] +#[visit(drive(&crate::cst::AnonymousOpenBrace<'db>))] +#[visit(drive(&crate::cst::AnonymousOpenBracePipe<'db>))] +#[visit(drive(&crate::cst::AnonymousOpenBracket<'db>))] +#[visit(drive(&crate::cst::AnonymousOpenParen<'db>))] +#[visit(drive(&crate::cst::AnonymousOverride<'db>))] +#[visit(drive(&crate::cst::AnonymousPercent<'db>))] +#[visit(drive(&crate::cst::AnonymousPercentEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousPipe<'db>))] +#[visit(drive(&crate::cst::AnonymousPipeCloseBrace<'db>))] +#[visit(drive(&crate::cst::AnonymousPipeEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousPipePipe<'db>))] +#[visit(drive(&crate::cst::AnonymousPipePipeEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousPlus<'db>))] +#[visit(drive(&crate::cst::AnonymousPlusEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousPlusPlus<'db>))] +#[visit(drive(&crate::cst::AnonymousPlusQuestionMarkColon<'db>))] +#[visit(drive(&crate::cst::AnonymousPrivate<'db>))] +#[visit(drive(&crate::cst::AnonymousProtected<'db>))] +#[visit(drive(&crate::cst::AnonymousPublic<'db>))] +#[visit(drive(&crate::cst::AnonymousQuestionMark<'db>))] +#[visit(drive(&crate::cst::AnonymousQuestionMarkColon<'db>))] +#[visit(drive(&crate::cst::AnonymousQuestionMarkDot<'db>))] +#[visit(drive(&crate::cst::AnonymousQuestionMarkQuestionMark<'db>))] +#[visit(drive(&crate::cst::AnonymousQuestionMarkQuestionMarkEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousReadonly<'db>))] +#[visit(drive(&crate::cst::AnonymousRequire<'db>))] +#[visit(drive(&crate::cst::AnonymousReturn<'db>))] +#[visit(drive(&crate::cst::AnonymousSatisfies<'db>))] +#[visit(drive(&crate::cst::AnonymousSemicolon<'db>))] +#[visit(drive(&crate::cst::AnonymousSet<'db>))] +#[visit(drive(&crate::cst::AnonymousSingleQuote<'db>))] +#[visit(drive(&crate::cst::AnonymousSlash<'db>))] +#[visit(drive(&crate::cst::AnonymousSlashEquals<'db>))] +#[visit(drive(&crate::cst::AnonymousStatic<'db>))] +#[visit(drive(&crate::cst::AnonymousString<'db>))] +#[visit(drive(&crate::cst::AnonymousSwitch<'db>))] +#[visit(drive(&crate::cst::AnonymousSymbol<'db>))] +#[visit(drive(&crate::cst::AnonymousTarget<'db>))] +#[visit(drive(&crate::cst::AnonymousThrow<'db>))] +#[visit(drive(&crate::cst::AnonymousTilde<'db>))] +#[visit(drive(&crate::cst::AnonymousTry<'db>))] +#[visit(drive(&crate::cst::AnonymousType<'db>))] +#[visit(drive(&crate::cst::AnonymousTypeof<'db>))] +#[visit(drive(&crate::cst::AnonymousUniqueSymbol<'db>))] +#[visit(drive(&crate::cst::AnonymousUnknown<'db>))] +#[visit(drive(&crate::cst::AnonymousUsing<'db>))] +#[visit(drive(&crate::cst::AnonymousVar<'db>))] +#[visit(drive(&crate::cst::AnonymousVoid<'db>))] +#[visit(drive(&crate::cst::AnonymousWhile<'db>))] +#[visit(drive(&crate::cst::AnonymousWith<'db>))] +#[visit(drive(&crate::cst::AnonymousYield<'db>))] +#[visit(drive(&crate::cst::Arguments<'db>))] +#[visit(drive(&crate::cst::ArgumentsChildren<'db>))] +#[visit(drive(&crate::cst::Array<'db>))] +#[visit(drive(&crate::cst::ArrayChildren<'db>))] +#[visit(drive(&crate::cst::ArrayPattern<'db>))] +#[visit(drive(&crate::cst::ArrayPatternChildren<'db>))] +#[visit(drive(&crate::cst::ArrayType<'db>))] +#[visit(drive(&crate::cst::ArrayTypeChildren<'db>))] +#[visit(drive(&crate::cst::ArrowFunction<'db>))] +#[visit(drive(&crate::cst::ArrowFunctionBody<'db>))] +#[visit(drive(&crate::cst::ArrowFunctionChildren<'db>))] +#[visit(drive(&crate::cst::ArrowFunctionReturnType<'db>))] +#[visit(drive(&crate::cst::AsExpression<'db>))] +#[visit(drive(&crate::cst::AsExpressionChildren<'db>))] +#[visit(drive(&crate::cst::Asserts<'db>))] +#[visit(drive(&crate::cst::AssertsAnnotation<'db>))] +#[visit(drive(&crate::cst::AssertsAnnotationChildren<'db>))] +#[visit(drive(&crate::cst::AssertsChildren<'db>))] +#[visit(drive(&crate::cst::AssignmentExpression<'db>))] +#[visit(drive(&crate::cst::AssignmentExpressionChildren<'db>))] +#[visit(drive(&crate::cst::AssignmentExpressionLeft<'db>))] +#[visit(drive(&crate::cst::AssignmentPattern<'db>))] +#[visit(drive(&crate::cst::AssignmentPatternChildren<'db>))] +#[visit(drive(&crate::cst::AugmentedAssignmentExpression<'db>))] +#[visit(drive(&crate::cst::AugmentedAssignmentExpressionChildren<'db>))] +#[visit(drive(&crate::cst::AugmentedAssignmentExpressionLeft<'db>))] +#[visit(drive(&crate::cst::AugmentedAssignmentExpressionOperator<'db>))] +#[visit(drive(&crate::cst::AwaitExpression<'db>))] +#[visit(drive(&crate::cst::AwaitExpressionChildren<'db>))] +#[visit(drive(&crate::cst::BinaryExpression<'db>))] +#[visit(drive(&crate::cst::BinaryExpressionChildren<'db>))] +#[visit(drive(&crate::cst::BinaryExpressionLeft<'db>))] +#[visit(drive(&crate::cst::BinaryExpressionOperator<'db>))] +#[visit(drive(&crate::cst::BreakStatement<'db>))] +#[visit(drive(&crate::cst::BreakStatementChildren<'db>))] +#[visit(drive(&crate::cst::CallExpression<'db>))] +#[visit(drive(&crate::cst::CallExpressionArguments<'db>))] +#[visit(drive(&crate::cst::CallExpressionChildren<'db>))] +#[visit(drive(&crate::cst::CallExpressionFunction<'db>))] +#[visit(drive(&crate::cst::CallSignature<'db>))] +#[visit(drive(&crate::cst::CallSignatureChildren<'db>))] +#[visit(drive(&crate::cst::CallSignatureReturnType<'db>))] +#[visit(drive(&crate::cst::CatchClause<'db>))] +#[visit(drive(&crate::cst::CatchClauseChildren<'db>))] +#[visit(drive(&crate::cst::CatchClauseParameter<'db>))] +#[visit(drive(&crate::cst::Class<'db>))] +#[visit(drive(&crate::cst::ClassBody<'db>))] +#[visit(drive(&crate::cst::ClassBodyChildren<'db>))] +#[visit(drive(&crate::cst::ClassChildren<'db>))] +#[visit(drive(&crate::cst::ClassDeclaration<'db>))] +#[visit(drive(&crate::cst::ClassDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::ClassHeritage<'db>))] +#[visit(drive(&crate::cst::ClassHeritageChildren<'db>))] +#[visit(drive(&crate::cst::ClassStaticBlock<'db>))] +#[visit(drive(&crate::cst::ClassStaticBlockChildren<'db>))] +#[visit(drive(&crate::cst::Comment<'db>))] +#[visit(drive(&crate::cst::ComputedPropertyName<'db>))] +#[visit(drive(&crate::cst::ComputedPropertyNameChildren<'db>))] +#[visit(drive(&crate::cst::ConditionalType<'db>))] +#[visit(drive(&crate::cst::ConditionalTypeChildren<'db>))] +#[visit(drive(&crate::cst::Constraint<'db>))] +#[visit(drive(&crate::cst::ConstraintChildren<'db>))] +#[visit(drive(&crate::cst::ConstructSignature<'db>))] +#[visit(drive(&crate::cst::ConstructSignatureChildren<'db>))] +#[visit(drive(&crate::cst::ConstructorType<'db>))] +#[visit(drive(&crate::cst::ConstructorTypeChildren<'db>))] +#[visit(drive(&crate::cst::ContinueStatement<'db>))] +#[visit(drive(&crate::cst::ContinueStatementChildren<'db>))] +#[visit(drive(&crate::cst::DebuggerStatement<'db>))] +#[visit(drive(&crate::cst::Declaration<'db>))] +#[visit(drive(&crate::cst::Decorator<'db>))] +#[visit(drive(&crate::cst::DecoratorChildren<'db>))] +#[visit(drive(&crate::cst::DefaultType<'db>))] +#[visit(drive(&crate::cst::DefaultTypeChildren<'db>))] +#[visit(drive(&crate::cst::DoStatement<'db>))] +#[visit(drive(&crate::cst::DoStatementChildren<'db>))] +#[visit(drive(&crate::cst::ElseClause<'db>))] +#[visit(drive(&crate::cst::ElseClauseChildren<'db>))] +#[visit(drive(&crate::cst::EmptyStatement<'db>))] +#[visit(drive(&crate::cst::EnumAssignment<'db>))] +#[visit(drive(&crate::cst::EnumAssignmentChildren<'db>))] +#[visit(drive(&crate::cst::EnumAssignmentName<'db>))] +#[visit(drive(&crate::cst::EnumBody<'db>))] +#[visit(drive(&crate::cst::EnumBodyChildren<'db>))] +#[visit(drive(&crate::cst::EnumBodyName<'db>))] +#[visit(drive(&crate::cst::EnumDeclaration<'db>))] +#[visit(drive(&crate::cst::EnumDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::EscapeSequence<'db>))] +#[visit(drive(&crate::cst::ExistentialType<'db>))] +#[visit(drive(&crate::cst::ExportClause<'db>))] +#[visit(drive(&crate::cst::ExportClauseChildren<'db>))] +#[visit(drive(&crate::cst::ExportSpecifier<'db>))] +#[visit(drive(&crate::cst::ExportSpecifierAlias<'db>))] +#[visit(drive(&crate::cst::ExportSpecifierChildren<'db>))] +#[visit(drive(&crate::cst::ExportSpecifierName<'db>))] +#[visit(drive(&crate::cst::ExportStatement<'db>))] +#[visit(drive(&crate::cst::ExportStatementChildren<'db>))] +#[visit(drive(&crate::cst::Expression<'db>))] +#[visit(drive(&crate::cst::ExpressionStatement<'db>))] +#[visit(drive(&crate::cst::ExpressionStatementChildren<'db>))] +#[visit(drive(&crate::cst::ExtendsClause<'db>))] +#[visit(drive(&crate::cst::ExtendsClauseChildren<'db>))] +#[visit(drive(&crate::cst::ExtendsTypeClause<'db>))] +#[visit(drive(&crate::cst::ExtendsTypeClauseChildren<'db>))] +#[visit(drive(&crate::cst::ExtendsTypeClauseType<'db>))] +#[visit(drive(&crate::cst::False<'db>))] +#[visit(drive(&crate::cst::FinallyClause<'db>))] +#[visit(drive(&crate::cst::FinallyClauseChildren<'db>))] +#[visit(drive(&crate::cst::FlowMaybeType<'db>))] +#[visit(drive(&crate::cst::FlowMaybeTypeChildren<'db>))] +#[visit(drive(&crate::cst::ForInStatement<'db>))] +#[visit(drive(&crate::cst::ForInStatementChildren<'db>))] +#[visit(drive(&crate::cst::ForInStatementKind<'db>))] +#[visit(drive(&crate::cst::ForInStatementLeft<'db>))] +#[visit(drive(&crate::cst::ForInStatementOperator<'db>))] +#[visit(drive(&crate::cst::ForInStatementRight<'db>))] +#[visit(drive(&crate::cst::ForStatement<'db>))] +#[visit(drive(&crate::cst::ForStatementChildren<'db>))] +#[visit(drive(&crate::cst::ForStatementCondition<'db>))] +#[visit(drive(&crate::cst::ForStatementIncrement<'db>))] +#[visit(drive(&crate::cst::ForStatementInitializer<'db>))] +#[visit(drive(&crate::cst::FormalParameters<'db>))] +#[visit(drive(&crate::cst::FormalParametersChildren<'db>))] +#[visit(drive(&crate::cst::FunctionDeclaration<'db>))] +#[visit(drive(&crate::cst::FunctionDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::FunctionDeclarationReturnType<'db>))] +#[visit(drive(&crate::cst::FunctionExpression<'db>))] +#[visit(drive(&crate::cst::FunctionExpressionChildren<'db>))] +#[visit(drive(&crate::cst::FunctionExpressionReturnType<'db>))] +#[visit(drive(&crate::cst::FunctionSignatureChildren<'db>))] +#[visit(drive(&crate::cst::FunctionSignatureReturnType<'db>))] +#[visit(drive(&crate::cst::FunctionType<'db>))] +#[visit(drive(&crate::cst::FunctionTypeChildren<'db>))] +#[visit(drive(&crate::cst::FunctionTypeReturnType<'db>))] +#[visit(drive(&crate::cst::GeneratorFunction<'db>))] +#[visit(drive(&crate::cst::GeneratorFunctionChildren<'db>))] +#[visit(drive(&crate::cst::GeneratorFunctionDeclaration<'db>))] +#[visit(drive(&crate::cst::GeneratorFunctionDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::GeneratorFunctionDeclarationReturnType<'db>))] +#[visit(drive(&crate::cst::GeneratorFunctionReturnType<'db>))] +#[visit(drive(&crate::cst::GenericType<'db>))] +#[visit(drive(&crate::cst::GenericTypeChildren<'db>))] +#[visit(drive(&crate::cst::GenericTypeName<'db>))] +#[visit(drive(&crate::cst::HashBangLine<'db>))] +#[visit(drive(&crate::cst::HtmlComment<'db>))] +#[visit(drive(&crate::cst::Identifier<'db>))] +#[visit(drive(&crate::cst::IfStatement<'db>))] +#[visit(drive(&crate::cst::IfStatementChildren<'db>))] +#[visit(drive(&crate::cst::ImplementsClause<'db>))] +#[visit(drive(&crate::cst::ImplementsClauseChildren<'db>))] +#[visit(drive(&crate::cst::Import<'db>))] +#[visit(drive(&crate::cst::ImportAlias<'db>))] +#[visit(drive(&crate::cst::ImportAliasChildren<'db>))] +#[visit(drive(&crate::cst::ImportAttribute<'db>))] +#[visit(drive(&crate::cst::ImportAttributeChildren<'db>))] +#[visit(drive(&crate::cst::ImportClause<'db>))] +#[visit(drive(&crate::cst::ImportClauseChildren<'db>))] +#[visit(drive(&crate::cst::ImportRequireClause<'db>))] +#[visit(drive(&crate::cst::ImportRequireClauseChildren<'db>))] +#[visit(drive(&crate::cst::ImportSpecifier<'db>))] +#[visit(drive(&crate::cst::ImportSpecifierChildren<'db>))] +#[visit(drive(&crate::cst::ImportSpecifierName<'db>))] +#[visit(drive(&crate::cst::ImportStatement<'db>))] +#[visit(drive(&crate::cst::ImportStatementChildren<'db>))] +#[visit(drive(&crate::cst::IndexSignature<'db>))] +#[visit(drive(&crate::cst::IndexSignatureChildren<'db>))] +#[visit(drive(&crate::cst::IndexSignatureSign<'db>))] +#[visit(drive(&crate::cst::IndexSignatureType<'db>))] +#[visit(drive(&crate::cst::IndexTypeQuery<'db>))] +#[visit(drive(&crate::cst::IndexTypeQueryChildren<'db>))] +#[visit(drive(&crate::cst::InferType<'db>))] +#[visit(drive(&crate::cst::InferTypeChildren<'db>))] +#[visit(drive(&crate::cst::InstantiationExpression<'db>))] +#[visit(drive(&crate::cst::InstantiationExpressionChildren<'db>))] +#[visit(drive(&crate::cst::InstantiationExpressionFunction<'db>))] +#[visit(drive(&crate::cst::InterfaceBody<'db>))] +#[visit(drive(&crate::cst::InterfaceBodyChildren<'db>))] +#[visit(drive(&crate::cst::InterfaceDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::InternalModule<'db>))] +#[visit(drive(&crate::cst::InternalModuleChildren<'db>))] +#[visit(drive(&crate::cst::InternalModuleName<'db>))] +#[visit(drive(&crate::cst::IntersectionType<'db>))] +#[visit(drive(&crate::cst::IntersectionTypeChildren<'db>))] +#[visit(drive(&crate::cst::LabeledStatement<'db>))] +#[visit(drive(&crate::cst::LabeledStatementChildren<'db>))] +#[visit(drive(&crate::cst::LexicalDeclaration<'db>))] +#[visit(drive(&crate::cst::LexicalDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::LexicalDeclarationKind<'db>))] +#[visit(drive(&crate::cst::LiteralType<'db>))] +#[visit(drive(&crate::cst::LiteralTypeChildren<'db>))] +#[visit(drive(&crate::cst::LookupType<'db>))] +#[visit(drive(&crate::cst::LookupTypeChildren<'db>))] +#[visit(drive(&crate::cst::MappedTypeClause<'db>))] +#[visit(drive(&crate::cst::MappedTypeClauseChildren<'db>))] +#[visit(drive(&crate::cst::MemberExpression<'db>))] +#[visit(drive(&crate::cst::MemberExpressionChildren<'db>))] +#[visit(drive(&crate::cst::MemberExpressionObject<'db>))] +#[visit(drive(&crate::cst::MemberExpressionProperty<'db>))] +#[visit(drive(&crate::cst::MetaProperty<'db>))] +#[visit(drive(&crate::cst::MethodDefinition<'db>))] +#[visit(drive(&crate::cst::MethodDefinitionChildren<'db>))] +#[visit(drive(&crate::cst::MethodDefinitionName<'db>))] +#[visit(drive(&crate::cst::MethodDefinitionReturnType<'db>))] +#[visit(drive(&crate::cst::MethodSignature<'db>))] +#[visit(drive(&crate::cst::MethodSignatureChildren<'db>))] +#[visit(drive(&crate::cst::MethodSignatureName<'db>))] +#[visit(drive(&crate::cst::MethodSignatureReturnType<'db>))] +#[visit(drive(&crate::cst::ModuleChildren<'db>))] +#[visit(drive(&crate::cst::ModuleName<'db>))] +#[visit(drive(&crate::cst::NamedImports<'db>))] +#[visit(drive(&crate::cst::NamedImportsChildren<'db>))] +#[visit(drive(&crate::cst::NamespaceExport<'db>))] +#[visit(drive(&crate::cst::NamespaceExportChildren<'db>))] +#[visit(drive(&crate::cst::NamespaceImport<'db>))] +#[visit(drive(&crate::cst::NamespaceImportChildren<'db>))] +#[visit(drive(&crate::cst::NestedIdentifier<'db>))] +#[visit(drive(&crate::cst::NestedIdentifierChildren<'db>))] +#[visit(drive(&crate::cst::NestedIdentifierObject<'db>))] +#[visit(drive(&crate::cst::NestedTypeIdentifier<'db>))] +#[visit(drive(&crate::cst::NestedTypeIdentifierChildren<'db>))] +#[visit(drive(&crate::cst::NestedTypeIdentifierModule<'db>))] +#[visit(drive(&crate::cst::NewExpression<'db>))] +#[visit(drive(&crate::cst::NewExpressionChildren<'db>))] +#[visit(drive(&crate::cst::NonNullExpression<'db>))] +#[visit(drive(&crate::cst::NonNullExpressionChildren<'db>))] +#[visit(drive(&crate::cst::Null<'db>))] +#[visit(drive(&crate::cst::Number<'db>))] +#[visit(drive(&crate::cst::Object<'db>))] +#[visit(drive(&crate::cst::ObjectAssignmentPattern<'db>))] +#[visit(drive(&crate::cst::ObjectAssignmentPatternChildren<'db>))] +#[visit(drive(&crate::cst::ObjectAssignmentPatternLeft<'db>))] +#[visit(drive(&crate::cst::ObjectChildren<'db>))] +#[visit(drive(&crate::cst::ObjectPattern<'db>))] +#[visit(drive(&crate::cst::ObjectPatternChildren<'db>))] +#[visit(drive(&crate::cst::ObjectType<'db>))] +#[visit(drive(&crate::cst::ObjectTypeChildren<'db>))] +#[visit(drive(&crate::cst::OmittingTypeAnnotation<'db>))] +#[visit(drive(&crate::cst::OmittingTypeAnnotationChildren<'db>))] +#[visit(drive(&crate::cst::OptingTypeAnnotation<'db>))] +#[visit(drive(&crate::cst::OptingTypeAnnotationChildren<'db>))] +#[visit(drive(&crate::cst::OptionalChain<'db>))] +#[visit(drive(&crate::cst::OptionalParameter<'db>))] +#[visit(drive(&crate::cst::OptionalParameterChildren<'db>))] +#[visit(drive(&crate::cst::OptionalParameterPattern<'db>))] +#[visit(drive(&crate::cst::OptionalType<'db>))] +#[visit(drive(&crate::cst::OptionalTypeChildren<'db>))] +#[visit(drive(&crate::cst::OverrideModifier<'db>))] +#[visit(drive(&crate::cst::Pair<'db>))] +#[visit(drive(&crate::cst::PairChildren<'db>))] +#[visit(drive(&crate::cst::PairKey<'db>))] +#[visit(drive(&crate::cst::PairPattern<'db>))] +#[visit(drive(&crate::cst::PairPatternChildren<'db>))] +#[visit(drive(&crate::cst::PairPatternKey<'db>))] +#[visit(drive(&crate::cst::PairPatternValue<'db>))] +#[visit(drive(&crate::cst::ParenthesizedExpression<'db>))] +#[visit(drive(&crate::cst::ParenthesizedExpressionChildren<'db>))] +#[visit(drive(&crate::cst::ParenthesizedType<'db>))] +#[visit(drive(&crate::cst::ParenthesizedTypeChildren<'db>))] +#[visit(drive(&crate::cst::Pattern<'db>))] +#[visit(drive(&crate::cst::PredefinedType<'db>))] +#[visit(drive(&crate::cst::PrimaryExpression<'db>))] +#[visit(drive(&crate::cst::PrimaryType<'db>))] +#[visit(drive(&crate::cst::PrivatePropertyIdentifier<'db>))] +#[visit(drive(&crate::cst::Program<'db>))] +#[visit(drive(&crate::cst::ProgramChildren<'db>))] +#[visit(drive(&crate::cst::PropertyIdentifier<'db>))] +#[visit(drive(&crate::cst::PropertySignature<'db>))] +#[visit(drive(&crate::cst::PropertySignatureChildren<'db>))] +#[visit(drive(&crate::cst::PropertySignatureName<'db>))] +#[visit(drive(&crate::cst::PublicFieldDefinition<'db>))] +#[visit(drive(&crate::cst::PublicFieldDefinitionChildren<'db>))] +#[visit(drive(&crate::cst::PublicFieldDefinitionName<'db>))] +#[visit(drive(&crate::cst::ReadonlyType<'db>))] +#[visit(drive(&crate::cst::ReadonlyTypeChildren<'db>))] +#[visit(drive(&crate::cst::Regex<'db>))] +#[visit(drive(&crate::cst::RegexChildren<'db>))] +#[visit(drive(&crate::cst::RegexFlags<'db>))] +#[visit(drive(&crate::cst::RegexPattern<'db>))] +#[visit(drive(&crate::cst::RequiredParameter<'db>))] +#[visit(drive(&crate::cst::RequiredParameterChildren<'db>))] +#[visit(drive(&crate::cst::RequiredParameterName<'db>))] +#[visit(drive(&crate::cst::RequiredParameterPattern<'db>))] +#[visit(drive(&crate::cst::RestPattern<'db>))] +#[visit(drive(&crate::cst::RestPatternChildren<'db>))] +#[visit(drive(&crate::cst::RestType<'db>))] +#[visit(drive(&crate::cst::RestTypeChildren<'db>))] +#[visit(drive(&crate::cst::ReturnStatement<'db>))] +#[visit(drive(&crate::cst::ReturnStatementChildren<'db>))] +#[visit(drive(&crate::cst::SatisfiesExpression<'db>))] +#[visit(drive(&crate::cst::SatisfiesExpressionChildren<'db>))] +#[visit(drive(&crate::cst::SequenceExpression<'db>))] +#[visit(drive(&crate::cst::SequenceExpressionChildren<'db>))] +#[visit(drive(&crate::cst::ShorthandPropertyIdentifier<'db>))] +#[visit(drive(&crate::cst::ShorthandPropertyIdentifierPattern<'db>))] +#[visit(drive(&crate::cst::SpreadElement<'db>))] +#[visit(drive(&crate::cst::SpreadElementChildren<'db>))] +#[visit(drive(&crate::cst::Statement<'db>))] +#[visit(drive(&crate::cst::StatementBlock<'db>))] +#[visit(drive(&crate::cst::StatementBlockChildren<'db>))] +#[visit(drive(&crate::cst::StatementIdentifier<'db>))] +#[visit(drive(&crate::cst::String<'db>))] +#[visit(drive(&crate::cst::StringChildren<'db>))] +#[visit(drive(&crate::cst::StringFragment<'db>))] +#[visit(drive(&crate::cst::SubscriptExpression<'db>))] +#[visit(drive(&crate::cst::SubscriptExpressionChildren<'db>))] +#[visit(drive(&crate::cst::SubscriptExpressionIndex<'db>))] +#[visit(drive(&crate::cst::Super<'db>))] +#[visit(drive(&crate::cst::SwitchBody<'db>))] +#[visit(drive(&crate::cst::SwitchBodyChildren<'db>))] +#[visit(drive(&crate::cst::SwitchCase<'db>))] +#[visit(drive(&crate::cst::SwitchCaseChildren<'db>))] +#[visit(drive(&crate::cst::SwitchCaseValue<'db>))] +#[visit(drive(&crate::cst::SwitchDefault<'db>))] +#[visit(drive(&crate::cst::SwitchDefaultChildren<'db>))] +#[visit(drive(&crate::cst::SwitchStatement<'db>))] +#[visit(drive(&crate::cst::SwitchStatementChildren<'db>))] +#[visit(drive(&crate::cst::TemplateLiteralType<'db>))] +#[visit(drive(&crate::cst::TemplateLiteralTypeChildren<'db>))] +#[visit(drive(&crate::cst::TemplateString<'db>))] +#[visit(drive(&crate::cst::TemplateStringChildren<'db>))] +#[visit(drive(&crate::cst::TemplateSubstitution<'db>))] +#[visit(drive(&crate::cst::TemplateSubstitutionChildren<'db>))] +#[visit(drive(&crate::cst::TemplateType<'db>))] +#[visit(drive(&crate::cst::TemplateTypeChildren<'db>))] +#[visit(drive(&crate::cst::TernaryExpression<'db>))] +#[visit(drive(&crate::cst::TernaryExpressionChildren<'db>))] +#[visit(drive(&crate::cst::This<'db>))] +#[visit(drive(&crate::cst::ThisType<'db>))] +#[visit(drive(&crate::cst::ThrowStatement<'db>))] +#[visit(drive(&crate::cst::ThrowStatementChildren<'db>))] +#[visit(drive(&crate::cst::True<'db>))] +#[visit(drive(&crate::cst::TryStatement<'db>))] +#[visit(drive(&crate::cst::TryStatementChildren<'db>))] +#[visit(drive(&crate::cst::TupleType<'db>))] +#[visit(drive(&crate::cst::TupleTypeChildren<'db>))] +#[visit(drive(&crate::cst::Type<'db>))] +#[visit(drive(&crate::cst::TypeAliasDeclaration<'db>))] +#[visit(drive(&crate::cst::TypeAliasDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::TypeAnnotation<'db>))] +#[visit(drive(&crate::cst::TypeAnnotationChildren<'db>))] +#[visit(drive(&crate::cst::TypeArguments<'db>))] +#[visit(drive(&crate::cst::TypeArgumentsChildren<'db>))] +#[visit(drive(&crate::cst::TypeAssertion<'db>))] +#[visit(drive(&crate::cst::TypeAssertionChildren<'db>))] +#[visit(drive(&crate::cst::TypeIdentifier<'db>))] +#[visit(drive(&crate::cst::TypeParameter<'db>))] +#[visit(drive(&crate::cst::TypeParameterChildren<'db>))] +#[visit(drive(&crate::cst::TypeParameters<'db>))] +#[visit(drive(&crate::cst::TypeParametersChildren<'db>))] +#[visit(drive(&crate::cst::TypePredicate<'db>))] +#[visit(drive(&crate::cst::TypePredicateAnnotation<'db>))] +#[visit(drive(&crate::cst::TypePredicateAnnotationChildren<'db>))] +#[visit(drive(&crate::cst::TypePredicateChildren<'db>))] +#[visit(drive(&crate::cst::TypePredicateName<'db>))] +#[visit(drive(&crate::cst::TypeQuery<'db>))] +#[visit(drive(&crate::cst::TypeQueryChildren<'db>))] +#[visit(drive(&crate::cst::UnaryExpression<'db>))] +#[visit(drive(&crate::cst::UnaryExpressionArgument<'db>))] +#[visit(drive(&crate::cst::UnaryExpressionChildren<'db>))] +#[visit(drive(&crate::cst::UnaryExpressionOperator<'db>))] +#[visit(drive(&crate::cst::Undefined<'db>))] +#[visit(drive(&crate::cst::UnionType<'db>))] +#[visit(drive(&crate::cst::UnionTypeChildren<'db>))] +#[visit(drive(&crate::cst::UpdateExpression<'db>))] +#[visit(drive(&crate::cst::UpdateExpressionChildren<'db>))] +#[visit(drive(&crate::cst::UpdateExpressionOperator<'db>))] +#[visit(drive(&crate::cst::VariableDeclaration<'db>))] +#[visit(drive(&crate::cst::VariableDeclarationChildren<'db>))] +#[visit(drive(&crate::cst::VariableDeclarator<'db>))] +#[visit(drive(&crate::cst::VariableDeclaratorChildren<'db>))] +#[visit(drive(&crate::cst::VariableDeclaratorName<'db>))] +#[visit(drive(&crate::cst::WhileStatement<'db>))] +#[visit(drive(&crate::cst::WhileStatementChildren<'db>))] +#[visit(drive(&crate::cst::WithStatement<'db>))] +#[visit(drive(&crate::cst::WithStatementChildren<'db>))] +#[visit(drive(&crate::cst::YieldExpression<'db>))] +#[visit(drive(&crate::cst::YieldExpressionChildren<'db>))] +#[visit(drive(&crate::cst::AbstractClassDeclaration<'db>))] +#[visit(drive(&crate::cst::AbstractMethodSignature<'db>))] +#[visit(drive(&crate::cst::FunctionSignature<'db>))] +#[visit(drive(&crate::cst::InterfaceDeclaration<'db>))] +#[visit(drive(&crate::cst::Module<'db>))] +#[visit(drive(crate::cst::AbstractClassDeclarationChildren<'db>))] +#[visit(drive(crate::cst::AbstractMethodSignatureChildren<'db>))] +#[visit(drive(crate::cst::AbstractMethodSignatureName<'db>))] +#[visit(drive(crate::cst::AbstractMethodSignatureReturnType<'db>))] +#[visit(drive(crate::cst::AccessibilityModifier<'db>))] +#[visit(drive(crate::cst::AddingTypeAnnotation<'db>))] +#[visit(drive(crate::cst::AddingTypeAnnotationChildren<'db>))] +#[visit(drive(crate::cst::AmbientDeclaration<'db>))] +#[visit(drive(crate::cst::AmbientDeclarationChildren<'db>))] +#[visit(drive(crate::cst::AnonymousAbstract<'db>))] +#[visit(drive(crate::cst::AnonymousAccessor<'db>))] +#[visit(drive(crate::cst::AnonymousAmpersand<'db>))] +#[visit(drive(crate::cst::AnonymousAmpersandAmpersand<'db>))] +#[visit(drive(crate::cst::AnonymousAmpersandAmpersandEquals<'db>))] +#[visit(drive(crate::cst::AnonymousAmpersandEquals<'db>))] +#[visit(drive(crate::cst::AnonymousAny<'db>))] +#[visit(drive(crate::cst::AnonymousAs<'db>))] +#[visit(drive(crate::cst::AnonymousAssert<'db>))] +#[visit(drive(crate::cst::AnonymousAsserts<'db>))] +#[visit(drive(crate::cst::AnonymousAsterisk<'db>))] +#[visit(drive(crate::cst::AnonymousAsteriskAsterisk<'db>))] +#[visit(drive(crate::cst::AnonymousAsteriskAsteriskEquals<'db>))] +#[visit(drive(crate::cst::AnonymousAsteriskEquals<'db>))] +#[visit(drive(crate::cst::AnonymousAsync<'db>))] +#[visit(drive(crate::cst::AnonymousAt<'db>))] +#[visit(drive(crate::cst::AnonymousAwait<'db>))] +#[visit(drive(crate::cst::AnonymousBacktick<'db>))] +#[visit(drive(crate::cst::AnonymousBang<'db>))] +#[visit(drive(crate::cst::AnonymousBangEquals<'db>))] +#[visit(drive(crate::cst::AnonymousBangEqualsEquals<'db>))] +#[visit(drive(crate::cst::AnonymousBoolean<'db>))] +#[visit(drive(crate::cst::AnonymousBreak<'db>))] +#[visit(drive(crate::cst::AnonymousCaret<'db>))] +#[visit(drive(crate::cst::AnonymousCaretEquals<'db>))] +#[visit(drive(crate::cst::AnonymousCase<'db>))] +#[visit(drive(crate::cst::AnonymousCatch<'db>))] +#[visit(drive(crate::cst::AnonymousClass<'db>))] +#[visit(drive(crate::cst::AnonymousCloseBrace<'db>))] +#[visit(drive(crate::cst::AnonymousCloseBracket<'db>))] +#[visit(drive(crate::cst::AnonymousCloseParen<'db>))] +#[visit(drive(crate::cst::AnonymousColon<'db>))] +#[visit(drive(crate::cst::AnonymousComma<'db>))] +#[visit(drive(crate::cst::AnonymousConst<'db>))] +#[visit(drive(crate::cst::AnonymousContinue<'db>))] +#[visit(drive(crate::cst::AnonymousDebugger<'db>))] +#[visit(drive(crate::cst::AnonymousDeclare<'db>))] +#[visit(drive(crate::cst::AnonymousDefault<'db>))] +#[visit(drive(crate::cst::AnonymousDelete<'db>))] +#[visit(drive(crate::cst::AnonymousDo<'db>))] +#[visit(drive(crate::cst::AnonymousDollarOpenBrace<'db>))] +#[visit(drive(crate::cst::AnonymousDot<'db>))] +#[visit(drive(crate::cst::AnonymousDotDotDot<'db>))] +#[visit(drive(crate::cst::AnonymousDoubleQuote<'db>))] +#[visit(drive(crate::cst::AnonymousElse<'db>))] +#[visit(drive(crate::cst::AnonymousEnum<'db>))] +#[visit(drive(crate::cst::AnonymousEquals<'db>))] +#[visit(drive(crate::cst::AnonymousEqualsEquals<'db>))] +#[visit(drive(crate::cst::AnonymousEqualsEqualsEquals<'db>))] +#[visit(drive(crate::cst::AnonymousEqualsGreaterThan<'db>))] +#[visit(drive(crate::cst::AnonymousExport<'db>))] +#[visit(drive(crate::cst::AnonymousExtends<'db>))] +#[visit(drive(crate::cst::AnonymousFinally<'db>))] +#[visit(drive(crate::cst::AnonymousFor<'db>))] +#[visit(drive(crate::cst::AnonymousFrom<'db>))] +#[visit(drive(crate::cst::AnonymousFunction<'db>))] +#[visit(drive(crate::cst::AnonymousGet<'db>))] +#[visit(drive(crate::cst::AnonymousGlobal<'db>))] +#[visit(drive(crate::cst::AnonymousGreaterThan<'db>))] +#[visit(drive(crate::cst::AnonymousGreaterThanEquals<'db>))] +#[visit(drive(crate::cst::AnonymousGreaterThanGreaterThan<'db>))] +#[visit(drive(crate::cst::AnonymousGreaterThanGreaterThanEquals<'db>))] +#[visit(drive(crate::cst::AnonymousGreaterThanGreaterThanGreaterThan<'db>))] +#[visit(drive(crate::cst::AnonymousGreaterThanGreaterThanGreaterThanEquals<'db>))] +#[visit(drive(crate::cst::AnonymousIf<'db>))] +#[visit(drive(crate::cst::AnonymousImplements<'db>))] +#[visit(drive(crate::cst::AnonymousImport<'db>))] +#[visit(drive(crate::cst::AnonymousIn<'db>))] +#[visit(drive(crate::cst::AnonymousInfer<'db>))] +#[visit(drive(crate::cst::AnonymousInstanceof<'db>))] +#[visit(drive(crate::cst::AnonymousInterface<'db>))] +#[visit(drive(crate::cst::AnonymousIs<'db>))] +#[visit(drive(crate::cst::AnonymousKeyof<'db>))] +#[visit(drive(crate::cst::AnonymousLessThan<'db>))] +#[visit(drive(crate::cst::AnonymousLessThanEquals<'db>))] +#[visit(drive(crate::cst::AnonymousLessThanLessThan<'db>))] +#[visit(drive(crate::cst::AnonymousLessThanLessThanEquals<'db>))] +#[visit(drive(crate::cst::AnonymousLet<'db>))] +#[visit(drive(crate::cst::AnonymousMeta<'db>))] +#[visit(drive(crate::cst::AnonymousMinus<'db>))] +#[visit(drive(crate::cst::AnonymousMinusEquals<'db>))] +#[visit(drive(crate::cst::AnonymousMinusMinus<'db>))] +#[visit(drive(crate::cst::AnonymousMinusQuestionMarkColon<'db>))] +#[visit(drive(crate::cst::AnonymousModule<'db>))] +#[visit(drive(crate::cst::AnonymousNamespace<'db>))] +#[visit(drive(crate::cst::AnonymousNever<'db>))] +#[visit(drive(crate::cst::AnonymousNew<'db>))] +#[visit(drive(crate::cst::AnonymousNumber<'db>))] +#[visit(drive(crate::cst::AnonymousObject<'db>))] +#[visit(drive(crate::cst::AnonymousOf<'db>))] +#[visit(drive(crate::cst::AnonymousOpenBrace<'db>))] +#[visit(drive(crate::cst::AnonymousOpenBracePipe<'db>))] +#[visit(drive(crate::cst::AnonymousOpenBracket<'db>))] +#[visit(drive(crate::cst::AnonymousOpenParen<'db>))] +#[visit(drive(crate::cst::AnonymousOverride<'db>))] +#[visit(drive(crate::cst::AnonymousPercent<'db>))] +#[visit(drive(crate::cst::AnonymousPercentEquals<'db>))] +#[visit(drive(crate::cst::AnonymousPipe<'db>))] +#[visit(drive(crate::cst::AnonymousPipeCloseBrace<'db>))] +#[visit(drive(crate::cst::AnonymousPipeEquals<'db>))] +#[visit(drive(crate::cst::AnonymousPipePipe<'db>))] +#[visit(drive(crate::cst::AnonymousPipePipeEquals<'db>))] +#[visit(drive(crate::cst::AnonymousPlus<'db>))] +#[visit(drive(crate::cst::AnonymousPlusEquals<'db>))] +#[visit(drive(crate::cst::AnonymousPlusPlus<'db>))] +#[visit(drive(crate::cst::AnonymousPlusQuestionMarkColon<'db>))] +#[visit(drive(crate::cst::AnonymousPrivate<'db>))] +#[visit(drive(crate::cst::AnonymousProtected<'db>))] +#[visit(drive(crate::cst::AnonymousPublic<'db>))] +#[visit(drive(crate::cst::AnonymousQuestionMark<'db>))] +#[visit(drive(crate::cst::AnonymousQuestionMarkColon<'db>))] +#[visit(drive(crate::cst::AnonymousQuestionMarkDot<'db>))] +#[visit(drive(crate::cst::AnonymousQuestionMarkQuestionMark<'db>))] +#[visit(drive(crate::cst::AnonymousQuestionMarkQuestionMarkEquals<'db>))] +#[visit(drive(crate::cst::AnonymousReadonly<'db>))] +#[visit(drive(crate::cst::AnonymousRequire<'db>))] +#[visit(drive(crate::cst::AnonymousReturn<'db>))] +#[visit(drive(crate::cst::AnonymousSatisfies<'db>))] +#[visit(drive(crate::cst::AnonymousSemicolon<'db>))] +#[visit(drive(crate::cst::AnonymousSet<'db>))] +#[visit(drive(crate::cst::AnonymousSingleQuote<'db>))] +#[visit(drive(crate::cst::AnonymousSlash<'db>))] +#[visit(drive(crate::cst::AnonymousSlashEquals<'db>))] +#[visit(drive(crate::cst::AnonymousStatic<'db>))] +#[visit(drive(crate::cst::AnonymousString<'db>))] +#[visit(drive(crate::cst::AnonymousSwitch<'db>))] +#[visit(drive(crate::cst::AnonymousSymbol<'db>))] +#[visit(drive(crate::cst::AnonymousTarget<'db>))] +#[visit(drive(crate::cst::AnonymousThrow<'db>))] +#[visit(drive(crate::cst::AnonymousTilde<'db>))] +#[visit(drive(crate::cst::AnonymousTry<'db>))] +#[visit(drive(crate::cst::AnonymousType<'db>))] +#[visit(drive(crate::cst::AnonymousTypeof<'db>))] +#[visit(drive(crate::cst::AnonymousUniqueSymbol<'db>))] +#[visit(drive(crate::cst::AnonymousUnknown<'db>))] +#[visit(drive(crate::cst::AnonymousUsing<'db>))] +#[visit(drive(crate::cst::AnonymousVar<'db>))] +#[visit(drive(crate::cst::AnonymousVoid<'db>))] +#[visit(drive(crate::cst::AnonymousWhile<'db>))] +#[visit(drive(crate::cst::AnonymousWith<'db>))] +#[visit(drive(crate::cst::AnonymousYield<'db>))] +#[visit(drive(crate::cst::Arguments<'db>))] +#[visit(drive(crate::cst::ArgumentsChildren<'db>))] +#[visit(drive(crate::cst::Array<'db>))] +#[visit(drive(crate::cst::ArrayChildren<'db>))] +#[visit(drive(crate::cst::ArrayPattern<'db>))] +#[visit(drive(crate::cst::ArrayPatternChildren<'db>))] +#[visit(drive(crate::cst::ArrayType<'db>))] +#[visit(drive(crate::cst::ArrayTypeChildren<'db>))] +#[visit(drive(crate::cst::ArrowFunction<'db>))] +#[visit(drive(crate::cst::ArrowFunctionBody<'db>))] +#[visit(drive(crate::cst::ArrowFunctionChildren<'db>))] +#[visit(drive(crate::cst::ArrowFunctionReturnType<'db>))] +#[visit(drive(crate::cst::AsExpression<'db>))] +#[visit(drive(crate::cst::AsExpressionChildren<'db>))] +#[visit(drive(crate::cst::Asserts<'db>))] +#[visit(drive(crate::cst::AssertsAnnotation<'db>))] +#[visit(drive(crate::cst::AssertsAnnotationChildren<'db>))] +#[visit(drive(crate::cst::AssertsChildren<'db>))] +#[visit(drive(crate::cst::AssignmentExpression<'db>))] +#[visit(drive(crate::cst::AssignmentExpressionChildren<'db>))] +#[visit(drive(crate::cst::AssignmentExpressionLeft<'db>))] +#[visit(drive(crate::cst::AssignmentPattern<'db>))] +#[visit(drive(crate::cst::AssignmentPatternChildren<'db>))] +#[visit(drive(crate::cst::AugmentedAssignmentExpression<'db>))] +#[visit(drive(crate::cst::AugmentedAssignmentExpressionChildren<'db>))] +#[visit(drive(crate::cst::AugmentedAssignmentExpressionLeft<'db>))] +#[visit(drive(crate::cst::AugmentedAssignmentExpressionOperator<'db>))] +#[visit(drive(crate::cst::AwaitExpression<'db>))] +#[visit(drive(crate::cst::AwaitExpressionChildren<'db>))] +#[visit(drive(crate::cst::BinaryExpression<'db>))] +#[visit(drive(crate::cst::BinaryExpressionChildren<'db>))] +#[visit(drive(crate::cst::BinaryExpressionLeft<'db>))] +#[visit(drive(crate::cst::BinaryExpressionOperator<'db>))] +#[visit(drive(crate::cst::BreakStatement<'db>))] +#[visit(drive(crate::cst::BreakStatementChildren<'db>))] +#[visit(drive(crate::cst::CallExpression<'db>))] +#[visit(drive(crate::cst::CallExpressionArguments<'db>))] +#[visit(drive(crate::cst::CallExpressionChildren<'db>))] +#[visit(drive(crate::cst::CallExpressionFunction<'db>))] +#[visit(drive(crate::cst::CallSignature<'db>))] +#[visit(drive(crate::cst::CallSignatureChildren<'db>))] +#[visit(drive(crate::cst::CallSignatureReturnType<'db>))] +#[visit(drive(crate::cst::CatchClause<'db>))] +#[visit(drive(crate::cst::CatchClauseChildren<'db>))] +#[visit(drive(crate::cst::CatchClauseParameter<'db>))] +#[visit(drive(crate::cst::Class<'db>))] +#[visit(drive(crate::cst::ClassBody<'db>))] +#[visit(drive(crate::cst::ClassBodyChildren<'db>))] +#[visit(drive(crate::cst::ClassChildren<'db>))] +#[visit(drive(crate::cst::ClassDeclaration<'db>))] +#[visit(drive(crate::cst::ClassDeclarationChildren<'db>))] +#[visit(drive(crate::cst::ClassHeritage<'db>))] +#[visit(drive(crate::cst::ClassHeritageChildren<'db>))] +#[visit(drive(crate::cst::ClassStaticBlock<'db>))] +#[visit(drive(crate::cst::ClassStaticBlockChildren<'db>))] +#[visit(drive(crate::cst::Comment<'db>))] +#[visit(drive(crate::cst::ComputedPropertyName<'db>))] +#[visit(drive(crate::cst::ComputedPropertyNameChildren<'db>))] +#[visit(drive(crate::cst::ConditionalType<'db>))] +#[visit(drive(crate::cst::ConditionalTypeChildren<'db>))] +#[visit(drive(crate::cst::Constraint<'db>))] +#[visit(drive(crate::cst::ConstraintChildren<'db>))] +#[visit(drive(crate::cst::ConstructSignature<'db>))] +#[visit(drive(crate::cst::ConstructSignatureChildren<'db>))] +#[visit(drive(crate::cst::ConstructorType<'db>))] +#[visit(drive(crate::cst::ConstructorTypeChildren<'db>))] +#[visit(drive(crate::cst::ContinueStatement<'db>))] +#[visit(drive(crate::cst::ContinueStatementChildren<'db>))] +#[visit(drive(crate::cst::DebuggerStatement<'db>))] +#[visit(drive(crate::cst::Declaration<'db>))] +#[visit(drive(crate::cst::Decorator<'db>))] +#[visit(drive(crate::cst::DecoratorChildren<'db>))] +#[visit(drive(crate::cst::DefaultType<'db>))] +#[visit(drive(crate::cst::DefaultTypeChildren<'db>))] +#[visit(drive(crate::cst::DoStatement<'db>))] +#[visit(drive(crate::cst::DoStatementChildren<'db>))] +#[visit(drive(crate::cst::ElseClause<'db>))] +#[visit(drive(crate::cst::ElseClauseChildren<'db>))] +#[visit(drive(crate::cst::EmptyStatement<'db>))] +#[visit(drive(crate::cst::EnumAssignment<'db>))] +#[visit(drive(crate::cst::EnumAssignmentChildren<'db>))] +#[visit(drive(crate::cst::EnumAssignmentName<'db>))] +#[visit(drive(crate::cst::EnumBody<'db>))] +#[visit(drive(crate::cst::EnumBodyChildren<'db>))] +#[visit(drive(crate::cst::EnumBodyName<'db>))] +#[visit(drive(crate::cst::EnumDeclaration<'db>))] +#[visit(drive(crate::cst::EnumDeclarationChildren<'db>))] +#[visit(drive(crate::cst::EscapeSequence<'db>))] +#[visit(drive(crate::cst::ExistentialType<'db>))] +#[visit(drive(crate::cst::ExportClause<'db>))] +#[visit(drive(crate::cst::ExportClauseChildren<'db>))] +#[visit(drive(crate::cst::ExportSpecifier<'db>))] +#[visit(drive(crate::cst::ExportSpecifierAlias<'db>))] +#[visit(drive(crate::cst::ExportSpecifierChildren<'db>))] +#[visit(drive(crate::cst::ExportSpecifierName<'db>))] +#[visit(drive(crate::cst::ExportStatement<'db>))] +#[visit(drive(crate::cst::ExportStatementChildren<'db>))] +#[visit(drive(crate::cst::Expression<'db>))] +#[visit(drive(crate::cst::ExpressionStatement<'db>))] +#[visit(drive(crate::cst::ExpressionStatementChildren<'db>))] +#[visit(drive(crate::cst::ExtendsClause<'db>))] +#[visit(drive(crate::cst::ExtendsClauseChildren<'db>))] +#[visit(drive(crate::cst::ExtendsTypeClause<'db>))] +#[visit(drive(crate::cst::ExtendsTypeClauseChildren<'db>))] +#[visit(drive(crate::cst::ExtendsTypeClauseType<'db>))] +#[visit(drive(crate::cst::False<'db>))] +#[visit(drive(crate::cst::FinallyClause<'db>))] +#[visit(drive(crate::cst::FinallyClauseChildren<'db>))] +#[visit(drive(crate::cst::FlowMaybeType<'db>))] +#[visit(drive(crate::cst::FlowMaybeTypeChildren<'db>))] +#[visit(drive(crate::cst::ForInStatement<'db>))] +#[visit(drive(crate::cst::ForInStatementChildren<'db>))] +#[visit(drive(crate::cst::ForInStatementKind<'db>))] +#[visit(drive(crate::cst::ForInStatementLeft<'db>))] +#[visit(drive(crate::cst::ForInStatementOperator<'db>))] +#[visit(drive(crate::cst::ForInStatementRight<'db>))] +#[visit(drive(crate::cst::ForStatement<'db>))] +#[visit(drive(crate::cst::ForStatementChildren<'db>))] +#[visit(drive(crate::cst::ForStatementCondition<'db>))] +#[visit(drive(crate::cst::ForStatementIncrement<'db>))] +#[visit(drive(crate::cst::ForStatementInitializer<'db>))] +#[visit(drive(crate::cst::FormalParameters<'db>))] +#[visit(drive(crate::cst::FormalParametersChildren<'db>))] +#[visit(drive(crate::cst::FunctionDeclaration<'db>))] +#[visit(drive(crate::cst::FunctionDeclarationChildren<'db>))] +#[visit(drive(crate::cst::FunctionDeclarationReturnType<'db>))] +#[visit(drive(crate::cst::FunctionExpression<'db>))] +#[visit(drive(crate::cst::FunctionExpressionChildren<'db>))] +#[visit(drive(crate::cst::FunctionExpressionReturnType<'db>))] +#[visit(drive(crate::cst::FunctionSignatureChildren<'db>))] +#[visit(drive(crate::cst::FunctionSignatureReturnType<'db>))] +#[visit(drive(crate::cst::FunctionType<'db>))] +#[visit(drive(crate::cst::FunctionTypeChildren<'db>))] +#[visit(drive(crate::cst::FunctionTypeReturnType<'db>))] +#[visit(drive(crate::cst::GeneratorFunction<'db>))] +#[visit(drive(crate::cst::GeneratorFunctionChildren<'db>))] +#[visit(drive(crate::cst::GeneratorFunctionDeclaration<'db>))] +#[visit(drive(crate::cst::GeneratorFunctionDeclarationChildren<'db>))] +#[visit(drive(crate::cst::GeneratorFunctionDeclarationReturnType<'db>))] +#[visit(drive(crate::cst::GeneratorFunctionReturnType<'db>))] +#[visit(drive(crate::cst::GenericType<'db>))] +#[visit(drive(crate::cst::GenericTypeChildren<'db>))] +#[visit(drive(crate::cst::GenericTypeName<'db>))] +#[visit(drive(crate::cst::HashBangLine<'db>))] +#[visit(drive(crate::cst::HtmlComment<'db>))] +#[visit(drive(crate::cst::Identifier<'db>))] +#[visit(drive(crate::cst::IfStatement<'db>))] +#[visit(drive(crate::cst::IfStatementChildren<'db>))] +#[visit(drive(crate::cst::ImplementsClause<'db>))] +#[visit(drive(crate::cst::ImplementsClauseChildren<'db>))] +#[visit(drive(crate::cst::Import<'db>))] +#[visit(drive(crate::cst::ImportAlias<'db>))] +#[visit(drive(crate::cst::ImportAliasChildren<'db>))] +#[visit(drive(crate::cst::ImportAttribute<'db>))] +#[visit(drive(crate::cst::ImportAttributeChildren<'db>))] +#[visit(drive(crate::cst::ImportClause<'db>))] +#[visit(drive(crate::cst::ImportClauseChildren<'db>))] +#[visit(drive(crate::cst::ImportRequireClause<'db>))] +#[visit(drive(crate::cst::ImportRequireClauseChildren<'db>))] +#[visit(drive(crate::cst::ImportSpecifier<'db>))] +#[visit(drive(crate::cst::ImportSpecifierChildren<'db>))] +#[visit(drive(crate::cst::ImportSpecifierName<'db>))] +#[visit(drive(crate::cst::ImportStatement<'db>))] +#[visit(drive(crate::cst::ImportStatementChildren<'db>))] +#[visit(drive(crate::cst::IndexSignature<'db>))] +#[visit(drive(crate::cst::IndexSignatureChildren<'db>))] +#[visit(drive(crate::cst::IndexSignatureSign<'db>))] +#[visit(drive(crate::cst::IndexSignatureType<'db>))] +#[visit(drive(crate::cst::IndexTypeQuery<'db>))] +#[visit(drive(crate::cst::IndexTypeQueryChildren<'db>))] +#[visit(drive(crate::cst::InferType<'db>))] +#[visit(drive(crate::cst::InferTypeChildren<'db>))] +#[visit(drive(crate::cst::InstantiationExpression<'db>))] +#[visit(drive(crate::cst::InstantiationExpressionChildren<'db>))] +#[visit(drive(crate::cst::InstantiationExpressionFunction<'db>))] +#[visit(drive(crate::cst::InterfaceBody<'db>))] +#[visit(drive(crate::cst::InterfaceBodyChildren<'db>))] +#[visit(drive(crate::cst::InterfaceDeclarationChildren<'db>))] +#[visit(drive(crate::cst::InternalModule<'db>))] +#[visit(drive(crate::cst::InternalModuleChildren<'db>))] +#[visit(drive(crate::cst::InternalModuleName<'db>))] +#[visit(drive(crate::cst::IntersectionType<'db>))] +#[visit(drive(crate::cst::IntersectionTypeChildren<'db>))] +#[visit(drive(crate::cst::LabeledStatement<'db>))] +#[visit(drive(crate::cst::LabeledStatementChildren<'db>))] +#[visit(drive(crate::cst::LexicalDeclaration<'db>))] +#[visit(drive(crate::cst::LexicalDeclarationChildren<'db>))] +#[visit(drive(crate::cst::LexicalDeclarationKind<'db>))] +#[visit(drive(crate::cst::LiteralType<'db>))] +#[visit(drive(crate::cst::LiteralTypeChildren<'db>))] +#[visit(drive(crate::cst::LookupType<'db>))] +#[visit(drive(crate::cst::LookupTypeChildren<'db>))] +#[visit(drive(crate::cst::MappedTypeClause<'db>))] +#[visit(drive(crate::cst::MappedTypeClauseChildren<'db>))] +#[visit(drive(crate::cst::MemberExpression<'db>))] +#[visit(drive(crate::cst::MemberExpressionChildren<'db>))] +#[visit(drive(crate::cst::MemberExpressionObject<'db>))] +#[visit(drive(crate::cst::MemberExpressionProperty<'db>))] +#[visit(drive(crate::cst::MetaProperty<'db>))] +#[visit(drive(crate::cst::MethodDefinition<'db>))] +#[visit(drive(crate::cst::MethodDefinitionChildren<'db>))] +#[visit(drive(crate::cst::MethodDefinitionName<'db>))] +#[visit(drive(crate::cst::MethodDefinitionReturnType<'db>))] +#[visit(drive(crate::cst::MethodSignature<'db>))] +#[visit(drive(crate::cst::MethodSignatureChildren<'db>))] +#[visit(drive(crate::cst::MethodSignatureName<'db>))] +#[visit(drive(crate::cst::MethodSignatureReturnType<'db>))] +#[visit(drive(crate::cst::ModuleChildren<'db>))] +#[visit(drive(crate::cst::ModuleName<'db>))] +#[visit(drive(crate::cst::NamedImports<'db>))] +#[visit(drive(crate::cst::NamedImportsChildren<'db>))] +#[visit(drive(crate::cst::NamespaceExport<'db>))] +#[visit(drive(crate::cst::NamespaceExportChildren<'db>))] +#[visit(drive(crate::cst::NamespaceImport<'db>))] +#[visit(drive(crate::cst::NamespaceImportChildren<'db>))] +#[visit(drive(crate::cst::NestedIdentifier<'db>))] +#[visit(drive(crate::cst::NestedIdentifierChildren<'db>))] +#[visit(drive(crate::cst::NestedIdentifierObject<'db>))] +#[visit(drive(crate::cst::NestedTypeIdentifier<'db>))] +#[visit(drive(crate::cst::NestedTypeIdentifierChildren<'db>))] +#[visit(drive(crate::cst::NestedTypeIdentifierModule<'db>))] +#[visit(drive(crate::cst::NewExpression<'db>))] +#[visit(drive(crate::cst::NewExpressionChildren<'db>))] +#[visit(drive(crate::cst::NonNullExpression<'db>))] +#[visit(drive(crate::cst::NonNullExpressionChildren<'db>))] +#[visit(drive(crate::cst::Null<'db>))] +#[visit(drive(crate::cst::Number<'db>))] +#[visit(drive(crate::cst::Object<'db>))] +#[visit(drive(crate::cst::ObjectAssignmentPattern<'db>))] +#[visit(drive(crate::cst::ObjectAssignmentPatternChildren<'db>))] +#[visit(drive(crate::cst::ObjectAssignmentPatternLeft<'db>))] +#[visit(drive(crate::cst::ObjectChildren<'db>))] +#[visit(drive(crate::cst::ObjectPattern<'db>))] +#[visit(drive(crate::cst::ObjectPatternChildren<'db>))] +#[visit(drive(crate::cst::ObjectType<'db>))] +#[visit(drive(crate::cst::ObjectTypeChildren<'db>))] +#[visit(drive(crate::cst::OmittingTypeAnnotation<'db>))] +#[visit(drive(crate::cst::OmittingTypeAnnotationChildren<'db>))] +#[visit(drive(crate::cst::OptingTypeAnnotation<'db>))] +#[visit(drive(crate::cst::OptingTypeAnnotationChildren<'db>))] +#[visit(drive(crate::cst::OptionalChain<'db>))] +#[visit(drive(crate::cst::OptionalParameter<'db>))] +#[visit(drive(crate::cst::OptionalParameterChildren<'db>))] +#[visit(drive(crate::cst::OptionalParameterPattern<'db>))] +#[visit(drive(crate::cst::OptionalType<'db>))] +#[visit(drive(crate::cst::OptionalTypeChildren<'db>))] +#[visit(drive(crate::cst::OverrideModifier<'db>))] +#[visit(drive(crate::cst::Pair<'db>))] +#[visit(drive(crate::cst::PairChildren<'db>))] +#[visit(drive(crate::cst::PairKey<'db>))] +#[visit(drive(crate::cst::PairPattern<'db>))] +#[visit(drive(crate::cst::PairPatternChildren<'db>))] +#[visit(drive(crate::cst::PairPatternKey<'db>))] +#[visit(drive(crate::cst::PairPatternValue<'db>))] +#[visit(drive(crate::cst::ParenthesizedExpression<'db>))] +#[visit(drive(crate::cst::ParenthesizedExpressionChildren<'db>))] +#[visit(drive(crate::cst::ParenthesizedType<'db>))] +#[visit(drive(crate::cst::ParenthesizedTypeChildren<'db>))] +#[visit(drive(crate::cst::Pattern<'db>))] +#[visit(drive(crate::cst::PredefinedType<'db>))] +#[visit(drive(crate::cst::PrimaryExpression<'db>))] +#[visit(drive(crate::cst::PrimaryType<'db>))] +#[visit(drive(crate::cst::PrivatePropertyIdentifier<'db>))] +#[visit(drive(crate::cst::Program<'db>))] +#[visit(drive(crate::cst::ProgramChildren<'db>))] +#[visit(drive(crate::cst::PropertyIdentifier<'db>))] +#[visit(drive(crate::cst::PropertySignature<'db>))] +#[visit(drive(crate::cst::PropertySignatureChildren<'db>))] +#[visit(drive(crate::cst::PropertySignatureName<'db>))] +#[visit(drive(crate::cst::PublicFieldDefinition<'db>))] +#[visit(drive(crate::cst::PublicFieldDefinitionChildren<'db>))] +#[visit(drive(crate::cst::PublicFieldDefinitionName<'db>))] +#[visit(drive(crate::cst::ReadonlyType<'db>))] +#[visit(drive(crate::cst::ReadonlyTypeChildren<'db>))] +#[visit(drive(crate::cst::Regex<'db>))] +#[visit(drive(crate::cst::RegexChildren<'db>))] +#[visit(drive(crate::cst::RegexFlags<'db>))] +#[visit(drive(crate::cst::RegexPattern<'db>))] +#[visit(drive(crate::cst::RequiredParameter<'db>))] +#[visit(drive(crate::cst::RequiredParameterChildren<'db>))] +#[visit(drive(crate::cst::RequiredParameterName<'db>))] +#[visit(drive(crate::cst::RequiredParameterPattern<'db>))] +#[visit(drive(crate::cst::RestPattern<'db>))] +#[visit(drive(crate::cst::RestPatternChildren<'db>))] +#[visit(drive(crate::cst::RestType<'db>))] +#[visit(drive(crate::cst::RestTypeChildren<'db>))] +#[visit(drive(crate::cst::ReturnStatement<'db>))] +#[visit(drive(crate::cst::ReturnStatementChildren<'db>))] +#[visit(drive(crate::cst::SatisfiesExpression<'db>))] +#[visit(drive(crate::cst::SatisfiesExpressionChildren<'db>))] +#[visit(drive(crate::cst::SequenceExpression<'db>))] +#[visit(drive(crate::cst::SequenceExpressionChildren<'db>))] +#[visit(drive(crate::cst::ShorthandPropertyIdentifier<'db>))] +#[visit(drive(crate::cst::ShorthandPropertyIdentifierPattern<'db>))] +#[visit(drive(crate::cst::SpreadElement<'db>))] +#[visit(drive(crate::cst::SpreadElementChildren<'db>))] +#[visit(drive(crate::cst::Statement<'db>))] +#[visit(drive(crate::cst::StatementBlock<'db>))] +#[visit(drive(crate::cst::StatementBlockChildren<'db>))] +#[visit(drive(crate::cst::StatementIdentifier<'db>))] +#[visit(drive(crate::cst::String<'db>))] +#[visit(drive(crate::cst::StringChildren<'db>))] +#[visit(drive(crate::cst::StringFragment<'db>))] +#[visit(drive(crate::cst::SubscriptExpression<'db>))] +#[visit(drive(crate::cst::SubscriptExpressionChildren<'db>))] +#[visit(drive(crate::cst::SubscriptExpressionIndex<'db>))] +#[visit(drive(crate::cst::Super<'db>))] +#[visit(drive(crate::cst::SwitchBody<'db>))] +#[visit(drive(crate::cst::SwitchBodyChildren<'db>))] +#[visit(drive(crate::cst::SwitchCase<'db>))] +#[visit(drive(crate::cst::SwitchCaseChildren<'db>))] +#[visit(drive(crate::cst::SwitchCaseValue<'db>))] +#[visit(drive(crate::cst::SwitchDefault<'db>))] +#[visit(drive(crate::cst::SwitchDefaultChildren<'db>))] +#[visit(drive(crate::cst::SwitchStatement<'db>))] +#[visit(drive(crate::cst::SwitchStatementChildren<'db>))] +#[visit(drive(crate::cst::TemplateLiteralType<'db>))] +#[visit(drive(crate::cst::TemplateLiteralTypeChildren<'db>))] +#[visit(drive(crate::cst::TemplateString<'db>))] +#[visit(drive(crate::cst::TemplateStringChildren<'db>))] +#[visit(drive(crate::cst::TemplateSubstitution<'db>))] +#[visit(drive(crate::cst::TemplateSubstitutionChildren<'db>))] +#[visit(drive(crate::cst::TemplateType<'db>))] +#[visit(drive(crate::cst::TemplateTypeChildren<'db>))] +#[visit(drive(crate::cst::TernaryExpression<'db>))] +#[visit(drive(crate::cst::TernaryExpressionChildren<'db>))] +#[visit(drive(crate::cst::This<'db>))] +#[visit(drive(crate::cst::ThisType<'db>))] +#[visit(drive(crate::cst::ThrowStatement<'db>))] +#[visit(drive(crate::cst::ThrowStatementChildren<'db>))] +#[visit(drive(crate::cst::True<'db>))] +#[visit(drive(crate::cst::TryStatement<'db>))] +#[visit(drive(crate::cst::TryStatementChildren<'db>))] +#[visit(drive(crate::cst::TupleType<'db>))] +#[visit(drive(crate::cst::TupleTypeChildren<'db>))] +#[visit(drive(crate::cst::Type<'db>))] +#[visit(drive(crate::cst::TypeAliasDeclaration<'db>))] +#[visit(drive(crate::cst::TypeAliasDeclarationChildren<'db>))] +#[visit(drive(crate::cst::TypeAnnotation<'db>))] +#[visit(drive(crate::cst::TypeAnnotationChildren<'db>))] +#[visit(drive(crate::cst::TypeArguments<'db>))] +#[visit(drive(crate::cst::TypeArgumentsChildren<'db>))] +#[visit(drive(crate::cst::TypeAssertion<'db>))] +#[visit(drive(crate::cst::TypeAssertionChildren<'db>))] +#[visit(drive(crate::cst::TypeIdentifier<'db>))] +#[visit(drive(crate::cst::TypeParameter<'db>))] +#[visit(drive(crate::cst::TypeParameterChildren<'db>))] +#[visit(drive(crate::cst::TypeParameters<'db>))] +#[visit(drive(crate::cst::TypeParametersChildren<'db>))] +#[visit(drive(crate::cst::TypePredicate<'db>))] +#[visit(drive(crate::cst::TypePredicateAnnotation<'db>))] +#[visit(drive(crate::cst::TypePredicateAnnotationChildren<'db>))] +#[visit(drive(crate::cst::TypePredicateChildren<'db>))] +#[visit(drive(crate::cst::TypePredicateName<'db>))] +#[visit(drive(crate::cst::TypeQuery<'db>))] +#[visit(drive(crate::cst::TypeQueryChildren<'db>))] +#[visit(drive(crate::cst::UnaryExpression<'db>))] +#[visit(drive(crate::cst::UnaryExpressionArgument<'db>))] +#[visit(drive(crate::cst::UnaryExpressionChildren<'db>))] +#[visit(drive(crate::cst::UnaryExpressionOperator<'db>))] +#[visit(drive(crate::cst::Undefined<'db>))] +#[visit(drive(crate::cst::UnionType<'db>))] +#[visit(drive(crate::cst::UnionTypeChildren<'db>))] +#[visit(drive(crate::cst::UpdateExpression<'db>))] +#[visit(drive(crate::cst::UpdateExpressionChildren<'db>))] +#[visit(drive(crate::cst::UpdateExpressionOperator<'db>))] +#[visit(drive(crate::cst::VariableDeclaration<'db>))] +#[visit(drive(crate::cst::VariableDeclarationChildren<'db>))] +#[visit(drive(crate::cst::VariableDeclarator<'db>))] +#[visit(drive(crate::cst::VariableDeclaratorChildren<'db>))] +#[visit(drive(crate::cst::VariableDeclaratorName<'db>))] +#[visit(drive(crate::cst::WhileStatement<'db>))] +#[visit(drive(crate::cst::WhileStatementChildren<'db>))] +#[visit(drive(crate::cst::WithStatement<'db>))] +#[visit(drive(crate::cst::WithStatementChildren<'db>))] +#[visit(drive(crate::cst::YieldExpression<'db>))] +#[visit(drive(crate::cst::YieldExpressionChildren<'db>))] +#[visit(drive(forBox))] +#[visit(drive(forVec))] +#[visit(drive(forOption))] +#[visit( + enter(AbstractClassDeclaration:crate::cst::AbstractClassDeclaration<'db>), + enter(AbstractMethodSignature:crate::cst::AbstractMethodSignature<'db>), + enter(FunctionSignature:crate::cst::FunctionSignature<'db>), + enter(InterfaceDeclaration:crate::cst::InterfaceDeclaration<'db>), + enter(Module:crate::cst::Module<'db>) )] -pub struct Definitions { - pub classes: Vec, - pub functions: Vec, - pub interfaces: Vec, - pub methods: Vec, - pub modules: Vec, +pub struct Definitions<'db> { + pub classes: Vec>, + pub functions: Vec>, + pub interfaces: Vec>, + pub methods: Vec>, + pub modules: Vec>, + phantom: std::marker::PhantomData<&'db ()>, } -impl Definitions { - fn enter_abstract_class_declaration( +impl<'db> Definitions<'db> { + fn enter_AbstractClassDeclaration( &mut self, - node: &codegen_sdk_cst::typescript::AbstractClassDeclaration, + node: &crate::cst::AbstractClassDeclaration<'db>, ) { - let field = &node.name; + ///Code for query: (abstract_class_declaration name: (type_identifier) @name) @definition.class + let name = &*node.name; self.classes.push(node.clone()); } - fn enter_abstract_method_signature( + fn enter_AbstractMethodSignature( &mut self, - node: &codegen_sdk_cst::typescript::AbstractMethodSignature, + node: &crate::cst::AbstractMethodSignature<'db>, ) { - let field = &node.name; + ///Code for query: (abstract_method_signature name: (property_identifier) @name) @definition.method + let name = &*node.name; self.methods.push(node.clone()); } - fn enter_function_signature( - &mut self, - node: &codegen_sdk_cst::typescript::FunctionSignature, - ) { - let field = &node.name; + fn enter_FunctionSignature(&mut self, node: &crate::cst::FunctionSignature<'db>) { + ///Code for query: (function_signature name: (identifier) @name) @definition.function + let name = &*node.name; self.functions.push(node.clone()); } - fn enter_interface_declaration( + fn enter_InterfaceDeclaration( &mut self, - node: &codegen_sdk_cst::typescript::InterfaceDeclaration, + node: &crate::cst::InterfaceDeclaration<'db>, ) { - let field = &node.name; + ///Code for query: (interface_declaration name: (type_identifier) @name) @definition.interface + let name = &*node.name; self.interfaces.push(node.clone()); } - fn enter_module(&mut self, node: &codegen_sdk_cst::typescript::Module) { - let field = &node.name; + fn enter_Module(&mut self, node: &crate::cst::Module<'db>) { + ///Code for query: (module name: (identifier) @name) @definition.module + let name = &*node.name; self.modules.push(node.clone()); } } diff --git a/codegen-sdk-ast-generator/src/visitor.rs b/codegen-sdk-ast-generator/src/visitor.rs index 935aae11..a152b481 100644 --- a/codegen-sdk-ast-generator/src/visitor.rs +++ b/codegen-sdk-ast-generator/src/visitor.rs @@ -1,7 +1,7 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; use codegen_sdk_common::{CSTNode, HasChildren, Language}; -use codegen_sdk_cst::ts_query; +use codegen_sdk_ts_query::cst as ts_query; use convert_case::{Case, Casing}; use log::info; use proc_macro2::TokenStream; @@ -9,24 +9,27 @@ use quote::{format_ident, quote}; use super::query::Query; use crate::query::HasQuery; -pub fn generate_visitor(language: &Language, name: &str) -> TokenStream { +pub fn generate_visitor<'db>( + db: &'db dyn salsa::Database, + language: &Language, + name: &str, +) -> TokenStream { log::info!( "Generating visitor for language: {} for {}", language.name(), name ); - let raw_queries = language.queries_with_prefix(&format!("{}", name)); + let raw_queries = language.queries_with_prefix(db, &format!("{}", name)); let queries: Vec<&Query> = raw_queries.values().flatten().collect(); - let language_name = format_ident!("{}", language.name()); let mut names = Vec::new(); let mut types = Vec::new(); - let mut variants = Vec::new(); + let mut variants = BTreeSet::new(); let mut enter_methods = BTreeMap::new(); for query in queries { names.push(query.executor_id()); types.push(format_ident!("{}", query.struct_name())); for variant in query.struct_variants() { - variants.push(format_ident!("{}", variant)); + variants.insert(format_ident!("{}", variant)); enter_methods .entry(variant) .or_insert(Vec::new()) @@ -36,7 +39,7 @@ pub fn generate_visitor(language: &Language, name: &str) -> TokenStream { let mut methods = Vec::new(); for (variant, queries) in enter_methods { let mut matchers = TokenStream::new(); - let enter = format_ident!("enter_{}", variant.to_case(Case::Snake)); + let enter = format_ident!("enter_{}", variant); let struct_name = format_ident!("{}", variant); for query in queries { matchers.extend_one(query.matcher(&variant)); @@ -54,27 +57,47 @@ pub fn generate_visitor(language: &Language, name: &str) -> TokenStream { } } methods.push(quote! { - fn #enter(&mut self, node: &codegen_sdk_cst::#language_name::#struct_name) { + fn #enter(&mut self, node: &crate::cst::#struct_name<'db>) { #matchers } }); } + let visitor = if variants.len() > 0 { + let first_query = raw_queries.values().flatten().next().unwrap(); + let state = first_query.state.clone(); + let mut nodes = BTreeSet::new(); + nodes.extend(state.get_node_struct_names()); + nodes.extend(state.get_subenum_struct_names()); + nodes = nodes.difference(&variants).cloned().collect(); + quote! { + #(#[visit(drive(&crate::cst::#nodes<'db>))])* + #(#[visit(drive(&crate::cst::#variants<'db>))])* + #(#[visit(drive(crate::cst::#nodes<'db>))])* + #[visit(drive(for Box))] + #[visit(drive(for Vec))] + #[visit(drive(for Option))] + #[visit( + #(enter(#variants:crate::cst::#variants<'db>)),* + )] + } + } else { + quote! {} + }; let name = format_ident!("{}s", name.to_case(Case::Pascal)); quote! { - #[derive(Visitor, Default, Debug, Clone)] - #[visitor( - #(#language_name::#variants(enter)),* - )] - pub struct #name { - #(pub #names: Vec<#language_name::#types>),* + #[derive(Visitor, Visit, Debug, Clone, Eq, PartialEq, salsa::Update, Hash, Default)] + #visitor + pub struct #name<'db> { + #(pub #names: Vec>,)* + phantom: std::marker::PhantomData<&'db ()>, } - impl #name { + impl<'db> #name<'db> { #(#methods)* } } } -#[cfg(test)] +#[cfg(all(test))] mod tests { use codegen_sdk_common::language::typescript::Typescript; @@ -83,9 +106,10 @@ mod tests { #[test_log::test] fn test_generate_visitor() { let language = &Typescript; - let visitor = generate_visitor(language, "definition"); + let db = codegen_sdk_cst::CSTDatabase::default(); + let visitor = generate_visitor(&db, language, "definition"); insta::assert_snapshot!( - codegen_sdk_common::generator::format_code(&visitor.to_string()).unwrap() + codegen_sdk_common::generator::format_code_string(&visitor.to_string()).unwrap() ); } } diff --git a/codegen-sdk-ast/Cargo.toml b/codegen-sdk-ast/Cargo.toml index 132f1fd8..01e083f3 100644 --- a/codegen-sdk-ast/Cargo.toml +++ b/codegen-sdk-ast/Cargo.toml @@ -6,30 +6,8 @@ edition = "2024" [dependencies] codegen-sdk-cst = { workspace = true} codegen-sdk-common = { workspace = true } -derive-visitor = { workspace = true } -codegen-sdk-macros = { path = "../codegen-sdk-macros"} -log = { workspace = true } -[build-dependencies] -codegen-sdk-common = { workspace = true } -env_logger = { workspace = true } -rayon = { workspace = true } -codegen-sdk-ast-generator = { path = "../codegen-sdk-ast-generator"} +salsa = { workspace = true } [dev-dependencies] test-log = { workspace = true } tempfile = { workspace = true } -[features] -python = [ "codegen-sdk-cst/python"] -typescript = [ "codegen-sdk-cst/typescript"] -tsx = [ "codegen-sdk-cst/tsx"] -jsx = [ "codegen-sdk-cst/jsx"] -javascript = [ "codegen-sdk-cst/typescript"] -json = [ "codegen-sdk-cst/json"] -java = [ "codegen-sdk-cst/java"] -rust = [ "codegen-sdk-cst/rust"] -go = [ "codegen-sdk-cst/go"] -ruby = [ "codegen-sdk-cst/ruby"] -yaml = [ "codegen-sdk-cst/yaml"] -toml = [ "codegen-sdk-cst/toml"] -markdown = [ "codegen-sdk-cst/markdown"] -ts_query = [] -default = ["json", "ts_query", "typescript"] +codegen-sdk-typescript = { workspace = true } diff --git a/codegen-sdk-ast/build.rs b/codegen-sdk-ast/build.rs deleted file mode 100644 index fee4dba5..00000000 --- a/codegen-sdk-ast/build.rs +++ /dev/null @@ -1,9 +0,0 @@ -use codegen_sdk_ast_generator::generate_ast; -use codegen_sdk_common::language::LANGUAGES; -use rayon::prelude::*; -fn main() { - env_logger::init(); - LANGUAGES.par_iter().for_each(|language| { - generate_ast(language).unwrap(); - }); -} diff --git a/codegen-sdk-ast/src/input.rs b/codegen-sdk-ast/src/input.rs new file mode 100644 index 00000000..9e97d421 --- /dev/null +++ b/codegen-sdk-ast/src/input.rs @@ -0,0 +1,10 @@ +use std::path::PathBuf; + +use codegen_sdk_cst::Input; +#[salsa::input] +pub struct File { + #[id] + pub path: PathBuf, + // #[return_ref] + pub contents: Input, +} diff --git a/codegen-sdk-ast/src/lib.rs b/codegen-sdk-ast/src/lib.rs index 708a7e31..153579a9 100644 --- a/codegen-sdk-ast/src/lib.rs +++ b/codegen-sdk-ast/src/lib.rs @@ -1,9 +1,8 @@ #![recursion_limit = "512"] -#![allow(unused)] +pub mod input; use codegen_sdk_common::File; pub use codegen_sdk_common::language::LANGUAGES; pub use codegen_sdk_cst::*; -use codegen_sdk_macros::include_languages_ast; pub trait Named { fn name(&self) -> &str; } @@ -12,4 +11,3 @@ impl Named for T { self.path().file_name().unwrap().to_str().unwrap() } } -include_languages_ast!(); diff --git a/codegen-sdk-ast/tests/test_typescript.rs b/codegen-sdk-ast/tests/test_typescript.rs index 1d113494..bd700a64 100644 --- a/codegen-sdk-ast/tests/test_typescript.rs +++ b/codegen-sdk-ast/tests/test_typescript.rs @@ -1,8 +1,6 @@ #![recursion_limit = "512"] use std::path::PathBuf; -use codegen_sdk_ast::typescript::TypescriptFile; -use codegen_sdk_common::File; fn write_to_temp_file(content: &str, temp_dir: &tempfile::TempDir) -> PathBuf { let file_path = temp_dir.path().join("test.ts"); std::fs::write(&file_path, content).unwrap(); @@ -30,6 +28,9 @@ fn test_typescript_ast_interface() { let temp_dir = tempfile::tempdir().unwrap(); let content = "interface Test { }"; let file_path = write_to_temp_file(content, &temp_dir); - let file = TypescriptFile::parse(&file_path).unwrap(); - assert_eq!(file.definitions.interfaces.len(), 1); + let db = codegen_sdk_cst::CSTDatabase::default(); + let content = codegen_sdk_cst::Input::new(&db, content.to_string()); + let input = codegen_sdk_ast::input::File::new(&db, file_path, content); + let file = codegen_sdk_typescript::ast::parse_query(&db, input); + assert_eq!(file.definitions(&db).interfaces.len(), 1); } diff --git a/codegen-sdk-common/Cargo.toml b/codegen-sdk-common/Cargo.toml index 352acb20..b7cad729 100644 --- a/codegen-sdk-common/Cargo.toml +++ b/codegen-sdk-common/Cargo.toml @@ -24,21 +24,22 @@ serde_json = { workspace = true } anyhow = { workspace = true } convert_case = { workspace = true } tree-sitter-query = {git = "https://github.com/tree-sitter-grammars/tree-sitter-query", optional = true} -tree-sitter-language = "0.1.4" phf = { version = "0.11.3", features = ["macros"] } -rkyv = { workspace = true } +rkyv = { workspace = true } xdg = "2.5.2" base64 = "0.22.1" buildid = "1.0.3" sha2 = "0.10.8" -zstd = { version = "0.13.2", features = ["zstdmt"] } -enum_delegate = { workspace = true } +zstd = { version = "0.13.2", features = ["zstdmt"], optional = true } +ambassador = { workspace = true } mockall = { workspace = true } syn = { workspace = true } prettyplease = { workspace = true } +salsa = { workspace = true } [dev-dependencies] test-log = { workspace = true } [features] +serialization = ["dep:zstd"] python = ["dep:tree-sitter-python"] json = ["dep:tree-sitter-json"] java = ["dep:tree-sitter-java"] diff --git a/codegen-sdk-common/src/errors.rs b/codegen-sdk-common/src/errors.rs index bc80a9e8..23d8df60 100644 --- a/codegen-sdk-common/src/errors.rs +++ b/codegen-sdk-common/src/errors.rs @@ -1,5 +1,6 @@ use std::backtrace::Backtrace; +use salsa::Accumulator; use thiserror::Error; #[derive(Debug, Error)] pub enum ParseError { @@ -31,3 +32,16 @@ pub enum ParseError { #[error("Failed to serialize: {0}")] Serialize(#[from] rkyv::rancor::Error), } +#[salsa::accumulator] +#[allow(dead_code)] // Debug impl uses them +struct AccumulatedParseError { + message: String, +} +impl ParseError { + pub fn report(self, db: &dyn salsa::Database) { + AccumulatedParseError { + message: self.to_string(), + } + .accumulate(db); + } +} diff --git a/codegen-sdk-common/src/file.rs b/codegen-sdk-common/src/file.rs index e5efb8c6..ce8328b8 100644 --- a/codegen-sdk-common/src/file.rs +++ b/codegen-sdk-common/src/file.rs @@ -1,13 +1,8 @@ use std::path::PathBuf; -use crate::ParseError; - pub trait File { fn path(&self) -> &PathBuf; fn content(&self) -> String { std::fs::read_to_string(self.path()).unwrap() } - fn parse(path: &PathBuf) -> Result - where - Self: Sized; } diff --git a/codegen-sdk-common/src/generator/format.rs b/codegen-sdk-common/src/generator/format.rs index 1877ea99..bdb46076 100644 --- a/codegen-sdk-common/src/generator/format.rs +++ b/codegen-sdk-common/src/generator/format.rs @@ -1,4 +1,7 @@ -pub fn format_code(cst: &str) -> anyhow::Result { +pub fn format_code(cst: &syn::File) -> anyhow::Result { + Ok(prettyplease::unparse(cst)) +} +pub fn format_code_string(cst: &str) -> anyhow::Result { let parsed = syn::parse_str::(cst)?; - Ok(prettyplease::unparse(&parsed)) + format_code(&parsed) } diff --git a/codegen-sdk-common/src/language.rs b/codegen-sdk-common/src/language.rs index 2d03b6ce..d7c6ffa1 100644 --- a/codegen-sdk-common/src/language.rs +++ b/codegen-sdk-common/src/language.rs @@ -1,4 +1,4 @@ -use std::num::NonZeroU16; +use std::{hash::Hash, num::NonZeroU16, sync::Arc}; use convert_case::{Case, Casing}; use mockall::automock; @@ -9,7 +9,7 @@ use crate::{ naming::normalize_type_name, parser::{Node, parse_node_types}, }; -#[derive(Debug)] +#[derive(Debug, Eq, PartialEq)] pub struct Language { name: &'static str, pub struct_name: &'static str, @@ -17,8 +17,14 @@ pub struct Language { pub file_extensions: &'static [&'static str], tree_sitter_language: tree_sitter::Language, pub tag_query: &'static str, - nodes: Vec, + nodes: Vec>, } +impl Hash for Language { + fn hash(&self, state: &mut H) { + self.name.hash(state); + } +} + #[automock] impl Language { pub fn new( @@ -29,7 +35,10 @@ impl Language { tree_sitter_language: tree_sitter::Language, tag_query: &'static str, ) -> anyhow::Result { - let nodes = parse_node_types(node_types)?; + let nodes = parse_node_types(node_types)? + .into_iter() + .map(|node| Arc::new(node)) + .collect(); Ok(Self { name, struct_name, @@ -45,7 +54,7 @@ impl Language { parser.set_language(&self.tree_sitter_language)?; parser.parse(content, None).ok_or(ParseError::Miscelaneous) } - pub fn nodes(&self) -> &Vec { + pub fn nodes(&self) -> &Vec> { &self.nodes } pub fn root_node(&self) -> String { @@ -71,7 +80,10 @@ impl Language { pub fn name(&self) -> &'static str { self.name } - pub fn node_for_struct_name(&self, struct_name: &str) -> Option { + pub fn struct_name(&self) -> &'static str { + self.struct_name + } + pub fn node_for_struct_name(&self, struct_name: &str) -> Option> { self.nodes .iter() .find(|node| normalize_type_name(&node.type_name, node.named) == struct_name) diff --git a/codegen-sdk-common/src/lib.rs b/codegen-sdk-common/src/lib.rs index 2b43992d..55eabdb6 100644 --- a/codegen-sdk-common/src/lib.rs +++ b/codegen-sdk-common/src/lib.rs @@ -1,4 +1,5 @@ #![feature(error_generic_member_access)] +#![feature(trivial_bounds)] mod errors; pub mod language; pub mod traits; @@ -13,6 +14,7 @@ pub mod parser; #[macro_use] extern crate lazy_static; pub mod naming; +#[cfg(feature = "serialization")] pub mod serialize; pub mod tree; pub use tree::{Point, Range}; diff --git a/codegen-sdk-common/src/naming.rs b/codegen-sdk-common/src/naming.rs index b04e194c..00d0d476 100644 --- a/codegen-sdk-common/src/naming.rs +++ b/codegen-sdk-common/src/naming.rs @@ -45,6 +45,12 @@ pub fn normalize_field_name(field_name: &str) -> String { if field_name == "macro" { return "r#macro".to_string(); } + if field_name == "else" { + return "r#else".to_string(); + } + if field_name == "trait" { + return "r#trait".to_string(); + } field_name.to_string() } fn get_char_mapping(c: char) -> String { @@ -70,6 +76,9 @@ pub fn normalize_string(string: &str) -> String { escaped } pub fn normalize_type_name(type_name: &str, named: bool) -> String { + if type_name == "self" { + return "SelfNode".to_string(); + } let mut cased = type_name.to_string(); if type_name.chars().any(|c| c.is_ascii_alphabetic()) { cased = cased.to_case(Case::Pascal); @@ -78,7 +87,7 @@ pub fn normalize_type_name(type_name: &str, named: bool) -> String { debug_assert!( escaped .chars() - .all(|c| c.is_ascii_lowercase() || c.is_ascii_uppercase()), + .all(|c| c.is_ascii_lowercase() || c.is_ascii_uppercase() || c.is_ascii_digit()), "Type name '{}' contains invalid characters", type_name ); diff --git a/codegen-sdk-common/src/parser.rs b/codegen-sdk-common/src/parser.rs index 499fab8d..2219e7d0 100644 --- a/codegen-sdk-common/src/parser.rs +++ b/codegen-sdk-common/src/parser.rs @@ -39,7 +39,7 @@ pub struct TypeDefinition { } impl TypeDefinition { pub fn normalize(&self) -> String { - normalize_type_name(&self.type_name, true) + normalize_type_name(&self.type_name, self.named) } } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] diff --git a/codegen-sdk-common/src/serialize.rs b/codegen-sdk-common/src/serialize.rs index e00df702..ff731d0b 100644 --- a/codegen-sdk-common/src/serialize.rs +++ b/codegen-sdk-common/src/serialize.rs @@ -6,10 +6,24 @@ use std::{ use base64::{Engine as _, engine::general_purpose::URL_SAFE}; use bytes::Bytes; -use rkyv::ser::writer::IoWriter; +use rkyv::{ + Archive, Deserialize, + bytecheck::CheckBytes, + de::Pool, + from_bytes, + rancor::{Error, Strategy}, + ser::writer::IoWriter, + validation::{Validator, archive::ArchiveValidator, shared::SharedValidator}, +}; use sha2::{Digest, Sha256}; use zstd::stream::AutoFinishEncoder; - +type Writer<'a> = IoWriter< + AutoFinishEncoder< + 'a, + BufWriter, + Box, std::io::Error>) + Send>, + >, +>; use crate::ParseError; pub struct Cache { base_dir: PathBuf, @@ -33,25 +47,24 @@ impl Cache { self.base_dir .join(format!("{}/{}", self.build_id, URL_SAFE.encode(path_hash))) } - pub fn read_entry(&self, path: &PathBuf) -> Result { + fn read_entry_raw(&self, path: &PathBuf) -> Result { let file = File::open(path)?; let mut buf = Vec::new(); let mut reader = zstd::Decoder::new(BufReader::new(file))?; reader.read_to_end(&mut buf)?; Ok(Bytes::from(buf)) } - pub fn get_writer( - &self, - path: &PathBuf, - ) -> Result< - IoWriter< - AutoFinishEncoder< - BufWriter, - Box, std::io::Error>) + Send>, - >, - >, - ParseError, - > { + pub fn read_entry(&self, path: &PathBuf) -> Result + where + T::Archived: + for<'a> CheckBytes, SharedValidator>, Error>>, + T::Archived: Deserialize>, + { + let bytes = self.read_entry_raw(path)?; + let value = from_bytes::(&bytes)?; + Ok(value) + } + pub fn get_writer<'a>(&self, path: &PathBuf) -> Result, ParseError> { let file = File::create(path)?; let writer = zstd::Encoder::new(BufWriter::new(file), 1)?.auto_finish(); Ok(IoWriter::new(writer)) diff --git a/codegen-sdk-common/src/traits.rs b/codegen-sdk-common/src/traits.rs index fc063dab..5feca529 100644 --- a/codegen-sdk-common/src/traits.rs +++ b/codegen-sdk-common/src/traits.rs @@ -1,14 +1,19 @@ use std::{fmt::Debug, sync::Arc}; +use ambassador::delegatable_trait; use bytes::Bytes; use tree_sitter::{self}; use crate::{Point, errors::ParseError, tree::Range}; -pub trait FromNode: Sized { - fn from_node(node: tree_sitter::Node, buffer: &Arc) -> Result; +pub trait FromNode<'db>: Sized { + fn from_node( + db: &'db dyn salsa::Database, + node: tree_sitter::Node, + buffer: &Arc, + ) -> Result; } -#[enum_delegate::register] -pub trait CSTNode { +#[delegatable_trait] +pub trait CSTNode<'db> { /// Returns the byte offset where the node starts fn start_byte(&self) -> usize; @@ -16,15 +21,10 @@ pub trait CSTNode { fn end_byte(&self) -> usize; /// Returns the position where the node starts - fn start_position(&self) -> Point; + fn start_position(&self) -> Point<'db>; /// Returns the position where the node ends - fn end_position(&self) -> Point; - - /// Returns the range of positions that this node spans - fn range(&self) -> Range { - Range::new(self.start_position(), self.end_position()) - } + fn end_position(&self) -> Point<'db>; /// Returns the source text buffer for this node fn buffer(&self) -> &Bytes; @@ -68,9 +68,9 @@ pub trait CSTNode { } fn id(&self) -> usize; } -pub trait CSTNodeExt: CSTNode { +pub trait CSTNodeExt<'db>: CSTNode<'db> { /// Get the next sibling of this node in its parent - fn next_sibling>( + fn next_sibling + Clone, Parent: HasChildren<'db, Child = Child>>( &self, parent: &Parent, ) -> Option { @@ -82,7 +82,7 @@ pub trait CSTNodeExt: CSTNode { } None } - fn next_named_sibling>( + fn next_named_sibling + Clone, Parent: HasChildren<'db, Child = Child>>( &self, parent: &Parent, ) -> Option { @@ -94,7 +94,7 @@ pub trait CSTNodeExt: CSTNode { } None } - fn prev_sibling>( + fn prev_sibling + Clone, Parent: HasChildren<'db, Child = Child>>( &self, parent: &Parent, ) -> Option { @@ -107,7 +107,7 @@ pub trait CSTNodeExt: CSTNode { } None } - fn prev_named_sibling>( + fn prev_named_sibling + Clone, Parent: HasChildren<'db, Child = Child>>( &self, parent: &Parent, ) -> Option { @@ -120,53 +120,57 @@ pub trait CSTNodeExt: CSTNode { } None } -} -pub trait HasNode: Send + Debug + Clone { - type Node: CSTNode; - fn node(&self) -> &Self::Node; -} -impl CSTNode for T { - fn kind(&self) -> &str { - self.node().kind() - } - fn start_byte(&self) -> usize { - self.node().start_byte() - } - fn end_byte(&self) -> usize { - self.node().end_byte() - } - fn start_position(&self) -> Point { - self.node().start_position() - } - fn end_position(&self) -> Point { - self.node().end_position() - } - fn buffer(&self) -> &Bytes { - self.node().buffer() - } - fn kind_id(&self) -> u16 { - self.node().kind_id() - } - fn is_named(&self) -> bool { - self.node().is_named() - } - fn is_error(&self) -> bool { - self.node().is_error() - } - fn is_missing(&self) -> bool { - self.node().is_missing() - } - fn is_edited(&self) -> bool { - self.node().is_edited() - } - fn is_extra(&self) -> bool { - self.node().is_extra() - } - - fn id(&self) -> usize { - self.node().id() + /// Returns the range of positions that this node spans + fn range(&self, db: &'db dyn salsa::Database) -> Range<'db> { + Range::from_points(db, self.start_position(), self.end_position()) } } +// pub trait HasNode<'db>: Send + Debug + Clone { +// type Node: CSTNode<'db>; +// fn node(&self) -> &Self::Node; +// } +// impl<'db, T: HasNode<'db>> CSTNode<'db> for T { +// fn kind(&self) -> &'_ str { +// self.node().kind() +// } +// fn start_byte(&self) -> usize { +// self.node().start_byte() +// } +// fn end_byte(&self) -> usize { +// self.node().end_byte() +// } +// fn start_position(&self) -> Point<'db> { +// self.node().start_position() +// } +// fn end_position(&self) -> Point<'db> { +// self.node().end_position() +// } +// fn buffer(&self) -> &'_ Bytes { +// self.node().buffer() +// } +// fn kind_id(&self) -> u16 { +// self.node().kind_id() +// } +// fn is_named(&self) -> bool { +// self.node().is_named() +// } +// fn is_error(&self) -> bool { +// self.node().is_error() +// } +// fn is_missing(&self) -> bool { +// self.node().is_missing() +// } +// fn is_edited(&self) -> bool { +// self.node().is_edited() +// } +// fn is_extra(&self) -> bool { +// self.node().is_extra() +// } + +// fn id(&self) -> usize { +// self.node().id() +// } +// } // impl HasChildren for T { // type Child = ::Child; // fn child_by_field_name(&self, field_name: &str) -> Option { @@ -185,8 +189,8 @@ impl CSTNode for T { // self.node().child_count() // } // } -pub trait HasChildren { - type Child: Send + Debug + Clone + CSTNode; +pub trait HasChildren<'db> { + type Child: Send + Debug + Clone + CSTNode<'db>; /// Returns the first child with the given field name fn child_by_field_id(&self, field_id: u16) -> Option { self.children_by_field_id(field_id) @@ -238,4 +242,19 @@ pub trait HasChildren { fn child_count(&self) -> usize { self.children().len() } + fn children_by_field_types(&self, field_types: &[&str]) -> Vec { + self.children() + .into_iter() + .filter(|child| field_types.contains(&child.kind())) + .collect() + } + fn children_by_field_type(&self, field_type: &str) -> Vec { + self.children_by_field_types(&[field_type]) + } + fn child_by_field_type(&self, field_type: &str) -> Option { + self.children_by_field_type(field_type).into_iter().next() + } + fn child_by_field_types(&self, field_types: &[&str]) -> Option { + self.children_by_field_types(field_types).into_iter().next() + } } diff --git a/codegen-sdk-common/src/tree/point.rs b/codegen-sdk-common/src/tree/point.rs index 1f73a281..d282e4f4 100644 --- a/codegen-sdk-common/src/tree/point.rs +++ b/codegen-sdk-common/src/tree/point.rs @@ -1,15 +1,13 @@ use rkyv::{Archive, Deserialize, Serialize}; -#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Archive, Deserialize, Serialize)] -pub struct Point { +#[salsa::interned] +#[derive(Archive, Deserialize, Serialize)] +pub struct Point<'db> { pub row: usize, pub column: usize, } -impl From for Point { - fn from(value: tree_sitter::Point) -> Self { - Point { - row: value.row, - column: value.column, - } +impl<'db> Point<'db> { + pub fn from(db: &'db dyn salsa::Database, value: tree_sitter::Point) -> Self { + Self::new(db, value.row, value.column) } } diff --git a/codegen-sdk-common/src/tree/range.rs b/codegen-sdk-common/src/tree/range.rs index 1fe85c89..c1f37cfa 100644 --- a/codegen-sdk-common/src/tree/range.rs +++ b/codegen-sdk-common/src/tree/range.rs @@ -1,21 +1,21 @@ use rkyv::{Archive, Deserialize, Serialize}; use crate::Point; -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Archive, Deserialize, Serialize)] -pub struct Range { - start: Point, - end: Point, +#[salsa::interned] +#[derive(Archive, Deserialize, Serialize)] +pub struct Range<'db> { + start: Point<'db>, + end: Point<'db>, } -impl From for Range { - fn from(value: tree_sitter::Range) -> Self { - Self { - start: value.start_point.into(), - end: value.end_point.into(), - } +impl<'db> Range<'db> { + pub fn from_points(db: &'db dyn salsa::Database, start: Point<'db>, end: Point<'db>) -> Self { + Self::new(db, start, end) } -} -impl Range { - pub fn new(start: Point, end: Point) -> Self { - Self { start, end } + pub fn from_tree_sitter(db: &'db dyn salsa::Database, value: tree_sitter::Range) -> Self { + Self::from_points( + db, + Point::from(db, value.start_point), + Point::from(db, value.end_point), + ) } } diff --git a/codegen-sdk-common/src/utils.rs b/codegen-sdk-common/src/utils.rs index c0103d13..93f035c2 100644 --- a/codegen-sdk-common/src/utils.rs +++ b/codegen-sdk-common/src/utils.rs @@ -4,35 +4,38 @@ use bytes::Bytes; use tree_sitter::{self}; use crate::{ParseError, traits::FromNode}; -pub fn named_children_without_field_names( +pub fn named_children_without_field_names<'db, T: FromNode<'db>>( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result, ParseError> { let mut children = Vec::new(); for (index, child) in node.named_children(&mut node.walk()).enumerate() { if node.field_name_for_named_child(index as u32).is_none() { - children.push(T::from_node(child, buffer)?); + children.push(T::from_node(db, child, buffer)?); } } Ok(children) } -pub fn get_optional_child_by_field_name( +pub fn get_optional_child_by_field_name<'db, T: FromNode<'db>>( + db: &'db dyn salsa::Database, node: &tree_sitter::Node, field_name: &str, buffer: &Arc, ) -> Result, ParseError> { if let Some(child) = node.child_by_field_name(field_name) { - return Ok(Some(T::from_node(child, buffer)?)); + return Ok(Some(T::from_node(db, child, buffer)?)); } Ok(None) } -pub fn get_child_by_field_name( +pub fn get_child_by_field_name<'db, T: FromNode<'db>>( + db: &'db dyn salsa::Database, node: &tree_sitter::Node, field_name: &str, buffer: &Arc, ) -> Result { - if let Some(child) = get_optional_child_by_field_name(node, field_name, buffer)? { + if let Some(child) = get_optional_child_by_field_name(db, node, field_name, buffer)? { return Ok(child); } Err(ParseError::MissingNode { @@ -42,14 +45,15 @@ pub fn get_child_by_field_name( }) } -pub fn get_multiple_children_by_field_name( +pub fn get_multiple_children_by_field_name<'db, T: FromNode<'db>>( + db: &'db dyn salsa::Database, node: &tree_sitter::Node, field_name: &str, buffer: &Arc, ) -> Result, ParseError> { let mut children = Vec::new(); for child in node.children_by_field_name(field_name, &mut node.walk()) { - children.push(T::from_node(child, buffer)?); + children.push(T::from_node(db, child, buffer)?); } Ok(children) } diff --git a/codegen-sdk-cst-generator/Cargo.toml b/codegen-sdk-cst-generator/Cargo.toml index 22f1c3e5..aecadca1 100644 --- a/codegen-sdk-cst-generator/Cargo.toml +++ b/codegen-sdk-cst-generator/Cargo.toml @@ -4,18 +4,15 @@ version = "0.1.0" edition = "2024" [dependencies] -convert_case = { workspace = true } -prettyplease = {workspace = true} syn = { workspace = true } tree-sitter = { workspace = true } log = { workspace = true } -codegen-sdk-common = { workspace = true, features = ["python"]} +codegen-sdk-common = { workspace = true, features = ["python", "typescript"]} anyhow = { workspace = true } quote = { workspace = true } proc-macro2 = { workspace = true } tempfile = { workspace = true } mockall_double = "0.3.1" - [dev-dependencies] tree-sitter-python = { workspace = true } test-log = { workspace = true } diff --git a/codegen-sdk-cst-generator/src/config.rs b/codegen-sdk-cst-generator/src/config.rs new file mode 100644 index 00000000..1a77d4ae --- /dev/null +++ b/codegen-sdk-cst-generator/src/config.rs @@ -0,0 +1,4 @@ +#[derive(Default, Debug, Clone)] +pub struct Config { + pub serialize: bool, +} diff --git a/codegen-sdk-cst-generator/src/generator.rs b/codegen-sdk-cst-generator/src/generator.rs index 248f17f4..76771554 100644 --- a/codegen-sdk-cst-generator/src/generator.rs +++ b/codegen-sdk-cst-generator/src/generator.rs @@ -12,30 +12,97 @@ mod utils; use std::io::Write; use proc_macro2::TokenStream; -use quote::quote; -fn get_imports() -> TokenStream { - quote! { +use quote::{ToTokens, format_ident, quote}; +use syn::parse_quote; + +use crate::Config; +fn get_imports(config: &Config) -> TokenStream { + let mut imports = quote! { use std::sync::Arc; use tree_sitter; - use derive_more::Debug; use codegen_sdk_common::*; use subenum::subenum; use std::backtrace::Backtrace; - use bytes::Bytes; - use rkyv::{Archive, Deserialize, Serialize}; - use derive_visitor::Drive; - + use bytes::Bytes; + use derive_generic_visitor::Drive; + use ambassador::Delegate; + use codegen_sdk_cst::CSTLanguage; + }; + if config.serialize { + imports.extend_one(quote! { + use rkyv::{Archive, Deserialize, Serialize}; + }); + } + imports +} +fn get_parser(language: &Language) -> TokenStream { + let program_id = format_ident!("{}", language.root_node()); + let language_name = format_ident!("{}", language.name()); + let language_struct_name = format_ident!("{}", language.struct_name()); + let root_node = format_ident!("{}", language.root_node()); + quote! { + #[salsa::tracked] + pub struct Parsed<'db> { + #[tracked] + #[return_ref] + pub program: Option<#program_id<'db>>, + } + pub fn parse_program_raw(db: &dyn salsa::Database, input: codegen_sdk_cst::Input) -> Option<#program_id<'_>> { + let buffer = Bytes::from(input.content(db).as_bytes().to_vec()); + let tree = codegen_sdk_common::language::#language_name::#language_struct_name.parse_tree_sitter(&input.content(db)); + match tree { + Ok(tree) => { + if tree.root_node().has_error() { + ParseError::SyntaxError.report(db); + None + } else { + let buffer = Arc::new(buffer); + #program_id::from_node(db, tree.root_node(), &buffer) + .map_or_else(|e| { + e.report(db); + None + }, |program| { + Some(program) + }) + } + } + Err(e) => { + e.report(db); + None + } + } + } + #[salsa::tracked] + pub fn parse_program(db: &dyn salsa::Database, input: codegen_sdk_cst::Input) -> Parsed<'_> { + Parsed::new(db, parse_program_raw(db, input)) } + pub struct #language_struct_name; + impl CSTLanguage for #language_struct_name { + type Program<'db> = #root_node<'db>; + fn language() -> &'static codegen_sdk_common::language::Language { + &codegen_sdk_common::language::#language_name::#language_struct_name + } + fn parse<'db>(db: &'db dyn salsa::Database, content: std::string::String) -> &'db Option> { + let input = codegen_sdk_cst::Input::new(db, content); + return parse_program(db, input).program(db); + } + } + } } -pub fn generate_cst(language: &Language) -> anyhow::Result { - let state = State::new(language); - let mut result = get_imports(); +pub fn generate_cst(language: &Language, config: Config) -> anyhow::Result { + let imports: TokenStream = get_imports(&config); + let state = State::new(language, config); let enums = state.get_enum(); let structs = state.get_structs(); - result.extend_one(enums); - result.extend_one(structs); - let formatted = codegen_sdk_common::generator::format_code(&result.to_string()); + let parser = get_parser(language); + let result: syn::File = parse_quote! { + #imports + #enums + #structs + #parser + }; + let formatted = codegen_sdk_common::generator::format_code(&result); match formatted { Ok(formatted) => return Ok(formatted), Err(e) => { @@ -44,7 +111,7 @@ pub fn generate_cst(language: &Language) -> anyhow::Result { "Failed to format CST, writing to temp file at {}", out_file.path().display() ); - out_file.write_all(result.to_string().as_bytes())?; + out_file.write_all(result.into_token_stream().to_string().as_bytes())?; out_file.keep()?; return Err(e); } diff --git a/codegen-sdk-cst-generator/src/generator/field.rs b/codegen-sdk-cst-generator/src/generator/field.rs index 898c1b5c..388db95a 100644 --- a/codegen-sdk-cst-generator/src/generator/field.rs +++ b/codegen-sdk-cst-generator/src/generator/field.rs @@ -9,26 +9,31 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote}; use super::constants::TYPE_NAME; +use crate::Config; + #[derive(Debug)] pub struct Field<'a> { - raw: &'a FieldDefinition, + raw: FieldDefinition, name: String, node_name: String, language: &'a Language, + config: Config, } impl<'a> Field<'a> { pub fn new( node_name: &str, name: &str, - raw: &'a FieldDefinition, + raw: FieldDefinition, language: &'a Language, + config: Config, ) -> Self { Self { node_name: node_name.to_string(), name: name.to_string(), raw, language, + config, } } fn field_id(&self) -> u16 { @@ -56,15 +61,15 @@ impl<'a> Field<'a> { let original_name = &self.name; if self.raw.multiple { quote! { - #field_name_ident: get_multiple_children_by_field_name(&node, #original_name, buffer)? + #field_name_ident: get_multiple_children_by_field_name(db, &node, #original_name, buffer)? } } else if !self.raw.required { quote! { - #field_name_ident: Box::new(get_optional_child_by_field_name(&node, #original_name, buffer)?) + #field_name_ident: Box::new(get_optional_child_by_field_name(db, &node, #original_name, buffer)?) } } else { quote! { - #field_name_ident: Box::new(get_child_by_field_name(&node, #original_name, buffer)?) + #field_name_ident: Box::new(get_child_by_field_name(db,&node, #original_name, buffer)?) } } } @@ -156,20 +161,27 @@ impl<'a> Field<'a> { pub fn get_struct_field(&self) -> TokenStream { let field_name_ident = format_ident!("{}", self.name()); let converted_type_name = format_ident!("{}", self.type_name()); - if self.raw.multiple { + let bounds = if self.config.serialize { quote! { #[rkyv(omit_bounds)] - pub #field_name_ident: Vec<#converted_type_name> + } + } else { + quote! {} + }; + if self.raw.multiple { + quote! { + #bounds + pub #field_name_ident: Vec<#converted_type_name<'db>> } } else if !self.raw.required { quote! { - #[rkyv(omit_bounds)] - pub #field_name_ident: Box> + #bounds + pub #field_name_ident: Box>> } } else { quote! { - #[rkyv(omit_bounds)] - pub #field_name_ident: Box<#converted_type_name> + #bounds + pub #field_name_ident: Box<#converted_type_name<'db>> } } } @@ -219,7 +231,13 @@ mod tests { fn test_field_normalized_name() { let field_definition = create_test_field_definition("test_type", false, true); let language = get_language_no_nodes(); - let field = Field::new("node", "field", &field_definition, &language); + let field = Field::new( + "node", + "field", + field_definition, + &language, + Config::default(), + ); assert_eq!(field.normalized_name(), "Field"); } @@ -231,7 +249,13 @@ mod tests { true, ); let language = get_language_no_nodes(); - let field = Field::new("test_node", "test_field", &field_definition, &language); + let field = Field::new( + "test_node", + "test_field", + field_definition.clone(), + &language, + Config::default(), + ); assert_eq!( field.types(), field_definition.types.iter().collect::>() @@ -246,7 +270,13 @@ mod tests { true, ); let language = get_language_no_nodes(); - let field = Field::new("Node", "field", &field_definition, &language); + let field = Field::new( + "Node", + "field", + field_definition, + &language, + Config::default(), + ); assert_eq!(field.type_name(), "NodeField"); } @@ -254,17 +284,35 @@ mod tests { fn test_get_struct_field() { let field_definition = create_test_field_definition("test_type", false, true); let language = get_language_no_nodes(); - let field = Field::new("test_node", "test_field", &field_definition, &language); + let field = Field::new( + "test_node", + "test_field", + field_definition, + &language, + Config::default(), + ); insta::assert_debug_snapshot!(snapshot_tokens(&field.get_struct_field())); // Test optional field let optional_definition = create_test_field_definition("test_type", false, false); - let optional_field = Field::new("test_node", "test_field", &optional_definition, &language); + let optional_field = Field::new( + "test_node", + "test_field", + optional_definition, + &language, + Config::default(), + ); insta::assert_debug_snapshot!(snapshot_tokens(&optional_field.get_struct_field())); // Test multiple field let multiple_definition = create_test_field_definition("test_type", true, true); - let multiple_field = Field::new("test_node", "test_field", &multiple_definition, &language); + let multiple_field = Field::new( + "test_node", + "test_field", + multiple_definition, + &language, + Config::default(), + ); insta::assert_debug_snapshot!(snapshot_tokens(&multiple_field.get_struct_field())); } @@ -272,17 +320,35 @@ mod tests { fn test_get_constructor_field() { let field_definition = create_test_field_definition("test_type", false, true); let language = get_language_no_nodes(); - let field = Field::new("test_node", "test_field", &field_definition, &language); + let field = Field::new( + "test_node", + "test_field", + field_definition, + &language, + Config::default(), + ); insta::assert_debug_snapshot!(snapshot_tokens(&field.get_constructor_field())); // Test optional field let optional_definition = create_test_field_definition("test_type", false, false); - let optional_field = Field::new("test_node", "test_field", &optional_definition, &language); + let optional_field = Field::new( + "test_node", + "test_field", + optional_definition, + &language, + Config::default(), + ); insta::assert_debug_snapshot!(snapshot_tokens(&optional_field.get_constructor_field())); // Test multiple field let multiple_definition = create_test_field_definition("test_type", true, true); - let multiple_field = Field::new("test_node", "test_field", &multiple_definition, &language); + let multiple_field = Field::new( + "test_node", + "test_field", + multiple_definition, + &language, + Config::default(), + ); insta::assert_debug_snapshot!(snapshot_tokens(&multiple_field.get_constructor_field())); } @@ -290,7 +356,13 @@ mod tests { fn test_get_children_field() { let field_definition = create_test_field_definition("test_type", false, true); let language = get_language_no_nodes(); - let field = Field::new("test_node", "test_field", &field_definition, &language); + let field = Field::new( + "test_node", + "test_field", + field_definition, + &language, + Config::default(), + ); assert_eq!( field.get_children_field(true).to_string(), @@ -299,7 +371,13 @@ mod tests { // Test optional field let optional_definition = create_test_field_definition("test_type", false, false); - let optional_field = Field::new("test_node", "test_field", &optional_definition, &language); + let optional_field = Field::new( + "test_node", + "test_field", + optional_definition, + &language, + Config::default(), + ); assert_eq!( optional_field.get_children_field(true).to_string(), @@ -311,7 +389,13 @@ mod tests { // Test multiple field let multiple_definition = create_test_field_definition("test_type", true, true); - let multiple_field = Field::new("test_node", "test_field", &multiple_definition, &language); + let multiple_field = Field::new( + "test_node", + "test_field", + multiple_definition, + &language, + Config::default(), + ); assert_eq!( multiple_field.get_children_field(true).to_string(), @@ -323,7 +407,13 @@ mod tests { fn test_get_children_by_field_name_field() { let field_definition = create_test_field_definition("test_type", false, true); let language = get_language_no_nodes(); - let field = Field::new("test_node", "test_field", &field_definition, &language); + let field = Field::new( + "test_node", + "test_field", + field_definition, + &language, + Config::default(), + ); assert_eq!( field.get_children_by_field_name_field(true).to_string(), @@ -332,7 +422,13 @@ mod tests { // Test optional field let optional_definition = create_test_field_definition("test_type", false, false); - let optional_field = Field::new("test_node", "test_field", &optional_definition, &language); + let optional_field = Field::new( + "test_node", + "test_field", + optional_definition, + &language, + Config::default(), + ); assert_eq!( optional_field.get_children_by_field_name_field(true).to_string(), @@ -341,7 +437,13 @@ mod tests { // Test multiple field let multiple_definition = create_test_field_definition("test_type", true, true); - let multiple_field = Field::new("test_node", "test_field", &multiple_definition, &language); + let multiple_field = Field::new( + "test_node", + "test_field", + multiple_definition, + &language, + Config::default(), + ); assert_eq!( multiple_field.get_children_by_field_name_field(true).to_string(), diff --git a/codegen-sdk-cst-generator/src/generator/node.rs b/codegen-sdk-cst-generator/src/generator/node.rs index 08d531ff..861aec3c 100644 --- a/codegen-sdk-cst-generator/src/generator/node.rs +++ b/codegen-sdk-cst-generator/src/generator/node.rs @@ -1,3 +1,5 @@ +use std::{collections::HashMap, sync::Arc}; + #[double] use codegen_sdk_common::language::Language; use codegen_sdk_common::{naming::normalize_type_name, parser::TypeDefinition}; @@ -6,21 +8,36 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote}; use super::field::Field; -use crate::generator::utils::{get_comment_type, get_serialize_bounds}; +use crate::{ + Config, + generator::utils::{get_comment_type, get_serialize_bounds}, +}; #[derive(Debug)] pub struct Node<'a> { - raw: &'a codegen_sdk_common::parser::Node, + raw: Arc, pub subenums: Vec, pub fields: Vec>, language: &'a Language, + config: Config, + normalized_name: String, } impl<'a> Node<'a> { - pub fn new(raw: &'a codegen_sdk_common::parser::Node, language: &'a Language) -> Self { + pub fn new( + raw: Arc, + language: &'a Language, + config: Config, + ) -> Self { let mut fields = Vec::new(); let normalized_name = normalize_type_name(&raw.type_name, raw.named); if let Some(raw_fields) = &raw.fields { for (name, field) in raw_fields.fields.iter() { - fields.push(Field::new(&normalized_name, name, field, language)); + fields.push(Field::new( + &normalized_name, + name, + field.clone(), + language, + config.clone(), + )); } } fields.sort_by_key(|f| f.normalized_name().clone()); @@ -29,6 +46,8 @@ impl<'a> Node<'a> { subenums: Vec::new(), fields, language, + config: config, + normalized_name, } } pub fn kind(&self) -> &str { @@ -38,7 +57,7 @@ impl<'a> Node<'a> { self.language.kind_id(&self.raw.type_name, self.raw.named) } pub fn normalize_name(&self) -> String { - normalize_type_name(&self.raw.type_name, self.raw.named) + self.normalized_name.clone() } pub fn type_definition(&self) -> TypeDefinition { TypeDefinition { @@ -51,21 +70,22 @@ impl<'a> Node<'a> { self.subenums.push(subenum); } } - pub fn get_enum_tokens(&self) -> TokenStream { + pub fn get_enum_tokens(&self, subenum_name_map: &HashMap) -> TokenStream { let name = format_ident!("{}", self.normalize_name()); let subenum_names = &self .subenums .iter() - .map(|s| format_ident!("{}", normalize_type_name(s, true))) + .map(|s| subenum_name_map.get(s).unwrap_or(&s)) + .map(|s| format_ident!("{}", s)) .collect::>(); if subenum_names.is_empty() { quote! { - #name(#name) + #name(#name<'db1>) } } else { quote! { #[subenum(#(#subenum_names), *)] - #name(#name) + #name(#name<'db1>) } } } @@ -99,9 +119,16 @@ impl<'a> Node<'a> { fn get_children_field(&self) -> TokenStream { if self.has_children() { let children_type_name = format_ident!("{}", self.children_struct_name()); + let bounds = if self.config.serialize { + quote! { + #[rkyv(omit_bounds)] + } + } else { + quote! {} + }; quote! { - #[rkyv(omit_bounds)] - pub children: Vec<#children_type_name>, + #bounds + pub _children: Vec<#children_type_name<'db>>, } } else { quote! {} @@ -117,38 +144,40 @@ impl<'a> Node<'a> { .collect::>(); let children_field = self.get_children_field(); let name = format_ident!("{}", self.normalize_name()); - let serialize_bounds = get_serialize_bounds(); let trait_impls = self.get_trait_implementations(); + let derives = if self.config.serialize { + let serialize_bounds = get_serialize_bounds(); + quote! { + #[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive, Eq, PartialEq, salsa::Update)] + #serialize_bounds + } + } else { + quote! { + #[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] + } + }; quote! { - #[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] - #serialize_bounds - pub struct #name { + #derives + pub struct #name<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, #children_field @@ -161,7 +190,7 @@ impl<'a> Node<'a> { fn get_children_constructor(&self) -> TokenStream { if self.has_children() { quote! { - children: named_children_without_field_names(node, buffer)? + _children: named_children_without_field_names(db, node, buffer)? } } else { quote! {} @@ -176,34 +205,43 @@ impl<'a> Node<'a> { constructor_fields.push(self.get_children_constructor()); quote! { - impl FromNode for #name { - fn from_node(node: tree_sitter::Node, buffer: &Arc) -> Result { + impl<'db> FromNode<'db> for #name<'db> { + fn from_node(db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), #(#constructor_fields),* - }) + }) } } } } fn get_children_impl(&self) -> TokenStream { let name = format_ident!("{}", self.normalize_name()); - let children_type_name = format_ident!("{}", self.children_struct_name()); + + let children_type_name = self.children_struct_name(); + let children_type_ident = format_ident!("{}", children_type_name); + let mut children_type_generic = quote! {#children_type_ident}; + if children_type_name != "Self" { + children_type_generic = quote! {#children_type_generic<'db1>}; + } + let children_field = self.get_children_field_impl(); let children_by_field_name = self.get_children_by_field_name_impl(); let children_by_field_id = self.get_children_by_field_id_impl(); quote! { - impl HasChildren for #name { - type Child = #children_type_name; + impl<'db1> HasChildren<'db1> for #name<'db1> { + type Child = #children_type_generic; #children_field #children_by_field_name #children_by_field_id @@ -215,7 +253,7 @@ impl<'a> Node<'a> { let children_impl = self.get_children_impl(); quote! { - impl CSTNode for #name { + impl<'db> CSTNode<'db> for #name<'db> { fn kind(&self) -> &str { &self._kind } @@ -225,10 +263,10 @@ impl<'a> Node<'a> { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -248,6 +286,11 @@ impl<'a> Node<'a> { } } #children_impl + impl<'db> std::hash::Hash for #name<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } + } } } fn get_children_field_impl(&self) -> TokenStream { @@ -267,7 +310,7 @@ impl<'a> Node<'a> { let children_init = if self.has_children() { quote! { - self.children.iter().cloned().collect() + self._children.iter().cloned().collect() } } else { quote! { @@ -386,13 +429,16 @@ mod tests { fn test_get_enum_tokens() { let base_node = create_test_node("test"); let language = get_language_no_nodes(); - let mut node = Node::new(&base_node, &language); - - let tokens = node.get_enum_tokens(); + let mut node = Node::new(Arc::new(base_node), &language, Config::default()); + let mut subenum_name_map = HashMap::new(); + for subenum in &node.subenums { + subenum_name_map.insert(subenum.clone(), normalize_type_name(subenum, true)); + } + let tokens = node.get_enum_tokens(&subenum_name_map); insta::assert_debug_snapshot!(snapshot_tokens(&tokens)); node.add_subenum("subenum".to_string()); - let tokens = node.get_enum_tokens(); + let tokens = node.get_enum_tokens(&subenum_name_map); insta::assert_debug_snapshot!(snapshot_tokens(&tokens)); } @@ -400,7 +446,7 @@ mod tests { fn test_get_struct_tokens_simple() { let raw_node = create_test_node("test_node"); let language = get_language_no_nodes(); - let node = Node::new(&raw_node, &language); + let node = Node::new(Arc::new(raw_node), &language, Config::default()); insta::assert_debug_snapshot!(snapshot_tokens(&node.get_struct_tokens())); } @@ -421,7 +467,7 @@ mod tests { )], ); let language = get_language_no_nodes(); - let node = Node::new(&raw_node, &language); + let node = Node::new(Arc::new(raw_node), &language, Config::default()); insta::assert_debug_snapshot!(snapshot_tokens(&node.get_struct_tokens())); } @@ -467,7 +513,7 @@ mod tests { ); let nodes = vec![raw_node.clone()]; let language = get_language(nodes); - let node = Node::new(&raw_node, &language); + let node = Node::new(Arc::new(raw_node), &language, Config::default()); insta::assert_debug_snapshot!(snapshot_tokens(&node.get_struct_tokens())); } @@ -476,7 +522,7 @@ mod tests { let raw_node = create_test_node_with_children("test_node", vec!["child_type_a", "child_type_b"]); let language = get_language_no_nodes(); - let node = Node::new(&raw_node, &language); + let node = Node::new(Arc::new(raw_node), &language, Config::default()); insta::assert_debug_snapshot!(snapshot_tokens(&node.get_struct_tokens())); } @@ -484,7 +530,7 @@ mod tests { fn test_get_struct_tokens_with_single_child_type() { let raw_node = create_test_node_with_children("test_node", vec!["child_type"]); let language = get_language_no_nodes(); - let node = Node::new(&raw_node, &language); + let node = Node::new(Arc::new(raw_node), &language, Config::default()); insta::assert_debug_snapshot!(snapshot_tokens(&node.get_struct_tokens())); } @@ -492,7 +538,7 @@ mod tests { fn test_get_trait_implementations() { let raw_node = create_test_node("test_node"); let language = get_language_no_nodes(); - let node = Node::new(&raw_node, &language); + let node = Node::new(Arc::new(raw_node), &language, Config::default()); insta::assert_debug_snapshot!(snapshot_tokens(&node.get_trait_implementations())); } @@ -513,7 +559,7 @@ mod tests { )], ); let language = get_language_no_nodes(); - let node = Node::new(&raw_node, &language); + let node = Node::new(Arc::new(raw_node), &language, Config::default()); insta::assert_debug_snapshot!(snapshot_tokens(&node.get_children_field_impl())); } @@ -534,7 +580,7 @@ mod tests { )], ); let language = get_language_no_nodes(); - let node = Node::new(&raw_node, &language); + let node = Node::new(Arc::new(raw_node), &language, Config::default()); insta::assert_debug_snapshot!(snapshot_tokens(&node.get_children_by_field_name_impl())); } } diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field-2.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field-2.snap index 0e445c30..8aeda198 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field-2.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field-2.snap @@ -2,4 +2,4 @@ source: codegen-sdk-cst-generator/src/generator/field.rs expression: snapshot_tokens(&optional_field.get_constructor_field()) --- -test_field : Box :: new (get_optional_child_by_field_name (& node , "test_field" , buffer) ?) +test_field : Box :: new (get_optional_child_by_field_name (db , & node , "test_field" , buffer) ?) diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field-3.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field-3.snap index 5b204825..d46e6cd6 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field-3.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field-3.snap @@ -2,4 +2,4 @@ source: codegen-sdk-cst-generator/src/generator/field.rs expression: snapshot_tokens(&multiple_field.get_constructor_field()) --- -test_field : get_multiple_children_by_field_name (& node , "test_field" , buffer) ? +test_field : get_multiple_children_by_field_name (db , & node , "test_field" , buffer) ? diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field.snap index 9ecc8a3a..4e6146df 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_constructor_field.snap @@ -2,4 +2,4 @@ source: codegen-sdk-cst-generator/src/generator/field.rs expression: snapshot_tokens(&field.get_constructor_field()) --- -test_field : Box :: new (get_child_by_field_name (& node , "test_field" , buffer) ?) +test_field : Box :: new (get_child_by_field_name (db , & node , "test_field" , buffer) ?) diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field-2.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field-2.snap index 4f671feb..9895ee7c 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field-2.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field-2.snap @@ -2,4 +2,4 @@ source: codegen-sdk-cst-generator/src/generator/field.rs expression: snapshot_tokens(&optional_field.get_struct_field()) --- -# [rkyv (omit_bounds)] pub test_field : Box < Option < TestType >> +pub test_field : Box < Option < TestType < 'db >> > diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field-3.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field-3.snap index aaaa11c1..0b1e4b9b 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field-3.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field-3.snap @@ -2,4 +2,4 @@ source: codegen-sdk-cst-generator/src/generator/field.rs expression: snapshot_tokens(&multiple_field.get_struct_field()) --- -# [rkyv (omit_bounds)] pub test_field : Vec < TestType > +pub test_field : Vec < TestType < 'db >> diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field.snap index 3b2d6114..09ab734f 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__field__tests__get_struct_field.snap @@ -2,4 +2,4 @@ source: codegen-sdk-cst-generator/src/generator/field.rs expression: snapshot_tokens(&field.get_struct_field()) --- -# [rkyv (omit_bounds)] pub test_field : Box < TestType > +pub test_field : Box < TestType < 'db >> diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_enum_tokens-2.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_enum_tokens-2.snap index 9807811f..1dd92263 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_enum_tokens-2.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_enum_tokens-2.snap @@ -2,4 +2,4 @@ source: codegen-sdk-cst-generator/src/generator/node.rs expression: snapshot_tokens(&tokens) --- -# [subenum (Subenum)] Test (Test) +# [subenum (subenum)] Test (Test < 'db1 >) diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_enum_tokens.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_enum_tokens.snap index 52740de3..94ee990b 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_enum_tokens.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_enum_tokens.snap @@ -2,4 +2,4 @@ source: codegen-sdk-cst-generator/src/generator/node.rs expression: snapshot_tokens(&tokens) --- -Test (Test) +Test (Test < 'db1 >) diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_complex.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_complex.snap index ccb50518..f6a95d76 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_complex.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_complex.snap @@ -2,85 +2,67 @@ source: codegen-sdk-cst-generator/src/generator/node.rs expression: snapshot_tokens(&node.get_struct_tokens()) --- -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct TestNode { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct TestNode<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub multiple_field: Vec, - #[rkyv(omit_bounds)] - pub optional_field: Box>, - #[rkyv(omit_bounds)] - pub required_field: Box, + pub multiple_field: Vec>, + pub optional_field: Box>>, + pub required_field: Box>, } -impl FromNode for TestNode { +impl<'db> FromNode<'db> for TestNode<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), multiple_field: get_multiple_children_by_field_name( + db, &node, "multiple_field", buffer, )?, optional_field: Box::new( - get_optional_child_by_field_name(&node, "optional_field", buffer)?, + get_optional_child_by_field_name(db, &node, "optional_field", buffer)?, ), required_field: Box::new( - get_child_by_field_name(&node, "required_field", buffer)?, + get_child_by_field_name(db, &node, "required_field", buffer)?, ), }) } } -impl CSTNode for TestNode { +impl<'db> CSTNode<'db> for TestNode<'db> { fn kind(&self) -> &str { &self._kind } @@ -90,10 +72,10 @@ impl CSTNode for TestNode { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -112,8 +94,8 @@ impl CSTNode for TestNode { self.id } } -impl HasChildren for TestNode { - type Child = TestNodeChildren; +impl<'db1> HasChildren<'db1> for TestNode<'db1> { + type Child = TestNodeChildren<'db1>; fn children(&self) -> Vec { let mut children: Vec<_> = vec![]; children @@ -196,3 +178,8 @@ impl HasChildren for TestNode { } } } +impl<'db> std::hash::Hash for TestNode<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_simple.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_simple.snap index f8be64e1..ffe3d5e9 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_simple.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_simple.snap @@ -2,59 +2,43 @@ source: codegen-sdk-cst-generator/src/generator/node.rs expression: snapshot_tokens(&node.get_struct_tokens()) --- -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct TestNode { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct TestNode<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for TestNode { +impl<'db> FromNode<'db> for TestNode<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -63,7 +47,7 @@ impl FromNode for TestNode { }) } } -impl CSTNode for TestNode { +impl<'db> CSTNode<'db> for TestNode<'db> { fn kind(&self) -> &str { &self._kind } @@ -73,10 +57,10 @@ impl CSTNode for TestNode { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -95,7 +79,7 @@ impl CSTNode for TestNode { self.id } } -impl HasChildren for TestNode { +impl<'db1> HasChildren<'db1> for TestNode<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -111,3 +95,8 @@ impl HasChildren for TestNode { } } } +impl<'db> std::hash::Hash for TestNode<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_children.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_children.snap index cd0d621f..8b641ac5 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_children.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_children.snap @@ -2,71 +2,54 @@ source: codegen-sdk-cst-generator/src/generator/node.rs expression: snapshot_tokens(&node.get_struct_tokens()) --- -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct TestNode { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct TestNode<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub children: Vec, + pub _children: Vec>, } -impl FromNode for TestNode { +impl<'db> FromNode<'db> for TestNode<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), - children: named_children_without_field_names(node, buffer)?, + _children: named_children_without_field_names(db, node, buffer)?, }) } } -impl CSTNode for TestNode { +impl<'db> CSTNode<'db> for TestNode<'db> { fn kind(&self) -> &str { &self._kind } @@ -76,10 +59,10 @@ impl CSTNode for TestNode { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -98,10 +81,10 @@ impl CSTNode for TestNode { self.id } } -impl HasChildren for TestNode { - type Child = TestNodeChildren; +impl<'db1> HasChildren<'db1> for TestNode<'db1> { + type Child = TestNodeChildren<'db1>; fn children(&self) -> Vec { - let mut children: Vec<_> = self.children.iter().cloned().collect(); + let mut children: Vec<_> = self._children.iter().cloned().collect(); children.sort_by_key(|c| c.start_byte()); children } @@ -116,3 +99,8 @@ impl HasChildren for TestNode { } } } +impl<'db> std::hash::Hash for TestNode<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_fields.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_fields.snap index 0beed022..65ebeccb 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_fields.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_fields.snap @@ -2,71 +2,56 @@ source: codegen-sdk-cst-generator/src/generator/node.rs expression: snapshot_tokens(&node.get_struct_tokens()) --- -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct TestNode { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct TestNode<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub test_field: Box, + pub test_field: Box>, } -impl FromNode for TestNode { +impl<'db> FromNode<'db> for TestNode<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), - test_field: Box::new(get_child_by_field_name(&node, "test_field", buffer)?), + test_field: Box::new( + get_child_by_field_name(db, &node, "test_field", buffer)?, + ), }) } } -impl CSTNode for TestNode { +impl<'db> CSTNode<'db> for TestNode<'db> { fn kind(&self) -> &str { &self._kind } @@ -76,10 +61,10 @@ impl CSTNode for TestNode { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -98,8 +83,8 @@ impl CSTNode for TestNode { self.id } } -impl HasChildren for TestNode { - type Child = TestNodeChildren; +impl<'db1> HasChildren<'db1> for TestNode<'db1> { + type Child = TestNodeChildren<'db1>; fn children(&self) -> Vec { let mut children: Vec<_> = vec![]; children @@ -133,3 +118,8 @@ impl HasChildren for TestNode { } } } +impl<'db> std::hash::Hash for TestNode<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_single_child_type.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_single_child_type.snap index cd0d621f..8b641ac5 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_single_child_type.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_struct_tokens_with_single_child_type.snap @@ -2,71 +2,54 @@ source: codegen-sdk-cst-generator/src/generator/node.rs expression: snapshot_tokens(&node.get_struct_tokens()) --- -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct TestNode { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct TestNode<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub children: Vec, + pub _children: Vec>, } -impl FromNode for TestNode { +impl<'db> FromNode<'db> for TestNode<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), - children: named_children_without_field_names(node, buffer)?, + _children: named_children_without_field_names(db, node, buffer)?, }) } } -impl CSTNode for TestNode { +impl<'db> CSTNode<'db> for TestNode<'db> { fn kind(&self) -> &str { &self._kind } @@ -76,10 +59,10 @@ impl CSTNode for TestNode { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -98,10 +81,10 @@ impl CSTNode for TestNode { self.id } } -impl HasChildren for TestNode { - type Child = TestNodeChildren; +impl<'db1> HasChildren<'db1> for TestNode<'db1> { + type Child = TestNodeChildren<'db1>; fn children(&self) -> Vec { - let mut children: Vec<_> = self.children.iter().cloned().collect(); + let mut children: Vec<_> = self._children.iter().cloned().collect(); children.sort_by_key(|c| c.start_byte()); children } @@ -116,3 +99,8 @@ impl HasChildren for TestNode { } } } +impl<'db> std::hash::Hash for TestNode<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_trait_implementations.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_trait_implementations.snap index 41dcf746..c3a6f905 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_trait_implementations.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__node__tests__get_trait_implementations.snap @@ -2,7 +2,7 @@ source: codegen-sdk-cst-generator/src/generator/node.rs expression: snapshot_tokens(&node.get_trait_implementations()) --- -impl CSTNode for TestNode { +impl<'db> CSTNode<'db> for TestNode<'db> { fn kind(&self) -> &str { &self._kind } @@ -12,10 +12,10 @@ impl CSTNode for TestNode { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -34,7 +34,7 @@ impl CSTNode for TestNode { self.id } } -impl HasChildren for TestNode { +impl<'db1> HasChildren<'db1> for TestNode<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -50,3 +50,8 @@ impl HasChildren for TestNode { } } } +impl<'db> std::hash::Hash for TestNode<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums.snap index 8c243b48..6b78a583 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums.snap @@ -2,26 +2,39 @@ source: codegen-sdk-cst-generator/src/generator/state.rs expression: snapshot_tokens(&enum_tokens) --- -#[subenum( - NodeCChildren(derive(Archive, Deserialize, Serialize)), - NodeCField(derive(Archive, Deserialize, Serialize)) -)] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { +#[subenum(NodeCChildren, NodeCField)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { #[subenum(NodeCChildren, NodeCField)] - NodeA(NodeA), + NodeA(NodeA<'db1>), #[subenum(NodeCChildren, NodeCField)] - NodeB(NodeB), - NodeC(NodeC), + NodeB(NodeB<'db1>), + NodeC(NodeC<'db1>), } -impl FromNode for NodeCChildren { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: NodeA<'db3>) -> Self { + Self::NodeA(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: NodeB<'db3>) -> Self { + Self::NodeB(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: NodeC<'db3>) -> Self { + Self::NodeC(node) + } +} +impl<'db4> FromNode<'db4> for NodeCChildren<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::NodeB(NodeB::from_node(node, buffer)?)), + 0u16 => Ok(Self::NodeB(NodeB::from_node(db, node, buffer)?)), _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -31,13 +44,14 @@ impl FromNode for NodeCChildren { } } } -impl FromNode for NodeCField { +impl<'db4> FromNode<'db4> for NodeCField<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::NodeB(NodeB::from_node(node, buffer)?)), + 0u16 => Ok(Self::NodeB(NodeB::from_node(db, node, buffer)?)), _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums_missing_node-2.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums_missing_node-2.snap new file mode 100644 index 00000000..d2ce3d00 --- /dev/null +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums_missing_node-2.snap @@ -0,0 +1,327 @@ +--- +source: codegen-sdk-cst-generator/src/generator/state.rs +expression: snapshot_tokens(&struct_tokens) +--- +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct AnonymousNodeA<'db> { + #[drive(skip)] + start_byte: usize, + #[drive(skip)] + end_byte: usize, + #[drive(skip)] + _kind: std::string::String, + #[drive(skip)] + start_position: Point<'db>, + #[drive(skip)] + end_position: Point<'db>, + #[drive(skip)] + buffer: Arc, + #[drive(skip)] + kind_id: u16, + #[drive(skip)] + is_error: bool, + #[drive(skip)] + named: bool, + #[drive(skip)] + id: usize, +} +impl<'db> FromNode<'db> for AnonymousNodeA<'db> { + fn from_node( + db: &'db dyn salsa::Database, + node: tree_sitter::Node, + buffer: &Arc, + ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); + Ok(Self { + start_byte: node.start_byte(), + end_byte: node.end_byte(), + _kind: node.kind().to_string(), + start_position: start_position, + end_position: end_position, + buffer: buffer.clone(), + kind_id: node.kind_id(), + is_error: node.is_error(), + named: node.is_named(), + id: node.id(), + }) + } +} +impl<'db> CSTNode<'db> for AnonymousNodeA<'db> { + fn kind(&self) -> &str { + &self._kind + } + fn start_byte(&self) -> usize { + self.start_byte + } + fn end_byte(&self) -> usize { + self.end_byte + } + fn start_position(&self) -> Point<'db> { + self.start_position + } + fn end_position(&self) -> Point<'db> { + self.end_position + } + fn buffer(&self) -> &Bytes { + &self.buffer + } + fn kind_id(&self) -> u16 { + self.kind_id + } + fn is_error(&self) -> bool { + self.is_error + } + fn is_named(&self) -> bool { + self.named + } + fn id(&self) -> usize { + self.id + } +} +impl<'db1> HasChildren<'db1> for AnonymousNodeA<'db1> { + type Child = Self; + fn children(&self) -> Vec { + vec![] + } + fn children_by_field_name(&self, field_name: &str) -> Vec { + match field_name { + _ => vec![], + } + } + fn children_by_field_id(&self, field_id: u16) -> Vec { + match field_id { + _ => vec![], + } + } +} +impl<'db> std::hash::Hash for AnonymousNodeA<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct NodeB<'db> { + #[drive(skip)] + start_byte: usize, + #[drive(skip)] + end_byte: usize, + #[drive(skip)] + _kind: std::string::String, + #[drive(skip)] + start_position: Point<'db>, + #[drive(skip)] + end_position: Point<'db>, + #[drive(skip)] + buffer: Arc, + #[drive(skip)] + kind_id: u16, + #[drive(skip)] + is_error: bool, + #[drive(skip)] + named: bool, + #[drive(skip)] + id: usize, +} +impl<'db> FromNode<'db> for NodeB<'db> { + fn from_node( + db: &'db dyn salsa::Database, + node: tree_sitter::Node, + buffer: &Arc, + ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); + Ok(Self { + start_byte: node.start_byte(), + end_byte: node.end_byte(), + _kind: node.kind().to_string(), + start_position: start_position, + end_position: end_position, + buffer: buffer.clone(), + kind_id: node.kind_id(), + is_error: node.is_error(), + named: node.is_named(), + id: node.id(), + }) + } +} +impl<'db> CSTNode<'db> for NodeB<'db> { + fn kind(&self) -> &str { + &self._kind + } + fn start_byte(&self) -> usize { + self.start_byte + } + fn end_byte(&self) -> usize { + self.end_byte + } + fn start_position(&self) -> Point<'db> { + self.start_position + } + fn end_position(&self) -> Point<'db> { + self.end_position + } + fn buffer(&self) -> &Bytes { + &self.buffer + } + fn kind_id(&self) -> u16 { + self.kind_id + } + fn is_error(&self) -> bool { + self.is_error + } + fn is_named(&self) -> bool { + self.named + } + fn id(&self) -> usize { + self.id + } +} +impl<'db1> HasChildren<'db1> for NodeB<'db1> { + type Child = Self; + fn children(&self) -> Vec { + vec![] + } + fn children_by_field_name(&self, field_name: &str) -> Vec { + match field_name { + _ => vec![], + } + } + fn children_by_field_id(&self, field_id: u16) -> Vec { + match field_id { + _ => vec![], + } + } +} +impl<'db> std::hash::Hash for NodeB<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct NodeC<'db> { + #[drive(skip)] + start_byte: usize, + #[drive(skip)] + end_byte: usize, + #[drive(skip)] + _kind: std::string::String, + #[drive(skip)] + start_position: Point<'db>, + #[drive(skip)] + end_position: Point<'db>, + #[drive(skip)] + buffer: Arc, + #[drive(skip)] + kind_id: u16, + #[drive(skip)] + is_error: bool, + #[drive(skip)] + named: bool, + #[drive(skip)] + id: usize, + pub field: Vec>, +} +impl<'db> FromNode<'db> for NodeC<'db> { + fn from_node( + db: &'db dyn salsa::Database, + node: tree_sitter::Node, + buffer: &Arc, + ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); + Ok(Self { + start_byte: node.start_byte(), + end_byte: node.end_byte(), + _kind: node.kind().to_string(), + start_position: start_position, + end_position: end_position, + buffer: buffer.clone(), + kind_id: node.kind_id(), + is_error: node.is_error(), + named: node.is_named(), + id: node.id(), + field: get_multiple_children_by_field_name(db, &node, "field", buffer)?, + }) + } +} +impl<'db> CSTNode<'db> for NodeC<'db> { + fn kind(&self) -> &str { + &self._kind + } + fn start_byte(&self) -> usize { + self.start_byte + } + fn end_byte(&self) -> usize { + self.end_byte + } + fn start_position(&self) -> Point<'db> { + self.start_position + } + fn end_position(&self) -> Point<'db> { + self.end_position + } + fn buffer(&self) -> &Bytes { + &self.buffer + } + fn kind_id(&self) -> u16 { + self.kind_id + } + fn is_error(&self) -> bool { + self.is_error + } + fn is_named(&self) -> bool { + self.named + } + fn id(&self) -> usize { + self.id + } +} +impl<'db1> HasChildren<'db1> for NodeC<'db1> { + type Child = NodeCChildren<'db1>; + fn children(&self) -> Vec { + let mut children: Vec<_> = vec![]; + children + .extend( + self + .field + .iter() + .map(|child| { + Self::Child::try_from(NodeTypes::from(child.clone())).unwrap() + }), + ); + children.sort_by_key(|c| c.start_byte()); + children + } + fn children_by_field_name(&self, field_name: &str) -> Vec { + match field_name { + "field" => { + self.field + .iter() + .map(|child| { + Self::Child::try_from(NodeTypes::from(child.clone())).unwrap() + }) + .collect() + } + _ => vec![], + } + } + fn children_by_field_id(&self, field_id: u16) -> Vec { + match field_id { + 1u16 => { + self.field + .iter() + .map(|child| { + Self::Child::try_from(NodeTypes::from(child.clone())).unwrap() + }) + .collect() + } + _ => vec![], + } + } +} +impl<'db> std::hash::Hash for NodeC<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums_missing_node.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums_missing_node.snap new file mode 100644 index 00000000..ccc1c455 --- /dev/null +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__add_field_subenums_missing_node.snap @@ -0,0 +1,65 @@ +--- +source: codegen-sdk-cst-generator/src/generator/state.rs +expression: snapshot_tokens(&enum_tokens) +--- +#[subenum(NodeCChildren, NodeCField)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { + #[subenum(NodeCChildren, NodeCField)] + AnonymousNodeA(AnonymousNodeA<'db1>), + #[subenum(NodeCField)] + NodeB(NodeB<'db1>), + NodeC(NodeC<'db1>), +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: AnonymousNodeA<'db3>) -> Self { + Self::AnonymousNodeA(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: NodeB<'db3>) -> Self { + Self::NodeB(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: NodeC<'db3>) -> Self { + Self::NodeC(node) + } +} +impl<'db4> FromNode<'db4> for NodeCChildren<'db4> { + fn from_node( + db: &'db4 dyn salsa::Database, + node: tree_sitter::Node, + buffer: &Arc, + ) -> Result { + match node.kind_id() { + 0u16 => { + Ok(Self::AnonymousNodeA(AnonymousNodeA::from_node(db, node, buffer)?)) + } + _ => { + Err(ParseError::UnexpectedNode { + node_type: node.kind().to_string(), + backtrace: Backtrace::capture(), + }) + } + } + } +} +impl<'db4> FromNode<'db4> for NodeCField<'db4> { + fn from_node( + db: &'db4 dyn salsa::Database, + node: tree_sitter::Node, + buffer: &Arc, + ) -> Result { + match node.kind_id() { + 0u16 => Ok(Self::NodeB(NodeB::from_node(db, node, buffer)?)), + _ => { + Err(ParseError::UnexpectedNode { + node_type: node.kind().to_string(), + backtrace: Backtrace::capture(), + }) + } + } + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__get_enum.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__get_enum.snap index e6e7f580..fb15323e 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__get_enum.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__get_enum.snap @@ -2,8 +2,13 @@ source: codegen-sdk-cst-generator/src/generator/state.rs expression: snapshot_tokens(&enum_tokens) --- -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { - Test(Test), +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { + Test(Test<'db1>), +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: Test<'db3>) -> Self { + Self::Test(node) + } } diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__get_structs.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__get_structs.snap index 4c680f84..6274a6d6 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__get_structs.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__get_structs.snap @@ -2,59 +2,43 @@ source: codegen-sdk-cst-generator/src/generator/state.rs expression: snapshot_tokens(&struct_tokens) --- -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct Test { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct Test<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for Test { +impl<'db> FromNode<'db> for Test<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -63,7 +47,7 @@ impl FromNode for Test { }) } } -impl CSTNode for Test { +impl<'db> CSTNode<'db> for Test<'db> { fn kind(&self) -> &str { &self._kind } @@ -73,10 +57,10 @@ impl CSTNode for Test { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -95,7 +79,7 @@ impl CSTNode for Test { self.id } } -impl HasChildren for Test { +impl<'db1> HasChildren<'db1> for Test<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -111,3 +95,8 @@ impl HasChildren for Test { } } } +impl<'db> std::hash::Hash for Test<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__parse_children.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__parse_children.snap index 4bee9ce7..6ccf3b66 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__parse_children.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__parse_children.snap @@ -2,16 +2,32 @@ source: codegen-sdk-cst-generator/src/generator/state.rs expression: snapshot_tokens(&enum_tokens) --- -#[subenum(AnonymousTestChildren(derive(Archive, Deserialize, Serialize)))] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { - AnonymousChild(AnonymousChild), - AnonymousChildTwo(AnonymousChildTwo), - AnonymousTest(AnonymousTest), +#[subenum(AnonymousTestChildren)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { + AnonymousChild(AnonymousChild<'db1>), + AnonymousChildTwo(AnonymousChildTwo<'db1>), + AnonymousTest(AnonymousTest<'db1>), } -impl FromNode for AnonymousTestChildren { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: AnonymousChild<'db3>) -> Self { + Self::AnonymousChild(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: AnonymousChildTwo<'db3>) -> Self { + Self::AnonymousChildTwo(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: AnonymousTest<'db3>) -> Self { + Self::AnonymousTest(node) + } +} +impl<'db4> FromNode<'db4> for AnonymousTestChildren<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { diff --git a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__parse_children_subtypes.snap b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__parse_children_subtypes.snap index ead20e35..a327e4ea 100644 --- a/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__parse_children_subtypes.snap +++ b/codegen-sdk-cst-generator/src/generator/snapshots/codegen_sdk_cst_generator__generator__state__tests__parse_children_subtypes.snap @@ -2,18 +2,26 @@ source: codegen-sdk-cst-generator/src/generator/state.rs expression: snapshot_tokens(&enum_tokens) --- -#[subenum( - AnonymousClassChildren(derive(Archive, Deserialize, Serialize)), - Definition(derive(Archive, Deserialize, Serialize)) -)] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { - AnonymousClass(AnonymousClass), - AnonymousFunction(AnonymousFunction), +#[subenum(AnonymousClassChildren, Definition)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { + AnonymousClass(AnonymousClass<'db1>), + AnonymousFunction(AnonymousFunction<'db1>), } -impl FromNode for AnonymousClassChildren { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: AnonymousClass<'db3>) -> Self { + Self::AnonymousClass(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: AnonymousFunction<'db3>) -> Self { + Self::AnonymousFunction(node) + } +} +impl<'db4> FromNode<'db4> for AnonymousClassChildren<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { @@ -27,8 +35,9 @@ impl FromNode for AnonymousClassChildren { } } } -impl FromNode for Definition { +impl<'db4> FromNode<'db4> for Definition<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { diff --git a/codegen-sdk-cst-generator/src/generator/state.rs b/codegen-sdk-cst-generator/src/generator/state.rs index 0ff9aad2..263ea4c9 100644 --- a/codegen-sdk-cst-generator/src/generator/state.rs +++ b/codegen-sdk-cst-generator/src/generator/state.rs @@ -1,4 +1,7 @@ -use std::collections::{BTreeMap, BTreeSet, VecDeque}; +use std::{ + collections::{BTreeMap, BTreeSet, HashMap, VecDeque}, + sync::Arc, +}; #[double] use codegen_sdk_common::language::Language; @@ -6,31 +9,40 @@ use codegen_sdk_common::{naming::normalize_type_name, parser::TypeDefinition}; use mockall_double::double; use proc_macro2::TokenStream; use quote::{format_ident, quote}; +use syn::Ident; use super::node::Node; -use crate::generator::{ - constants::TYPE_NAME, - utils::{get_comment_type, get_from_node}, +use crate::{ + Config, + generator::{ + constants::TYPE_NAME, + utils::{get_comment_type, get_from_node, get_from_type}, + }, }; #[derive(Debug)] pub struct State<'a> { pub subenums: BTreeSet, + config: Config, nodes: BTreeMap>, } impl<'a> State<'a> { - pub fn new(language: &'a Language) -> Self { + pub fn new(language: &'a Language, config: Config) -> Self { let mut nodes = BTreeMap::new(); let mut subenums = BTreeSet::new(); let raw_nodes = language.nodes(); for raw_node in raw_nodes { if raw_node.subtypes.is_empty() { - let node = Node::new(raw_node, language); + let node = Node::new(raw_node.clone(), language, config.clone()); nodes.insert(node.normalize_name(), node); } else { subenums.insert(raw_node.type_name.clone()); } } - let mut ret = Self { nodes, subenums }; + let mut ret = Self { + nodes, + subenums, + config, + }; let mut subenums = VecDeque::new(); for raw_node in raw_nodes.iter().filter(|n| !n.subtypes.is_empty()) { subenums.push_back(raw_node.clone()); @@ -47,12 +59,45 @@ impl<'a> State<'a> { ret.add_subenum(&raw_node.type_name, &raw_node.subtypes.iter().collect()); } } + log::info!("Adding child subenums"); ret.add_child_subenums(); + log::info!("Adding missing fields"); + ret.add_missing_fields(language); log::info!("Adding field subenums"); ret.add_field_subenums(); ret } + fn add_missing_fields(&mut self, language: &'a Language) { + let mut to_insert = Vec::new(); + for node in self.nodes.values() { + for field in &node.fields { + for type_def in field.types() { + let name = type_def.normalize(); + if !self.nodes.contains_key(&name) + && !self.subenums.contains(&type_def.type_name) + { + let node = codegen_sdk_common::parser::Node { + type_name: type_def.type_name.clone(), + subtypes: vec![], + named: type_def.named, + root: false, + fields: None, + children: None, + }; + to_insert.push((name, node)); + } + } + } + } + to_insert.dedup_by_key(|(name, _)| name.clone()); + for (name, node) in to_insert { + self.nodes.insert( + name, + Node::new(Arc::new(node), language, self.config.clone()), + ); + } + } fn add_child_subenums(&mut self) { let keys = self.nodes.keys().cloned().collect::>(); for name in keys.into_iter() { @@ -127,7 +172,7 @@ impl<'a> State<'a> { variant_map.insert( node.kind_id(), quote! { - Ok(Self::#variant_name(#variant_name::from_node(node, buffer)?)) + Ok(Self::#variant_name(#variant_name::from_node(db, node, buffer)?)) }, ); } @@ -144,8 +189,13 @@ impl<'a> State<'a> { let mut enum_tokens = Vec::new(); let mut from_tokens = TokenStream::new(); let mut subenums = Vec::new(); + let mut subenum_name_map = HashMap::new(); + for name in self.subenums.iter() { + subenum_name_map.insert(name.clone(), normalize_type_name(name, true)); + } for node in self.nodes.values() { - enum_tokens.push(node.get_enum_tokens()); + enum_tokens.push(node.get_enum_tokens(&subenum_name_map)); + from_tokens.extend_one(get_from_type(&node.normalize_name())); } for subenum in self.subenums.iter() { assert!( @@ -159,8 +209,14 @@ impl<'a> State<'a> { let subenum_tokens = if !subenums.is_empty() { subenums.sort(); subenums.dedup(); - quote! { - #[subenum(#(#subenums(derive(Archive, Deserialize, Serialize))),*)] + if self.config.serialize { + quote! { + #[subenum(#(#subenums(derive(Archive, Deserialize, Serialize))),*)] + } + } else { + quote! { + #[subenum(#(#subenums),*)] + } } } else { quote! {} @@ -168,9 +224,11 @@ impl<'a> State<'a> { let enum_name = format_ident!("{}", TYPE_NAME); quote! { #subenum_tokens - #[derive(Debug, Clone, Drive)] - #[enum_delegate::implement(CSTNode)] - pub enum #enum_name { + #[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] + #[delegate( + CSTNode<'db1> + )] + pub enum #enum_name<'db1> { #(#enum_tokens),* } #from_tokens @@ -204,6 +262,18 @@ impl<'a> State<'a> { } nodes } + pub fn get_node_struct_names(&self) -> Vec { + self.nodes + .values() + .map(|node| format_ident!("{}", node.normalize_name())) + .collect() + } + pub fn get_subenum_struct_names(&self) -> Vec { + self.subenums + .iter() + .map(|s| format_ident!("{}", normalize_type_name(s, true))) + .collect() + } } #[cfg(test)] mod tests { @@ -224,7 +294,7 @@ mod tests { }; let nodes = vec![node]; let language = get_language(nodes); - let state = State::new(&language); + let state = State::new(&language, Config::default()); let enum_tokens = state.get_enum(); insta::assert_debug_snapshot!(snapshot_tokens(&enum_tokens)); } @@ -269,7 +339,7 @@ mod tests { }; let nodes = vec![child, child_two, node]; let language = get_language(nodes); - let state = State::new(&language); + let state = State::new(&language, Config::default()); let enum_tokens = state.get_enum(); insta::assert_debug_snapshot!(snapshot_tokens(&enum_tokens)); } @@ -317,7 +387,7 @@ mod tests { }; let nodes = vec![definition, class, function]; let language = get_language(nodes); - let state = State::new(&language); + let state = State::new(&language, Config::default()); let enum_tokens = state.get_enum(); insta::assert_debug_snapshot!(snapshot_tokens(&enum_tokens)); } @@ -365,11 +435,53 @@ mod tests { }; let nodes = vec![node_a, node_b, node_c]; let language = get_language(nodes); - let state = State::new(&language); + let state = State::new(&language, Config::default()); let enum_tokens = state.get_enum(); insta::assert_debug_snapshot!(snapshot_tokens(&enum_tokens)); } #[test_log::test] + fn test_add_field_subenums_missing_node() { + let node_a = codegen_sdk_common::parser::Node { + type_name: "node_a".to_string(), + subtypes: vec![], + named: false, + root: false, + fields: None, + children: None, + }; + let field = codegen_sdk_common::parser::FieldDefinition { + types: vec![ + TypeDefinition { + type_name: "node_a".to_string(), + named: false, + }, + TypeDefinition { + type_name: "node_b".to_string(), + named: true, + }, + ], + multiple: true, + required: false, + }; + let node_c = codegen_sdk_common::parser::Node { + type_name: "node_c".to_string(), + subtypes: vec![], + named: true, + root: false, + fields: Some(codegen_sdk_common::parser::Fields { + fields: HashMap::from([("field".to_string(), field)]), + }), + children: None, + }; + let nodes = vec![node_a, node_c]; + let language = get_language(nodes); + let state = State::new(&language, Config::default()); + let enum_tokens = state.get_enum(); + let struct_tokens = state.get_structs(); + insta::assert_debug_snapshot!(snapshot_tokens(&enum_tokens)); + insta::assert_debug_snapshot!(snapshot_tokens(&struct_tokens)); + } + #[test_log::test] fn test_get_structs() { let node = codegen_sdk_common::parser::Node { type_name: "test".to_string(), @@ -381,7 +493,7 @@ mod tests { }; let nodes = vec![node]; let language = get_language(nodes); - let state = State::new(&language); + let state = State::new(&language, Config::default()); let struct_tokens = state.get_structs(); insta::assert_debug_snapshot!(snapshot_tokens(&struct_tokens)); } @@ -422,7 +534,7 @@ mod tests { }; let nodes = vec![node_a, node_b, parent]; let language = get_language(nodes); - let state = State::new(&language); + let state = State::new(&language, Config::default()); let variants = state.get_variants("parent"); assert_eq!( @@ -455,7 +567,7 @@ mod tests { }; let nodes = vec![node_a]; let language = get_language(nodes); - let mut state = State::new(&language); + let mut state = State::new(&language, Config::default()); state.add_subenum( "TestEnum", diff --git a/codegen-sdk-cst-generator/src/generator/utils.rs b/codegen-sdk-cst-generator/src/generator/utils.rs index 8914c90e..7db00569 100644 --- a/codegen-sdk-cst-generator/src/generator/utils.rs +++ b/codegen-sdk-cst-generator/src/generator/utils.rs @@ -4,6 +4,7 @@ use codegen_sdk_common::{naming::normalize_type_name, parser::TypeDefinition}; use proc_macro2::TokenStream; use quote::{format_ident, quote}; +use super::constants::TYPE_NAME; pub fn get_serialize_bounds() -> TokenStream { quote! { #[rkyv(serialize_bounds( @@ -19,6 +20,17 @@ pub fn get_serialize_bounds() -> TokenStream { ))] } } +pub fn get_from_type(struct_name: &str) -> TokenStream { + let name = format_ident!("{}", struct_name); + let target = format_ident!("{}", TYPE_NAME); + quote! { + impl<'db3> From<#name<'db3>> for #target<'db3> { + fn from(node: #name<'db3>) -> Self { + Self::#name(node) + } + } + } +} pub fn get_from_node( node: &str, named: bool, @@ -32,8 +44,8 @@ pub fn get_from_node( values.push(value); } quote! { - impl FromNode for #node { - fn from_node(node: tree_sitter::Node, buffer: &Arc) -> Result { + impl<'db4> FromNode<'db4> for #node<'db4> { + fn from_node(db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc) -> Result { match node.kind_id() { #(#keys => #values,)* _ => Err(ParseError::UnexpectedNode { diff --git a/codegen-sdk-cst-generator/src/lib.rs b/codegen-sdk-cst-generator/src/lib.rs index 4d563d2f..3551af2d 100644 --- a/codegen-sdk-cst-generator/src/lib.rs +++ b/codegen-sdk-cst-generator/src/lib.rs @@ -1,11 +1,14 @@ #![feature(extend_one)] +mod config; mod generator; #[double] use codegen_sdk_common::language::Language; pub use generator::{Field, Node, State, generate_cst}; use mockall_double::double; -pub fn generate_cst_to_file(language: &Language) -> anyhow::Result<()> { - let cst = generator::generate_cst(language)?; + +pub use crate::config::Config; +pub fn generate_cst_to_file(language: &Language, config: Config) -> anyhow::Result<()> { + let cst = generator::generate_cst(language, config)?; let out_dir = std::env::var("OUT_DIR")?; let out_file = format!("{}/{}.rs", out_dir, language.name()); std::fs::write(out_file, cst)?; @@ -13,7 +16,7 @@ pub fn generate_cst_to_file(language: &Language) -> anyhow::Result<()> { } #[cfg(test)] mod test_util { - use std::{fmt::Debug, num::NonZeroU16}; + use std::{fmt::Debug, num::NonZeroU16, sync::Arc}; use codegen_sdk_common::{language::MockLanguage, parser::Node}; use proc_macro2::TokenStream; @@ -37,7 +40,9 @@ mod test_util { }) .return_const(idx as u16); } - language.expect_nodes().return_const(nodes); + language + .expect_nodes() + .return_const(nodes.into_iter().map(|n| Arc::new(n)).collect()); language } pub fn get_language_no_nodes() -> MockLanguage { @@ -46,10 +51,13 @@ mod test_util { language .expect_field_id() .return_const(Some(NonZeroU16::new(1).unwrap())); + language.expect_root_node().return_const("Program"); + language.expect_struct_name().return_const("Language"); + language.expect_name().return_const("language"); language } pub fn snapshot_string(string: &str) -> StringDebug { - let formatted = codegen_sdk_common::generator::format_code(string) + let formatted = codegen_sdk_common::generator::format_code_string(string) .unwrap_or_else(|_| string.to_string()); StringDebug { string: formatted } } diff --git a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__basic_subtypes.snap b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__basic_subtypes.snap index b9a4fc19..0fd5bd5c 100644 --- a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__basic_subtypes.snap +++ b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__basic_subtypes.snap @@ -4,29 +4,42 @@ expression: "crate::test_util::snapshot_string(&output)" --- use std::sync::Arc; use tree_sitter; -use derive_more::Debug; use codegen_sdk_common::*; use subenum::subenum; use std::backtrace::Backtrace; use bytes::Bytes; -use rkyv::{Archive, Deserialize, Serialize}; -use derive_visitor::Drive; -#[subenum(Expression(derive(Archive, Deserialize, Serialize)))] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { +use derive_generic_visitor::Drive; +use ambassador::Delegate; +use codegen_sdk_cst::CSTLanguage; +#[subenum(Expression)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { #[subenum(Expression)] - BinaryExpression(BinaryExpression), + BinaryExpression(BinaryExpression<'db1>), #[subenum(Expression)] - UnaryExpression(UnaryExpression), + UnaryExpression(UnaryExpression<'db1>), } -impl FromNode for Expression { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: BinaryExpression<'db3>) -> Self { + Self::BinaryExpression(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: UnaryExpression<'db3>) -> Self { + Self::UnaryExpression(node) + } +} +impl<'db4> FromNode<'db4> for Expression<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::UnaryExpression(UnaryExpression::from_node(node, buffer)?)), + 0u16 => { + Ok(Self::UnaryExpression(UnaryExpression::from_node(db, node, buffer)?)) + } _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -36,59 +49,43 @@ impl FromNode for Expression { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct BinaryExpression { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct BinaryExpression<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for BinaryExpression { +impl<'db> FromNode<'db> for BinaryExpression<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -97,7 +94,7 @@ impl FromNode for BinaryExpression { }) } } -impl CSTNode for BinaryExpression { +impl<'db> CSTNode<'db> for BinaryExpression<'db> { fn kind(&self) -> &str { &self._kind } @@ -107,10 +104,10 @@ impl CSTNode for BinaryExpression { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -129,7 +126,7 @@ impl CSTNode for BinaryExpression { self.id } } -impl HasChildren for BinaryExpression { +impl<'db1> HasChildren<'db1> for BinaryExpression<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -145,59 +142,48 @@ impl HasChildren for BinaryExpression { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct UnaryExpression { +impl<'db> std::hash::Hash for BinaryExpression<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct UnaryExpression<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for UnaryExpression { +impl<'db> FromNode<'db> for UnaryExpression<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -206,7 +192,7 @@ impl FromNode for UnaryExpression { }) } } -impl CSTNode for UnaryExpression { +impl<'db> CSTNode<'db> for UnaryExpression<'db> { fn kind(&self) -> &str { &self._kind } @@ -216,10 +202,10 @@ impl CSTNode for UnaryExpression { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -238,7 +224,7 @@ impl CSTNode for UnaryExpression { self.id } } -impl HasChildren for UnaryExpression { +impl<'db1> HasChildren<'db1> for UnaryExpression<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -254,3 +240,65 @@ impl HasChildren for UnaryExpression { } } } +impl<'db> std::hash::Hash for UnaryExpression<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[salsa::tracked] +pub struct Parsed<'db> { + #[tracked] + #[return_ref] + pub program: Option>, +} +pub fn parse_program_raw( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Option> { + let buffer = Bytes::from(input.content(db).as_bytes().to_vec()); + let tree = codegen_sdk_common::language::language::Language + .parse_tree_sitter(&input.content(db)); + match tree { + Ok(tree) => { + if tree.root_node().has_error() { + ParseError::SyntaxError.report(db); + None + } else { + let buffer = Arc::new(buffer); + Program::from_node(db, tree.root_node(), &buffer) + .map_or_else( + |e| { + e.report(db); + None + }, + |program| { Some(program) }, + ) + } + } + Err(e) => { + e.report(db); + None + } + } +} +#[salsa::tracked] +pub fn parse_program( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Parsed<'_> { + Parsed::new(db, parse_program_raw(db, input)) +} +pub struct Language; +impl CSTLanguage for Language { + type Program<'db> = Program<'db>; + fn language() -> &'static codegen_sdk_common::language::Language { + &codegen_sdk_common::language::language::Language + } + fn parse<'db>( + db: &'db dyn salsa::Database, + content: std::string::String, + ) -> &'db Option> { + let input = codegen_sdk_cst::Input::new(db, content); + return parse_program(db, input).program(db); + } +} diff --git a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__deeply_nested_subtypes.snap b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__deeply_nested_subtypes.snap index a263d6bf..bf3e95c7 100644 --- a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__deeply_nested_subtypes.snap +++ b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__deeply_nested_subtypes.snap @@ -4,36 +4,52 @@ expression: "crate::test_util::snapshot_string(&output)" --- use std::sync::Arc; use tree_sitter; -use derive_more::Debug; use codegen_sdk_common::*; use subenum::subenum; use std::backtrace::Backtrace; use bytes::Bytes; -use rkyv::{Archive, Deserialize, Serialize}; -use derive_visitor::Drive; -#[subenum( - Declaration(derive(Archive, Deserialize, Serialize)), - FunctionDeclaration(derive(Archive, Deserialize, Serialize)), - Statement(derive(Archive, Deserialize, Serialize)) -)] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { +use derive_generic_visitor::Drive; +use ambassador::Delegate; +use codegen_sdk_cst::CSTLanguage; +#[subenum(Declaration, FunctionDeclaration, Statement)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { #[subenum(Declaration, Statement)] - ClassDeclaration(ClassDeclaration), + ClassDeclaration(ClassDeclaration<'db1>), #[subenum(Statement)] - ExpressionStatement(ExpressionStatement), + ExpressionStatement(ExpressionStatement<'db1>), #[subenum(FunctionDeclaration, Declaration, Statement)] - MethodDeclaration(MethodDeclaration), + MethodDeclaration(MethodDeclaration<'db1>), } -impl FromNode for Declaration { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: ClassDeclaration<'db3>) -> Self { + Self::ClassDeclaration(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: ExpressionStatement<'db3>) -> Self { + Self::ExpressionStatement(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: MethodDeclaration<'db3>) -> Self { + Self::MethodDeclaration(node) + } +} +impl<'db4> FromNode<'db4> for Declaration<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { 0u16 => { - Ok(Self::MethodDeclaration(MethodDeclaration::from_node(node, buffer)?)) + Ok( + Self::MethodDeclaration( + MethodDeclaration::from_node(db, node, buffer)?, + ), + ) } _ => { Err(ParseError::UnexpectedNode { @@ -44,14 +60,19 @@ impl FromNode for Declaration { } } } -impl FromNode for FunctionDeclaration { +impl<'db4> FromNode<'db4> for FunctionDeclaration<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { 0u16 => { - Ok(Self::MethodDeclaration(MethodDeclaration::from_node(node, buffer)?)) + Ok( + Self::MethodDeclaration( + MethodDeclaration::from_node(db, node, buffer)?, + ), + ) } _ => { Err(ParseError::UnexpectedNode { @@ -62,14 +83,19 @@ impl FromNode for FunctionDeclaration { } } } -impl FromNode for Statement { +impl<'db4> FromNode<'db4> for Statement<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { 0u16 => { - Ok(Self::MethodDeclaration(MethodDeclaration::from_node(node, buffer)?)) + Ok( + Self::MethodDeclaration( + MethodDeclaration::from_node(db, node, buffer)?, + ), + ) } _ => { Err(ParseError::UnexpectedNode { @@ -80,59 +106,43 @@ impl FromNode for Statement { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct ClassDeclaration { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct ClassDeclaration<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for ClassDeclaration { +impl<'db> FromNode<'db> for ClassDeclaration<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -141,7 +151,7 @@ impl FromNode for ClassDeclaration { }) } } -impl CSTNode for ClassDeclaration { +impl<'db> CSTNode<'db> for ClassDeclaration<'db> { fn kind(&self) -> &str { &self._kind } @@ -151,10 +161,10 @@ impl CSTNode for ClassDeclaration { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -173,7 +183,7 @@ impl CSTNode for ClassDeclaration { self.id } } -impl HasChildren for ClassDeclaration { +impl<'db1> HasChildren<'db1> for ClassDeclaration<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -189,59 +199,48 @@ impl HasChildren for ClassDeclaration { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct ExpressionStatement { +impl<'db> std::hash::Hash for ClassDeclaration<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct ExpressionStatement<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for ExpressionStatement { +impl<'db> FromNode<'db> for ExpressionStatement<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -250,7 +249,7 @@ impl FromNode for ExpressionStatement { }) } } -impl CSTNode for ExpressionStatement { +impl<'db> CSTNode<'db> for ExpressionStatement<'db> { fn kind(&self) -> &str { &self._kind } @@ -260,10 +259,10 @@ impl CSTNode for ExpressionStatement { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -282,7 +281,7 @@ impl CSTNode for ExpressionStatement { self.id } } -impl HasChildren for ExpressionStatement { +impl<'db1> HasChildren<'db1> for ExpressionStatement<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -298,59 +297,48 @@ impl HasChildren for ExpressionStatement { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct MethodDeclaration { +impl<'db> std::hash::Hash for ExpressionStatement<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct MethodDeclaration<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for MethodDeclaration { +impl<'db> FromNode<'db> for MethodDeclaration<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -359,7 +347,7 @@ impl FromNode for MethodDeclaration { }) } } -impl CSTNode for MethodDeclaration { +impl<'db> CSTNode<'db> for MethodDeclaration<'db> { fn kind(&self) -> &str { &self._kind } @@ -369,10 +357,10 @@ impl CSTNode for MethodDeclaration { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -391,7 +379,7 @@ impl CSTNode for MethodDeclaration { self.id } } -impl HasChildren for MethodDeclaration { +impl<'db1> HasChildren<'db1> for MethodDeclaration<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -407,3 +395,65 @@ impl HasChildren for MethodDeclaration { } } } +impl<'db> std::hash::Hash for MethodDeclaration<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[salsa::tracked] +pub struct Parsed<'db> { + #[tracked] + #[return_ref] + pub program: Option>, +} +pub fn parse_program_raw( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Option> { + let buffer = Bytes::from(input.content(db).as_bytes().to_vec()); + let tree = codegen_sdk_common::language::language::Language + .parse_tree_sitter(&input.content(db)); + match tree { + Ok(tree) => { + if tree.root_node().has_error() { + ParseError::SyntaxError.report(db); + None + } else { + let buffer = Arc::new(buffer); + Program::from_node(db, tree.root_node(), &buffer) + .map_or_else( + |e| { + e.report(db); + None + }, + |program| { Some(program) }, + ) + } + } + Err(e) => { + e.report(db); + None + } + } +} +#[salsa::tracked] +pub fn parse_program( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Parsed<'_> { + Parsed::new(db, parse_program_raw(db, input)) +} +pub struct Language; +impl CSTLanguage for Language { + type Program<'db> = Program<'db>; + fn language() -> &'static codegen_sdk_common::language::Language { + &codegen_sdk_common::language::language::Language + } + fn parse<'db>( + db: &'db dyn salsa::Database, + content: std::string::String, + ) -> &'db Option> { + let input = codegen_sdk_cst::Input::new(db, content); + return parse_program(db, input).program(db); + } +} diff --git a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__subtypes_with_fields.snap b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__subtypes_with_fields.snap index 63c037a3..cb0a44f6 100644 --- a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__subtypes_with_fields.snap +++ b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes__subtypes_with_fields.snap @@ -4,32 +4,40 @@ expression: "crate::test_util::snapshot_string(&output)" --- use std::sync::Arc; use tree_sitter; -use derive_more::Debug; use codegen_sdk_common::*; use subenum::subenum; use std::backtrace::Backtrace; use bytes::Bytes; -use rkyv::{Archive, Deserialize, Serialize}; -use derive_visitor::Drive; -#[subenum( - BinaryExpressionChildren(derive(Archive, Deserialize, Serialize)), - Expression(derive(Archive, Deserialize, Serialize)) -)] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { +use derive_generic_visitor::Drive; +use ambassador::Delegate; +use codegen_sdk_cst::CSTLanguage; +#[subenum(BinaryExpressionChildren, Expression)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { #[subenum(Expression, BinaryExpressionChildren)] - BinaryExpression(BinaryExpression), + BinaryExpression(BinaryExpression<'db1>), #[subenum(Expression, BinaryExpressionChildren)] - Literal(Literal), + Literal(Literal<'db1>), } -impl FromNode for BinaryExpressionChildren { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: BinaryExpression<'db3>) -> Self { + Self::BinaryExpression(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: Literal<'db3>) -> Self { + Self::Literal(node) + } +} +impl<'db4> FromNode<'db4> for BinaryExpressionChildren<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::Literal(Literal::from_node(node, buffer)?)), + 0u16 => Ok(Self::Literal(Literal::from_node(db, node, buffer)?)), _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -39,13 +47,14 @@ impl FromNode for BinaryExpressionChildren { } } } -impl FromNode for Expression { +impl<'db4> FromNode<'db4> for Expression<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::Literal(Literal::from_node(node, buffer)?)), + 0u16 => Ok(Self::Literal(Literal::from_node(db, node, buffer)?)), _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -55,74 +64,56 @@ impl FromNode for Expression { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct BinaryExpression { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct BinaryExpression<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub left: Box, - #[rkyv(omit_bounds)] - pub right: Box, + pub left: Box>, + pub right: Box>, } -impl FromNode for BinaryExpression { +impl<'db> FromNode<'db> for BinaryExpression<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), - left: Box::new(get_child_by_field_name(&node, "left", buffer)?), - right: Box::new(get_child_by_field_name(&node, "right", buffer)?), + left: Box::new(get_child_by_field_name(db, &node, "left", buffer)?), + right: Box::new(get_child_by_field_name(db, &node, "right", buffer)?), }) } } -impl CSTNode for BinaryExpression { +impl<'db> CSTNode<'db> for BinaryExpression<'db> { fn kind(&self) -> &str { &self._kind } @@ -132,10 +123,10 @@ impl CSTNode for BinaryExpression { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -154,8 +145,8 @@ impl CSTNode for BinaryExpression { self.id } } -impl HasChildren for BinaryExpression { - type Child = BinaryExpressionChildren; +impl<'db1> HasChildren<'db1> for BinaryExpression<'db1> { + type Child = BinaryExpressionChildren<'db1>; fn children(&self) -> Vec { let mut children: Vec<_> = vec![]; children @@ -206,59 +197,48 @@ impl HasChildren for BinaryExpression { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct Literal { +impl<'db> std::hash::Hash for BinaryExpression<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct Literal<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for Literal { +impl<'db> FromNode<'db> for Literal<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -267,7 +247,7 @@ impl FromNode for Literal { }) } } -impl CSTNode for Literal { +impl<'db> CSTNode<'db> for Literal<'db> { fn kind(&self) -> &str { &self._kind } @@ -277,10 +257,10 @@ impl CSTNode for Literal { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -299,7 +279,7 @@ impl CSTNode for Literal { self.id } } -impl HasChildren for Literal { +impl<'db1> HasChildren<'db1> for Literal<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -315,3 +295,65 @@ impl HasChildren for Literal { } } } +impl<'db> std::hash::Hash for Literal<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[salsa::tracked] +pub struct Parsed<'db> { + #[tracked] + #[return_ref] + pub program: Option>, +} +pub fn parse_program_raw( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Option> { + let buffer = Bytes::from(input.content(db).as_bytes().to_vec()); + let tree = codegen_sdk_common::language::language::Language + .parse_tree_sitter(&input.content(db)); + match tree { + Ok(tree) => { + if tree.root_node().has_error() { + ParseError::SyntaxError.report(db); + None + } else { + let buffer = Arc::new(buffer); + Program::from_node(db, tree.root_node(), &buffer) + .map_or_else( + |e| { + e.report(db); + None + }, + |program| { Some(program) }, + ) + } + } + Err(e) => { + e.report(db); + None + } + } +} +#[salsa::tracked] +pub fn parse_program( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Parsed<'_> { + Parsed::new(db, parse_program_raw(db, input)) +} +pub struct Language; +impl CSTLanguage for Language { + type Program<'db> = Program<'db>; + fn language() -> &'static codegen_sdk_common::language::Language { + &codegen_sdk_common::language::language::Language + } + fn parse<'db>( + db: &'db dyn salsa::Database, + content: std::string::String, + ) -> &'db Option> { + let input = codegen_sdk_cst::Input::new(db, content); + return parse_program(db, input).program(db); + } +} diff --git a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_children__subtypes_with_children.snap b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_children__subtypes_with_children.snap index de597a69..d970f6be 100644 --- a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_children__subtypes_with_children.snap +++ b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_children__subtypes_with_children.snap @@ -4,35 +4,49 @@ expression: "crate::test_util::snapshot_string(&output)" --- use std::sync::Arc; use tree_sitter; -use derive_more::Debug; use codegen_sdk_common::*; use subenum::subenum; use std::backtrace::Backtrace; use bytes::Bytes; -use rkyv::{Archive, Deserialize, Serialize}; -use derive_visitor::Drive; -#[subenum( - BlockChildren(derive(Archive, Deserialize, Serialize)), - IfStatementChildren(derive(Archive, Deserialize, Serialize)), - Statement(derive(Archive, Deserialize, Serialize)) -)] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { +use derive_generic_visitor::Drive; +use ambassador::Delegate; +use codegen_sdk_cst::CSTLanguage; +#[subenum(BlockChildren, IfStatementChildren, Statement)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { #[subenum(IfStatementChildren)] - Block(Block), + Block(Block<'db1>), #[subenum(Statement, BlockChildren)] - IfStatement(IfStatement), + IfStatement(IfStatement<'db1>), #[subenum(Statement, BlockChildren)] - ReturnStatement(ReturnStatement), + ReturnStatement(ReturnStatement<'db1>), } -impl FromNode for BlockChildren { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: Block<'db3>) -> Self { + Self::Block(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: IfStatement<'db3>) -> Self { + Self::IfStatement(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: ReturnStatement<'db3>) -> Self { + Self::ReturnStatement(node) + } +} +impl<'db4> FromNode<'db4> for BlockChildren<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::ReturnStatement(ReturnStatement::from_node(node, buffer)?)), + 0u16 => { + Ok(Self::ReturnStatement(ReturnStatement::from_node(db, node, buffer)?)) + } _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -42,13 +56,14 @@ impl FromNode for BlockChildren { } } } -impl FromNode for IfStatementChildren { +impl<'db4> FromNode<'db4> for IfStatementChildren<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::Block(Block::from_node(node, buffer)?)), + 0u16 => Ok(Self::Block(Block::from_node(db, node, buffer)?)), _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -58,13 +73,16 @@ impl FromNode for IfStatementChildren { } } } -impl FromNode for Statement { +impl<'db4> FromNode<'db4> for Statement<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::ReturnStatement(ReturnStatement::from_node(node, buffer)?)), + 0u16 => { + Ok(Self::ReturnStatement(ReturnStatement::from_node(db, node, buffer)?)) + } _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -74,71 +92,54 @@ impl FromNode for Statement { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct Block { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct Block<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub children: Vec, + pub _children: Vec>, } -impl FromNode for Block { +impl<'db> FromNode<'db> for Block<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), - children: named_children_without_field_names(node, buffer)?, + _children: named_children_without_field_names(db, node, buffer)?, }) } } -impl CSTNode for Block { +impl<'db> CSTNode<'db> for Block<'db> { fn kind(&self) -> &str { &self._kind } @@ -148,10 +149,10 @@ impl CSTNode for Block { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -170,10 +171,10 @@ impl CSTNode for Block { self.id } } -impl HasChildren for Block { - type Child = BlockChildren; +impl<'db1> HasChildren<'db1> for Block<'db1> { + type Child = BlockChildren<'db1>; fn children(&self) -> Vec { - let mut children: Vec<_> = self.children.iter().cloned().collect(); + let mut children: Vec<_> = self._children.iter().cloned().collect(); children.sort_by_key(|c| c.start_byte()); children } @@ -188,71 +189,59 @@ impl HasChildren for Block { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct IfStatement { +impl<'db> std::hash::Hash for Block<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct IfStatement<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub children: Vec, + pub _children: Vec>, } -impl FromNode for IfStatement { +impl<'db> FromNode<'db> for IfStatement<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), - children: named_children_without_field_names(node, buffer)?, + _children: named_children_without_field_names(db, node, buffer)?, }) } } -impl CSTNode for IfStatement { +impl<'db> CSTNode<'db> for IfStatement<'db> { fn kind(&self) -> &str { &self._kind } @@ -262,10 +251,10 @@ impl CSTNode for IfStatement { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -284,10 +273,10 @@ impl CSTNode for IfStatement { self.id } } -impl HasChildren for IfStatement { - type Child = IfStatementChildren; +impl<'db1> HasChildren<'db1> for IfStatement<'db1> { + type Child = IfStatementChildren<'db1>; fn children(&self) -> Vec { - let mut children: Vec<_> = self.children.iter().cloned().collect(); + let mut children: Vec<_> = self._children.iter().cloned().collect(); children.sort_by_key(|c| c.start_byte()); children } @@ -302,59 +291,48 @@ impl HasChildren for IfStatement { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct ReturnStatement { +impl<'db> std::hash::Hash for IfStatement<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct ReturnStatement<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for ReturnStatement { +impl<'db> FromNode<'db> for ReturnStatement<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -363,7 +341,7 @@ impl FromNode for ReturnStatement { }) } } -impl CSTNode for ReturnStatement { +impl<'db> CSTNode<'db> for ReturnStatement<'db> { fn kind(&self) -> &str { &self._kind } @@ -373,10 +351,10 @@ impl CSTNode for ReturnStatement { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -395,7 +373,7 @@ impl CSTNode for ReturnStatement { self.id } } -impl HasChildren for ReturnStatement { +impl<'db1> HasChildren<'db1> for ReturnStatement<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -411,3 +389,65 @@ impl HasChildren for ReturnStatement { } } } +impl<'db> std::hash::Hash for ReturnStatement<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[salsa::tracked] +pub struct Parsed<'db> { + #[tracked] + #[return_ref] + pub program: Option>, +} +pub fn parse_program_raw( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Option> { + let buffer = Bytes::from(input.content(db).as_bytes().to_vec()); + let tree = codegen_sdk_common::language::language::Language + .parse_tree_sitter(&input.content(db)); + match tree { + Ok(tree) => { + if tree.root_node().has_error() { + ParseError::SyntaxError.report(db); + None + } else { + let buffer = Arc::new(buffer); + Program::from_node(db, tree.root_node(), &buffer) + .map_or_else( + |e| { + e.report(db); + None + }, + |program| { Some(program) }, + ) + } + } + Err(e) => { + e.report(db); + None + } + } +} +#[salsa::tracked] +pub fn parse_program( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Parsed<'_> { + Parsed::new(db, parse_program_raw(db, input)) +} +pub struct Language; +impl CSTLanguage for Language { + type Program<'db> = Program<'db>; + fn language() -> &'static codegen_sdk_common::language::Language { + &codegen_sdk_common::language::language::Language + } + fn parse<'db>( + db: &'db dyn salsa::Database, + content: std::string::String, + ) -> &'db Option> { + let input = codegen_sdk_cst::Input::new(db, content); + return parse_program(db, input).program(db); + } +} diff --git a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_multiple_inheritance__multiple_inheritance.snap b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_multiple_inheritance__multiple_inheritance.snap index 4da044e0..37173890 100644 --- a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_multiple_inheritance__multiple_inheritance.snap +++ b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_multiple_inheritance__multiple_inheritance.snap @@ -4,30 +4,33 @@ expression: "crate::test_util::snapshot_string(&output)" --- use std::sync::Arc; use tree_sitter; -use derive_more::Debug; use codegen_sdk_common::*; use subenum::subenum; use std::backtrace::Backtrace; use bytes::Bytes; -use rkyv::{Archive, Deserialize, Serialize}; -use derive_visitor::Drive; -#[subenum( - ClassMember(derive(Archive, Deserialize, Serialize)), - Declaration(derive(Archive, Deserialize, Serialize)) -)] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { +use derive_generic_visitor::Drive; +use ambassador::Delegate; +use codegen_sdk_cst::CSTLanguage; +#[subenum(ClassMember, Declaration)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { #[subenum(Declaration, ClassMember)] - ClassMethod(ClassMethod), + ClassMethod(ClassMethod<'db1>), } -impl FromNode for ClassMember { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: ClassMethod<'db3>) -> Self { + Self::ClassMethod(node) + } +} +impl<'db4> FromNode<'db4> for ClassMember<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::ClassMethod(ClassMethod::from_node(node, buffer)?)), + 0u16 => Ok(Self::ClassMethod(ClassMethod::from_node(db, node, buffer)?)), _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -37,13 +40,14 @@ impl FromNode for ClassMember { } } } -impl FromNode for Declaration { +impl<'db4> FromNode<'db4> for Declaration<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::ClassMethod(ClassMethod::from_node(node, buffer)?)), + 0u16 => Ok(Self::ClassMethod(ClassMethod::from_node(db, node, buffer)?)), _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -53,59 +57,43 @@ impl FromNode for Declaration { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct ClassMethod { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct ClassMethod<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, } -impl FromNode for ClassMethod { +impl<'db> FromNode<'db> for ClassMethod<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), @@ -114,7 +102,7 @@ impl FromNode for ClassMethod { }) } } -impl CSTNode for ClassMethod { +impl<'db> CSTNode<'db> for ClassMethod<'db> { fn kind(&self) -> &str { &self._kind } @@ -124,10 +112,10 @@ impl CSTNode for ClassMethod { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -146,7 +134,7 @@ impl CSTNode for ClassMethod { self.id } } -impl HasChildren for ClassMethod { +impl<'db1> HasChildren<'db1> for ClassMethod<'db1> { type Child = Self; fn children(&self) -> Vec { vec![] @@ -162,3 +150,65 @@ impl HasChildren for ClassMethod { } } } +impl<'db> std::hash::Hash for ClassMethod<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[salsa::tracked] +pub struct Parsed<'db> { + #[tracked] + #[return_ref] + pub program: Option>, +} +pub fn parse_program_raw( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Option> { + let buffer = Bytes::from(input.content(db).as_bytes().to_vec()); + let tree = codegen_sdk_common::language::language::Language + .parse_tree_sitter(&input.content(db)); + match tree { + Ok(tree) => { + if tree.root_node().has_error() { + ParseError::SyntaxError.report(db); + None + } else { + let buffer = Arc::new(buffer); + Program::from_node(db, tree.root_node(), &buffer) + .map_or_else( + |e| { + e.report(db); + None + }, + |program| { Some(program) }, + ) + } + } + Err(e) => { + e.report(db); + None + } + } +} +#[salsa::tracked] +pub fn parse_program( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Parsed<'_> { + Parsed::new(db, parse_program_raw(db, input)) +} +pub struct Language; +impl CSTLanguage for Language { + type Program<'db> = Program<'db>; + fn language() -> &'static codegen_sdk_common::language::Language { + &codegen_sdk_common::language::language::Language + } + fn parse<'db>( + db: &'db dyn salsa::Database, + content: std::string::String, + ) -> &'db Option> { + let input = codegen_sdk_cst::Input::new(db, content); + return parse_program(db, input).program(db); + } +} diff --git a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_recursive__recursive_subtypes.snap b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_recursive__recursive_subtypes.snap index 51b6edbc..78afa809 100644 --- a/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_recursive__recursive_subtypes.snap +++ b/codegen-sdk-cst-generator/src/tests/snapshots/codegen_sdk_cst_generator__tests__test_subtypes_recursive__recursive_subtypes.snap @@ -4,33 +4,42 @@ expression: "crate::test_util::snapshot_string(&output)" --- use std::sync::Arc; use tree_sitter; -use derive_more::Debug; use codegen_sdk_common::*; use subenum::subenum; use std::backtrace::Backtrace; use bytes::Bytes; -use rkyv::{Archive, Deserialize, Serialize}; -use derive_visitor::Drive; -#[subenum( - BinaryExpressionChildren(derive(Archive, Deserialize, Serialize)), - CallExpressionChildren(derive(Archive, Deserialize, Serialize)), - Expression(derive(Archive, Deserialize, Serialize)) -)] -#[derive(Debug, Clone, Drive)] -#[enum_delegate::implement(CSTNode)] -pub enum NodeTypes { +use derive_generic_visitor::Drive; +use ambassador::Delegate; +use codegen_sdk_cst::CSTLanguage; +#[subenum(BinaryExpressionChildren, CallExpressionChildren, Expression)] +#[derive(Debug, Clone, Eq, PartialEq, Drive, Hash, salsa::Update, Delegate)] +#[delegate(CSTNode<'db1>)] +pub enum NodeTypes<'db1> { #[subenum(Expression, BinaryExpressionChildren, CallExpressionChildren)] - BinaryExpression(BinaryExpression), + BinaryExpression(BinaryExpression<'db1>), #[subenum(Expression, BinaryExpressionChildren, CallExpressionChildren)] - CallExpression(CallExpression), + CallExpression(CallExpression<'db1>), } -impl FromNode for BinaryExpressionChildren { +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: BinaryExpression<'db3>) -> Self { + Self::BinaryExpression(node) + } +} +impl<'db3> From> for NodeTypes<'db3> { + fn from(node: CallExpression<'db3>) -> Self { + Self::CallExpression(node) + } +} +impl<'db4> FromNode<'db4> for BinaryExpressionChildren<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::CallExpression(CallExpression::from_node(node, buffer)?)), + 0u16 => { + Ok(Self::CallExpression(CallExpression::from_node(db, node, buffer)?)) + } _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -40,13 +49,16 @@ impl FromNode for BinaryExpressionChildren { } } } -impl FromNode for CallExpressionChildren { +impl<'db4> FromNode<'db4> for CallExpressionChildren<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::CallExpression(CallExpression::from_node(node, buffer)?)), + 0u16 => { + Ok(Self::CallExpression(CallExpression::from_node(db, node, buffer)?)) + } _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -56,13 +68,16 @@ impl FromNode for CallExpressionChildren { } } } -impl FromNode for Expression { +impl<'db4> FromNode<'db4> for Expression<'db4> { fn from_node( + db: &'db4 dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { match node.kind_id() { - 0u16 => Ok(Self::CallExpression(CallExpression::from_node(node, buffer)?)), + 0u16 => { + Ok(Self::CallExpression(CallExpression::from_node(db, node, buffer)?)) + } _ => { Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), @@ -72,74 +87,56 @@ impl FromNode for Expression { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct BinaryExpression { +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct BinaryExpression<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub left: Box, - #[rkyv(omit_bounds)] - pub right: Box, + pub left: Box>, + pub right: Box>, } -impl FromNode for BinaryExpression { +impl<'db> FromNode<'db> for BinaryExpression<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), - left: Box::new(get_child_by_field_name(&node, "left", buffer)?), - right: Box::new(get_child_by_field_name(&node, "right", buffer)?), + left: Box::new(get_child_by_field_name(db, &node, "left", buffer)?), + right: Box::new(get_child_by_field_name(db, &node, "right", buffer)?), }) } } -impl CSTNode for BinaryExpression { +impl<'db> CSTNode<'db> for BinaryExpression<'db> { fn kind(&self) -> &str { &self._kind } @@ -149,10 +146,10 @@ impl CSTNode for BinaryExpression { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -171,8 +168,8 @@ impl CSTNode for BinaryExpression { self.id } } -impl HasChildren for BinaryExpression { - type Child = BinaryExpressionChildren; +impl<'db1> HasChildren<'db1> for BinaryExpression<'db1> { + type Child = BinaryExpressionChildren<'db1>; fn children(&self) -> Vec { let mut children: Vec<_> = vec![]; children @@ -223,74 +220,61 @@ impl HasChildren for BinaryExpression { } } } -#[derive(Debug, Clone, Deserialize, Archive, Serialize, Drive)] -#[rkyv( - serialize_bounds( - __S:rkyv::ser::Writer+rkyv::ser::Allocator, - __S::Error:rkyv::rancor::Source, - ) -)] -#[rkyv(deserialize_bounds(__D::Error:rkyv::rancor::Source))] -#[rkyv( - bytecheck( - bounds(__C:rkyv::validation::ArchiveContext, __C::Error:rkyv::rancor::Source) - ) -)] -pub struct CallExpression { +impl<'db> std::hash::Hash for BinaryExpression<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[derive(Debug, Clone, Drive, Eq, PartialEq, salsa::Update)] +pub struct CallExpression<'db> { #[drive(skip)] start_byte: usize, #[drive(skip)] end_byte: usize, #[drive(skip)] _kind: std::string::String, - #[debug("[{},{}]", start_position.row, start_position.column)] #[drive(skip)] - start_position: Point, - #[debug("[{},{}]", end_position.row, end_position.column)] + start_position: Point<'db>, #[drive(skip)] - end_position: Point, - #[debug(ignore)] + end_position: Point<'db>, #[drive(skip)] buffer: Arc, - #[debug(ignore)] #[drive(skip)] kind_id: u16, - #[debug(ignore)] #[drive(skip)] is_error: bool, - #[debug(ignore)] #[drive(skip)] named: bool, - #[debug(ignore)] #[drive(skip)] id: usize, - #[rkyv(omit_bounds)] - pub children: Vec, - #[rkyv(omit_bounds)] - pub callee: Box, + pub _children: Vec>, + pub callee: Box>, } -impl FromNode for CallExpression { +impl<'db> FromNode<'db> for CallExpression<'db> { fn from_node( + db: &'db dyn salsa::Database, node: tree_sitter::Node, buffer: &Arc, ) -> Result { + let start_position = Point::from(db, node.start_position()); + let end_position = Point::from(db, node.end_position()); Ok(Self { start_byte: node.start_byte(), end_byte: node.end_byte(), _kind: node.kind().to_string(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), + start_position: start_position, + end_position: end_position, buffer: buffer.clone(), kind_id: node.kind_id(), is_error: node.is_error(), named: node.is_named(), id: node.id(), - callee: Box::new(get_child_by_field_name(&node, "callee", buffer)?), - children: named_children_without_field_names(node, buffer)?, + callee: Box::new(get_child_by_field_name(db, &node, "callee", buffer)?), + _children: named_children_without_field_names(db, node, buffer)?, }) } } -impl CSTNode for CallExpression { +impl<'db> CSTNode<'db> for CallExpression<'db> { fn kind(&self) -> &str { &self._kind } @@ -300,10 +284,10 @@ impl CSTNode for CallExpression { fn end_byte(&self) -> usize { self.end_byte } - fn start_position(&self) -> Point { + fn start_position(&self) -> Point<'db> { self.start_position } - fn end_position(&self) -> Point { + fn end_position(&self) -> Point<'db> { self.end_position } fn buffer(&self) -> &Bytes { @@ -322,10 +306,10 @@ impl CSTNode for CallExpression { self.id } } -impl HasChildren for CallExpression { - type Child = CallExpressionChildren; +impl<'db1> HasChildren<'db1> for CallExpression<'db1> { + type Child = CallExpressionChildren<'db1>; fn children(&self) -> Vec { - let mut children: Vec<_> = self.children.iter().cloned().collect(); + let mut children: Vec<_> = self._children.iter().cloned().collect(); children .push( Self::Child::try_from(NodeTypes::from(self.callee.as_ref().clone())) @@ -357,3 +341,65 @@ impl HasChildren for CallExpression { } } } +impl<'db> std::hash::Hash for CallExpression<'db> { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } +} +#[salsa::tracked] +pub struct Parsed<'db> { + #[tracked] + #[return_ref] + pub program: Option>, +} +pub fn parse_program_raw( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Option> { + let buffer = Bytes::from(input.content(db).as_bytes().to_vec()); + let tree = codegen_sdk_common::language::language::Language + .parse_tree_sitter(&input.content(db)); + match tree { + Ok(tree) => { + if tree.root_node().has_error() { + ParseError::SyntaxError.report(db); + None + } else { + let buffer = Arc::new(buffer); + Program::from_node(db, tree.root_node(), &buffer) + .map_or_else( + |e| { + e.report(db); + None + }, + |program| { Some(program) }, + ) + } + } + Err(e) => { + e.report(db); + None + } + } +} +#[salsa::tracked] +pub fn parse_program( + db: &dyn salsa::Database, + input: codegen_sdk_cst::Input, +) -> Parsed<'_> { + Parsed::new(db, parse_program_raw(db, input)) +} +pub struct Language; +impl CSTLanguage for Language { + type Program<'db> = Program<'db>; + fn language() -> &'static codegen_sdk_common::language::Language { + &codegen_sdk_common::language::language::Language + } + fn parse<'db>( + db: &'db dyn salsa::Database, + content: std::string::String, + ) -> &'db Option> { + let input = codegen_sdk_cst::Input::new(db, content); + return parse_program(db, input).program(db); + } +} diff --git a/codegen-sdk-cst-generator/src/tests/test_subtypes.rs b/codegen-sdk-cst-generator/src/tests/test_subtypes.rs index fe7a341a..2e923d27 100644 --- a/codegen-sdk-cst-generator/src/tests/test_subtypes.rs +++ b/codegen-sdk-cst-generator/src/tests/test_subtypes.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use codegen_sdk_common::parser::{Fields, Node, TypeDefinition}; -use crate::{generate_cst, test_util::get_language}; +use crate::{Config, generate_cst, test_util::get_language}; #[test_log::test] fn test_basic_subtypes() { @@ -44,7 +44,7 @@ fn test_basic_subtypes() { ]; let language = get_language(nodes); - let output = generate_cst(&language).unwrap(); + let output = generate_cst(&language, Config::default()).unwrap(); insta::assert_debug_snapshot!(crate::test_util::snapshot_string(&output)); } @@ -112,7 +112,7 @@ fn test_subtypes_with_fields() { ]; let language = get_language(nodes); - let output = generate_cst(&language).unwrap(); + let output = generate_cst(&language, Config::default()).unwrap(); insta::assert_debug_snapshot!(crate::test_util::snapshot_string(&output)); } @@ -194,6 +194,6 @@ fn test_deeply_nested_subtypes() { }, ]; let language = get_language(nodes); - let output = generate_cst(&language).unwrap(); + let output = generate_cst(&language, Config::default()).unwrap(); insta::assert_debug_snapshot!(crate::test_util::snapshot_string(&output)); } diff --git a/codegen-sdk-cst-generator/src/tests/test_subtypes_children.rs b/codegen-sdk-cst-generator/src/tests/test_subtypes_children.rs index c90f4a42..128a4079 100644 --- a/codegen-sdk-cst-generator/src/tests/test_subtypes_children.rs +++ b/codegen-sdk-cst-generator/src/tests/test_subtypes_children.rs @@ -1,6 +1,6 @@ use codegen_sdk_common::parser::{Children, Node, TypeDefinition}; -use crate::{generate_cst, test_util::get_language}; +use crate::{Config, generate_cst, test_util::get_language}; #[test_log::test] fn test_subtypes_with_children() { @@ -65,6 +65,6 @@ fn test_subtypes_with_children() { }, ]; let language = get_language(nodes); - let output = generate_cst(&language).unwrap(); + let output = generate_cst(&language, Config::default()).unwrap(); insta::assert_debug_snapshot!(crate::test_util::snapshot_string(&output)); } diff --git a/codegen-sdk-cst-generator/src/tests/test_subtypes_multiple_inheritance.rs b/codegen-sdk-cst-generator/src/tests/test_subtypes_multiple_inheritance.rs index 7eae7750..46768348 100644 --- a/codegen-sdk-cst-generator/src/tests/test_subtypes_multiple_inheritance.rs +++ b/codegen-sdk-cst-generator/src/tests/test_subtypes_multiple_inheritance.rs @@ -1,6 +1,6 @@ use codegen_sdk_common::parser::{Node, TypeDefinition}; -use crate::{generate_cst, test_util::get_language}; +use crate::{Config, generate_cst, test_util::get_language}; #[test_log::test] fn test_multiple_inheritance() { @@ -40,6 +40,6 @@ fn test_multiple_inheritance() { ]; let language = get_language(nodes); - let output = generate_cst(&language).unwrap(); + let output = generate_cst(&language, Config::default()).unwrap(); insta::assert_debug_snapshot!(crate::test_util::snapshot_string(&output)); } diff --git a/codegen-sdk-cst-generator/src/tests/test_subtypes_recursive.rs b/codegen-sdk-cst-generator/src/tests/test_subtypes_recursive.rs index b278edb2..37cddfd9 100644 --- a/codegen-sdk-cst-generator/src/tests/test_subtypes_recursive.rs +++ b/codegen-sdk-cst-generator/src/tests/test_subtypes_recursive.rs @@ -2,8 +2,7 @@ use std::collections::HashMap; use codegen_sdk_common::parser::{Children, Fields, Node, TypeDefinition}; -use crate::{generate_cst, test_util::get_language}; - +use crate::{Config, generate_cst, test_util::get_language}; #[test_log::test] fn test_recursive_subtypes() { let nodes = vec![ @@ -88,6 +87,6 @@ fn test_recursive_subtypes() { ]; let language = get_language(nodes); - let output = generate_cst(&language).unwrap(); + let output = generate_cst(&language, Config::default()).unwrap(); insta::assert_debug_snapshot!(crate::test_util::snapshot_string(&output)); } diff --git a/codegen-sdk-cst-generator/tests/test_python.rs b/codegen-sdk-cst-generator/tests/test_python.rs index 8080ca8d..ec43b9b3 100644 --- a/codegen-sdk-cst-generator/tests/test_python.rs +++ b/codegen-sdk-cst-generator/tests/test_python.rs @@ -1,9 +1,9 @@ use codegen_sdk_common::language::python::Python; -use codegen_sdk_cst_generator::generate_cst; +use codegen_sdk_cst_generator::{Config, generate_cst}; #[test_log::test] fn test_generate_cst() { let language = &Python; - let cst = generate_cst(&language).unwrap(); + let cst = generate_cst(&language, Config::default()).unwrap(); log::info!("{}", cst); } diff --git a/codegen-sdk-cst-generator/tests/test_typescript.rs b/codegen-sdk-cst-generator/tests/test_typescript.rs new file mode 100644 index 00000000..2b3208a0 --- /dev/null +++ b/codegen-sdk-cst-generator/tests/test_typescript.rs @@ -0,0 +1,9 @@ +use codegen_sdk_common::language::typescript::Typescript; +use codegen_sdk_cst_generator::{Config, generate_cst}; + +#[test_log::test] +fn test_generate_cst() { + let language = &Typescript; + let cst = generate_cst(&language, Config::default()).unwrap(); + log::info!("{}", cst); +} diff --git a/codegen-sdk-cst/Cargo.toml b/codegen-sdk-cst/Cargo.toml index 03514832..78b1ea56 100644 --- a/codegen-sdk-cst/Cargo.toml +++ b/codegen-sdk-cst/Cargo.toml @@ -6,38 +6,15 @@ edition = "2024" [dependencies] tree-sitter = { workspace = true } bytes = { workspace = true } -codegen-sdk-common = { workspace = true } -codegen-sdk-macros = { path = "../codegen-sdk-macros" } -derive_more = { workspace = true } +codegen-sdk-common = { path = "../codegen-sdk-common" } convert_case = { workspace = true } rkyv = { workspace = true } -subenum = "1.1.2" -log = { workspace = true } -enum_delegate = { workspace = true } -derive-visitor = { workspace = true } -[build-dependencies] -codegen-sdk-cst-generator = { path = "../codegen-sdk-cst-generator"} -codegen-sdk-common = { workspace = true } -rayon = { workspace = true } -env_logger = { workspace = true } log = { workspace = true } +salsa = { workspace = true } +dashmap = "6.1.0" [dev-dependencies] tempfile = { workspace = true } test-log = { workspace = true } [features] -python = [ "codegen-sdk-common/python"] -typescript = [ "codegen-sdk-common/typescript"] -tsx = [ "codegen-sdk-common/typescript"] -jsx = [ "codegen-sdk-common/typescript"] -javascript = [ "codegen-sdk-common/typescript"] -json = [ "codegen-sdk-common/json"] -java = [ "codegen-sdk-common/java"] -rust = [ "codegen-sdk-common/rust"] -go = [ "codegen-sdk-common/go"] -ruby = [ "codegen-sdk-common/ruby"] -yaml = [ "codegen-sdk-common/yaml"] -toml = [ "codegen-sdk-common/toml"] -markdown = [ "codegen-sdk-common/markdown"] -ts_query = [ "codegen-sdk-common/ts_query"] -default = ["json", "ts_query", "toml"] +serialization = ["codegen-sdk-common/serialization"] diff --git a/codegen-sdk-cst/build.rs b/codegen-sdk-cst/build.rs deleted file mode 100644 index 3fd0669f..00000000 --- a/codegen-sdk-cst/build.rs +++ /dev/null @@ -1,12 +0,0 @@ -use codegen_sdk_common::language::LANGUAGES; -use codegen_sdk_cst_generator::generate_cst_to_file; -use rayon::prelude::*; -fn main() { - env_logger::init(); - println!("cargo:rerun-if-changed=build.rs"); - LANGUAGES.par_iter().for_each(|language| { - generate_cst_to_file(language).unwrap_or_else(|e| { - log::error!("Error generating CST for {}: {}", language.name(), e); - }); - }); -} diff --git a/codegen-sdk-cst/src/database.rs b/codegen-sdk-cst/src/database.rs new file mode 100644 index 00000000..de19d043 --- /dev/null +++ b/codegen-sdk-cst/src/database.rs @@ -0,0 +1,15 @@ +use std::{any::Any, path::PathBuf, sync::Arc}; + +use dashmap::{DashMap, mapref::entry::Entry}; + +use crate::Input; +#[salsa::db] +#[derive(Default, Clone)] +// Basic Database implementation for Query generation. This is not used for anything else. +pub struct CSTDatabase { + storage: salsa::Storage, +} +#[salsa::db] +impl salsa::Database for CSTDatabase { + fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {} +} diff --git a/codegen-sdk-cst/src/input.rs b/codegen-sdk-cst/src/input.rs new file mode 100644 index 00000000..658de06a --- /dev/null +++ b/codegen-sdk-cst/src/input.rs @@ -0,0 +1,5 @@ +use std::path::PathBuf; +#[salsa::input] +pub struct Input { + pub content: String, +} diff --git a/codegen-sdk-cst/src/language.rs b/codegen-sdk-cst/src/language.rs index 598c6f8d..3a9d8718 100644 --- a/codegen-sdk-cst/src/language.rs +++ b/codegen-sdk-cst/src/language.rs @@ -6,23 +6,45 @@ use codegen_sdk_common::{ language::Language, traits::{CSTNode, FromNode}, }; + pub trait CSTLanguage { - type Program: CSTNode + FromNode + Send; + type Program<'db1>: CSTNode<'db1> + FromNode<'db1> + Send; fn language() -> &'static Language; - fn parse(content: &str) -> Result { - let buffer = Bytes::from(content.as_bytes().to_vec()); - let tree = Self::language().parse_tree_sitter(content)?; - if tree.root_node().has_error() { - Err(ParseError::SyntaxError) - } else { - let buffer = Arc::new(buffer); - Self::Program::from_node(tree.root_node(), &buffer) + fn parse<'db>(db: &'db dyn salsa::Database, content: String) + -> &'db Option>; + fn parse_file_from_cache<'db>( + db: &'db dyn salsa::Database, + file_path: &PathBuf, + #[cfg(feature = "serialization")] cache: &'db codegen_sdk_common::serialize::Cache, + ) -> Result<&'db Option>, ParseError> { + #[cfg(feature = "serialization")] + { + let serialized_path = cache.get_path(file_path); + if serialized_path.exists() { + let parsed = cache.read_entry::>(&serialized_path)?; + return Ok(Some(parsed)); + } } + Ok(&None) } - fn parse_file(file_path: &PathBuf) -> Result { + fn parse_file<'db>( + db: &'db dyn salsa::Database, + file_path: &PathBuf, + #[cfg(feature = "serialization")] cache: &'db codegen_sdk_common::serialize::Cache, + ) -> Result<&'db Self::Program<'db>, ParseError> { + if let Some(parsed) = Self::parse_file_from_cache( + db, + file_path, + #[cfg(feature = "serialization")] + cache, + )? { + return Ok(parsed); + } let content = std::fs::read_to_string(file_path)?; - let parsed = Self::parse(&content)?; - Ok(parsed) + if let Some(parsed) = Self::parse(db, content) { + return Ok(parsed); + } + Err(ParseError::SyntaxError) } fn should_parse(file_path: &PathBuf) -> Result { diff --git a/codegen-sdk-cst/src/lib.rs b/codegen-sdk-cst/src/lib.rs index 0ea86380..f1c03665 100644 --- a/codegen-sdk-cst/src/lib.rs +++ b/codegen-sdk-cst/src/lib.rs @@ -1,61 +1,14 @@ #![recursion_limit = "512"] #![feature(trivial_bounds, extend_one)] -use std::path::PathBuf; +#![allow(unused)] -use codegen_sdk_common::{ParseError, serialize::Cache, traits::CSTNode}; -use codegen_sdk_macros::{include_languages, parse_languages}; -use rkyv::{api::high::to_bytes_in, from_bytes}; +use std::{any::Any, path::PathBuf}; +mod input; +use dashmap::{DashMap, mapref::entry::Entry}; +mod database; +use codegen_sdk_common::{ParseError, traits::CSTNode}; +pub use database::CSTDatabase; +pub use input::Input; mod language; pub use codegen_sdk_common::language::LANGUAGES; pub use language::CSTLanguage; -include_languages!(); -pub fn parse_file( - cache: &Cache, - file_path: &PathBuf, -) -> Result, ParseError> { - parse_languages!(); - Err(ParseError::UnknownLanguage) -} - -#[cfg(test)] -mod tests { - use codegen_sdk_common::traits::HasChildren; - use derive_visitor::{Drive, Visitor}; - - use super::*; - use crate::typescript::ClassDeclaration; - #[test_log::test] - fn test_snazzy_items() { - let content = " - { - \"name\": \"SnazzyItems\" - } - "; - let module = json::JSON::parse(&content).unwrap(); - assert!(module.children().len() > 0); - } - #[derive(Visitor, Default)] - #[visitor(ClassDeclaration(enter))] - struct ClassVisitor { - pub items: Vec, - } - impl ClassVisitor { - fn enter_class_declaration(&mut self, node: &typescript::ClassDeclaration) { - self.items.push(node.name.source()); - } - } - #[test_log::test] - fn test_visitor() { - let content = " - class SnazzyItems { - constructor() { - this.items = []; - } - } - "; - let module = typescript::Typescript::parse(&content).unwrap(); - let mut visitor = ClassVisitor::default(); - module.drive(&mut visitor); - assert_eq!(visitor.items, vec!["SnazzyItems"]); - } -} diff --git a/codegen-sdk-macros/Cargo.toml b/codegen-sdk-macros/Cargo.toml index 3a2c473d..e69f288f 100644 --- a/codegen-sdk-macros/Cargo.toml +++ b/codegen-sdk-macros/Cargo.toml @@ -4,6 +4,8 @@ version = "0.1.0" edition = "2024" [dependencies] -codegen-sdk-common = { workspace = true, features = ["all"] } +codegen-sdk-common = { workspace = true} +quote = { workspace = true } +proc-macro2 = { workspace = true } [lib] proc-macro = true diff --git a/codegen-sdk-macros/src/lib.rs b/codegen-sdk-macros/src/lib.rs index ef602243..960946f3 100644 --- a/codegen-sdk-macros/src/lib.rs +++ b/codegen-sdk-macros/src/lib.rs @@ -1,111 +1,89 @@ +#![feature(extend_one)] extern crate proc_macro; -use codegen_sdk_common::language::{LANGUAGES, Language}; +use codegen_sdk_common::language::LANGUAGES; use proc_macro::TokenStream; -fn get_language(language: &str) -> &Language { - for lang in LANGUAGES.iter() { - if lang.name().to_lowercase() == language.to_lowercase() { - return lang; - } - } - panic!("Language not found"); -} -#[proc_macro] -pub fn include_language_ast(_item: TokenStream) -> TokenStream { - let target_language = _item.to_string(); - let language = get_language(&target_language); - - format!( - "#[cfg(feature = \"{name}\")] -pub mod {name} {{ - use codegen_sdk_cst::{name}; - include!(concat!(env!(\"OUT_DIR\"), \"/{name}.rs\")); -}}", - name = language.name() - ) - .parse() - .unwrap() -} +use quote::{format_ident, quote}; -#[proc_macro] -pub fn include_language(_item: TokenStream) -> TokenStream { - let target_language = _item.to_string(); - let language = get_language(&target_language); - let root = language.root_node(); +// #[proc_macro] +// pub fn parse_language(_item: TokenStream) -> TokenStream { +// let target_language = _item.to_string(); +// let language = get_language(&target_language); +// format!( +// "#[cfg(feature = \"{name}\")] +// if {name}::{struct_name}::should_parse(file_path)? {{ +// let parsed = {name}::{struct_name}::parse_file(db, file_path, #[cfg(feature = \"serialization\")] cache)?; +// #[cfg(feature = \"serialization\")] {{ +// log::debug!(\"Serializing {name}\"); +// let writer = cache.get_writer(&serialized_path)?; +// let _ = rkyv::api::high::to_bytes_in::<_, rkyv::rancor::Error>(&parsed, writer)?; +// }} +// return Ok(Box::new(parsed)); +// }} +// ", +// name = language.name(), +// struct_name = language.struct_name +// ) +// .parse() +// .unwrap() +// } +// #[proc_macro] +// pub fn parse_languages(_item: TokenStream) -> TokenStream { +// let mut output = String::new(); +// output.push_str("use codegen_sdk_macros::parse_language;"); +// for language in LANGUAGES.iter() { +// output.push_str(&format!("parse_language!({});", language.name())); +// } +// output.parse().unwrap() +// } - format!( - "#[cfg(feature = \"{name}\")] -pub mod {name} {{ - use crate::CSTLanguage; - use codegen_sdk_common::language::Language; - include!(concat!(env!(\"OUT_DIR\"), \"/{name}.rs\")); - pub struct {struct_name}; - impl CSTLanguage for {struct_name} {{ - type Program = {root}; - fn language() -> &'static Language {{ - &codegen_sdk_common::language::{name}::{struct_name} - }} - }} -}}", - name = language.name(), - struct_name = language.struct_name, - root = root - ) - .parse() - .unwrap() -} - -#[proc_macro] -pub fn parse_language(_item: TokenStream) -> TokenStream { - let target_language = _item.to_string(); - let language = get_language(&target_language); - format!( - "#[cfg(feature = \"{name}\")] - if {name}::{struct_name}::should_parse(file_path)? {{ - let serialized_path = cache.get_path(file_path); - if serialized_path.exists() {{ - log::debug!(\"Deserializing {name}\"); - let bytes = cache.read_entry(&serialized_path)?; - let parsed = - from_bytes::<<{name}::{struct_name} as CSTLanguage>::Program, rkyv::rancor::Error>(&bytes)?; - return Ok(Box::new(parsed)); - }} - let parsed = {name}::{struct_name}::parse_file(file_path)?; - log::debug!(\"Serializing {name}\"); - let writer = cache.get_writer(&serialized_path)?; - let _ = to_bytes_in::<_, rkyv::rancor::Error>(&parsed, writer)?; - return Ok(Box::new(parsed)); - }} - ", - name = language.name(), - struct_name = language.struct_name - ) - .parse() - .unwrap() -} #[proc_macro] -pub fn parse_languages(_item: TokenStream) -> TokenStream { - let mut output = String::new(); - output.push_str("use codegen_sdk_macros::parse_language;"); +pub fn languages_ast(_item: TokenStream) -> TokenStream { + let mut output = Vec::new(); for language in LANGUAGES.iter() { - output.push_str(&format!("parse_language!({});", language.name())); + if language.name() == "ts_query" { + continue; + } + let name = language.name(); + let package_name = format_ident!("codegen_sdk_{}", name); + let struct_name = format_ident!("{}", language.struct_name); + let file_name = format_ident!("{}File", language.struct_name); + let variant: proc_macro2::TokenStream = quote! { + #[cfg(feature = #name)] + #struct_name(#package_name::ast::#file_name<'db>), + }; + output.push(variant); + } + let enum_output: TokenStream = quote! { + #[derive(Debug, Clone, Eq, PartialEq, Hash, salsa::Update)] + pub enum ParsedFile<'db> { + #(#output)* } - output.parse().unwrap() -} -#[proc_macro] -pub fn include_languages(_item: TokenStream) -> TokenStream { - let mut output = String::new(); - output.push_str("use codegen_sdk_macros::include_language;"); - for language in LANGUAGES.iter() { - output.push_str(&format!("include_language!({});", language.name())); } - output.parse().unwrap() + .into(); + enum_output } + #[proc_macro] -pub fn include_languages_ast(_item: TokenStream) -> TokenStream { - let mut output = String::new(); - output.push_str("use codegen_sdk_macros::include_language_ast;"); +pub fn parse_language(_item: TokenStream) -> TokenStream { + let mut output = proc_macro2::TokenStream::new(); for language in LANGUAGES.iter() { - output.push_str(&format!("include_language_ast!({});", language.name())); + if language.name() == "ts_query" { + continue; + } + let name = language.name(); + let package_name = format_ident!("codegen_sdk_{}", name); + let struct_name = format_ident!("{}", language.struct_name); + let variant: proc_macro2::TokenStream = quote! { + #[cfg(feature = #name)] + if #package_name::cst::#struct_name::should_parse(&file.path(db)).unwrap_or(false) { + let parsed = #package_name::ast::parse(db, file); + return Parsed::new( + db, + Some(ParsedFile::#struct_name(parsed)), + ); + } + }; + output.extend_one(variant); } - output.parse().unwrap() + output.into() } diff --git a/languages/codegen-sdk-go/Cargo.toml b/languages/codegen-sdk-go/Cargo.toml new file mode 100644 index 00000000..2c4f0c17 --- /dev/null +++ b/languages/codegen-sdk-go/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-go" +version = "0.1.0" +edition = "2024" +description = "Go language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["go"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["go"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-go/build.rs b/languages/codegen-sdk-go/build.rs new file mode 100644 index 00000000..88388c58 --- /dev/null +++ b/languages/codegen-sdk-go/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::go::Go; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Go, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Go.name(), e); + panic!("Error generating CST for {}: {}", Go.name(), e); + }); + generate_ast(&Go).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Go.name(), e); + panic!("Error generating AST for {}: {}", Go.name(), e); + }); +} diff --git a/languages/codegen-sdk-go/src/lib.rs b/languages/codegen-sdk-go/src/lib.rs new file mode 100644 index 00000000..fc68c94d --- /dev/null +++ b/languages/codegen-sdk-go/src/lib.rs @@ -0,0 +1,9 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +#![allow(non_snake_case)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/go.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/go-ast.rs")); +} diff --git a/languages/codegen-sdk-java/Cargo.toml b/languages/codegen-sdk-java/Cargo.toml new file mode 100644 index 00000000..0d7e131c --- /dev/null +++ b/languages/codegen-sdk-java/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-java" +version = "0.1.0" +edition = "2024" +description = "Java language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["java"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["java"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-java/build.rs b/languages/codegen-sdk-java/build.rs new file mode 100644 index 00000000..d2ed1c32 --- /dev/null +++ b/languages/codegen-sdk-java/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::java::Java; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Java, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Java.name(), e); + panic!("Error generating CST for {}: {}", Java.name(), e); + }); + generate_ast(&Java).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Java.name(), e); + panic!("Error generating AST for {}: {}", Java.name(), e); + }); +} diff --git a/languages/codegen-sdk-java/src/lib.rs b/languages/codegen-sdk-java/src/lib.rs new file mode 100644 index 00000000..188d6160 --- /dev/null +++ b/languages/codegen-sdk-java/src/lib.rs @@ -0,0 +1,9 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +#![allow(non_snake_case)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/java.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/java-ast.rs")); +} diff --git a/languages/codegen-sdk-javascript/Cargo.toml b/languages/codegen-sdk-javascript/Cargo.toml new file mode 100644 index 00000000..cbed7010 --- /dev/null +++ b/languages/codegen-sdk-javascript/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-javascript" +version = "0.1.0" +edition = "2024" +description = "Javascript language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["typescript"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["typescript"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-javascript/build.rs b/languages/codegen-sdk-javascript/build.rs new file mode 100644 index 00000000..1f79f3b4 --- /dev/null +++ b/languages/codegen-sdk-javascript/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::javascript::Javascript; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Javascript, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Javascript.name(), e); + panic!("Error generating CST for {}: {}", Javascript.name(), e); + }); + generate_ast(&Javascript).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Javascript.name(), e); + panic!("Error generating AST for {}: {}", Javascript.name(), e); + }); +} diff --git a/languages/codegen-sdk-javascript/src/lib.rs b/languages/codegen-sdk-javascript/src/lib.rs new file mode 100644 index 00000000..8f0d859e --- /dev/null +++ b/languages/codegen-sdk-javascript/src/lib.rs @@ -0,0 +1,10 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +#![allow(non_snake_case)] + +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/javascript.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/javascript-ast.rs")); +} diff --git a/languages/codegen-sdk-json/Cargo.toml b/languages/codegen-sdk-json/Cargo.toml new file mode 100644 index 00000000..947df056 --- /dev/null +++ b/languages/codegen-sdk-json/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "codegen-sdk-json" +version = "0.1.0" +edition = "2024" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["json"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +codegen-sdk-ast = { workspace = true } +log = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["json"] } +env_logger = { workspace = true } +log = { workspace = true } +[dev-dependencies] +test-log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-json/build.rs b/languages/codegen-sdk-json/build.rs new file mode 100644 index 00000000..e64f5198 --- /dev/null +++ b/languages/codegen-sdk-json/build.rs @@ -0,0 +1,17 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::json::JSON; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&JSON, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", JSON.name(), e); + panic!("Error generating CST for {}: {}", JSON.name(), e); + }); + generate_ast(&JSON).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", JSON.name(), e); + panic!("Error generating AST for {}: {}", JSON.name(), e); + }); +} diff --git a/languages/codegen-sdk-json/src/lib.rs b/languages/codegen-sdk-json/src/lib.rs new file mode 100644 index 00000000..23e5a92b --- /dev/null +++ b/languages/codegen-sdk-json/src/lib.rs @@ -0,0 +1,27 @@ +#![allow(unused)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/json.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/json-ast.rs")); +} +#[cfg(test)] +mod tests { + use codegen_sdk_common::traits::HasChildren; + use codegen_sdk_cst::CSTLanguage; + + use super::*; + #[test_log::test] + fn test_snazzy_items() { + let content = " + { + \"name\": \"SnazzyItems\" + } + "; + let db = codegen_sdk_cst::CSTDatabase::default(); + let module = crate::cst::JSON::parse(&db, content.to_string()) + .as_ref() + .unwrap(); + assert!(module.children().len() > 0); + } +} diff --git a/languages/codegen-sdk-jsx/Cargo.toml b/languages/codegen-sdk-jsx/Cargo.toml new file mode 100644 index 00000000..64ab9c95 --- /dev/null +++ b/languages/codegen-sdk-jsx/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-jsx" +version = "0.1.0" +edition = "2024" +description = "JSX language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["typescript"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["typescript"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-jsx/build.rs b/languages/codegen-sdk-jsx/build.rs new file mode 100644 index 00000000..7f77a947 --- /dev/null +++ b/languages/codegen-sdk-jsx/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::jsx::JSX; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&JSX, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", JSX.name(), e); + panic!("Error generating CST for {}: {}", JSX.name(), e); + }); + generate_ast(&JSX).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", JSX.name(), e); + panic!("Error generating AST for {}: {}", JSX.name(), e); + }); +} diff --git a/languages/codegen-sdk-jsx/src/lib.rs b/languages/codegen-sdk-jsx/src/lib.rs new file mode 100644 index 00000000..ed766cdf --- /dev/null +++ b/languages/codegen-sdk-jsx/src/lib.rs @@ -0,0 +1,10 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +#![allow(non_snake_case)] + +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/jsx.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/jsx-ast.rs")); +} diff --git a/languages/codegen-sdk-markdown/Cargo.toml b/languages/codegen-sdk-markdown/Cargo.toml new file mode 100644 index 00000000..06e9af1e --- /dev/null +++ b/languages/codegen-sdk-markdown/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-markdown" +version = "0.1.0" +edition = "2024" +description = "Markdown language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["markdown"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["markdown"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-markdown/build.rs b/languages/codegen-sdk-markdown/build.rs new file mode 100644 index 00000000..3d8d3e85 --- /dev/null +++ b/languages/codegen-sdk-markdown/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::markdown::Markdown; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Markdown, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Markdown.name(), e); + panic!("Error generating CST for {}: {}", Markdown.name(), e); + }); + generate_ast(&Markdown).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Markdown.name(), e); + panic!("Error generating AST for {}: {}", Markdown.name(), e); + }); +} diff --git a/languages/codegen-sdk-markdown/src/lib.rs b/languages/codegen-sdk-markdown/src/lib.rs new file mode 100644 index 00000000..b2f144a0 --- /dev/null +++ b/languages/codegen-sdk-markdown/src/lib.rs @@ -0,0 +1,8 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/markdown.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/markdown-ast.rs")); +} diff --git a/languages/codegen-sdk-python/Cargo.toml b/languages/codegen-sdk-python/Cargo.toml new file mode 100644 index 00000000..573fbcb8 --- /dev/null +++ b/languages/codegen-sdk-python/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "codegen-sdk-python" +version = "0.1.0" +edition = "2024" +description = "Python language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["python"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } + +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["python"] } +env_logger = { workspace = true } +log = { workspace = true } + +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-python/build.rs b/languages/codegen-sdk-python/build.rs new file mode 100644 index 00000000..83534633 --- /dev/null +++ b/languages/codegen-sdk-python/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::python::Python; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Python, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Python.name(), e); + panic!("Error generating CST for {}: {}", Python.name(), e); + }); + generate_ast(&Python).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Python.name(), e); + panic!("Error generating AST for {}: {}", Python.name(), e); + }); +} diff --git a/languages/codegen-sdk-python/src/lib.rs b/languages/codegen-sdk-python/src/lib.rs new file mode 100644 index 00000000..c4896093 --- /dev/null +++ b/languages/codegen-sdk-python/src/lib.rs @@ -0,0 +1,9 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +#![allow(non_snake_case)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/python.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/python-ast.rs")); +} diff --git a/languages/codegen-sdk-ruby/Cargo.toml b/languages/codegen-sdk-ruby/Cargo.toml new file mode 100644 index 00000000..93a214db --- /dev/null +++ b/languages/codegen-sdk-ruby/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-ruby" +version = "0.1.0" +edition = "2024" +description = "Ruby language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["ruby"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["ruby"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-ruby/build.rs b/languages/codegen-sdk-ruby/build.rs new file mode 100644 index 00000000..e81a3af7 --- /dev/null +++ b/languages/codegen-sdk-ruby/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::ruby::Ruby; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Ruby, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Ruby.name(), e); + panic!("Error generating CST for {}: {}", Ruby.name(), e); + }); + generate_ast(&Ruby).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Ruby.name(), e); + panic!("Error generating AST for {}: {}", Ruby.name(), e); + }); +} diff --git a/languages/codegen-sdk-ruby/src/lib.rs b/languages/codegen-sdk-ruby/src/lib.rs new file mode 100644 index 00000000..07f60889 --- /dev/null +++ b/languages/codegen-sdk-ruby/src/lib.rs @@ -0,0 +1,9 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +#![allow(non_snake_case)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/ruby.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/ruby-ast.rs")); +} diff --git a/languages/codegen-sdk-rust/Cargo.toml b/languages/codegen-sdk-rust/Cargo.toml new file mode 100644 index 00000000..47504ba9 --- /dev/null +++ b/languages/codegen-sdk-rust/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-rust" +version = "0.1.0" +edition = "2024" +description = "Rust language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["rust"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["rust"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-rust/build.rs b/languages/codegen-sdk-rust/build.rs new file mode 100644 index 00000000..5c0549cc --- /dev/null +++ b/languages/codegen-sdk-rust/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::rust::Rust; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Rust, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Rust.name(), e); + panic!("Error generating CST for {}: {}", Rust.name(), e); + }); + generate_ast(&Rust).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Rust.name(), e); + panic!("Error generating AST for {}: {}", Rust.name(), e); + }); +} diff --git a/languages/codegen-sdk-rust/src/lib.rs b/languages/codegen-sdk-rust/src/lib.rs new file mode 100644 index 00000000..b45ffbeb --- /dev/null +++ b/languages/codegen-sdk-rust/src/lib.rs @@ -0,0 +1,9 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +#![allow(non_snake_case)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/rust.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/rust-ast.rs")); +} diff --git a/languages/codegen-sdk-toml/Cargo.toml b/languages/codegen-sdk-toml/Cargo.toml new file mode 100644 index 00000000..88de5b1c --- /dev/null +++ b/languages/codegen-sdk-toml/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-toml" +version = "0.1.0" +edition = "2024" +description = "TOML language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["toml"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["toml"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-toml/build.rs b/languages/codegen-sdk-toml/build.rs new file mode 100644 index 00000000..4ed8dec5 --- /dev/null +++ b/languages/codegen-sdk-toml/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::toml::TOML; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&TOML, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", TOML.name(), e); + panic!("Error generating CST for {}: {}", TOML.name(), e); + }); + generate_ast(&TOML).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", TOML.name(), e); + panic!("Error generating AST for {}: {}", TOML.name(), e); + }); +} diff --git a/languages/codegen-sdk-toml/src/lib.rs b/languages/codegen-sdk-toml/src/lib.rs new file mode 100644 index 00000000..adfe35e0 --- /dev/null +++ b/languages/codegen-sdk-toml/src/lib.rs @@ -0,0 +1,8 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/toml.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/toml-ast.rs")); +} diff --git a/languages/codegen-sdk-ts_query/Cargo.toml b/languages/codegen-sdk-ts_query/Cargo.toml new file mode 100644 index 00000000..65c8d4c2 --- /dev/null +++ b/languages/codegen-sdk-ts_query/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "codegen-sdk-ts_query" +version = "0.1.0" +edition = "2024" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["ts_query"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["ts_query"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-ts_query/build.rs b/languages/codegen-sdk-ts_query/build.rs new file mode 100644 index 00000000..ca39e14f --- /dev/null +++ b/languages/codegen-sdk-ts_query/build.rs @@ -0,0 +1,12 @@ +use codegen_sdk_common::language::ts_query::Query; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Query, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Query.name(), e); + panic!("Error generating CST for {}: {}", Query.name(), e); + }); +} diff --git a/languages/codegen-sdk-ts_query/src/lib.rs b/languages/codegen-sdk-ts_query/src/lib.rs new file mode 100644 index 00000000..8615218f --- /dev/null +++ b/languages/codegen-sdk-ts_query/src/lib.rs @@ -0,0 +1,4 @@ +#![allow(unused)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/ts_query.rs")); +} diff --git a/languages/codegen-sdk-tsx/Cargo.toml b/languages/codegen-sdk-tsx/Cargo.toml new file mode 100644 index 00000000..014be1c0 --- /dev/null +++ b/languages/codegen-sdk-tsx/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-tsx" +version = "0.1.0" +edition = "2024" +description = "TSX language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["typescript"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["typescript"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-tsx/build.rs b/languages/codegen-sdk-tsx/build.rs new file mode 100644 index 00000000..f69e8760 --- /dev/null +++ b/languages/codegen-sdk-tsx/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::tsx::TSX; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&TSX, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", TSX.name(), e); + panic!("Error generating CST for {}: {}", TSX.name(), e); + }); + generate_ast(&TSX).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", TSX.name(), e); + panic!("Error generating AST for {}: {}", TSX.name(), e); + }); +} diff --git a/languages/codegen-sdk-tsx/src/lib.rs b/languages/codegen-sdk-tsx/src/lib.rs new file mode 100644 index 00000000..c0e26cf5 --- /dev/null +++ b/languages/codegen-sdk-tsx/src/lib.rs @@ -0,0 +1,10 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +#![allow(non_snake_case)] + +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/tsx.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/tsx-ast.rs")); +} diff --git a/languages/codegen-sdk-typescript/Cargo.toml b/languages/codegen-sdk-typescript/Cargo.toml new file mode 100644 index 00000000..bcbaface --- /dev/null +++ b/languages/codegen-sdk-typescript/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-typescript" +version = "0.1.0" +edition = "2024" +description = "Typescript language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["typescript"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["typescript"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-typescript/build.rs b/languages/codegen-sdk-typescript/build.rs new file mode 100644 index 00000000..73fa2246 --- /dev/null +++ b/languages/codegen-sdk-typescript/build.rs @@ -0,0 +1,17 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::typescript::Typescript; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Typescript, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Typescript.name(), e); + panic!("Error generating CST for {}: {}", Typescript.name(), e); + }); + generate_ast(&Typescript).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Typescript.name(), e); + panic!("Error generating AST for {}: {}", Typescript.name(), e); + }); +} diff --git a/languages/codegen-sdk-typescript/src/lib.rs b/languages/codegen-sdk-typescript/src/lib.rs new file mode 100644 index 00000000..6609f525 --- /dev/null +++ b/languages/codegen-sdk-typescript/src/lib.rs @@ -0,0 +1,9 @@ +#![recursion_limit = "2048"] +#![allow(non_snake_case)] +#![allow(unused)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/typescript.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/typescript-ast.rs")); +} diff --git a/languages/codegen-sdk-yaml/Cargo.toml b/languages/codegen-sdk-yaml/Cargo.toml new file mode 100644 index 00000000..6defd948 --- /dev/null +++ b/languages/codegen-sdk-yaml/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "codegen-sdk-yaml" +version = "0.1.0" +edition = "2024" +description = "YAML language support for codegen-sdk" + +[dependencies] +codegen-sdk-common = { workspace = true, features = ["yaml"] } +salsa = { workspace = true } +tree-sitter = { workspace = true } +derive_more = { workspace = true } +ambassador = { workspace = true } +derive_generic_visitor = { workspace = true } +subenum = {workspace = true} +bytes = { workspace = true } +codegen-sdk-cst = { workspace = true } +log = { workspace = true } +codegen-sdk-ast = { workspace = true } +[build-dependencies] +codegen-sdk-cst-generator = { workspace = true } +codegen-sdk-ast-generator = { workspace = true } +codegen-sdk-common = { workspace = true, features = ["yaml"] } +env_logger = { workspace = true } +log = { workspace = true } +[features] +serialization = ["codegen-sdk-common/serialization"] diff --git a/languages/codegen-sdk-yaml/build.rs b/languages/codegen-sdk-yaml/build.rs new file mode 100644 index 00000000..f2cedae5 --- /dev/null +++ b/languages/codegen-sdk-yaml/build.rs @@ -0,0 +1,18 @@ +use codegen_sdk_ast_generator::generate_ast; +use codegen_sdk_common::language::yaml::Yaml; +use codegen_sdk_cst_generator::{Config, generate_cst_to_file}; + +fn main() { + let config = Config { + serialize: cfg!(feature = "serialization"), + }; + env_logger::init(); + generate_cst_to_file(&Yaml, config.clone()).unwrap_or_else(|e| { + log::error!("Error generating CST for {}: {}", Yaml.name(), e); + panic!("Error generating CST for {}: {}", Yaml.name(), e); + }); + generate_ast(&Yaml).unwrap_or_else(|e| { + log::error!("Error generating AST for {}: {}", Yaml.name(), e); + panic!("Error generating AST for {}: {}", Yaml.name(), e); + }); +} diff --git a/languages/codegen-sdk-yaml/src/lib.rs b/languages/codegen-sdk-yaml/src/lib.rs new file mode 100644 index 00000000..b49481d2 --- /dev/null +++ b/languages/codegen-sdk-yaml/src/lib.rs @@ -0,0 +1,8 @@ +#![recursion_limit = "2048"] +#![allow(unused)] +pub mod cst { + include!(concat!(env!("OUT_DIR"), "/yaml.rs")); +} +pub mod ast { + include!(concat!(env!("OUT_DIR"), "/yaml-ast.rs")); +} diff --git a/src/cache.rs b/src/cache.rs new file mode 100644 index 00000000..fb0fdfa4 --- /dev/null +++ b/src/cache.rs @@ -0,0 +1,26 @@ +use std::{ops::Div, path, time::Instant}; + +use clap::Parser; +use codegen_sdk_analyzer::{CodegenDatabase, Db, Parsed}; +use codegen_sdk_ast::*; +use codegen_sdk_common::serialize::Cache; +use glob::glob; +use rayon::prelude::*; +use sysinfo::System; +fn report_cached_count(cached: usize, files_to_parse: &Vec) { + log::info!( + "{} files cached. {}% of total", + cached, + (cached * 100).div(files_to_parse.len()) + ); +} +fn get_cached_count(cache: &Cache, files_to_parse: &Vec) -> usize { + let mut cached = 0; + for file in files_to_parse.iter() { + let path = cache.get_path(file); + if path.exists() { + cached += 1; + } + } + cached +} diff --git a/src/discovery.rs b/src/discovery.rs new file mode 100644 index 00000000..58b2623f --- /dev/null +++ b/src/discovery.rs @@ -0,0 +1,35 @@ +use codegen_sdk_analyzer::{CodegenDatabase, Db}; +use codegen_sdk_ast::*; +#[cfg(feature = "serialization")] +use codegen_sdk_common::serialize::Cache; +use glob::glob; +#[salsa::input] +pub struct FilesToParse { + pub files: Vec, +} +pub fn log_languages() { + for language in LANGUAGES.iter() { + log::info!( + "Supported language: {} with extensions: {:?}", + language.name(), + language.file_extensions + ); + } +} + +pub fn collect_files(db: &CodegenDatabase, dir: String) -> FilesToParse { + let mut files = Vec::new(); + for language in LANGUAGES.iter() { + for extension in language.file_extensions.iter() { + files.extend(glob(&format!("{dir}**/*.{}", extension)).unwrap()); + } + } + + let files = files + .into_iter() + .filter_map(|file| file.ok()) + .filter(|file| !file.is_dir()) + .map(|file| db.input(file).unwrap()) + .collect(); + FilesToParse::new(db, files) +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 00000000..26844d2e --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,6 @@ +#![recursion_limit = "2048"] +#[cfg(feature = "serialization")] +mod cache; +pub mod discovery; +pub mod parser; +pub mod system; diff --git a/src/main.rs b/src/main.rs index a77e0afc..71cd9b2f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,97 +1,79 @@ -use std::{ops::Div, path, time::Instant}; +#![recursion_limit = "2048"] +use std::{path::PathBuf, time::Instant}; +use anyhow::Context; use clap::Parser; -use codegen_sdk_ast::*; -use codegen_sdk_common::{serialize::Cache, traits::CSTNode}; -use glob::glob; -use rayon::prelude::*; -use sysinfo::System; +use codegen_sdk_analyzer::{CodegenDatabase, Db, ParsedFile, parse_file}; +use codegen_sdk_ast::Input; +#[cfg(feature = "serialization")] +use codegen_sdk_common::serialize::Cache; +use codegen_sdk_core::{discovery::FilesToParse, parser::parse_files, system::get_memory}; +use salsa::Setter; #[derive(Debug, Parser)] struct Args { input: String, } -fn get_memory() -> u64 { - let s = System::new_all(); - let current = s.process(sysinfo::get_current_pid().unwrap()).unwrap(); - current.memory() -} -fn collect_files(dir: String) -> Vec { - let mut files = Vec::new(); - for language in LANGUAGES.iter() { - for extension in language.file_extensions.iter() { - files.extend(glob(&format!("{dir}**/*.{}", extension)).unwrap()); - } - } - - files.into_iter().filter_map(|file| file.ok()).collect() -} -fn parse_file( - cache: &Cache, - file: &path::PathBuf, - tx: &crossbeam::channel::Sender, -) -> Option> { - if file.is_dir() { - return None; - } - let result = codegen_sdk_ast::parse_file(cache, file); - - return match result { - Ok(program) => Some(program), - Err(e) => { - log::error!("Error parsing file {}: {}", file.display(), e); - tx.send(e.to_string()).unwrap(); - None +#[salsa::tracked] +fn get_total_definitions( + db: &dyn Db, + files_to_parse: FilesToParse, +) -> Vec<(usize, usize, usize, usize, usize)> { + salsa::par_map(db, files_to_parse.files(db), |db, file| { + let parsed = parse_file(db, file); + if let Some(parsed) = parsed.file(db) { + if let ParsedFile::Typescript(file) = parsed { + let definitions = file.definitions(db); + return ( + definitions.classes.len(), + definitions.functions.len(), + definitions.interfaces.len(), + definitions.methods.len(), + definitions.modules.len(), + ); + } } - }; + (0, 0, 0, 0, 0) + }) } -fn log_languages() { - for language in LANGUAGES.iter() { - log::info!( - "Supported language: {} with extensions: {:?}", - language.name(), - language.file_extensions - ); - } -} -fn parse_files(dir: String) -> (Vec>, Vec) { - rayon::ThreadPoolBuilder::new() - .stack_size(1024 * 1024 * 1024 * 10) - .build_global() - .unwrap(); - let (tx, rx) = crossbeam::channel::unbounded(); - let mut errors = Vec::new(); - log_languages(); - let cache = Cache::new().unwrap(); - let files_to_parse = collect_files(dir); - log::info!("Parsing {} files", files_to_parse.len()); - let mut cached = 0; - for file in files_to_parse.iter() { - let path = cache.get_path(file); - if path.exists() { - cached += 1; - } - } - let files: Vec> = files_to_parse - .par_iter() - .filter_map(|file| parse_file(&cache, file, &tx)) - .collect(); - drop(tx); - for e in rx.iter() { - errors.push(e); +#[cfg(feature = "typescript")] +fn print_definitions(db: &CodegenDatabase, files_to_parse: &FilesToParse) { + let mut total_classes = 0; + let mut total_functions = 0; + let mut total_interfaces = 0; + let mut total_methods = 0; + let mut total_modules = 0; + let new_files = FilesToParse::new(db, files_to_parse.files(db).clone()); + let definitions = get_total_definitions(db, new_files); + for (classes, functions, interfaces, methods, modules) in definitions { + total_classes += classes; + total_functions += functions; + total_interfaces += interfaces; + total_methods += methods; + total_modules += modules; } log::info!( - "{} files cached. {}% of total", - cached, - (cached * 100).div(files_to_parse.len()) + "{} classes, {} functions, {} interfaces, {} methods, {} modules", + total_classes, + total_functions, + total_interfaces, + total_methods, + total_modules ); - (files, errors) } -fn main() { +fn main() -> anyhow::Result<()> { env_logger::init(); let args = Args::parse(); let dir = args.input; let start = Instant::now(); - let (files, errors) = parse_files(dir); + let (tx, rx) = crossbeam_channel::unbounded(); + let mut db = CodegenDatabase::new(tx); + db.watch_dir(PathBuf::from(&dir)).unwrap(); + let (files_to_parse, errors) = parse_files( + &db, + #[cfg(feature = "serialization")] + &cache, + dir, + ); let num_errors = errors.len(); drop(errors); let end = Instant::now(); @@ -99,10 +81,51 @@ fn main() { let memory = get_memory(); log::info!( "{} files parsed in {:?}.{} seconds with {} errors. Using {} MB of memory", - files.len(), + files_to_parse.files(&db).len(), duration.as_secs(), duration.subsec_millis(), num_errors, memory / 1024 / 1024 ); + loop { + // Compile the code starting at the provided input, this will read other + // needed files using the on-demand mechanism. + print_definitions(&db, &files_to_parse); + // let diagnostics = compile::accumulated::(&db, initial); + // if diagnostics.is_empty() { + // println!("Sum is: {}", sum); + // } else { + // for diagnostic in diagnostics { + // println!("{}", diagnostic.0); + // } + // } + + // Wait for file change events, the output can't change unless the + // inputs change. + for event in rx.recv()?.unwrap() { + match event.path.canonicalize() { + Ok(path) => { + log::info!("File changed: {}", path.display()); + let file = match db.files.get(&path) { + Some(file) => *file, + None => continue, + }; + // `path` has changed, so read it and update the contents to match. + // This creates a new revision and causes the incremental algorithm + // to kick in, just like any other update to a salsa input. + let contents = std::fs::read_to_string(path) + .with_context(|| format!("Failed to read file {}", event.path.display()))?; + let input = Input::new(&db, contents); + file.set_contents(&mut db).to(input); + } + Err(e) => { + log::error!( + "Failed to canonicalize path {} for file {}", + e, + event.path.display() + ); + } + } + } + } } diff --git a/src/parser.rs b/src/parser.rs new file mode 100644 index 00000000..a10af99c --- /dev/null +++ b/src/parser.rs @@ -0,0 +1,80 @@ +use codegen_sdk_analyzer::{CodegenDatabase, Db}; +#[cfg(feature = "serialization")] +use codegen_sdk_common::serialize::Cache; +use indicatif::{ProgressBar, ProgressStyle}; + +use crate::discovery::{FilesToParse, collect_files, log_languages}; +fn parse_file<'db>( + db: &'db dyn Db, + #[cfg(feature = "serialization")] cache: &Cache, + file: codegen_sdk_ast::input::File, +) { + if file.path(db).is_dir() { + log::warn!("Skipping directory: {}", file.path(db).display()); + return; + } + codegen_sdk_analyzer::parse_file(db, file); +} +#[salsa::tracked] +fn parse_files_par(db: &dyn Db, files: FilesToParse) { + let multi = db.multi_progress(); + let style = ProgressStyle::with_template( + "[{elapsed_precise}] {wide_bar} {msg} [{per_sec}] [estimated time remaining: {eta}]", + ) + .unwrap(); + let pg = multi.add( + ProgressBar::new(files.files(db).len() as u64) + .with_style(style) + .with_message("Parsing Files"), + ); + let inputs = files + .files(db) + .into_iter() + .map(|file| (&pg, file)) + .collect::>(); + let _: Vec<()> = salsa::par_map(db, inputs, |db, input| { + let (pg, file) = input; + parse_file( + db, + #[cfg(feature = "serialization")] + &cache, + file, + ); + pg.inc(1); + () + }); + pg.finish(); + multi.remove(&pg); +} +pub fn parse_files<'db>( + db: &'db CodegenDatabase, + #[cfg(feature = "serialization")] cache: &'db Cache, + dir: String, +) -> (FilesToParse, Vec) { + rayon::ThreadPoolBuilder::new() + .stack_size(1024 * 1024 * 1024 * 10) + .build_global() + .unwrap(); + let (tx, rx) = crossbeam::channel::unbounded(); + let mut errors = Vec::new(); + log_languages(); + #[cfg(feature = "serialization")] + let cache = Cache::new().unwrap(); + #[cfg(feature = "serialization")] + let cached = get_cached_count(&cache, &files_to_parse); + let files_to_parse = collect_files(db, dir); + log::info!("Parsing {} files", files_to_parse.files(db).len()); + parse_files_par( + db, + #[cfg(feature = "serialization")] + &cache, + files_to_parse, + ); + drop(tx); + #[cfg(feature = "serialization")] + report_cached_count(cached, &files_to_parse.files(db)); + for e in rx.iter() { + errors.push(e); + } + (files_to_parse, errors) +} diff --git a/src/system.rs b/src/system.rs new file mode 100644 index 00000000..bde5abd9 --- /dev/null +++ b/src/system.rs @@ -0,0 +1,7 @@ +use sysinfo::System; + +pub fn get_memory() -> u64 { + let s = System::new_all(); + let current = s.process(sysinfo::get_current_pid().unwrap()).unwrap(); + current.memory() +}