diff --git a/Cargo.lock b/Cargo.lock index cdb8fe20..09021f2a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,12 +26,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "aliasable" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" - [[package]] name = "anstream" version = "0.6.18" @@ -274,7 +268,6 @@ dependencies = [ "bytes", "convert_case", "lazy_static", - "ouroboros", "phf", "rkyv", "serde", @@ -322,7 +315,6 @@ dependencies = [ "derive_more", "env_logger", "log", - "ouroboros", "rayon", "rkyv", "tempfile", @@ -338,7 +330,10 @@ dependencies = [ "convert_case", "log", "prettyplease", + "proc-macro2", + "quote", "syn 2.0.98", + "tempfile", "tree-sitter", ] @@ -582,12 +577,6 @@ dependencies = [ "unicode-segmentation", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - [[package]] name = "heck" version = "0.5.0" @@ -749,30 +738,6 @@ version = "11.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" -[[package]] -name = "ouroboros" -version = "0.18.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e0f050db9c44b97a94723127e6be766ac5c340c48f2c4bb3ffa11713744be59" -dependencies = [ - "aliasable", - "ouroboros_macro", - "static_assertions", -] - -[[package]] -name = "ouroboros_macro" -version = "0.18.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c7028bdd3d43083f6d8d4d5187680d0d3560d54df4cc9d752005268b41e64d0" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "proc-macro2-diagnostics", - "quote", - "syn 2.0.98", -] - [[package]] name = "parking_lot" version = "0.11.2" @@ -865,19 +830,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "proc-macro2-diagnostics" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.98", - "version_check", - "yansi", -] - [[package]] name = "ptr_meta" version = "0.3.0" @@ -1162,12 +1114,6 @@ version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "streaming-iterator" version = "0.1.9" @@ -1558,12 +1504,6 @@ version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "213b7324336b53d2414b2db8537e56544d981803139155afa84f76eeebb7a546" -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" - [[package]] name = "zstd" version = "0.13.2" diff --git a/Cargo.toml b/Cargo.toml index db9338ef..797438c1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "codegen-sdk-core" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] clap = { version = "4.5.28", features = ["derive"] } @@ -28,7 +28,6 @@ members = [ rayon = "1.10.0" env_logger = "0.11.6" log = "0.4.25" -ouroboros = "0.18.5" tree-sitter = "0.25.1" tree-sitter-python = "0.23.6" tree-sitter-typescript = "0.23.2" diff --git a/codegen-sdk-analyzer/Cargo.toml b/codegen-sdk-analyzer/Cargo.toml index c8690d8d..58a34755 100644 --- a/codegen-sdk-analyzer/Cargo.toml +++ b/codegen-sdk-analyzer/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "codegen-sdk-analyzer" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] salsa = "0.16.1" diff --git a/codegen-sdk-ast/Cargo.toml b/codegen-sdk-ast/Cargo.toml index b852f176..ca1413fe 100644 --- a/codegen-sdk-ast/Cargo.toml +++ b/codegen-sdk-ast/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "codegen-sdk-ast" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] codegen-sdk-cst = { path = "../codegen-sdk-cst" } diff --git a/codegen-sdk-common/Cargo.toml b/codegen-sdk-common/Cargo.toml index 5a7d5c4d..65a8e324 100644 --- a/codegen-sdk-common/Cargo.toml +++ b/codegen-sdk-common/Cargo.toml @@ -1,12 +1,11 @@ [package] name = "codegen-sdk-common" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] bytes = { workspace = true } tree-sitter = { workspace = true } -ouroboros = { workspace = true } tree-sitter-python = { workspace = true, optional = true } tree-sitter-typescript = { workspace = true, optional = true } tree-sitter-javascript = { workspace = true, optional = true } diff --git a/codegen-sdk-cst-generator/Cargo.toml b/codegen-sdk-cst-generator/Cargo.toml index 98e82dd1..107fef11 100644 --- a/codegen-sdk-cst-generator/Cargo.toml +++ b/codegen-sdk-cst-generator/Cargo.toml @@ -1,16 +1,18 @@ [package] name = "codegen-sdk-cst-generator" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] convert_case = { workspace = true } prettyplease = "0.2.29" -syn = "2.0.98" +syn = { version = "2.0.98", features = ["proc-macro"] } tree-sitter = { workspace = true } log = { workspace = true } codegen-sdk-common = { path = "../codegen-sdk-common" } anyhow = { workspace = true } - +quote = "1.0.38" +proc-macro2 = "1.0.93" +tempfile = "3.8.1" [dev-dependencies] codegen-sdk-common = { path = "../codegen-sdk-common" , features = ["python"] } diff --git a/codegen-sdk-cst-generator/src/generator.rs b/codegen-sdk-cst-generator/src/generator.rs index bbf4fff3..8253126e 100644 --- a/codegen-sdk-cst-generator/src/generator.rs +++ b/codegen-sdk-cst-generator/src/generator.rs @@ -8,16 +8,23 @@ mod enum_generator; mod format; mod state; mod struct_generator; -const IMPORTS: &str = " -use std::sync::Arc; -use tree_sitter; -use derive_more::Debug; -use codegen_sdk_common::*; -use std::backtrace::Backtrace; -use bytes::Bytes; -use rkyv::{Archive, Deserialize, Serialize, Portable}; -"; +use std::io::Write; +use proc_macro2::TokenStream; +use quote::quote; +fn get_imports() -> TokenStream { + quote! { + + use std::sync::Arc; + use tree_sitter; + use derive_more::Debug; + use codegen_sdk_common::*; + use std::backtrace::Backtrace; + use bytes::Bytes; + use rkyv::{Archive, Deserialize, Serialize, Portable}; + + } +} pub(crate) fn generate_cst(node_types: &Vec) -> anyhow::Result { let mut state = State::default(); let mut nodes = HashSet::new(); @@ -47,15 +54,21 @@ pub(crate) fn generate_cst(node_types: &Vec) -> anyhow::Result { generate_struct(node, &mut state, &name); } } - let mut result = IMPORTS.to_string(); - result.push_str(&state.enums); - result.push_str(&state.structs); - let formatted = format::format_cst(&result); + let mut result = get_imports(); + result.extend_one(state.enums); + result.extend_one(state.structs); + let formatted = format::format_cst(&result.to_string()); match formatted { Ok(formatted) => return Ok(formatted), Err(e) => { - log::error!("Failed to format CST: {}", e); - return Ok(result.to_string()); + let mut out_file = tempfile::NamedTempFile::with_suffix(".rs")?; + log::error!( + "Failed to format CST, writing to temp file at {}", + out_file.path().display() + ); + out_file.write_all(result.to_string().as_bytes())?; + out_file.keep()?; + return Err(e); } } } diff --git a/codegen-sdk-cst-generator/src/generator/enum_generator.rs b/codegen-sdk-cst-generator/src/generator/enum_generator.rs index 30e354d9..34b3984f 100644 --- a/codegen-sdk-cst-generator/src/generator/enum_generator.rs +++ b/codegen-sdk-cst-generator/src/generator/enum_generator.rs @@ -2,32 +2,43 @@ use codegen_sdk_common::{ naming::{normalize_string, normalize_type_name}, parser::TypeDefinition, }; +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; use crate::generator::state::State; fn get_cases( variants: &Vec, - cases: &mut String, state: &State, override_variant_name: Option<&str>, existing_cases: &mut Vec, -) { +) -> Vec<(String, TokenStream)> { + let mut cases = Vec::new(); for t in variants { let normalized_variant_name = normalize_type_name(&t.type_name); if normalized_variant_name.is_empty() { continue; } let variant_name = override_variant_name.unwrap_or_else(|| &normalized_variant_name); - let prefix = format!("{}::{}", "Self", variant_name); if let Some(variants) = state.variants.get(&normalized_variant_name) { - get_cases(variants, cases, state, Some(variant_name), existing_cases); + cases.extend(get_cases( + variants, + state, + Some(variant_name), + existing_cases, + )); } else if !existing_cases.contains(&t.type_name) { existing_cases.push(t.type_name.clone()); - cases.push_str(&format!( - "\"{}\" => Ok({}({variant_name}::from_node(node, buffer)?)),", - t.type_name, prefix, + let variant_name = format_ident!("{}", variant_name); + cases.push(( + t.type_name.clone(), + quote! { Self::#variant_name (#variant_name::from_node(node, buffer)?)}, )); + // cases.insert(t.type_name.clone(), quote!{ + // #t.type_name => Ok(#(#prefix)::from_node(node, buffer)?), + // }.to_string()); } } + return cases; } pub fn generate_enum( variants: &Vec, @@ -35,29 +46,32 @@ pub fn generate_enum( enum_name: &str, anonymous_nodes: bool, ) { - state.enums.push_str(&format!( - " - #[derive(Debug, Clone, Archive, Portable, Deserialize, Serialize)] - #[repr(C, u8)] - pub enum {enum_name} {{\n", - enum_name = enum_name - )); + let mut variant_tokens = Vec::new(); for t in variants { let variant_name = normalize_type_name(&t.type_name); if variant_name.is_empty() { continue; } - state - .enums - .push_str(&format!(" {}({variant_name}),\n", variant_name)); + let variant_name = format_ident!("{}", variant_name); + variant_tokens.push(quote! { + #variant_name(#variant_name) + }); } if anonymous_nodes { - state.enums.push_str(" Anonymous,\n"); + variant_tokens.push(quote! { + Anonymous, + }); } - state.enums.push_str("}\n"); - let mut cases = String::new(); + let enum_name = format_ident!("{}", enum_name); + state.enums.extend_one(quote! { + #[derive(Debug, Clone, Archive, Portable, Deserialize, Serialize)] + #[repr(C, u8)] + pub enum #enum_name { + #(#variant_tokens),* + } + }); let mut existing_cases = Vec::new(); - get_cases(variants, &mut cases, state, None, &mut existing_cases); + let mut cases = get_cases(variants, state, None, &mut existing_cases); if anonymous_nodes { for (name, _variant_name) in state.anonymous_nodes.iter() { if name.is_empty() { @@ -67,27 +81,26 @@ pub fn generate_enum( continue; } let normalized_name = normalize_string(name); - cases.push_str(&format!( - "\"{}\" => Ok(Self::Anonymous),\n", - normalized_name - )); + cases.push((normalized_name, quote! {Self::Anonymous})); } } - state.enums.push_str(&format!( - " - impl FromNode for {enum_name} {{ - fn from_node(node: tree_sitter::Node, buffer: &Arc) -> Result {{ - match node.kind() {{ - {cases} - _ => Err(ParseError::UnexpectedNode {{ + let mut keys = Vec::new(); + let mut values = Vec::new(); + for (key, value) in cases { + keys.push(key); + values.push(value); + } + state.enums.extend_one(quote! { + impl FromNode for #enum_name { + fn from_node(node: tree_sitter::Node, buffer: &Arc) -> Result { + match node.kind() { + #(#keys => Ok(#values)),*, + _ => Err(ParseError::UnexpectedNode { node_type: node.kind().to_string(), backtrace: Backtrace::capture(), - }}), - }} - }} - }} - ", - enum_name = enum_name, - cases = cases - )); + }), + } + } + } + }); } diff --git a/codegen-sdk-cst-generator/src/generator/state.rs b/codegen-sdk-cst-generator/src/generator/state.rs index bb2c71b5..95383fb1 100644 --- a/codegen-sdk-cst-generator/src/generator/state.rs +++ b/codegen-sdk-cst-generator/src/generator/state.rs @@ -1,11 +1,11 @@ use std::collections::HashMap; use codegen_sdk_common::parser::TypeDefinition; - +use proc_macro2::TokenStream; #[derive(Default, Debug)] pub struct State { - pub enums: String, - pub structs: String, + pub enums: TokenStream, + pub structs: TokenStream, pub variants: HashMap>, pub anonymous_nodes: HashMap, } diff --git a/codegen-sdk-cst-generator/src/generator/struct_generator.rs b/codegen-sdk-cst-generator/src/generator/struct_generator.rs index b4f79306..4efa9fb7 100644 --- a/codegen-sdk-cst-generator/src/generator/struct_generator.rs +++ b/codegen-sdk-cst-generator/src/generator/struct_generator.rs @@ -1,80 +1,12 @@ use codegen_sdk_common::{ naming::{normalize_field_name, normalize_type_name}, - parser::{Children, Fields, Node, TypeDefinition}, + parser::{Children, FieldDefinition, Fields, Node, TypeDefinition}, }; +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; use super::enum_generator::generate_enum; use crate::generator::state::State; -const HEADER_TEMPLATE: &str = " -#[derive(Debug, Clone, Deserialize, Archive, Serialize)] -#[rkyv(serialize_bounds( - __S: rkyv::ser::Writer + rkyv::ser::Allocator, - __S::Error: rkyv::rancor::Source, -))] -#[rkyv(deserialize_bounds(__D::Error: rkyv::rancor::Source))] -#[rkyv(bytecheck( - bounds( - __C: rkyv::validation::ArchiveContext, - __C::Error: rkyv::rancor::Source, - ) -))] -pub struct {name} { - start_byte: usize, - end_byte: usize, - #[debug(\"[{},{}]\", start_position.row, start_position.column)] - start_position: Point, - #[debug(\"[{},{}]\", end_position.row, end_position.column)] - end_position: Point, - #[debug(ignore)] - buffer: Arc, - #[debug(ignore)] - kind_id: u16, -"; -const FOOTER_TEMPLATE: &str = " -} -"; - -const CONSTRUCTOR_TEMPLATE: &str = " -impl CSTNode for {{name}} { - fn start_byte(&self) -> usize { - self.start_byte - } - fn end_byte(&self) -> usize { - self.end_byte - } - fn start_position(&self) -> Point { - self.start_position - } - fn end_position(&self) -> Point { - self.end_position - } - fn buffer(&self) -> &Bytes { - &self.buffer - } - fn kind_id(&self) -> u16 { - self.kind_id - } -} -impl HasChildren for {{name}} { - type Child = {{children}}; - fn children(&self) -> &Vec { - self.children.as_ref() - } -} -impl FromNode for {{name}} { - fn from_node(node: tree_sitter::Node, buffer: &Arc) -> Result { - Ok(Self { - start_byte: node.start_byte(), - end_byte: node.end_byte(), - start_position: node.start_position().into(), - end_position: node.end_position().into(), - buffer: buffer.clone(), - kind_id: node.kind_id(), - {{fields}} - }) - } -} -"; fn convert_type_definition( type_name: &Vec, state: &mut State, @@ -101,132 +33,181 @@ fn convert_type_definition( fn generate_multiple_field( field_name: &str, converted_type_name: &str, - state: &mut State, - constructor_fields: &mut Vec, original_name: &str, -) { - state.structs.push_str(&format!( - " pub {field_name}: Vec<{}>,\n", - converted_type_name - )); - constructor_fields.push(format!( - " {field_name}: get_multiple_children_by_field_name(&node, \"{name}\", buffer)?", - field_name = field_name, - name = original_name - )); +) -> (TokenStream, TokenStream) { + let field_name = format_ident!("{}", field_name); + let converted_type_name = format_ident!("{}", converted_type_name); + let struct_field = quote! { + pub #field_name: Vec<#converted_type_name> + }; + let constructor_field = quote! { + #field_name: get_multiple_children_by_field_name(&node, #original_name, buffer)? + }; + (struct_field, constructor_field) } fn generate_required_field( field_name: &str, converted_type_name: &str, - state: &mut State, - constructor_fields: &mut Vec, original_name: &str, -) { - state.structs.push_str("#[rkyv(omit_bounds)]"); - state.structs.push_str(&format!( - " pub {field_name}: Box<{type_name}>,\n", - field_name = field_name, - type_name = converted_type_name - )); - constructor_fields.push(format!( - " {field_name}: Box::new(get_child_by_field_name(&node, \"{name}\", buffer)?)", - field_name = field_name, - name = original_name - )); +) -> (TokenStream, TokenStream) { + let field_name = format_ident!("{}", field_name); + let converted_type_name = format_ident!("{}", converted_type_name); + let struct_field = quote! { + #[rkyv(omit_bounds)] + pub #field_name: Box<#converted_type_name> + }; + let constructor_field = quote! { + #field_name: Box::new(get_child_by_field_name(&node, #original_name, buffer)?) + }; + (struct_field, constructor_field) } fn generate_optional_field( field_name: &str, converted_type_name: &str, - state: &mut State, - constructor_fields: &mut Vec, original_name: &str, -) { - state.structs.push_str("#[rkyv(omit_bounds)]"); - state.structs.push_str(&format!( - " pub {field_name}: Box>,\n", - field_name = field_name, - type_name = converted_type_name - )); - constructor_fields.push(format!( - " {field_name}: Box::new(get_optional_child_by_field_name(&node, \"{name}\", buffer)?)", - field_name = field_name, - name = original_name - )); +) -> (TokenStream, TokenStream) { + let field_name = format_ident!("{}", field_name); + let converted_type_name = format_ident!("{}", converted_type_name); + let struct_field = quote! { + #[rkyv(omit_bounds)] + pub #field_name: Box> + }; + let constructor_field = quote! { + #field_name: Box::new(get_optional_child_by_field_name(&node, #original_name, buffer)?) + }; + (struct_field, constructor_field) +} +fn generate_field( + field: &FieldDefinition, + state: &mut State, + node: &Node, + name: &str, +) -> (TokenStream, TokenStream) { + let field_name = normalize_field_name(name); + let converted_type_name = convert_type_definition(&field.types, state, &node.type_name, name); + if field.multiple { + return generate_multiple_field(&field_name, &converted_type_name, name); + } else if field.required { + return generate_required_field(&field_name, &converted_type_name, name); + } else { + return generate_optional_field(&field_name, &converted_type_name, name); + } } fn generate_fields( fields: &Fields, state: &mut State, node: &Node, - constructor_fields: &mut Vec, -) { +) -> (Vec, Vec) { + let mut struct_fields = Vec::new(); + let mut constructor_fields = Vec::new(); for (name, field) in &fields.fields { - let field_name = normalize_field_name(name); - let converted_type_name = - convert_type_definition(&field.types, state, &node.type_name, name); - if field.multiple { - generate_multiple_field( - &field_name, - &converted_type_name, - state, - constructor_fields, - name, - ); - } else if field.required { - generate_required_field( - &field_name, - &converted_type_name, - state, - constructor_fields, - name, - ); - } else { - generate_optional_field( - &field_name, - &converted_type_name, - state, - constructor_fields, - name, - ); - } + let (struct_field, constructor_field) = generate_field(field, state, node, name); + struct_fields.push(struct_field); + constructor_fields.push(constructor_field); } + (struct_fields, constructor_fields) } fn generate_children( children: &Children, state: &mut State, node_name: &str, - constructor_fields: &mut Vec, -) -> String { +) -> (String, TokenStream) { let converted_type_name = convert_type_definition(&children.types, state, node_name, "children"); - constructor_fields - .push(" children: named_children_without_field_names(node, buffer)?".to_string()); - - converted_type_name + let constructor_field = quote! { + children: named_children_without_field_names(node, buffer)? + }; + (converted_type_name, constructor_field) } pub fn generate_struct(node: &Node, state: &mut State, name: &str) { - state - .structs - .push_str(&HEADER_TEMPLATE.replace("{name}", name)); let mut constructor_fields = Vec::new(); + let mut struct_fields = Vec::new(); if let Some(fields) = &node.fields { - generate_fields(fields, state, node, &mut constructor_fields); + (struct_fields, constructor_fields) = generate_fields(fields, state, node); } let mut children_type_name = "Self".to_string(); if let Some(children) = &node.children { - state.structs.push_str("#[rkyv(omit_bounds)]"); - children_type_name = - generate_children(children, state, &node.type_name, &mut constructor_fields); + let constructor_field; + (children_type_name, constructor_field) = + generate_children(children, state, &node.type_name); + constructor_fields.push(constructor_field); } else { - state.structs.push_str("#[rkyv(omit_bounds)]"); - constructor_fields.push(" children: vec![]".to_string()); + constructor_fields.push(quote! { + children: vec![] + }); + } + let name = format_ident!("{}", name); + let children_type_name = format_ident!("{}", children_type_name); + let definition = quote! { + #[derive(Debug, Clone, Deserialize, Archive, Serialize)] + #[rkyv(serialize_bounds( + __S: rkyv::ser::Writer + rkyv::ser::Allocator, + __S::Error: rkyv::rancor::Source, + ))] + #[rkyv(deserialize_bounds(__D::Error: rkyv::rancor::Source))] + #[rkyv(bytecheck( + bounds( + __C: rkyv::validation::ArchiveContext, + __C::Error: rkyv::rancor::Source, + ) + ))] + pub struct #name { + start_byte: usize, + end_byte: usize, + #[debug("[{},{}]", start_position.row, start_position.column)] + start_position: Point, + #[debug("[{},{}]", end_position.row, end_position.column)] + end_position: Point, + #[debug(ignore)] + buffer: Arc, + #[debug(ignore)] + kind_id: u16, + #[rkyv(omit_bounds)] + pub children: Vec<#children_type_name>, + #(#struct_fields),* } - state - .structs - .push_str(&format!(" pub children: Vec<{}>,\n", children_type_name)); - state.structs.push_str(FOOTER_TEMPLATE); - let constructor = &CONSTRUCTOR_TEMPLATE - .replace("{{fields}}", &constructor_fields.join(",\n ")) - .replace("{{name}}", name) - .replace("{{children}}", &children_type_name); - state.structs.push_str(&constructor); + }; + state.structs.extend_one(definition); + let implementation = quote! { + impl CSTNode for #name { + fn start_byte(&self) -> usize { + self.start_byte + } + fn end_byte(&self) -> usize { + self.end_byte + } + fn start_position(&self) -> Point { + self.start_position + } + fn end_position(&self) -> Point { + self.end_position + } + fn buffer(&self) -> &Bytes { + &self.buffer + } + fn kind_id(&self) -> u16 { + self.kind_id + } + } + impl HasChildren for #name { + type Child = #children_type_name; + fn children(&self) -> &Vec { + self.children.as_ref() + } + } + impl FromNode for #name { + fn from_node(node: tree_sitter::Node, buffer: &Arc) -> Result { + Ok(Self { + start_byte: node.start_byte(), + end_byte: node.end_byte(), + start_position: node.start_position().into(), + end_position: node.end_position().into(), + buffer: buffer.clone(), + kind_id: node.kind_id(), + #(#constructor_fields),* + }) + } + } + }; + state.structs.extend_one(implementation); } diff --git a/codegen-sdk-cst-generator/src/lib.rs b/codegen-sdk-cst-generator/src/lib.rs index 14f938ee..914366d8 100644 --- a/codegen-sdk-cst-generator/src/lib.rs +++ b/codegen-sdk-cst-generator/src/lib.rs @@ -1,3 +1,4 @@ +#![feature(extend_one)] mod generator; use codegen_sdk_common::language::Language; pub fn generate_cst(language: &Language) -> anyhow::Result<()> { diff --git a/codegen-sdk-cst/Cargo.toml b/codegen-sdk-cst/Cargo.toml index fe844ddc..794d69e1 100644 --- a/codegen-sdk-cst/Cargo.toml +++ b/codegen-sdk-cst/Cargo.toml @@ -1,11 +1,10 @@ [package] name = "codegen-sdk-cst" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] tree-sitter = { workspace = true } -ouroboros = { workspace = true } bytes = { workspace = true } codegen-sdk-common = { path = "../codegen-sdk-common", features = ["ts_query"] } codegen-sdk-macros = { path = "../codegen-sdk-macros" } diff --git a/codegen-sdk-cst/build.rs b/codegen-sdk-cst/build.rs index fd140ff1..6e21cae5 100644 --- a/codegen-sdk-cst/build.rs +++ b/codegen-sdk-cst/build.rs @@ -2,6 +2,7 @@ use codegen_sdk_common::language::LANGUAGES; use codegen_sdk_cst_generator::generate_cst; use rayon::prelude::*; fn main() { + env_logger::init(); println!("cargo:rerun-if-changed=build.rs"); LANGUAGES.par_iter().for_each(|language| { generate_cst(language).unwrap(); diff --git a/codegen-sdk-macros/Cargo.toml b/codegen-sdk-macros/Cargo.toml index 340258d6..70ae7cfa 100644 --- a/codegen-sdk-macros/Cargo.toml +++ b/codegen-sdk-macros/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "codegen-sdk-macros" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] codegen-sdk-common = { path = "../codegen-sdk-common", features = ["all"] }