Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
85017e7
feat: add `ArcRef`
freshtonic Apr 14, 2025
0f89817
refactor: remove unused code from `ScopeTracker`
freshtonic Apr 14, 2025
92e6cfb
refactor: update `TypeRegistry` to use `AsNodeKey` trait
freshtonic Apr 14, 2025
7d86bc9
REVISIT Fix: inference of `Expr::Value`
freshtonic Apr 14, 2025
54f8274
feat: track all `Expr::Value` nodes
freshtonic Apr 14, 2025
a2e0867
SQUASH into refactor: update `TypeRegistry` to use `AsNodeKey` trait
freshtonic Apr 14, 2025
61c345c
WIP
freshtonic Apr 14, 2025
19d1dab
WIP
freshtonic Apr 15, 2025
3470148
WIP
freshtonic Apr 15, 2025
586aac4
WIP
freshtonic Apr 15, 2025
40b02fa
WIP
freshtonic Apr 15, 2025
3e6bc8c
WI{
freshtonic Apr 15, 2025
f4ca1c0
WIP
freshtonic Apr 15, 2025
2e9940a
WIP-brokemn
freshtonic Apr 16, 2025
20b03a0
WIP
freshtonic Apr 16, 2025
676a3f7
WIP
freshtonic Apr 16, 2025
a534104
6 failing
freshtonic Apr 16, 2025
ac750e9
5
freshtonic Apr 16, 2025
e54065a
Actually record the unified node types
freshtonic Apr 16, 2025
1bd3920
WIP
freshtonic Apr 16, 2025
c7c6d63
WIP
freshtonic Apr 16, 2025
c3bb195
WIP
freshtonic Apr 16, 2025
8337d9e
WIP
freshtonic Apr 17, 2025
4738efc
WIP
freshtonic Apr 17, 2025
44783ee
Enhance tracing
freshtonic Apr 17, 2025
5995631
Disable tracing in tests m=by default but provide new way of enabling it
freshtonic Apr 17, 2025
4281071
Fix: broken tests
freshtonic Apr 17, 2025
b496a5f
Enhance tracing of unification
freshtonic Apr 17, 2025
4b67cdf
cleanup
freshtonic Apr 17, 2025
41d0bea
savepoint
freshtonic Apr 17, 2025
144f913
WIP
freshtonic Apr 17, 2025
127878c
WIP
freshtonic Apr 18, 2025
44b5860
WIP
freshtonic Apr 18, 2025
a088260
WIP
freshtonic Apr 18, 2025
795023c
WI{
freshtonic Apr 18, 2025
d05ae53
WIP
freshtonic Apr 19, 2025
a7ae0e3
WIP
freshtonic Apr 19, 2025
b495abf
WIP
freshtonic Apr 19, 2025
cab3476
WIP
freshtonic Apr 19, 2025
f04d4a8
WIP
freshtonic Apr 20, 2025
34326bb
WIP
freshtonic Apr 20, 2025
507ecb9
Resolve unresolved value nodes
freshtonic Apr 21, 2025
d2e54f2
WIP
freshtonic Apr 21, 2025
325f669
IT WORKS AGAIN
freshtonic Apr 21, 2025
25bbfca
fmt
freshtonic Apr 21, 2025
30fa79b
WIP
freshtonic Apr 21, 2025
adaf6d4
chore: remove TypeRegistry from ScopeTracker
freshtonic Apr 21, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 23 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ debug = true

[workspace.dependencies]
sqlparser = { version = "^0.52", features = ["bigdecimal", "serde"] }
# sqltk = { git = "https://github.com/cipherstash/sqltk/", rev = "e16406f" }
sqltk = { path = "../sqltk/packages/sqltk" }
thiserror = "2.0.9"
tokio = { version = "1.42.0", features = ["full"] }
tracing = "0.1"
Expand Down
1 change: 1 addition & 0 deletions packages/cipherstash-proxy/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ serde = "1.0"
serde_json = "1.0"
socket2 = "0.5.7"
sqlparser = { workspace = true }
sqltk = { workspace = true }
thiserror = { workspace = true }
tokio = { workspace = true }
tokio-postgres = { version = "0.7", features = [
Expand Down
23 changes: 11 additions & 12 deletions packages/cipherstash-proxy/src/postgresql/frontend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,13 +26,14 @@ use crate::prometheus::{
use crate::Encrypted;
use bytes::BytesMut;
use cipherstash_client::encryption::Plaintext;
use eql_mapper::{self, EqlMapperError, EqlValue, NodeKey, TableColumn, TypedStatement};
use eql_mapper::{self, EqlMapperError, EqlValue, TableColumn, TypedStatement};
use metrics::{counter, histogram};
use pg_escape::quote_literal;
use serde::Serialize;
use sqlparser::ast::{self, Expr, Value};
use sqlparser::ast::{self, Value};
use sqlparser::dialect::PostgreSqlDialect;
use sqlparser::parser::Parser;
use sqltk::AsNodeKey;
use std::collections::HashMap;
use std::time::Instant;
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
Expand Down Expand Up @@ -285,7 +286,7 @@ where

match self.to_encryptable_statement(&typed_statement, vec![])? {
Some(statement) => {
if statement.has_literals() || typed_statement.has_nodes_to_wrap() {
if statement.has_literals() {
let encrypted_literals = self
.encrypt_literals(&typed_statement, &statement.literal_columns)
.await?;
Expand Down Expand Up @@ -421,16 +422,15 @@ where
.literals
.iter()
.zip(encrypted_expressions.into_iter())
.filter_map(|((_, original_node), en)| en.map(|en| (NodeKey::new(*original_node), en)))
.filter_map(|((_, original_node), en)| en.map(|en| (original_node.as_node_key(), en)))
.collect::<HashMap<_, _>>();

debug!(target: MAPPER,
client_id = self.context.client_id,
nodes_to_wrap = typed_statement.nodes_to_wrap.len(),
literals = encrypted_nodes.len(),
);

if !typed_statement.has_nodes_to_wrap() && encrypted_nodes.is_empty() {
if encrypted_nodes.is_empty() {
return Ok(None);
}

Expand Down Expand Up @@ -500,7 +500,7 @@ where

match self.to_encryptable_statement(&typed_statement, param_types)? {
Some(statement) => {
if statement.has_literals() || typed_statement.has_nodes_to_wrap() {
if statement.has_literals() {
let encrypted_literals = self
.encrypt_literals(&typed_statement, &statement.literal_columns)
.await?;
Expand Down Expand Up @@ -620,7 +620,6 @@ where
if (param_columns.is_empty() || no_encrypted_param_columns)
&& (projection_columns.is_empty() || no_encrypted_projection_columns)
&& literal_columns.is_empty()
&& !typed_statement.has_nodes_to_wrap()
{
return Ok(None);
}
Expand Down Expand Up @@ -781,7 +780,7 @@ where
typed_statement: &eql_mapper::TypedStatement<'_>,
) -> Result<Vec<Option<Column>>, Error> {
let mut projection_columns = vec![];
if let Some(eql_mapper::Projection::WithColumns(columns)) = &typed_statement.projection {
if let eql_mapper::Projection::WithColumns(columns) = &typed_statement.projection {
for col in columns {
let eql_mapper::ProjectionColumn { ty, .. } = col;
let configured_column = match ty {
Expand Down Expand Up @@ -819,7 +818,7 @@ where

for param in typed_statement.params.iter() {
let configured_column = match param {
eql_mapper::Value::Eql(EqlValue(TableColumn { table, column })) => {
(_, eql_mapper::Value::Eql(EqlValue(TableColumn { table, column }))) => {
let identifier = Identifier::from((table, column));

debug!(
Expand Down Expand Up @@ -967,9 +966,9 @@ fn literals_to_plaintext(
Ok(plaintexts)
}

fn to_json_literal_expr<T>(literal: &T) -> Result<Expr, Error>
fn to_json_literal_expr<T>(literal: &T) -> Result<Value, Error>
where
T: ?Sized + Serialize,
{
Ok(serde_json::to_string(literal).map(|json| Expr::Value(Value::SingleQuotedString(json)))?)
Ok(serde_json::to_string(literal).map(Value::SingleQuotedString)?)
}
12 changes: 12 additions & 0 deletions packages/eql-mapper-macros/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[package]
name = "eql-mapper-macros"
version.workspace = true
edition.workspace = true

[lib]
proc-macro = true

[dependencies]
syn = { version = "2.0", features = ["full"] }
quote = "1.0"
proc-macro2 = "1.0"
115 changes: 115 additions & 0 deletions packages/eql-mapper-macros/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
use proc_macro::TokenStream;
use quote::{quote, ToTokens};
use syn::{
parse::Parse, parse_macro_input, parse_quote, Attribute, FnArg, Ident, ImplItem, ImplItemFn,
ItemImpl, Pat, PatType, Signature, Type, TypePath, TypeReference,
};

/// This macro generates consistently defined `#[tracing::instrument]` attributes for `InferType::infer_enter` &
/// `InferType::infer_enter` implementations on `TypeInferencer`.
///
/// This attribute MUST be defined on the trait `impl` itself (not the trait method impls).
#[proc_macro_attribute]
pub fn trace_infer(_attr: TokenStream, item: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(item as ItemImpl);

for item in &mut input.items {
if let ImplItem::Fn(ImplItemFn {
attrs,
sig:
Signature {
ident: method,
inputs,
..
},
..
}) = item
{
let node_ident_and_type: Option<(&Ident, &Type)> =
if let Some(FnArg::Typed(PatType {
ty: node_ty, pat, ..
})) = inputs.get(1)
{
if let Pat::Ident(pat_ident) = &**pat {
Some((&pat_ident.ident, node_ty))
} else {
None
}
} else {
None
};

let vec_ident: Ident = parse_quote!(Vec);

match node_ident_and_type {
Some((node_ident, node_ty)) => {
let (formatter, node_ty_abbrev) = match node_ty {
Type::Reference(TypeReference { elem, .. }) => match &**elem {
Type::Path(TypePath { path, .. }) => {
let last_segment = path.segments.last().unwrap();
let last_segment_ident = &last_segment.ident;
let last_segment_arguments = if last_segment.arguments.is_empty() {
None
} else {
let args = &last_segment.arguments;
Some(quote!(<#args>))
};
match last_segment_ident {
ident if vec_ident == *ident => {
(quote!(crate::FmtAstVec), quote!(#last_segment_ident #last_segment_arguments))
}
_ => (quote!(crate::FmtAst), quote!(#last_segment_ident #last_segment_arguments))
}
},
_ => unreachable!("Infer::infer_enter/infer_exit has sig: infer_..(&mut self, delete: &'ast N) -> Result<(), TypeError>")
},
_ => unreachable!("Infer::infer_enter/infer_exit has sig: infer_..(&mut self, delete: &'ast N) -> Result<(), TypeError>")
};

let node_ty_abbrev = node_ty_abbrev
.to_token_stream()
.to_string()
.replace(" ", "");

let target = format!("eql-mapper::{}", method.to_string().to_uppercase());

let attr: TracingInstrumentAttr = syn::parse2(quote! {
#[tracing::instrument(
target = #target,
level = "trace",
skip(self, #node_ident),
fields(
ast_ty = #node_ty_abbrev,
ast = %#formatter(#node_ident),
),
ret(Debug)
)]
})
.unwrap();
attrs.push(attr.attr);
}
None => {
return quote!(compile_error!(
"could not determine name of node argumemt in Infer impl"
))
.to_token_stream()
.into();
}
}
}
}

input.to_token_stream().into()
}

struct TracingInstrumentAttr {
attr: Attribute,
}

impl Parse for TracingInstrumentAttr {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
Ok(Self {
attr: Attribute::parse_outer(input)?.first().unwrap().clone(),
})
}
}
6 changes: 5 additions & 1 deletion packages/eql-mapper/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,14 @@ authors = [
]

[dependencies]
eql-mapper-macros = { path = "../eql-mapper-macros" }
derive_more = { version = "^1.0", features = ["display", "constructor"] }
impl-trait-for-tuples = "0.2.3"
itertools = "^0.13"
sqlparser = { workspace = true }
sqltk = { git = "https://github.com/cipherstash/sqltk/", rev = "214f9b90e4f07d4414292813ffd6e45dec075fbb" }
# sqltk = { git = "https://github.com/cipherstash/sqltk/", rev = "214f9b90e4f07d4414292813ffd6e45dec075fbb" }
# sqltk = { git = "https://github.com/cipherstash/sqltk/", rev = "cdf2e7c" }
sqltk = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
Expand Down
Loading
Loading