Skip to content

Commit 148e4cf

Browse files
authored
fix(cubesql): Support new QuickSight meta queries
1 parent df53c51 commit 148e4cf

15 files changed

+676
-19
lines changed

rust/cubesql/cubesql/src/compile/engine/context_postgresql.rs

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ use super::information_schema::postgres::{
1515
InfoSchemaRoleColumnGrantsProvider as PostgresInfoSchemaRoleColumnGrantsProvider,
1616
InfoSchemaRoleTableGrantsProvider as PostgresInfoSchemaRoleTableGrantsProvider,
1717
InfoSchemaSqlImplementationInfoProvider as PostgresInfoSchemaSqlImplementationInfoProvider,
18+
InfoSchemaSqlSizingProvider as PostgresInfoSchemaSqlSizingProvider,
1819
InfoSchemaTestingBlockingProvider, InfoSchemaTestingDatasetProvider, PgCatalogAmProvider,
1920
PgCatalogAttrdefProvider, PgCatalogAttributeProvider, PgCatalogClassProvider,
2021
PgCatalogConstraintProvider, PgCatalogDatabaseProvider, PgCatalogDependProvider,
@@ -37,10 +38,10 @@ use crate::{
3738
};
3839

3940
use super::information_schema::redshift::{
40-
RedshiftLateBindingViewUnpackedTableProvider, RedshiftStlDdltextProvider,
41-
RedshiftStlQueryProvider, RedshiftStlQuerytextProvider,
42-
RedshiftSvvExternalSchemasTableProvider, RedshiftSvvTableInfoProvider,
43-
RedshiftSvvTablesTableProvider,
41+
RedshiftLateBindingViewUnpackedTableProvider, RedshiftPgExternalSchemaProvider,
42+
RedshiftStlDdltextProvider, RedshiftStlQueryProvider, RedshiftStlQuerytextProvider,
43+
RedshiftStvSlicesProvider, RedshiftSvvExternalSchemasTableProvider,
44+
RedshiftSvvTableInfoProvider, RedshiftSvvTablesTableProvider,
4445
};
4546

4647
impl DatabaseProtocol {
@@ -75,6 +76,8 @@ impl DatabaseProtocol {
7576
any.downcast_ref::<PostgresInfoSchemaSqlImplementationInfoProvider>()
7677
{
7778
"information_schema.sql_implementation_info".to_string()
79+
} else if let Some(_) = any.downcast_ref::<PostgresInfoSchemaSqlSizingProvider>() {
80+
"information_schema.sql_sizing".to_string()
7881
} else if let Some(_) = any.downcast_ref::<PgCatalogTableProvider>() {
7982
"pg_catalog.pg_tables".to_string()
8083
} else if let Some(_) = any.downcast_ref::<PgCatalogTypeProvider>() {
@@ -133,12 +136,16 @@ impl DatabaseProtocol {
133136
"pg_catalog.pg_views".to_string()
134137
} else if let Some(_) = any.downcast_ref::<PgCatalogStatUserTablesProvider>() {
135138
"pg_catalog.pg_stat_user_tables".to_string()
139+
} else if let Some(_) = any.downcast_ref::<RedshiftPgExternalSchemaProvider>() {
140+
"pg_catalog.pg_external_schema".to_string()
136141
} else if let Some(_) = any.downcast_ref::<RedshiftSvvTablesTableProvider>() {
137142
"public.svv_tables".to_string()
138143
} else if let Some(_) = any.downcast_ref::<RedshiftSvvExternalSchemasTableProvider>() {
139144
"public.svv_external_schemas".to_string()
140145
} else if let Some(_) = any.downcast_ref::<RedshiftSvvTableInfoProvider>() {
141146
"public.svv_table_info".to_string()
147+
} else if let Some(_) = any.downcast_ref::<RedshiftStvSlicesProvider>() {
148+
"public.stv_slices".to_string()
142149
} else if let Some(_) = any.downcast_ref::<RedshiftStlDdltextProvider>() {
143150
"public.stl_ddltext".to_string()
144151
} else if let Some(_) = any.downcast_ref::<RedshiftStlQueryProvider>() {
@@ -235,6 +242,7 @@ impl DatabaseProtocol {
235242
&context.meta.tables,
236243
)))
237244
}
245+
"stv_slices" => return Some(Arc::new(RedshiftStvSlicesProvider::new())),
238246
"stl_ddltext" => return Some(Arc::new(RedshiftStlDdltextProvider::new())),
239247
"stl_query" => return Some(Arc::new(RedshiftStlQueryProvider::new())),
240248
"stl_querytext" => return Some(Arc::new(RedshiftStlQuerytextProvider::new())),
@@ -299,6 +307,7 @@ impl DatabaseProtocol {
299307
PostgresInfoSchemaSqlImplementationInfoProvider::new(),
300308
))
301309
}
310+
"sql_sizing" => return Some(Arc::new(PostgresInfoSchemaSqlSizingProvider::new())),
302311
#[cfg(debug_assertions)]
303312
"testing_dataset" => {
304313
return Some(Arc::new(InfoSchemaTestingDatasetProvider::new(5, 1000)))
@@ -392,6 +401,9 @@ impl DatabaseProtocol {
392401
&context.meta.tables,
393402
)))
394403
}
404+
"pg_external_schema" => {
405+
return Some(Arc::new(RedshiftPgExternalSchemaProvider::new()))
406+
}
395407
_ => return None,
396408
},
397409
_ => return None,

rust/cubesql/cubesql/src/compile/engine/information_schema/postgres/mod.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ pub mod key_column_usage;
88
pub mod referential_constraints;
99
pub mod schemata;
1010
pub mod sql_implementation_info;
11+
pub mod sql_sizing;
1112
pub mod table_constraints;
1213
pub mod tables;
1314
pub mod views;
@@ -79,5 +80,6 @@ pub use pg_views::*;
7980
pub use role_column_grants::*;
8081
pub use role_table_grants::*;
8182
pub use sql_implementation_info::*;
83+
pub use sql_sizing::*;
8284
pub use testing_blocking::*;
8385
pub use testing_dataset::*;
Lines changed: 159 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,159 @@
1+
use std::{any::Any, sync::Arc};
2+
3+
use async_trait::async_trait;
4+
use datafusion::{
5+
arrow::{
6+
array::{Array, ArrayRef, StringBuilder, UInt32Builder},
7+
datatypes::{DataType, Field, Schema, SchemaRef},
8+
record_batch::RecordBatch,
9+
},
10+
datasource::{datasource::TableProviderFilterPushDown, TableProvider, TableType},
11+
error::DataFusionError,
12+
logical_plan::Expr,
13+
physical_plan::{memory::MemoryExec, ExecutionPlan},
14+
};
15+
16+
struct InfoSchemaSqlSizingBuilder {
17+
sizing_id: UInt32Builder,
18+
sizing_name: StringBuilder,
19+
supported_value: UInt32Builder,
20+
comments: StringBuilder,
21+
}
22+
23+
impl InfoSchemaSqlSizingBuilder {
24+
fn new(capacity: usize) -> Self {
25+
Self {
26+
sizing_id: UInt32Builder::new(capacity),
27+
sizing_name: StringBuilder::new(capacity),
28+
supported_value: UInt32Builder::new(capacity),
29+
comments: StringBuilder::new(capacity),
30+
}
31+
}
32+
33+
fn add_info(
34+
&mut self,
35+
sizing_id: u32,
36+
sizing_name: impl AsRef<str>,
37+
supported_value: Option<u32>,
38+
comments: Option<&str>,
39+
) {
40+
self.sizing_id.append_value(sizing_id).unwrap();
41+
self.sizing_name.append_value(sizing_name).unwrap();
42+
self.supported_value.append_option(supported_value).unwrap();
43+
self.comments.append_option(comments).unwrap();
44+
}
45+
46+
fn finish(mut self) -> Vec<Arc<dyn Array>> {
47+
let columns: Vec<Arc<dyn Array>> = vec![
48+
Arc::new(self.sizing_id.finish()),
49+
Arc::new(self.sizing_name.finish()),
50+
Arc::new(self.supported_value.finish()),
51+
Arc::new(self.comments.finish()),
52+
];
53+
54+
columns
55+
}
56+
}
57+
58+
pub struct InfoSchemaSqlSizingProvider {
59+
data: Arc<Vec<ArrayRef>>,
60+
}
61+
62+
impl InfoSchemaSqlSizingProvider {
63+
pub fn new() -> Self {
64+
let mut builder = InfoSchemaSqlSizingBuilder::new(11);
65+
66+
builder.add_info(97, "MAXIMUM COLUMNS IN GROUP BY", Some(0), None);
67+
builder.add_info(99, "MAXIMUM COLUMNS IN ORDER BY", Some(0), None);
68+
builder.add_info(100, "MAXIMUM COLUMNS IN SELECT", Some(1664), None);
69+
builder.add_info(101, "MAXIMUM COLUMNS IN TABLE", Some(1600), None);
70+
builder.add_info(
71+
34,
72+
"MAXIMUM CATALOG NAME LENGTH",
73+
Some(63),
74+
Some("Might be less, depending on character set."),
75+
);
76+
builder.add_info(
77+
30,
78+
"MAXIMUM COLUMN NAME LENGTH",
79+
Some(63),
80+
Some("Might be less, depending on character set."),
81+
);
82+
builder.add_info(
83+
31,
84+
"MAXIMUM CURSOR NAME LENGTH",
85+
Some(63),
86+
Some("Might be less, depending on character set."),
87+
);
88+
builder.add_info(
89+
10005,
90+
"MAXIMUM IDENTIFIER LENGTH",
91+
Some(63),
92+
Some("Might be less, depending on character set."),
93+
);
94+
builder.add_info(
95+
32,
96+
"MAXIMUM SCHEMA NAME LENGTH",
97+
Some(63),
98+
Some("Might be less, depending on character set."),
99+
);
100+
builder.add_info(
101+
35,
102+
"MAXIMUM TABLE NAME LENGTH",
103+
Some(63),
104+
Some("Might be less, depending on character set."),
105+
);
106+
builder.add_info(
107+
107,
108+
"MAXIMUM USER NAME LENGTH",
109+
Some(63),
110+
Some("Might be less, depending on character set."),
111+
);
112+
113+
Self {
114+
data: Arc::new(builder.finish()),
115+
}
116+
}
117+
}
118+
119+
#[async_trait]
120+
impl TableProvider for InfoSchemaSqlSizingProvider {
121+
fn as_any(&self) -> &dyn Any {
122+
self
123+
}
124+
125+
fn table_type(&self) -> TableType {
126+
TableType::View
127+
}
128+
129+
fn schema(&self) -> SchemaRef {
130+
Arc::new(Schema::new(vec![
131+
Field::new("sizing_id", DataType::UInt32, false),
132+
Field::new("sizing_name", DataType::Utf8, false),
133+
Field::new("supported_value", DataType::UInt32, true),
134+
Field::new("comments", DataType::Utf8, true),
135+
]))
136+
}
137+
138+
async fn scan(
139+
&self,
140+
projection: &Option<Vec<usize>>,
141+
_filters: &[Expr],
142+
_limit: Option<usize>,
143+
) -> Result<Arc<dyn ExecutionPlan>, DataFusionError> {
144+
let batch = RecordBatch::try_new(self.schema(), self.data.to_vec())?;
145+
146+
Ok(Arc::new(MemoryExec::try_new(
147+
&[vec![batch]],
148+
self.schema(),
149+
projection.clone(),
150+
)?))
151+
}
152+
153+
fn supports_filter_pushdown(
154+
&self,
155+
_filter: &Expr,
156+
) -> Result<TableProviderFilterPushDown, DataFusionError> {
157+
Ok(TableProviderFilterPushDown::Unsupported)
158+
}
159+
}
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,19 @@
11
pub mod late_binding_view_unpacked;
2+
pub mod pg_external_schema;
23
pub mod stl_ddltext;
34
pub mod stl_query;
45
pub mod stl_querytext;
6+
pub mod stv_slices;
57
pub mod svv_external_schemas;
68
pub mod svv_table_info;
79
pub mod svv_tables;
810

911
pub use late_binding_view_unpacked::*;
12+
pub use pg_external_schema::*;
1013
pub use stl_ddltext::*;
1114
pub use stl_query::*;
1215
pub use stl_querytext::*;
16+
pub use stv_slices::*;
1317
pub use svv_external_schemas::*;
1418
pub use svv_table_info::*;
1519
pub use svv_tables::*;
Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
use std::{any::Any, sync::Arc};
2+
3+
use async_trait::async_trait;
4+
use datafusion::{
5+
arrow::{
6+
array::{Array, ArrayRef, Int32Builder, StringBuilder, UInt32Builder},
7+
datatypes::{DataType, Field, Schema, SchemaRef},
8+
record_batch::RecordBatch,
9+
},
10+
datasource::{datasource::TableProviderFilterPushDown, TableProvider, TableType},
11+
error::DataFusionError,
12+
logical_plan::Expr,
13+
physical_plan::{memory::MemoryExec, ExecutionPlan},
14+
};
15+
16+
struct RedshiftPgExternalSchemaBuilder {
17+
esoid: UInt32Builder,
18+
eskind: Int32Builder,
19+
esdbname: StringBuilder,
20+
esoptions: StringBuilder,
21+
}
22+
23+
impl RedshiftPgExternalSchemaBuilder {
24+
fn new(capacity: usize) -> Self {
25+
Self {
26+
esoid: UInt32Builder::new(capacity),
27+
eskind: Int32Builder::new(capacity),
28+
esdbname: StringBuilder::new(capacity),
29+
esoptions: StringBuilder::new(capacity),
30+
}
31+
}
32+
33+
fn finish(mut self) -> Vec<Arc<dyn Array>> {
34+
let columns: Vec<Arc<dyn Array>> = vec![
35+
Arc::new(self.esoid.finish()),
36+
Arc::new(self.eskind.finish()),
37+
Arc::new(self.esdbname.finish()),
38+
Arc::new(self.esoptions.finish()),
39+
];
40+
41+
columns
42+
}
43+
}
44+
45+
pub struct RedshiftPgExternalSchemaProvider {
46+
data: Arc<Vec<ArrayRef>>,
47+
}
48+
49+
impl RedshiftPgExternalSchemaProvider {
50+
pub fn new() -> Self {
51+
let builder = RedshiftPgExternalSchemaBuilder::new(0);
52+
53+
Self {
54+
data: Arc::new(builder.finish()),
55+
}
56+
}
57+
}
58+
59+
#[async_trait]
60+
impl TableProvider for RedshiftPgExternalSchemaProvider {
61+
fn as_any(&self) -> &dyn Any {
62+
self
63+
}
64+
65+
fn table_type(&self) -> TableType {
66+
TableType::View
67+
}
68+
69+
fn schema(&self) -> SchemaRef {
70+
Arc::new(Schema::new(vec![
71+
Field::new("esoid", DataType::UInt32, false),
72+
Field::new("eskind", DataType::Int32, false),
73+
Field::new("esdbname", DataType::Utf8, false),
74+
Field::new("esoptions", DataType::Utf8, false),
75+
]))
76+
}
77+
78+
async fn scan(
79+
&self,
80+
projection: &Option<Vec<usize>>,
81+
_filters: &[Expr],
82+
_limit: Option<usize>,
83+
) -> Result<Arc<dyn ExecutionPlan>, DataFusionError> {
84+
let batch = RecordBatch::try_new(self.schema(), self.data.to_vec())?;
85+
86+
Ok(Arc::new(MemoryExec::try_new(
87+
&[vec![batch]],
88+
self.schema(),
89+
projection.clone(),
90+
)?))
91+
}
92+
93+
fn supports_filter_pushdown(
94+
&self,
95+
_filter: &Expr,
96+
) -> Result<TableProviderFilterPushDown, DataFusionError> {
97+
Ok(TableProviderFilterPushDown::Unsupported)
98+
}
99+
}

0 commit comments

Comments
 (0)