Skip to content

Commit 733c49c

Browse files
committed
Merge branch 'master' into feat-suboptimal-which-do-not-use-all-dimensions
2 parents 62cbd30 + ddbd743 commit 733c49c

29 files changed

+915
-52
lines changed

.github/workflows/master.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,9 @@ on:
1111
- 'package.json'
1212
- 'rollup.config.js'
1313
- 'yarn.lock'
14+
- 'rust/cubesqlplanner/**'
15+
- 'rust/cubenativeutils/**'
16+
- 'rust/cubesql/**'
1417
branches:
1518
- master
1619
jobs:

.github/workflows/rust-cubesql.yml

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,13 +40,16 @@ jobs:
4040
key: cubesql-${{ runner.OS }}-x86_64-unknown-linux-gnu
4141
shared-key: cubesql-${{ runner.OS }}-x86_64-unknown-linux-gnu
4242
- name: Lint CubeSQL
43-
run: cd rust/cubesql/cubesql && cargo fmt --all -- --check
43+
run: cd rust/cubesql && cargo fmt --all -- --check
4444
- name: Lint Native
4545
run: cd packages/cubejs-backend-native && cargo fmt --all -- --check
46+
# TODO replace with clippy once cubesql is ready
47+
- name: Check CubeSQL
48+
run: cd rust/cubesql && cargo check --locked --workspace --all-targets --keep-going
4649
- name: Clippy Native
47-
run: cd packages/cubejs-backend-native && cargo clippy -- -D warnings
50+
run: cd packages/cubejs-backend-native && cargo clippy --locked --workspace --all-targets --keep-going -- -D warnings
4851
- name: Clippy Native (with Python)
49-
run: cd packages/cubejs-backend-native && cargo clippy --features python -- -D warnings
52+
run: cd packages/cubejs-backend-native && cargo clippy --locked --workspace --all-targets --keep-going --features python -- -D warnings
5053
# CubeSQL is not ready for Clippy
5154
#- name: Clippy CubeSQL
5255
# run: cd rust/cubesql && cargo clippy -- -D warnings

docs/pages/product/apis-integrations.mdx

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,13 @@ queries][ref-graphql-syntax].
3838

3939
For AI use cases, consider using the [AI API][ref-ai-api].
4040

41+
<ReferenceBox>
42+
43+
See this [GitHub issue](https://github.com/cube-js/cube/issues/1744#issuecomment-2291680777)
44+
for an unofficial, community-maintained [client library for Python](https://github.com/mharrisb1/cube-http-client).
45+
46+
</ReferenceBox>
47+
4148
## Management APIs
4249

4350
In case you'd like Cube to work with data orchestration tools and let them push

docs/pages/reference/data-model/context-variables.mdx

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -323,13 +323,6 @@ in your Cube queries, incorrect SQL might be generated.
323323

324324
</WarningBox>
325325

326-
<ReferenceBox>
327-
328-
Currently, `FILTER_GROUP` is not supported in YAML-based data models.
329-
Please [track this issue](https://github.com/cube-js/cube/issues/8508).
330-
331-
</ReferenceBox>
332-
333326
`FILTER_GROUP` has to be a top-level expression in `WHERE` and it has the
334327
following syntax:
335328

packages/cubejs-backend-native/src/utils.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ pub fn bind_method<'a>(
2727

2828
pub fn batch_to_rows(batch: RecordBatch) -> Result<(Value, Vec<Value>), CubeError> {
2929
let schema = batch.schema();
30-
let data_frame = dataframe::batch_to_dataframe(&schema, &vec![batch])?;
30+
let data_frame = dataframe::batches_to_dataframe(&schema, vec![batch])?;
3131

3232
let columns = serde_json::to_value(data_frame.get_columns())?;
3333
let rows = data_frame

packages/cubejs-schema-compiler/src/parser/PythonParser.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import {
1818
VarargslistContext,
1919
LambdefContext,
2020
Single_string_template_atomContext,
21+
ArglistContext,
2122
} from './Python3Parser';
2223
import { UserError } from '../compiler/UserError';
2324
import { Python3ParserVisitor } from './Python3ParserVisitor';
@@ -178,7 +179,9 @@ export class PythonParser {
178179
const name = node.NAME();
179180
const argsList = node.arglist();
180181
if (argsList) {
181-
return { call: children };
182+
// trailer with arglist have a single child: arguments _list_
183+
const args = children[0];
184+
return { call: args };
182185
} else if (name) {
183186
return { identifier: t.identifier(name.text) };
184187
} else {
@@ -195,6 +198,8 @@ export class PythonParser {
195198
return { args: children };
196199
} else if (node instanceof LambdefContext) {
197200
return t.arrowFunctionExpression(children[0].args, children[1]);
201+
} else if (node instanceof ArglistContext) {
202+
return children;
198203
} else {
199204
return singleNodeReturn();
200205
}

packages/cubejs-schema-compiler/test/unit/base-query.test.ts

Lines changed: 126 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -858,6 +858,132 @@ describe('SQL Generation', () => {
858858
expect(cubeSQL).toMatch(/\(\s*\(.*type\s*=\s*\$\d\$.*OR.*type\s*=\s*\$\d\$.*\)\s*AND\s*\(.*type\s*=\s*\$\d\$.*OR.*type\s*=\s*\$\d\$.*\)\s*\)/);
859859
});
860860
});
861+
862+
describe('FILTER_GROUP', () => {
863+
/** @type {Compilers} */
864+
const compilers = prepareYamlCompiler(
865+
createSchemaYaml({
866+
cubes: [
867+
{
868+
name: 'Order',
869+
sql: `select * from order where {FILTER_GROUP(
870+
FILTER_PARAMS.Order.dim0.filter('dim0'),
871+
FILTER_PARAMS.Order.dim1.filter('dim1')
872+
)}`,
873+
measures: [{
874+
name: 'count',
875+
type: 'count',
876+
}],
877+
dimensions: [
878+
{
879+
name: 'dim0',
880+
sql: 'dim0',
881+
type: 'string'
882+
},
883+
{
884+
name: 'dim1',
885+
sql: 'dim1',
886+
type: 'string'
887+
}
888+
]
889+
},
890+
]
891+
})
892+
);
893+
894+
it('inserts "or" filter', async () => {
895+
await compilers.compiler.compile();
896+
const query = new BaseQuery(compilers, {
897+
measures: ['Order.count'],
898+
filters: [
899+
{
900+
or: [
901+
{
902+
member: 'Order.dim0',
903+
operator: 'equals',
904+
values: ['val0'],
905+
},
906+
{
907+
member: 'Order.dim1',
908+
operator: 'equals',
909+
values: ['val1'],
910+
},
911+
]
912+
}
913+
],
914+
});
915+
const cubeSQL = query.cubeSql('Order');
916+
expect(cubeSQL).toContain('where (((dim0 = $0$) OR (dim1 = $1$)))');
917+
});
918+
919+
it('inserts "and" filter', async () => {
920+
await compilers.compiler.compile();
921+
const query = new BaseQuery(compilers, {
922+
measures: ['Order.count'],
923+
filters: [
924+
{
925+
and: [
926+
{
927+
member: 'Order.dim0',
928+
operator: 'equals',
929+
values: ['val0'],
930+
},
931+
{
932+
member: 'Order.dim1',
933+
operator: 'equals',
934+
values: ['val1'],
935+
},
936+
]
937+
}
938+
],
939+
});
940+
const cubeSQL = query.cubeSql('Order');
941+
expect(cubeSQL).toContain('where (((dim0 = $0$) AND (dim1 = $1$)))');
942+
});
943+
944+
it('inserts "or + and" filter', async () => {
945+
await compilers.compiler.compile();
946+
const query = new BaseQuery(compilers, {
947+
measures: ['Order.count'],
948+
filters: [
949+
{
950+
or: [
951+
{
952+
and: [
953+
{
954+
member: 'Order.dim0',
955+
operator: 'equals',
956+
values: ['val0'],
957+
},
958+
{
959+
member: 'Order.dim1',
960+
operator: 'equals',
961+
values: ['val1'],
962+
}
963+
]
964+
},
965+
{
966+
and: [
967+
{
968+
member: 'Order.dim0',
969+
operator: 'equals',
970+
values: ['another_val0'],
971+
},
972+
{
973+
member: 'Order.dim1',
974+
operator: 'equals',
975+
values: ['another_val1'],
976+
}
977+
]
978+
}
979+
]
980+
}
981+
]
982+
});
983+
const cubeSQL = query.cubeSql('Order');
984+
expect(cubeSQL).toContain('where ((((dim0 = $0$) AND (dim1 = $1$)) OR ((dim0 = $2$) AND (dim1 = $3$))))');
985+
});
986+
});
861987
});
862988

863989
describe('Class unit tests', () => {

rust/cubesql/cubesql/benches/large_model.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@ use uuid::Uuid;
2020
macro_rules! bench_large_model {
2121
($DIMS:expr, $NAME:expr, $QUERY_FN:expr, $CRITERION:expr) => {{
2222
let context = Arc::new(
23-
futures::executor::block_on(create_test_cube_context(
24-
create_test_postgresql_cube_context($DIMS),
23+
futures::executor::block_on(create_test_postgresql_cube_context(
24+
get_large_model_test_tenant_ctx($DIMS),
2525
))
2626
.unwrap(),
2727
);

0 commit comments

Comments
 (0)