Skip to content

Commit 8fd9d39

Browse files
authored
Fix bug when editing cube node with materialization (#1362)
* Fix bug where editing a cube node with a materialization configured can error out based on there being no lookback_window * Fix and add new tests * Lint
1 parent ded38bf commit 8fd9d39

File tree

3 files changed

+75
-2
lines changed

3 files changed

+75
-2
lines changed

datajunction-server/datajunction_server/internal/materializations.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -173,9 +173,14 @@ async def build_cube_materialization_config(
173173
if col.semantic_type == SemanticType.DIMENSION
174174
],
175175
measures=metrics_expressions,
176-
spark=upsert_input.config.spark,
176+
spark=upsert_input.config.spark.__root__
177+
if hasattr(upsert_input, "config") and upsert_input.config.spark
178+
else {},
177179
upstream_tables=measures_query.upstream_tables,
178180
columns=measures_query.columns,
181+
lookback_window=upsert_input.lookback_window
182+
if hasattr(upsert_input, "lookback_window")
183+
else "",
179184
)
180185
return generic_config
181186
except (KeyError, ValidationError, AttributeError) as exc: # pragma: no cover

datajunction-server/datajunction_server/internal/nodes.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -874,11 +874,17 @@ async def update_cube_node(
874874
]
875875
if major_changes and active_materializations:
876876
for old in active_materializations:
877+
# Once we've migrated all materializations to the new format, we should only
878+
# be using UpsertCubeMaterialization for cube nodes
879+
job_type = MaterializationJobTypeEnum.find_match(old.job)
880+
materialization_upsert_class = UpsertMaterialization
881+
if job_type == MaterializationJobTypeEnum.DRUID_CUBE:
882+
materialization_upsert_class = UpsertCubeMaterialization
877883
new_cube_revision.materializations.append(
878884
await create_new_materialization(
879885
session,
880886
new_cube_revision,
881-
UpsertMaterialization(
887+
materialization_upsert_class(
882888
**MaterializationConfigOutput.from_orm(old).dict(
883889
exclude={"job"},
884890
),

datajunction-server/tests/api/cubes_test.py

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1609,6 +1609,68 @@ async def test_updating_cube(
16091609
]
16101610

16111611

1612+
@pytest.mark.asyncio
1613+
async def test_updating_cube_with_existing_cube_materialization(
1614+
client_with_repairs_cube: AsyncClient,
1615+
module__query_service_client: QueryServiceClient,
1616+
):
1617+
"""
1618+
Verify updating a cube with an existing new-style cube materialization
1619+
"""
1620+
cube_name = "default.repairs_cube__default_incremental_11"
1621+
await make_a_test_cube(
1622+
client_with_repairs_cube,
1623+
cube_name,
1624+
)
1625+
response = await client_with_repairs_cube.post(
1626+
f"/nodes/{cube_name}/columns/default.hard_hat.hire_date/partition",
1627+
json={
1628+
"type_": "temporal",
1629+
"granularity": "day",
1630+
"format": "yyyyMMdd",
1631+
},
1632+
)
1633+
assert response.status_code in (200, 201)
1634+
response = await client_with_repairs_cube.post(
1635+
f"/nodes/{cube_name}/materialization/",
1636+
json={
1637+
"job": "druid_cube",
1638+
"strategy": "incremental_time",
1639+
"schedule": "@daily",
1640+
"lookback_window": "1 DAY",
1641+
},
1642+
)
1643+
# Update the cube, but keep the temporal partition column. This should succeed
1644+
response = await client_with_repairs_cube.patch(
1645+
f"/nodes/{cube_name}",
1646+
json={
1647+
"metrics": ["default.discounted_orders_rate"],
1648+
"dimensions": ["default.hard_hat.city", "default.hard_hat.hire_date"],
1649+
},
1650+
)
1651+
result = response.json()
1652+
assert result["version"] == "v2.0"
1653+
1654+
# Check that the configured materialization was updated
1655+
response = await client_with_repairs_cube.get(f"/cubes/{cube_name}/")
1656+
data = response.json()
1657+
assert [
1658+
col["semantic_entity"]
1659+
for col in data["materializations"][0]["config"]["columns"]
1660+
] == [
1661+
"default.hard_hat.city",
1662+
"default.hard_hat.hire_date",
1663+
"default.discounted_orders_rate.default_DOT_discounted_orders_rate",
1664+
]
1665+
assert data["materializations"][0]["job"] == "DruidMetricsCubeMaterializationJob"
1666+
assert (
1667+
data["materializations"][0]["name"]
1668+
== "druid_metrics_cube__incremental_time__default.hard_hat.hire_date"
1669+
)
1670+
assert data["materializations"][0]["strategy"] == "incremental_time"
1671+
assert data["materializations"][0]["schedule"] == "@daily"
1672+
1673+
16121674
@pytest.mark.asyncio
16131675
async def test_updating_cube_with_existing_materialization(
16141676
client_with_repairs_cube: AsyncClient,

0 commit comments

Comments
 (0)