Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
63c9a70
build: update pysdk
doctrino Mar 28, 2026
83ad9cd
docs
doctrino Mar 28, 2026
bf17aab
refactor: update import
doctrino Mar 28, 2026
284f300
regen
doctrino Mar 28, 2026
dfeb361
refactor: update templates
doctrino Mar 28, 2026
d917298
regen
doctrino Mar 28, 2026
87188de
refactro: update template
doctrino Mar 28, 2026
18d02e9
regen
doctrino Mar 28, 2026
650ea27
regen
doctrino Mar 28, 2026
c03ffc9
refactor: fix search arguments
doctrino Mar 28, 2026
e57f223
refactor: fix
doctrino Mar 28, 2026
812f2c5
regen
doctrino Mar 28, 2026
aab43e4
rfeactro: fix mock generator
doctrino Mar 28, 2026
de26e2f
rfeactro: fix mock generator
doctrino Mar 28, 2026
330c995
regen: fix
doctrino Mar 28, 2026
d1f477a
tests: update
doctrino Mar 28, 2026
419d672
tests: update
doctrino Mar 28, 2026
351119b
style: update test
doctrino Mar 28, 2026
a841ec8
tests: updated unit test
doctrino Mar 28, 2026
a4e05b6
tests: fix mocking
doctrino Mar 28, 2026
f3abbfb
tests: updated all unit tests
doctrino Mar 28, 2026
b37e759
refactor: update graphql
doctrino Mar 28, 2026
295ca36
regen
doctrino Mar 28, 2026
59cf4cd
fix: error handling
doctrino Mar 28, 2026
02c4e80
refactor: update
doctrino Mar 28, 2026
71aa87a
regen
doctrino Mar 28, 2026
daa09cd
refactor: better fix
doctrino Mar 28, 2026
0d75739
regen
doctrino Mar 28, 2026
770edf0
regen
doctrino Mar 28, 2026
e5960ea
refactor: handle required vales
doctrino Mar 28, 2026
80953b8
tests: added required
doctrino Mar 28, 2026
2786ceb
fix: handle created and lastUpdated time
doctrino Mar 28, 2026
78dbd9b
regen
doctrino Mar 28, 2026
fb0b087
Merge branch 'main' into pysdk-to-v8
doctrino Mar 30, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions cognite/pygen/_core/templates/api_core.py.jinja
Original file line number Diff line number Diff line change
Expand Up @@ -572,8 +572,10 @@ class GraphQLQueryResponse:
elif "__typename" in data:
try:
item = self._data_class_by_type[data["__typename"]].model_validate(data)
except KeyError:
raise ValueError(f"Could not find class for type {data['__typename']}") from None
except KeyError as key_error:
if key_error.args[0] == data["__typename"]:
raise ValueError(f"Could not find class for type {data['__typename']}") from None
raise key_error
Comment on lines +576 to +578
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Improved error message, there can be KeyErrors inside the model_validate call as wel

else:
self._output.append(item)
else:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
from __future__ import annotations

import datetime
from collections.abc import Callable
from typing import (
Annotated,
Optional,
Any,
no_type_check,
TypeVar,
Set,
)

from cognite.client import data_modeling as dm
Expand All @@ -24,6 +27,23 @@ from pydantic import BaseModel, BeforeValidator, model_validator, field_validato
from pydantic.alias_generators import to_camel
from pydantic.functional_serializers import PlainSerializer

T_CogniteResource = TypeVar("T_CogniteResource", bound=CogniteTimeSeries | CogniteSequence | CogniteFileMetadata)

_MISSING_VALUE = -999

def _create_load_method(resource_cls: type[T_CogniteResource], required_fields: Set[str]) -> Callable[[Any], Any]:
def _load_if_dict(value: Any) -> Any:
if not isinstance(value, dict):
return value
if missing_values := set(required_fields) - set(value.keys()):
raise ValueError(f"Missing required fields: {', '.join(missing_values)}")
for key in ["createdTime", "lastUpdatedTime"]:
# GraphQL does not support returning these properties, while the read classes requires them.
value[key] = _MISSING_VALUE
return resource_cls.load(value)

return _load_if_dict


TimeSeries = Annotated[
CogniteTimeSeries,
Expand All @@ -32,7 +52,7 @@ TimeSeries = Annotated[
return_type=dict,
when_used="unless-none",
),
BeforeValidator(lambda v: CogniteTimeSeries.load(v) if isinstance(v, dict) else v),
BeforeValidator(_create_load_method(CogniteTimeSeries, {"id", "isStep", "isString"})),
]


Expand All @@ -43,7 +63,7 @@ SequenceRead = Annotated[
return_type=dict,
when_used="unless-none",
),
BeforeValidator(lambda v: CogniteSequence.load(v) if isinstance(v, dict) else v),
BeforeValidator(_create_load_method(CogniteSequence, {"id", "columns"})),
]


Expand All @@ -54,7 +74,7 @@ FileMetadata = Annotated[
return_type=dict,
when_used="unless-none",
),
BeforeValidator(lambda v: CogniteFileMetadata.load(v) if isinstance(v, dict) else v),
BeforeValidator(_create_load_method(CogniteFileMetadata, {"id", "uploaded", "name"})),
]


Expand Down Expand Up @@ -123,6 +143,15 @@ class TimeSeriesGraphQL(GraphQLExternal):
datetime.datetime.fromisoformat(item["timestamp"].replace("Z", "+00:00"))
)
data["datapoints"] = datapoints["items"]
if missing := [name for name in ["id", "isString", "isStep"] if data.get(name) is None]:
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The PySDK does not give nice error messages on missing values. Just a KeyError on the first failure.

raise ValueError(
f"Cannot create datapoints, missing required fields: {', '.join(missing)}. "
"You need to include these in your query."
)
if "type" not in data:
# Type is not supported in the timeseries you retrieve through GraphQL, but it is required
# for the Datapoints object. Luckily it can be inferred from the isString field, so we set it here.
data["type"] = "string" if data["isString"] else "numeric"
data["data"] = Datapoints.load(data)
if isinstance(data, dict) and "getLatestDataPoint" in data:
latest = data.pop("getLatestDataPoint")
Expand All @@ -146,6 +175,7 @@ class TimeSeriesGraphQL(GraphQLExternal):
description=self.description,
)

@no_type_check
def as_read(self) -> CogniteTimeSeries:
Comment on lines +178 to 179
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We do not do type check here as this is handled by the pydantic validation. If any error is raised, then this is expected behavior. See docs: https://cognite-pygen.readthedocs-hosted.com/en/latest/usage/querying.html#data-classes

return CogniteTimeSeries(
id=self.id,
Expand All @@ -160,6 +190,8 @@ class TimeSeriesGraphQL(GraphQLExternal):
is_step=self.is_step,
description=self.description,
security_categories=self.security_categories,
created_time=self.created_time,
last_updated_time=self.last_updated_time,
)


Expand Down Expand Up @@ -237,7 +269,7 @@ class SequenceColumnGraphQL(GraphQLExternal):
@field_validator("value_type", mode="before")
def title_value_type(cls, value: Any) -> Any:
if isinstance(value, str):
return value.title()
return value.upper()
return value

@no_type_check
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,6 @@ class DataPointsAPI:
uniform_index=uniform_index,
include_aggregate_name=include_aggregate_name,
include_granularity_name=include_granularity_name,
column_names="instance_id",
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This no longer exists, but is the prioritized field when doing retrieves.

)

def __getattr__(self, item: str) -> Any:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ class NodeQueryCore(QueryCore[T_DomainModelList, T_DomainListEnd]):
step.connection_property = item._connection_property
step.expression.from_ = from_
step.expression.filter = item._assemble_filter()
step.expression.sort = item._create_sort()
step.expression.sort = item._create_sort() or []
builder.append(step)
elif isinstance(item, NodeQueryCore) and isinstance(item._expression, dm.query.EdgeResultSetExpression):
# Edge without properties
Expand Down Expand Up @@ -264,7 +264,7 @@ class NodeQueryCore(QueryCore[T_DomainModelList, T_DomainListEnd]):
)
step.expression.from_ = from_
step.expression.filter = item._assemble_filter()
step.expression.sort = item._create_sort()
step.expression.sort = item._create_sort() or []
builder.append(step)
else:
raise TypeError(f"Unsupported query step type: {type(item._expression)}")
Expand Down
2 changes: 1 addition & 1 deletion cognite/pygen/_query/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def _fetch_reverse_direct_relation_of_lists(
is_selected = is_items if step.raw_filter is None else dm.filters.And(is_items, step.raw_filter)

chunk_result = client.data_modeling.instances.search(
view_id, properties=None, filter=is_selected, limit=api_limit
view=view_id, query=None, properties=None, filter=is_selected, limit=api_limit
)
for node in chunk_result:
node_id = node.as_id()
Expand Down
4 changes: 2 additions & 2 deletions cognite/pygen/_query/step.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from cognite.client import CogniteClient
from cognite.client.data_classes import data_modeling as dm
from cognite.client.data_classes._base import CogniteObject
from cognite.client.data_classes._base import CogniteResource
from cognite.client.data_classes.data_modeling.instances import Instance
from cognite.client.data_classes.data_modeling.views import ReverseDirectRelation, ViewProperty

Expand All @@ -20,7 +20,7 @@


@dataclass(frozen=True)
class ViewPropertyId(CogniteObject):
class ViewPropertyId(CogniteResource):
view: dm.ViewId
property: str

Expand Down
4 changes: 2 additions & 2 deletions cognite/pygen/demo/_solar_apm.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from typing import Any, cast

from cognite.client import CogniteClient
from cognite.client.data_classes import DataSet
from cognite.client.data_classes import DataSet, DataSetWrite
from cognite.client.data_classes.data_modeling import (
DataModel,
MappedProperty,
Expand Down Expand Up @@ -146,7 +146,7 @@ def _data_set_id(self, client: CogniteClient) -> int | None:
new_dataset = cast(
DataSet,
client.data_sets.create(
DataSet(
DataSetWrite(
external_id=self._data_set_external_id,
name=self._data_set_external_id,
description="This data set was created by pygen for demo purposes.",
Expand Down
4 changes: 2 additions & 2 deletions cognite/pygen/utils/cdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

from cognite.client import CogniteClient
from cognite.client import data_modeling as dm
from cognite.client._api.files import FilesAPI
from cognite.client._sync_api.files import SyncFilesAPI
from cognite.client.data_classes import FileMetadata, FileMetadataList, TimeSeries, TimeSeriesList
from cognite.client.data_classes._base import CogniteResource, T_CogniteResource, T_CogniteResourceList
from cognite.client.data_classes.data_modeling import (
Expand Down Expand Up @@ -104,7 +104,7 @@ def delete(


class _FileAPIAdapter(_CogniteCoreResourceAPI[FileMetadataList]):
def __init__(self, files_api: FilesAPI):
def __init__(self, files_api: SyncFilesAPI):
self._files_api = files_api

def retrieve_multiple(
Expand Down
16 changes: 13 additions & 3 deletions cognite/pygen/utils/mock_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def __init__(

@property
def _views(self) -> dm.ViewList:
return dm.ViewList(self._view_by_id.values())
return dm.ViewList(list(self._view_by_id.values()))

def __str__(self):
args = [
Expand Down Expand Up @@ -390,6 +390,7 @@ def _only_null_values(count: int) -> list[None]:
external.timeseries.extend(
[
TimeSeries(
id=-1,
external_id=ts,
name=ts,
data_set_id=self._data_set_id,
Expand All @@ -398,6 +399,8 @@ def _only_null_values(count: int) -> list[None]:
metadata={
"source": f"Pygen{type(self).__name__}",
},
created_time=0,
last_updated_time=1,
)
for timeseries_set in values
for ts in (
Expand All @@ -412,6 +415,7 @@ def _only_null_values(count: int) -> list[None]:
external.file.extend(
[
FileMetadata(
id=-1,
external_id=file,
name=file,
source=self._instance_space,
Expand All @@ -420,6 +424,9 @@ def _only_null_values(count: int) -> list[None]:
metadata={
"source": f"Pygen{type(self).__name__}",
},
created_time=0,
last_updated_time=1,
uploaded=False,
)
for file_set in values
for file in (cast(list[str], file_set) if isinstance(file_set, list) else [cast(str, file_set)])
Expand All @@ -430,13 +437,14 @@ def _only_null_values(count: int) -> list[None]:
external.sequence.extend(
[
Sequence(
id=-1,
external_id=seq,
name=seq,
data_set_id=self._data_set_id,
columns=[
SequenceColumn(
external_id="value",
value_type=cast(Literal["Double"], "DOUBLE"),
value_type="DOUBLE",
Comment on lines -439 to +447
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug fixed in the SDK :)

metadata={
"source": f"Pygen{type(self).__name__}",
},
Expand All @@ -445,6 +453,8 @@ def _only_null_values(count: int) -> list[None]:
metadata={
"source": f"Pygen{type(self).__name__}",
},
created_time=0,
last_updated_time=1,
)
for seq_set in values
for seq in (cast(list[str], seq_set) if isinstance(seq_set, list) else [cast(str, seq_set)])
Expand Down Expand Up @@ -830,7 +840,7 @@ def deploy(
if nodes or edges:
# There is an 'edge' if there is an outward and inward edge on two views, we can get duplicated edges.
# We should remove the duplicates.
edges = dm.EdgeApplyList({edge.as_id(): edge for edge in edges}.values())
edges = dm.EdgeApplyList(list({edge.as_id(): edge for edge in edges}.values()))

created = client.data_modeling.instances.apply(
nodes,
Expand Down
6 changes: 4 additions & 2 deletions examples/cognite_core/_api/_core.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion examples/cognite_core/_api_client.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading
Loading