diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3b01fa262..f45ee611f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -161,8 +161,7 @@ jobs: - name: Install Python dependencies run: | python -m pip install --upgrade pip - pip install . - pip install -r docs/add-requirements.txt + pip install .[docs] - name: Check documentation for errors run: | SPHINXOPTS="-a -E -n -W --keep-going" make -C docs html @@ -255,13 +254,13 @@ jobs: pip install .[dev] - name: Check typing with MyPy run: | - mypy ./aas_compliance_tool test + mypy aas_compliance_tool test - name: Check code style with PyCodestyle run: | - pycodestyle --count --max-line-length 120 ./aas_compliance_tool test + pycodestyle --count --max-line-length 120 aas_compliance_tool test - compliance-tool-readme-codeblocks: - # This job runs the same static code analysis (mypy and pycodestyle) on the codeblocks in our docstrings. + compliance-tool-package: + # This job checks if we can build our compliance_tool package runs-on: ubuntu-latest defaults: @@ -273,42 +272,43 @@ jobs: uses: actions/setup-python@v5 with: python-version: ${{ env.X_PYTHON_MIN_VERSION }} - - name: Install Python dependencies - # install the local sdk in editable mode so it does not get overwritten + - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -e ../sdk[dev] - pip install .[dev] - - name: Check typing with MyPy - run: | - mypy <(codeblocks python README.md) - - name: Check code style with PyCodestyle - run: | - codeblocks --wrap python README.md | pycodestyle --count --max-line-length 120 - - - name: Run readme codeblocks with Python + pip install build + - name: Create source and wheel dist run: | - codeblocks python README.md | python + python -m build - compliance-tool-package: - # This job checks if we can build our compliance_tool package + #server-test: + # TODO: This job runs the unittests on the python versions specified down at the matrix + # and aas-test-engines on the server + + + server-static-analysis: + # This job runs static code analysis, namely pycodestyle and mypy runs-on: ubuntu-latest defaults: run: - working-directory: ./compliance_tool + working-directory: ./server/app steps: - uses: actions/checkout@v4 - name: Set up Python ${{ env.X_PYTHON_MIN_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.X_PYTHON_MIN_VERSION }} - - name: Install dependencies + - name: Install Python dependencies run: | python -m pip install --upgrade pip - pip install build - - name: Create source and wheel dist + pip install ../../sdk + pip install .[dev] + - name: Check typing with MyPy run: | - python -m build + mypy . + - name: Check code style with PyCodestyle + run: | + pycodestyle --count --max-line-length 120 . server-package: # This job checks if we can build our server package diff --git a/.gitignore b/.gitignore index 18b522c3a..28514e8d9 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,7 @@ sdk/test/adapter/schemas # Ignore dynamically generated version file sdk/basyx/version.py compliance_tool/aas_compliance_tool/version.py +server/app/version.py # ignore the content of the server storage server/storage/ diff --git a/sdk/.readthedocs.yaml b/sdk/.readthedocs.yaml index e64e5daaf..1085208ac 100644 --- a/sdk/.readthedocs.yaml +++ b/sdk/.readthedocs.yaml @@ -15,4 +15,5 @@ python: install: - method: pip path: . - - requirements: docs/add-requirements.txt + extra_requirements: + - docs diff --git a/sdk/README.md b/sdk/README.md index f63f7afcb..5dfd2e9a8 100644 --- a/sdk/README.md +++ b/sdk/README.md @@ -42,8 +42,7 @@ The BaSyx Python SDK requires the following Python packages to be installed for * `lxml` (BSD 3-clause License, using `libxml2` under MIT License) * `python-dateutil` (BSD 3-clause License) * `pyecma376-2` (Apache License v2.0) -* `urllib3` (MIT License) -* `Werkzeug` (BSD 3-clause License) + Development/testing/documentation/example dependencies: * `mypy` (MIT License) diff --git a/sdk/basyx/aas/adapter/_generic.py b/sdk/basyx/aas/adapter/_generic.py index 79c98fc8c..65d14d8d3 100644 --- a/sdk/basyx/aas/adapter/_generic.py +++ b/sdk/basyx/aas/adapter/_generic.py @@ -19,6 +19,13 @@ PathOrBinaryIO = Union[Path, BinaryIO] PathOrIO = Union[Path, IO] # IO is TextIO or BinaryIO +# JSON top-level keys and their corresponding model classes +JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES = ( + ('assetAdministrationShells', model.AssetAdministrationShell), + ('submodels', model.Submodel), + ('conceptDescriptions', model.ConceptDescription), +) + # XML Namespace definition XML_NS_MAP = {"aas": "https://admin-shell.io/aas/3/0"} XML_NS_AAS = "{" + XML_NS_MAP["aas"] + "}" diff --git a/sdk/basyx/aas/adapter/json/json_deserialization.py b/sdk/basyx/aas/adapter/json/json_deserialization.py index 78e3713f5..cd7ce9fb0 100644 --- a/sdk/basyx/aas/adapter/json/json_deserialization.py +++ b/sdk/basyx/aas/adapter/json/json_deserialization.py @@ -34,12 +34,13 @@ import json import logging import pprint -from typing import Dict, Callable, ContextManager, TypeVar, Type, List, IO, Optional, Set, get_args +from typing import (Dict, Callable, ContextManager, TypeVar, Type, + List, IO, Optional, Set, get_args, Tuple, Iterable, Any) from basyx.aas import model from .._generic import MODELLING_KIND_INVERSE, ASSET_KIND_INVERSE, KEY_TYPES_INVERSE, ENTITY_TYPES_INVERSE, \ IEC61360_DATA_TYPES_INVERSE, IEC61360_LEVEL_TYPES_INVERSE, KEY_TYPES_CLASSES_INVERSE, REFERENCE_TYPES_INVERSE, \ - DIRECTION_INVERSE, STATE_OF_EVENT_INVERSE, QUALIFIER_KIND_INVERSE, PathOrIO, Path + DIRECTION_INVERSE, STATE_OF_EVENT_INVERSE, QUALIFIER_KIND_INVERSE, PathOrIO, Path, JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES logger = logging.getLogger(__name__) @@ -154,19 +155,20 @@ def __init__(self, *args, **kwargs): json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs) @classmethod - def object_hook(cls, dct: Dict[str, object]) -> object: - # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON - # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. - if 'modelType' not in dct: - return dct + def _get_aas_class_parsers(cls) -> Dict[str, Callable[[Dict[str, object]], object]]: + """ + Returns the dictionary of AAS class parsers. + + The following dict specifies a constructor method for all AAS classes that may be identified using the + ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON + representation of an object and tries to construct a Python object from it. Embedded objects that have a + modelType themselves are expected to be converted to the correct PythonType already. Additionally, each + function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects + instead of raising an Exception. - # The following dict specifies a constructor method for all AAS classes that may be identified using the - # ``modelType`` attribute in their JSON representation. Each of those constructor functions takes the JSON - # representation of an object and tries to construct a Python object from it. Embedded objects that have a - # modelType themselves are expected to be converted to the correct PythonType already. Additionally, each - # function takes a bool parameter ``failsafe``, which indicates weather to log errors and skip defective objects - # instead of raising an Exception. - AAS_CLASS_PARSERS: Dict[str, Callable[[Dict[str, object]], object]] = { + :return: The dictionary of AAS class parsers + """ + aas_class_parsers: Dict[str, Callable[[Dict[str, object]], object]] = { 'AssetAdministrationShell': cls._construct_asset_administration_shell, 'AssetInformation': cls._construct_asset_information, 'SpecificAssetId': cls._construct_specific_asset_id, @@ -189,6 +191,16 @@ def object_hook(cls, dct: Dict[str, object]) -> object: 'ReferenceElement': cls._construct_reference_element, 'DataSpecificationIec61360': cls._construct_data_specification_iec61360, } + return aas_class_parsers + + @classmethod + def object_hook(cls, dct: Dict[str, object]) -> object: + # Check if JSON object seems to be a deserializable AAS object (i.e. it has a modelType). Otherwise, the JSON + # object is returned as is, so it's possible to mix AAS objects with other data within a JSON structure. + if 'modelType' not in dct: + return dct + + AAS_CLASS_PARSERS = cls._get_aas_class_parsers() # Get modelType and constructor function if not isinstance(dct['modelType'], str): @@ -799,7 +811,9 @@ def _select_decoder(failsafe: bool, stripped: bool, decoder: Optional[Type[AASFr def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathOrIO, replace_existing: bool = False, ignore_existing: bool = False, failsafe: bool = True, stripped: bool = False, - decoder: Optional[Type[AASFromJsonDecoder]] = None) -> Set[model.Identifier]: + decoder: Optional[Type[AASFromJsonDecoder]] = None, + keys_to_types: Iterable[Tuple[str, Any]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) \ + -> Set[model.Identifier]: """ Read an Asset Administration Shell JSON file according to 'Details of the Asset Administration Shell', chapter 5.5 into a given object store. @@ -817,6 +831,7 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO See https://git.rwth-aachen.de/acplt/pyi40aas/-/issues/91 This parameter is ignored if a decoder class is specified. :param decoder: The decoder class used to decode the JSON objects + :param keys_to_types: A dictionary of JSON keys to expected types. This is used to check the type of the objects :raises KeyError: **Non-failsafe**: Encountered a duplicate identifier :raises KeyError: Encountered an identifier that already exists in the given ``object_store`` with both ``replace_existing`` and ``ignore_existing`` set to ``False`` @@ -843,45 +858,43 @@ def read_aas_json_file_into(object_store: model.AbstractObjectStore, file: PathO with cm as fp: data = json.load(fp, cls=decoder_) - for name, expected_type in (('assetAdministrationShells', model.AssetAdministrationShell), - ('submodels', model.Submodel), - ('conceptDescriptions', model.ConceptDescription)): + for name, expected_type in keys_to_types: try: lst = _get_ts(data, name, list) except (KeyError, TypeError): continue for item in lst: - error_message = "Expected a {} in list '{}', but found {}".format( - expected_type.__name__, name, repr(item)) + error_msg = f"Expected a {expected_type.__name__} in list '{name}', but found {repr(item)}." if isinstance(item, model.Identifiable): if not isinstance(item, expected_type): - if decoder_.failsafe: - logger.warning("{} was in wrong list '{}'; nevertheless, we'll use it".format(item, name)) - else: - raise TypeError(error_message) + if not decoder_.failsafe: + raise TypeError(f"{item} was in the wrong list '{name}'") + logger.warning(f"{item} was in the wrong list '{name}'; nevertheless, we'll use it") + if item.id in ret: - error_message = f"{item} has a duplicate identifier already parsed in the document!" + error_msg = f"{item} has a duplicate identifier already parsed in the document!" if not decoder_.failsafe: - raise KeyError(error_message) - logger.error(error_message + " skipping it...") + raise KeyError(error_msg) + logger.error(f"{error_msg} Skipping it...") continue + existing_element = object_store.get(item.id) if existing_element is not None: if not replace_existing: - error_message = f"object with identifier {item.id} already exists " \ - f"in the object store: {existing_element}!" + error_msg = f"Object with id '{item.id}' already exists in store: {existing_element}!" if not ignore_existing: - raise KeyError(error_message + f" failed to insert {item}!") - logger.info(error_message + f" skipping insertion of {item}...") + raise KeyError(f"{error_msg} Failed to insert {item}!") + logger.info(f"{error_msg} Skipping {item}...") continue object_store.discard(existing_element) + object_store.add(item) ret.add(item.id) elif decoder_.failsafe: - logger.error(error_message) + logger.error(f"{error_msg} Skipping it...") else: - raise TypeError(error_message) + raise TypeError(error_msg) return ret diff --git a/sdk/basyx/aas/adapter/json/json_serialization.py b/sdk/basyx/aas/adapter/json/json_serialization.py index f7d6626eb..024226d97 100644 --- a/sdk/basyx/aas/adapter/json/json_serialization.py +++ b/sdk/basyx/aas/adapter/json/json_serialization.py @@ -30,11 +30,12 @@ import contextlib import inspect import io -from typing import ContextManager, List, Dict, Optional, TextIO, Type, Callable, get_args +from typing import ContextManager, List, Dict, Optional, TextIO, Type, Callable, get_args, Iterable, Tuple import json from basyx.aas import model from .. import _generic +from .._generic import JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES class AASToJsonEncoder(json.JSONEncoder): @@ -57,6 +58,40 @@ class AASToJsonEncoder(json.JSONEncoder): """ stripped = False + @classmethod + def _get_aas_class_serializers(cls) -> Dict[Type, Callable]: + mapping: Dict[Type, Callable] = { + model.AdministrativeInformation: cls._administrative_information_to_json, + model.AnnotatedRelationshipElement: cls._annotated_relationship_element_to_json, + model.AssetAdministrationShell: cls._asset_administration_shell_to_json, + model.AssetInformation: cls._asset_information_to_json, + model.BasicEventElement: cls._basic_event_element_to_json, + model.Blob: cls._blob_to_json, + model.Capability: cls._capability_to_json, + model.ConceptDescription: cls._concept_description_to_json, + model.DataSpecificationIEC61360: cls._data_specification_iec61360_to_json, + model.Entity: cls._entity_to_json, + model.Extension: cls._extension_to_json, + model.File: cls._file_to_json, + model.Key: cls._key_to_json, + model.LangStringSet: cls._lang_string_set_to_json, + model.MultiLanguageProperty: cls._multi_language_property_to_json, + model.Operation: cls._operation_to_json, + model.Property: cls._property_to_json, + model.Qualifier: cls._qualifier_to_json, + model.Range: cls._range_to_json, + model.Reference: cls._reference_to_json, + model.ReferenceElement: cls._reference_element_to_json, + model.RelationshipElement: cls._relationship_element_to_json, + model.Resource: cls._resource_to_json, + model.SpecificAssetId: cls._specific_asset_id_to_json, + model.Submodel: cls._submodel_to_json, + model.SubmodelElementCollection: cls._submodel_element_collection_to_json, + model.SubmodelElementList: cls._submodel_element_list_to_json, + model.ValueReferencePair: cls._value_reference_pair_to_json, + } + return mapping + def default(self, obj: object) -> object: """ The overwritten ``default`` method for :class:`json.JSONEncoder` @@ -64,36 +99,7 @@ def default(self, obj: object) -> object: :param obj: The object to serialize to json :return: The serialized object """ - mapping: Dict[Type, Callable] = { - model.AdministrativeInformation: self._administrative_information_to_json, - model.AnnotatedRelationshipElement: self._annotated_relationship_element_to_json, - model.AssetAdministrationShell: self._asset_administration_shell_to_json, - model.AssetInformation: self._asset_information_to_json, - model.BasicEventElement: self._basic_event_element_to_json, - model.Blob: self._blob_to_json, - model.Capability: self._capability_to_json, - model.ConceptDescription: self._concept_description_to_json, - model.DataSpecificationIEC61360: self._data_specification_iec61360_to_json, - model.Entity: self._entity_to_json, - model.Extension: self._extension_to_json, - model.File: self._file_to_json, - model.Key: self._key_to_json, - model.LangStringSet: self._lang_string_set_to_json, - model.MultiLanguageProperty: self._multi_language_property_to_json, - model.Operation: self._operation_to_json, - model.Property: self._property_to_json, - model.Qualifier: self._qualifier_to_json, - model.Range: self._range_to_json, - model.Reference: self._reference_to_json, - model.ReferenceElement: self._reference_element_to_json, - model.RelationshipElement: self._relationship_element_to_json, - model.Resource: self._resource_to_json, - model.SpecificAssetId: self._specific_asset_id_to_json, - model.Submodel: self._submodel_to_json, - model.SubmodelElementCollection: self._submodel_element_collection_to_json, - model.SubmodelElementList: self._submodel_element_list_to_json, - model.ValueReferencePair: self._value_reference_pair_to_json, - } + mapping = self._get_aas_class_serializers() for typ in mapping: if isinstance(obj, typ): mapping_method = mapping[typ] @@ -693,26 +699,34 @@ def _select_encoder(stripped: bool, encoder: Optional[Type[AASToJsonEncoder]] = return AASToJsonEncoder if not stripped else StrippedAASToJsonEncoder -def _create_dict(data: model.AbstractObjectStore) -> dict: - # separate different kind of objects - asset_administration_shells: List[model.AssetAdministrationShell] = [] - submodels: List[model.Submodel] = [] - concept_descriptions: List[model.ConceptDescription] = [] +def _create_dict(data: model.AbstractObjectStore, + keys_to_types: Iterable[Tuple[str, Type]] = JSON_AAS_TOP_LEVEL_KEYS_TO_TYPES) \ + -> Dict[str, List[model.Identifiable]]: + """ + Categorizes objects from an AbstractObjectStore into a dictionary based on their types. + + This function iterates over the objects in the provided AbstractObjectStore and groups them into lists + based on their types, as defined in the `keys_to_types` mapping. The resulting dictionary contains + keys corresponding to the names in `keys_to_types` and values as lists of objects of the respective types. + + :param data: An AbstractObjectStore containing objects to be categorized. + :param keys_to_types: An iterable of tuples where each tuple contains: + - A string key representing the category name. + - A type to match objects against. + :return: A dictionary where keys are category names and values are lists of objects of the corresponding types. + """ + objects: Dict[str, List[model.Identifiable]] = {} + for obj in data: - if isinstance(obj, model.AssetAdministrationShell): - asset_administration_shells.append(obj) - elif isinstance(obj, model.Submodel): - submodels.append(obj) - elif isinstance(obj, model.ConceptDescription): - concept_descriptions.append(obj) - dict_: Dict[str, List] = {} - if asset_administration_shells: - dict_['assetAdministrationShells'] = asset_administration_shells - if submodels: - dict_['submodels'] = submodels - if concept_descriptions: - dict_['conceptDescriptions'] = concept_descriptions - return dict_ + # Iterate through the mapping of category names to expected types + for name, expected_type in keys_to_types: + # Check if the object matches the expected type + if isinstance(obj, expected_type): + # Add the object to the appropriate category in the dictionary + objects.setdefault(name, []) + objects[name].append(obj) + break # Exit the inner loop once a match is found + return objects def object_store_to_json(data: model.AbstractObjectStore, stripped: bool = False, diff --git a/sdk/basyx/aas/model/base.py b/sdk/basyx/aas/model/base.py index a93e3cb59..3b175d389 100644 --- a/sdk/basyx/aas/model/base.py +++ b/sdk/basyx/aas/model/base.py @@ -802,7 +802,7 @@ def find_source(self) -> Tuple[Optional["Referable"], Optional[List[str]]]: # t def update_from(self, other: "Referable", update_source: bool = False): """ - Internal function to updates the object's attributes from another object of a similar type. + Internal function to update the object's attributes from a different version of the exact same object. This function should not be used directly. It is typically used by backend implementations (database adapters, protocol clients, etc.) to update the object's data, after ``update()`` has been called. @@ -811,15 +811,31 @@ def update_from(self, other: "Referable", update_source: bool = False): :param update_source: Update the source attribute with the other's source attribute. This is not propagated recursively """ - for name, var in vars(other).items(): - # do not update the parent, namespace_element_sets or source (depending on update_source parameter) - if name in ("parent", "namespace_element_sets") or name == "source" and not update_source: + for name in dir(other): + # Skip private and protected attributes + if name.startswith('_'): continue - if isinstance(var, NamespaceSet): + + # Do not update 'parent', 'namespace_element_sets', or 'source' (depending on update_source parameter) + if name in ("parent", "namespace_element_sets") or (name == "source" and not update_source): + continue + + # Skip methods + attr = getattr(other, name) + if callable(attr): + continue + + if isinstance(attr, NamespaceSet): # update the elements of the NameSpaceSet - vars(self)[name].update_nss_from(var) + getattr(self, name).update_nss_from(attr) else: - vars(self)[name] = var # that variable is not a NameSpaceSet, so it isn't Referable + # Check if this is a property and if it has no setter + prop = getattr(type(self), name, None) + if isinstance(prop, property) and prop.fset is None: + if getattr(self, name) != attr: + raise ValueError(f"property {name} is immutable but has changed between versions of the object") + else: + setattr(self, name, attr) def commit(self) -> None: """ diff --git a/sdk/docs/add-requirements.txt b/sdk/docs/add-requirements.txt deleted file mode 100644 index 6ac2c1473..000000000 --- a/sdk/docs/add-requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ -# Additional requirements for building the docs -sphinx~=8.2 -sphinx-rtd-theme~=3.0 -sphinx-argparse~=0.5.0 diff --git a/sdk/pyproject.toml b/sdk/pyproject.toml index baaf6ff05..4d5c25203 100644 --- a/sdk/pyproject.toml +++ b/sdk/pyproject.toml @@ -36,11 +36,9 @@ classifiers = [ ] requires-python = ">=3.9" dependencies = [ - "lxml>=4.2,<5", + "lxml>=5.3", "python-dateutil>=2.8,<3", - "pyecma376-2>=1.0.1", - "urllib3>=1.26,<3", - "Werkzeug>=3.0.3,<4", + "pyecma376-2>=1.0.1" ] [project.optional-dependencies] @@ -55,6 +53,11 @@ dev = [ "types-python-dateutil", "lxml-stubs~=0.5.1", ] +docs= [ + "sphinx~=8.2", + "sphinx-rtd-theme~=3.0", + "sphinx-argparse~=0.5.0" +] [project.urls] "Homepage" = "https://github.com/eclipse-basyx/basyx-python-sdk" diff --git a/sdk/test/adapter/json/test_json_deserialization.py b/sdk/test/adapter/json/test_json_deserialization.py index 9272bdf98..0dba6dbdb 100644 --- a/sdk/test/adapter/json/test_json_deserialization.py +++ b/sdk/test/adapter/json/test_json_deserialization.py @@ -37,7 +37,8 @@ def test_file_format_wrong_list(self) -> None: } ] }""" - with self.assertRaisesRegex(TypeError, r"submodels.*AssetAdministrationShell"): + with self.assertRaisesRegex(TypeError, r"AssetAdministrationShell.* was " + r"in the wrong list 'submodels'"): read_aas_json_file(io.StringIO(data), failsafe=False) with self.assertLogs(logging.getLogger(), level=logging.WARNING) as cm: read_aas_json_file(io.StringIO(data), failsafe=True) @@ -196,7 +197,7 @@ def get_clean_store() -> model.DictObjectStore: with self.assertLogs(logging.getLogger(), level=logging.INFO) as log_ctx: identifiers = read_aas_json_file_into(object_store, string_io, replace_existing=False, ignore_existing=True) self.assertEqual(len(identifiers), 0) - self.assertIn("already exists in the object store", log_ctx.output[0]) # type: ignore + self.assertIn("already exists in store", log_ctx.output[0]) # type: ignore submodel = object_store.pop() self.assertIsInstance(submodel, model.Submodel) self.assertEqual(submodel.id_short, "test123") @@ -204,7 +205,7 @@ def get_clean_store() -> model.DictObjectStore: string_io.seek(0) object_store = get_clean_store() - with self.assertRaisesRegex(KeyError, r"already exists in the object store"): + with self.assertRaisesRegex(KeyError, r"already exists in store"): identifiers = read_aas_json_file_into(object_store, string_io, replace_existing=False, ignore_existing=False) self.assertEqual(len(identifiers), 0) diff --git a/server/Dockerfile b/server/Dockerfile index 4df672c41..059b2e8ab 100644 --- a/server/Dockerfile +++ b/server/Dockerfile @@ -44,5 +44,6 @@ COPY ./sdk /sdk COPY ./server/app /app WORKDIR /app RUN pip install ../sdk +RUN pip install . CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.ini"] diff --git a/server/app/interfaces/__init__.py b/server/app/interfaces/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py new file mode 100644 index 000000000..65233a893 --- /dev/null +++ b/server/app/interfaces/base.py @@ -0,0 +1,442 @@ +# Copyright (c) 2025 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +import abc +import datetime +import enum +import io +import itertools +import json +from typing import Iterable, Type, Iterator, Tuple, Optional, List, Union, Dict, Callable, TypeVar, Any + +import werkzeug.exceptions +import werkzeug.routing +import werkzeug.utils +from lxml import etree +from werkzeug import Response, Request +from werkzeug.exceptions import NotFound, BadRequest +from werkzeug.routing import MapAdapter + +from basyx.aas import model +from basyx.aas.adapter._generic import XML_NS_MAP +from basyx.aas.adapter.json import StrictStrippedAASFromJsonDecoder, StrictAASFromJsonDecoder, AASToJsonEncoder +from basyx.aas.adapter.xml import xml_serialization, XMLConstructables, read_aas_xml_element +from basyx.aas.model import AbstractObjectStore +from util.converters import base64url_decode + + +T = TypeVar("T") + + +@enum.unique +class MessageType(enum.Enum): + UNDEFINED = enum.auto() + INFO = enum.auto() + WARNING = enum.auto() + ERROR = enum.auto() + EXCEPTION = enum.auto() + + def __str__(self): + return self.name.capitalize() + + +class Message: + def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, + timestamp: Optional[datetime.datetime] = None): + self.code: str = code + self.text: str = text + self.message_type: MessageType = message_type + self.timestamp: datetime.datetime = timestamp if timestamp is not None \ + else datetime.datetime.now(datetime.timezone.utc) + + +class Result: + def __init__(self, success: bool, messages: Optional[List[Message]] = None): + if messages is None: + messages = [] + self.success: bool = success + self.messages: List[Message] = messages + + +ResponseData = Union[Result, object, List[object]] + + +class APIResponse(abc.ABC, Response): + @abc.abstractmethod + def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, + stripped: bool = False, *args, **kwargs): + super().__init__(*args, **kwargs) + if obj is None: + self.status_code = 204 + else: + self.data = self.serialize(obj, cursor, stripped) + + @abc.abstractmethod + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + pass + + +class JsonResponse(APIResponse): + def __init__(self, *args, content_type="application/json", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + if cursor is None: + data = obj + else: + data = { + "paging_metadata": {"cursor": str(cursor)}, + "result": obj + } + return json.dumps( + data, + cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, + separators=(",", ":") + ) + + +class XmlResponse(APIResponse): + def __init__(self, *args, content_type="application/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: + root_elem = etree.Element("response", nsmap=XML_NS_MAP) + if cursor is not None: + root_elem.set("cursor", str(cursor)) + if isinstance(obj, Result): + result_elem = self.result_to_xml(obj, **XML_NS_MAP) + for child in result_elem: + root_elem.append(child) + elif isinstance(obj, list): + for item in obj: + item_elem = xml_serialization.object_to_xml_element(item) + root_elem.append(item_elem) + else: + obj_elem = xml_serialization.object_to_xml_element(obj) + for child in obj_elem: + root_elem.append(child) + etree.cleanup_namespaces(root_elem) + xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") + return xml_str # type: ignore[return-value] + + @classmethod + def result_to_xml(cls, result: Result, **kwargs) -> etree._Element: + result_elem = etree.Element("result", **kwargs) + success_elem = etree.Element("success") + success_elem.text = xml_serialization.boolean_to_xml(result.success) + messages_elem = etree.Element("messages") + for message in result.messages: + messages_elem.append(cls.message_to_xml(message)) + + result_elem.append(success_elem) + result_elem.append(messages_elem) + return result_elem + + @classmethod + def message_to_xml(cls, message: Message) -> etree._Element: + message_elem = etree.Element("message") + message_type_elem = etree.Element("messageType") + message_type_elem.text = str(message.message_type) + text_elem = etree.Element("text") + text_elem.text = message.text + code_elem = etree.Element("code") + code_elem.text = message.code + timestamp_elem = etree.Element("timestamp") + timestamp_elem.text = message.timestamp.isoformat() + + message_elem.append(message_type_elem) + message_elem.append(text_elem) + message_elem.append(code_elem) + message_elem.append(timestamp_elem) + return message_elem + + +class XmlResponseAlt(XmlResponse): + def __init__(self, *args, content_type="text/xml", **kwargs): + super().__init__(*args, **kwargs, content_type=content_type) + + +class ResultToJsonEncoder(AASToJsonEncoder): + @classmethod + def _result_to_json(cls, result: Result) -> Dict[str, object]: + return { + "success": result.success, + "messages": result.messages + } + + @classmethod + def _message_to_json(cls, message: Message) -> Dict[str, object]: + return { + "messageType": message.message_type, + "text": message.text, + "code": message.code, + "timestamp": message.timestamp.isoformat() + } + + def default(self, obj: object) -> object: + if isinstance(obj, Result): + return self._result_to_json(obj) + if isinstance(obj, Message): + return self._message_to_json(obj) + if isinstance(obj, MessageType): + return str(obj) + return super().default(obj) + + +class StrippedResultToJsonEncoder(ResultToJsonEncoder): + stripped = True + + +class BaseWSGIApp: + url_map: werkzeug.routing.Map + + # TODO: the parameters can be typed via builtin wsgiref with Python 3.11+ + def __call__(self, environ, start_response) -> Iterable[bytes]: + response: Response = self.handle_request(Request(environ)) + return response(environ, start_response) + + @classmethod + def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: + limit_str = request.args.get('limit', default="10") + cursor_str = request.args.get('cursor', default="1") + try: + limit, cursor = int(limit_str), int(cursor_str) - 1 # cursor is 1-indexed + if limit < 0 or cursor < 0: + raise ValueError + except ValueError: + raise BadRequest("Limit can not be negative, cursor must be positive!") + start_index = cursor + end_index = cursor + limit + paginated_slice = itertools.islice(iterator, start_index, end_index) + return paginated_slice, end_index + + def handle_request(self, request: Request): + map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) + try: + response_t = self.get_response_type(request) + except werkzeug.exceptions.NotAcceptable as e: + return e + + try: + endpoint, values = map_adapter.match() + return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) + + # any raised error that leaves this function will cause a 500 internal server error + # so catch raised http exceptions and return them + except werkzeug.exceptions.HTTPException as e: + return self.http_exception_to_response(e, response_t) + + @staticmethod + def get_response_type(request: Request) -> Type[APIResponse]: + response_types: Dict[str, Type[APIResponse]] = { + "application/json": JsonResponse, + "application/xml": XmlResponse, + "text/xml": XmlResponseAlt + } + if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): + return JsonResponse + mime_type = request.accept_mimetypes.best_match(response_types) + if mime_type is None: + raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " + + ", ".join(response_types.keys())) + return response_types[mime_type] + + @staticmethod + def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ + -> APIResponse: + headers = exception.get_headers() + location = exception.get_response().location + if location is not None: + headers.append(("Location", location)) + if exception.code and exception.code >= 400: + message = Message(type(exception).__name__, + exception.description if exception.description is not None else "", + MessageType.ERROR) + result = Result(False, [message]) + else: + result = Result(False) + return response_type(result, status=exception.code, headers=headers) + + +class ObjectStoreWSGIApp(BaseWSGIApp): + object_store: AbstractObjectStore + + def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: + for obj in self.object_store: + if isinstance(obj, type_): + obj.update() + yield obj + + def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: + identifiable = self.object_store.get(identifier) + if not isinstance(identifiable, type_): + raise NotFound(f"No {type_.__name__} with {identifier} found!") + identifiable.update() + return identifiable + + +class HTTPApiDecoder: + # these are the types we can construct (well, only the ones we need) + type_constructables_map = { + model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, + model.AssetInformation: XMLConstructables.ASSET_INFORMATION, + model.ModelReference: XMLConstructables.MODEL_REFERENCE, + model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, + model.Qualifier: XMLConstructables.QUALIFIER, + model.Submodel: XMLConstructables.SUBMODEL, + model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, + model.Reference: XMLConstructables.REFERENCE, + } + + @classmethod + def check_type_support(cls, type_: type): + if type_ not in cls.type_constructables_map: + raise TypeError(f"Parsing {type_} is not supported!") + + @classmethod + def assert_type(cls, obj: object, type_: Type[T]) -> T: + if not isinstance(obj, type_): + raise BadRequest(f"Object {obj!r} is not of type {type_.__name__}!") + return obj + + @classmethod + def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + cls.check_type_support(expect_type) + decoder: Type[StrictAASFromJsonDecoder] = StrictStrippedAASFromJsonDecoder if stripped \ + else StrictAASFromJsonDecoder + try: + parsed = json.loads(data, cls=decoder) + if isinstance(parsed, list) and expect_single: + raise BadRequest(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") + if not isinstance(parsed, list) and not expect_single: + raise BadRequest(f"Expected List[{expect_type.__name__}], got {parsed!r}!") + parsed = [parsed] if not isinstance(parsed, list) else parsed + + # TODO: the following is ugly, but necessary because references aren't self-identified objects + # in the json schema + # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines + # that automatically + mapping = { + model.ModelReference: decoder._construct_model_reference, + model.AssetInformation: decoder._construct_asset_information, + model.SpecificAssetId: decoder._construct_specific_asset_id, + model.Reference: decoder._construct_reference, + model.Qualifier: decoder._construct_qualifier, + } + + constructor: Optional[Callable[..., T]] = mapping.get(expect_type) # type: ignore[assignment] + args = [] + if expect_type is model.ModelReference: + args.append(model.Submodel) + + if constructor is not None: + # construct elements that aren't self-identified + return [constructor(obj, *args) for obj in parsed] + + except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: + raise BadRequest(str(e)) from e + + return [cls.assert_type(obj, expect_type) for obj in parsed] + + @classmethod + def base64url_json_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, expect_single) + + @classmethod + def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def base64url_json(cls, data: str, expect_type: Type[T], stripped: bool) -> T: + data = base64url_decode(data) + return cls.json_list(data, expect_type, stripped, True)[0] + + @classmethod + def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: + cls.check_type_support(expect_type) + try: + xml_data = io.BytesIO(data) + rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], + stripped=stripped, failsafe=False) + except (KeyError, ValueError) as e: + # xml deserialization creates an error chain. since we only return one error, return the root cause + f: BaseException = e + while f.__cause__ is not None: + f = f.__cause__ + raise BadRequest(str(f)) from e + except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: + raise BadRequest(str(e)) from e + return cls.assert_type(rv, expect_type) + + @classmethod + def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: + """ + TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent + running out of memory. but it doesn't state how to check the content length + also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json + schema + In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, + which should limit the maximum content length. + """ + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + + ", ".join(valid_content_types)) + + if request.mimetype == "application/json": + return cls.json(request.get_data(), expect_type, stripped) + return cls.xml(request.get_data(), expect_type, stripped) + + @classmethod + def request_body_list(cls, request: Request, expect_type: Type[T], stripped: bool) -> List[T]: + """ + Deserializes the request body to an instance (or list of instances) + of the expected type. + """ + # TODO: Refactor this method and request_body to avoid code duplication + valid_content_types = ("application/json", "application/xml", "text/xml") + + if request.mimetype not in valid_content_types: + raise werkzeug.exceptions.UnsupportedMediaType( + f"Invalid content-type: {request.mimetype}! Supported types: " + ", ".join(valid_content_types) + ) + + if request.mimetype == "application/json": + raw_data = request.get_data() + try: + parsed = json.loads(raw_data) + except Exception as e: + raise werkzeug.exceptions.BadRequest(f"Invalid JSON: {e}") + # Prüfe, ob parsed ein Array ist: + if isinstance(parsed, list): + # Für jedes Element wird die Konvertierung angewandt. + return [cls._convert_single_json_item(item, expect_type, stripped) for item in parsed] # type: ignore + else: + return [cls._convert_single_json_item(parsed, expect_type, stripped)] + else: + return [cls.xml(request.get_data(), expect_type, stripped)] + + @classmethod + def _convert_single_json_item(cls, data: Any, expect_type: Type[T], stripped: bool) -> T: + """ + Converts a single JSON-Object (as a Python-Dict) to an object of type expect_type. + Here the dictionary is first serialized back to a JSON-string and returned as bytes. + """ + json_bytes = json.dumps(data).encode("utf-8") + return cls.json(json_bytes, expect_type, stripped) + + +def is_stripped_request(request: Request) -> bool: + level = request.args.get("level") + if level not in {"deep", "core", None}: + raise BadRequest(f"Level {level} is not a valid level!") + extent = request.args.get("extent") + if extent is not None: + raise werkzeug.exceptions.NotImplemented(f"The parameter extent is not yet implemented for this server!") + return level == "core" diff --git a/sdk/basyx/aas/adapter/http.py b/server/app/interfaces/repository.py similarity index 70% rename from sdk/basyx/aas/adapter/http.py rename to server/app/interfaces/repository.py index 12bd533f3..c55d9a7eb 100644 --- a/sdk/basyx/aas/adapter/http.py +++ b/server/app/interfaces/repository.py @@ -34,403 +34,25 @@ - `GET /submodels/{submodelIdentifier}/submodel-elements/{idShortPath}/operation-results/{handleId}/$value` """ -import abc -import base64 -import binascii -import datetime -import enum import io import json -import itertools -import urllib +from typing import Type, Iterator, List, Dict, Union, Callable, Tuple, Optional -from lxml import etree import werkzeug.exceptions import werkzeug.routing -import werkzeug.urls import werkzeug.utils -from werkzeug.exceptions import BadRequest, Conflict, NotFound -from werkzeug.routing import MapAdapter, Rule, Submount -from werkzeug.wrappers import Request, Response +from werkzeug import Response, Request from werkzeug.datastructures import FileStorage +from werkzeug.exceptions import NotFound, BadRequest, Conflict +from werkzeug.routing import Submount, Rule, MapAdapter from basyx.aas import model -from ._generic import XML_NS_MAP -from .xml import XMLConstructables, read_aas_xml_element, xml_serialization, object_to_xml_element -from .json import AASToJsonEncoder, StrictAASFromJsonDecoder, StrictStrippedAASFromJsonDecoder -from . import aasx +from basyx.aas.adapter import aasx +from util.converters import IdentifierToBase64URLConverter, IdShortPathConverter, base64url_decode +from .base import ObjectStoreWSGIApp, APIResponse, is_stripped_request, HTTPApiDecoder, T -from typing import Callable, Dict, Iterable, Iterator, List, Optional, Type, TypeVar, Union, Tuple - -@enum.unique -class MessageType(enum.Enum): - UNDEFINED = enum.auto() - INFO = enum.auto() - WARNING = enum.auto() - ERROR = enum.auto() - EXCEPTION = enum.auto() - - def __str__(self): - return self.name.capitalize() - - -class Message: - def __init__(self, code: str, text: str, message_type: MessageType = MessageType.UNDEFINED, - timestamp: Optional[datetime.datetime] = None): - self.code: str = code - self.text: str = text - self.message_type: MessageType = message_type - self.timestamp: datetime.datetime = timestamp if timestamp is not None \ - else datetime.datetime.now(datetime.timezone.utc) - - -class Result: - def __init__(self, success: bool, messages: Optional[List[Message]] = None): - if messages is None: - messages = [] - self.success: bool = success - self.messages: List[Message] = messages - - -class ResultToJsonEncoder(AASToJsonEncoder): - @classmethod - def _result_to_json(cls, result: Result) -> Dict[str, object]: - return { - "success": result.success, - "messages": result.messages - } - - @classmethod - def _message_to_json(cls, message: Message) -> Dict[str, object]: - return { - "messageType": message.message_type, - "text": message.text, - "code": message.code, - "timestamp": message.timestamp.isoformat() - } - - def default(self, obj: object) -> object: - if isinstance(obj, Result): - return self._result_to_json(obj) - if isinstance(obj, Message): - return self._message_to_json(obj) - if isinstance(obj, MessageType): - return str(obj) - return super().default(obj) - - -class StrippedResultToJsonEncoder(ResultToJsonEncoder): - stripped = True - - -ResponseData = Union[Result, object, List[object]] - - -class APIResponse(abc.ABC, Response): - @abc.abstractmethod - def __init__(self, obj: Optional[ResponseData] = None, cursor: Optional[int] = None, - stripped: bool = False, *args, **kwargs): - super().__init__(*args, **kwargs) - if obj is None: - self.status_code = 204 - else: - self.data = self.serialize(obj, cursor, stripped) - - @abc.abstractmethod - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - pass - - -class JsonResponse(APIResponse): - def __init__(self, *args, content_type="application/json", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - if cursor is None: - data = obj - else: - data = { - "paging_metadata": {"cursor": str(cursor)}, - "result": obj - } - return json.dumps( - data, - cls=StrippedResultToJsonEncoder if stripped else ResultToJsonEncoder, - separators=(",", ":") - ) - - -class XmlResponse(APIResponse): - def __init__(self, *args, content_type="application/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - def serialize(self, obj: ResponseData, cursor: Optional[int], stripped: bool) -> str: - root_elem = etree.Element("response", nsmap=XML_NS_MAP) - if cursor is not None: - root_elem.set("cursor", str(cursor)) - if isinstance(obj, Result): - result_elem = result_to_xml(obj, **XML_NS_MAP) - for child in result_elem: - root_elem.append(child) - elif isinstance(obj, list): - for item in obj: - item_elem = object_to_xml_element(item) - root_elem.append(item_elem) - else: - obj_elem = object_to_xml_element(obj) - for child in obj_elem: - root_elem.append(child) - etree.cleanup_namespaces(root_elem) - xml_str = etree.tostring(root_elem, xml_declaration=True, encoding="utf-8") - return xml_str # type: ignore[return-value] - - -class XmlResponseAlt(XmlResponse): - def __init__(self, *args, content_type="text/xml", **kwargs): - super().__init__(*args, **kwargs, content_type=content_type) - - -def result_to_xml(result: Result, **kwargs) -> etree._Element: - result_elem = etree.Element("result", **kwargs) - success_elem = etree.Element("success") - success_elem.text = xml_serialization.boolean_to_xml(result.success) - messages_elem = etree.Element("messages") - for message in result.messages: - messages_elem.append(message_to_xml(message)) - - result_elem.append(success_elem) - result_elem.append(messages_elem) - return result_elem - - -def message_to_xml(message: Message) -> etree._Element: - message_elem = etree.Element("message") - message_type_elem = etree.Element("messageType") - message_type_elem.text = str(message.message_type) - text_elem = etree.Element("text") - text_elem.text = message.text - code_elem = etree.Element("code") - code_elem.text = message.code - timestamp_elem = etree.Element("timestamp") - timestamp_elem.text = message.timestamp.isoformat() - - message_elem.append(message_type_elem) - message_elem.append(text_elem) - message_elem.append(code_elem) - message_elem.append(timestamp_elem) - return message_elem - - -def get_response_type(request: Request) -> Type[APIResponse]: - response_types: Dict[str, Type[APIResponse]] = { - "application/json": JsonResponse, - "application/xml": XmlResponse, - "text/xml": XmlResponseAlt - } - if len(request.accept_mimetypes) == 0 or request.accept_mimetypes.best in (None, "*/*"): - return JsonResponse - mime_type = request.accept_mimetypes.best_match(response_types) - if mime_type is None: - raise werkzeug.exceptions.NotAcceptable("This server supports the following content types: " - + ", ".join(response_types.keys())) - return response_types[mime_type] - - -def http_exception_to_response(exception: werkzeug.exceptions.HTTPException, response_type: Type[APIResponse]) \ - -> APIResponse: - headers = exception.get_headers() - location = exception.get_response().location - if location is not None: - headers.append(("Location", location)) - if exception.code and exception.code >= 400: - message = Message(type(exception).__name__, exception.description if exception.description is not None else "", - MessageType.ERROR) - result = Result(False, [message]) - else: - result = Result(False) - return response_type(result, status=exception.code, headers=headers) - - -def is_stripped_request(request: Request) -> bool: - level = request.args.get("level") - if level not in {"deep", "core", None}: - raise BadRequest(f"Level {level} is not a valid level!") - extent = request.args.get("extent") - if extent is not None: - raise werkzeug.exceptions.NotImplemented(f"The parameter extent is not yet implemented for this server!") - return level == "core" - - -T = TypeVar("T") - -BASE64URL_ENCODING = "utf-8" - - -def base64url_decode(data: str) -> str: - try: - # If the requester omits the base64 padding, an exception will be raised. - # However, Python doesn't complain about too much padding, - # thus we simply always append two padding characters (==). - # See also: https://stackoverflow.com/a/49459036/4780052 - decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) - except binascii.Error: - raise BadRequest(f"Encoded data {data} is invalid base64url!") - except UnicodeDecodeError: - raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") - return decoded - - -def base64url_encode(data: str) -> str: - encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") - return encoded - - -class HTTPApiDecoder: - # these are the types we can construct (well, only the ones we need) - type_constructables_map = { - model.AssetAdministrationShell: XMLConstructables.ASSET_ADMINISTRATION_SHELL, - model.AssetInformation: XMLConstructables.ASSET_INFORMATION, - model.ModelReference: XMLConstructables.MODEL_REFERENCE, - model.SpecificAssetId: XMLConstructables.SPECIFIC_ASSET_ID, - model.Qualifier: XMLConstructables.QUALIFIER, - model.Submodel: XMLConstructables.SUBMODEL, - model.SubmodelElement: XMLConstructables.SUBMODEL_ELEMENT, - model.Reference: XMLConstructables.REFERENCE - } - - @classmethod - def check_type_supportance(cls, type_: type): - if type_ not in cls.type_constructables_map: - raise TypeError(f"Parsing {type_} is not supported!") - - @classmethod - def assert_type(cls, obj: object, type_: Type[T]) -> T: - if not isinstance(obj, type_): - raise BadRequest(f"Object {obj!r} is not of type {type_.__name__}!") - return obj - - @classmethod - def json_list(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - cls.check_type_supportance(expect_type) - decoder: Type[StrictAASFromJsonDecoder] = StrictStrippedAASFromJsonDecoder if stripped \ - else StrictAASFromJsonDecoder - try: - parsed = json.loads(data, cls=decoder) - if not isinstance(parsed, list): - if not expect_single: - raise BadRequest(f"Expected List[{expect_type.__name__}], got {parsed!r}!") - parsed = [parsed] - elif expect_single: - raise BadRequest(f"Expected a single object of type {expect_type.__name__}, got {parsed!r}!") - # TODO: the following is ugly, but necessary because references aren't self-identified objects - # in the json schema - # TODO: json deserialization will always create an ModelReference[Submodel], xml deserialization determines - # that automatically - constructor: Optional[Callable[..., T]] = None - args = [] - if expect_type is model.ModelReference: - constructor = decoder._construct_model_reference # type: ignore[assignment] - args.append(model.Submodel) - elif expect_type is model.AssetInformation: - constructor = decoder._construct_asset_information # type: ignore[assignment] - elif expect_type is model.SpecificAssetId: - constructor = decoder._construct_specific_asset_id # type: ignore[assignment] - elif expect_type is model.Reference: - constructor = decoder._construct_reference # type: ignore[assignment] - elif expect_type is model.Qualifier: - constructor = decoder._construct_qualifier # type: ignore[assignment] - - if constructor is not None: - # construct elements that aren't self-identified - return [constructor(obj, *args) for obj in parsed] - - except (KeyError, ValueError, TypeError, json.JSONDecodeError, model.AASConstraintViolation) as e: - raise BadRequest(str(e)) from e - - return [cls.assert_type(obj, expect_type) for obj in parsed] - - @classmethod - def base64urljson_list(cls, data: str, expect_type: Type[T], stripped: bool, expect_single: bool) -> List[T]: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, expect_single) - - @classmethod - def json(cls, data: Union[str, bytes], expect_type: Type[T], stripped: bool) -> T: - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def base64urljson(cls, data: str, expect_type: Type[T], stripped: bool) -> T: - data = base64url_decode(data) - return cls.json_list(data, expect_type, stripped, True)[0] - - @classmethod - def xml(cls, data: bytes, expect_type: Type[T], stripped: bool) -> T: - cls.check_type_supportance(expect_type) - try: - xml_data = io.BytesIO(data) - rv = read_aas_xml_element(xml_data, cls.type_constructables_map[expect_type], - stripped=stripped, failsafe=False) - except (KeyError, ValueError) as e: - # xml deserialization creates an error chain. since we only return one error, return the root cause - f: BaseException = e - while f.__cause__ is not None: - f = f.__cause__ - raise BadRequest(str(f)) from e - except (etree.XMLSyntaxError, model.AASConstraintViolation) as e: - raise BadRequest(str(e)) from e - return cls.assert_type(rv, expect_type) - - @classmethod - def request_body(cls, request: Request, expect_type: Type[T], stripped: bool) -> T: - """ - TODO: werkzeug documentation recommends checking the content length before retrieving the body to prevent - running out of memory. but it doesn't state how to check the content length - also: what would be a reasonable maximum content length? the request body isn't limited by the xml/json - schema - In the meeting (25.11.2020) we discussed, this may refer to a reverse proxy in front of this WSGI app, - which should limit the maximum content length. - """ - valid_content_types = ("application/json", "application/xml", "text/xml") - - if request.mimetype not in valid_content_types: - raise werkzeug.exceptions.UnsupportedMediaType( - f"Invalid content-type: {request.mimetype}! Supported types: " - + ", ".join(valid_content_types)) - - if request.mimetype == "application/json": - return cls.json(request.get_data(), expect_type, stripped) - return cls.xml(request.get_data(), expect_type, stripped) - - -class Base64URLConverter(werkzeug.routing.UnicodeConverter): - - def to_url(self, value: model.Identifier) -> str: - return super().to_url(base64url_encode(value)) - - def to_python(self, value: str) -> model.Identifier: - value = super().to_python(value) - decoded = base64url_decode(super().to_python(value)) - return decoded - - -class IdShortPathConverter(werkzeug.routing.UnicodeConverter): - id_short_sep = "." - - def to_url(self, value: List[str]) -> str: - return super().to_url(self.id_short_sep.join(value)) - - def to_python(self, value: str) -> List[str]: - id_shorts = super().to_python(value).split(self.id_short_sep) - for id_short in id_shorts: - try: - model.Referable.validate_id_short(id_short) - except (ValueError, model.AASConstraintViolation): - raise BadRequest(f"{id_short} is not a valid id_short!") - return id_shorts - - -class WSGIApp: +class WSGIApp(ObjectStoreWSGIApp): def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.AbstractSupplementaryFileContainer, base_path: str = "/api/v3.0"): self.object_store: model.AbstractObjectStore = object_store @@ -488,8 +110,7 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/submodel-elements", methods=["POST"], endpoint=self.post_submodel_submodel_elements_id_short_path), Submount("/submodel-elements", [ - Rule("/$metadata", methods=["GET"], - endpoint=self.get_submodel_submodel_elements_metadata), + Rule("/$metadata", methods=["GET"], endpoint=self.get_submodel_submodel_elements_metadata), Rule("/$reference", methods=["GET"], endpoint=self.get_submodel_submodel_elements_reference), Rule("/$value", methods=["GET"], endpoint=self.not_implemented), @@ -525,10 +146,8 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs Rule("/operation-status/", methods=["GET"], endpoint=self.not_implemented), Submount("/operation-results", [ - Rule("/", methods=["GET"], - endpoint=self.not_implemented), - Rule("//$value", methods=["GET"], - endpoint=self.not_implemented) + Rule("/", methods=["GET"], endpoint=self.not_implemented), + Rule("//$value", methods=["GET"], endpoint=self.not_implemented) ]), Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), @@ -544,10 +163,8 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs ]) ]) ]), - Rule("/qualifiers", methods=["GET"], - endpoint=self.get_submodel_submodel_element_qualifiers), - Rule("/qualifiers", methods=["POST"], - endpoint=self.post_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), + Rule("/qualifiers", methods=["POST"], endpoint=self.post_submodel_submodel_element_qualifiers), Submount("/qualifiers", [ Rule("/", methods=["GET"], endpoint=self.get_submodel_submodel_element_qualifiers), @@ -567,28 +184,10 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs ]), ]) ], converters={ - "base64url": Base64URLConverter, + "base64url": IdentifierToBase64URLConverter, "id_short_path": IdShortPathConverter }, strict_slashes=False) - # TODO: the parameters can be typed via builtin wsgiref with Python 3.11+ - def __call__(self, environ, start_response) -> Iterable[bytes]: - response: Response = self.handle_request(Request(environ)) - return response(environ, start_response) - - def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: - identifiable = self.object_store.get(identifier) - if not isinstance(identifiable, type_): - raise NotFound(f"No {type_.__name__} with {identifier} found!") - identifiable.update() - return identifiable - - def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: - for obj in self.object_store: - if isinstance(obj, type_): - obj.update() - yield obj - def _resolve_reference(self, reference: model.ModelReference[model.base._RT]) -> model.base._RT: try: return reference.resolve(self.object_store) @@ -651,21 +250,6 @@ def _get_submodel_reference(cls, aas: model.AssetAdministrationShell, submodel_i return ref raise NotFound(f"The AAS {aas!r} doesn't have a submodel reference to {submodel_id!r}!") - @classmethod - def _get_slice(cls, request: Request, iterator: Iterable[T]) -> Tuple[Iterator[T], int]: - limit_str = request.args.get('limit', default="10") - cursor_str = request.args.get('cursor', default="1") - try: - limit, cursor = int(limit_str), int(cursor_str) - 1 # cursor is 1-indexed - if limit < 0 or cursor < 0: - raise ValueError - except ValueError: - raise BadRequest("Limit can not be negative, cursor must be positive!") - start_index = cursor - end_index = cursor + limit - paginated_slice = itertools.islice(iterator, start_index, end_index) - return paginated_slice, end_index - def _get_shells(self, request: Request) -> Tuple[Iterator[model.AssetAdministrationShell], int]: aas: Iterator[model.AssetAdministrationShell] = self._get_all_obj_of_type(model.AssetAdministrationShell) @@ -713,7 +297,7 @@ def _get_submodels(self, request: Request) -> Tuple[Iterator[model.Submodel], in submodels = filter(lambda sm: sm.id_short == id_short, submodels) semantic_id = request.args.get("semanticId") if semantic_id is not None: - spec_semantic_id = HTTPApiDecoder.base64urljson( + spec_semantic_id = HTTPApiDecoder.base64url_json( semantic_id, model.Reference, False) # type: ignore[type-abstract] submodels = filter(lambda sm: sm.semantic_id == spec_semantic_id, submodels) paginated_submodels, end_index = self._get_slice(request, submodels) @@ -737,22 +321,6 @@ def _get_submodel_submodel_elements_id_short_path(self, url_args: Dict) -> model def _get_concept_description(self, url_args): return self._get_obj_ts(url_args["concept_id"], model.ConceptDescription) - def handle_request(self, request: Request): - map_adapter: MapAdapter = self.url_map.bind_to_environ(request.environ) - try: - response_t = get_response_type(request) - except werkzeug.exceptions.NotAcceptable as e: - return e - - try: - endpoint, values = map_adapter.match() - return endpoint(request, values, response_t=response_t, map_adapter=map_adapter) - - # any raised error that leaves this function will cause a 500 internal server error - # so catch raised http exceptions and return them - except werkzeug.exceptions.HTTPException as e: - return http_exception_to_response(e, response_t) - # ------ all not implemented ROUTES ------- def not_implemented(self, request: Request, url_args: Dict, **_kwargs) -> Response: raise werkzeug.exceptions.NotImplemented("This route is not implemented!") diff --git a/server/app/main.py b/server/app/main.py index c502bfbe0..49ba615a0 100644 --- a/server/app/main.py +++ b/server/app/main.py @@ -6,7 +6,7 @@ from basyx.aas.adapter import aasx from basyx.aas.backend.local_file import LocalFileObjectStore -from basyx.aas.adapter.http import WSGIApp +from interfaces.repository import WSGIApp storage_path = os.getenv("STORAGE_PATH", "/storage") storage_type = os.getenv("STORAGE_TYPE", "LOCAL_FILE_READ_ONLY") diff --git a/server/app/py.typed b/server/app/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/pyproject.toml b/server/app/pyproject.toml new file mode 100644 index 000000000..abaae73d6 --- /dev/null +++ b/server/app/pyproject.toml @@ -0,0 +1,64 @@ +[build-system] +requires = [ + "setuptools>=45", + "wheel", + "setuptools_scm[toml]>=6.2" +] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] +# Configure setuptools_scm for version management: +# - Automatically infers the version number from the most recent git tag +# - Generates a version.py file in the package directory +# - Allows for automatic versioning between releases (e.g., 1.0.1.dev4+g12345) +# If you want to use the version anywhere in the code, use +# ``` +# from app.version import version +# print(f"Project version: {version}") +# ``` +root = "../.." # Defines the path to the root of the repository +version_file = "version.py" + +[project] +name = "basyx-python-server" +dynamic = ["version"] +description = "The Eclipse BaSyx Python Server, an implementation of the BaSyx AAS Server" #FIXME +authors = [ + { name = "The Eclipse BaSyx Authors", email = "admins@iat.rwth-aachen.de" } +] +readme = "README.md" +license = { file = "LICENSE" } +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Development Status :: 5 - Production/Stable" +] +requires-python = ">=3.9" +dependencies = [ + "urllib3>=1.26,<3", + "Werkzeug>=3.0.3,<4", +] + +[project.optional-dependencies] +dev = [ + "mypy", + "pycodestyle", + "codeblocks", + "schemathesis~=3.7", + "jsonschema~=4.7", + "hypothesis~=6.13", + "lxml-stubs~=0.5.1", +] + +[project.urls] +"Homepage" = "https://github.com/eclipse-basyx/basyx-python-sdk" + +[tool.setuptools] +packages = { find = { exclude = ["test*"] } } + +[tool.setuptools.package-data] +app = ["py.typed"] + +[tool.mypy] +exclude = "build/" diff --git a/server/app/util/__init__.py b/server/app/util/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/app/util/converters.py b/server/app/util/converters.py new file mode 100644 index 000000000..0db897cb6 --- /dev/null +++ b/server/app/util/converters.py @@ -0,0 +1,83 @@ +# Copyright (c) 2025 the Eclipse BaSyx Authors +# +# This program and the accompanying materials are made available under the terms of the MIT License, available in +# the LICENSE file of this project. +# +# SPDX-License-Identifier: MIT +""" +This module contains helper classes for converting various types between our Python SDK types +and the HTTP-API formats, such as: +- Base64URLConverter +- IdShortPathConverter +""" + +import base64 +import binascii + +import werkzeug.routing +import werkzeug.utils +from werkzeug.exceptions import BadRequest + +from basyx.aas import model + +from typing import List + +BASE64URL_ENCODING = "utf-8" + + +def base64url_decode(data: str) -> str: + try: + # If the requester omits the base64 padding, an exception will be raised. + # However, Python doesn't complain about too much padding, + # thus we simply always append two padding characters (==). + # See also: https://stackoverflow.com/a/49459036/4780052 + decoded = base64.urlsafe_b64decode(data + "==").decode(BASE64URL_ENCODING) + except binascii.Error: + raise BadRequest(f"Encoded data {data} is invalid base64url!") + except UnicodeDecodeError: + raise BadRequest(f"Encoded base64url value is not a valid {BASE64URL_ENCODING} string!") + return decoded + + +def base64url_encode(data: str) -> str: + encoded = base64.urlsafe_b64encode(data.encode(BASE64URL_ENCODING)).decode("ascii") + return encoded + + +class IdentifierToBase64URLConverter(werkzeug.routing.UnicodeConverter): + """ + A custom URL converter for Werkzeug routing that encodes and decodes + Identifiers using Base64 URL-safe encoding. + """ + def to_url(self, value: model.Identifier) -> str: + return super().to_url(base64url_encode(value)) + + def to_python(self, value: str) -> model.Identifier: + value = super().to_python(value) + decoded = base64url_decode(value) + return decoded + + +class IdShortPathConverter(werkzeug.routing.UnicodeConverter): + """ + A custom Werkzeug URL converter for handling id_short_sep-separated idShort paths. + + This converter joins a list of idShort strings into an id_short_sep-separated path for URLs + (e.g., ["submodel", "element"] -> "submodel.element") and parses incoming URL paths + back into a list, validating each idShort. + + :cvar id_short_sep: Separator used to join and split idShort segments. + """ + id_short_sep = "." + + def to_url(self, value: List[str]) -> str: + return super().to_url(self.id_short_sep.join(value)) + + def to_python(self, value: str) -> List[str]: + id_shorts = super().to_python(value).split(self.id_short_sep) + for id_short in id_shorts: + try: + model.Referable.validate_id_short(id_short) + except (ValueError, model.AASConstraintViolation): + raise BadRequest(f"{id_short} is not a valid id_short!") + return id_shorts diff --git a/server/test/__init__.py b/server/test/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/test/interfaces/__init__.py b/server/test/interfaces/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/sdk/test/adapter/test_http.py b/server/test/interfaces/test_repository.py similarity index 98% rename from sdk/test/adapter/test_http.py rename to server/test/interfaces/test_repository.py index 09dadf865..5177dfacb 100644 --- a/sdk/test/adapter/test_http.py +++ b/server/test/interfaces/test_repository.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 the Eclipse BaSyx Authors +# Copyright (c) 2025 the Eclipse BaSyx Authors # # This program and the accompanying materials are made available under the terms of the MIT License, available in # the LICENSE file of this project. @@ -34,7 +34,7 @@ from basyx.aas import model from basyx.aas.adapter.aasx import DictSupplementaryFileContainer -from basyx.aas.adapter.http import WSGIApp +from server.app.interfaces.repository import WSGIApp from basyx.aas.examples.data.example_aas import create_full_example from typing import Set