diff --git a/sdk/README.md b/sdk/README.md index 5dfd2e9a8..74d63b8f3 100644 --- a/sdk/README.md +++ b/sdk/README.md @@ -127,7 +127,7 @@ For further examples and tutorials, check out the `basyx.aas.examples`-package. * [`tutorial_storage`](./basyx/aas/examples/tutorial_storage.py): Manage a larger number of Asset Administration Shells in an ObjectStore and resolve references * [`tutorial_serialization_deserialization`](./basyx/aas/examples/tutorial_serialization_deserialization.py): Use the JSON and XML serialization/deserialization for single objects or full standard-compliant files * [`tutorial_aasx`](./basyx/aas/examples/tutorial_aasx.py): Export Asset Administration Shells with related objects and auxiliary files to AASX package files -* [`tutorial_backend_couchdb`](./basyx/aas/examples/tutorial_backend_couchdb.py): Use the *Backends* interface (`update()/commit()` methods) to manage and retrieve AAS objects in a CouchDB document database +* [`tutorial_backend_couchdb`](./basyx/aas/examples/tutorial_backend_couchdb.py): Use the *CouchDBObjectStore* to manage and retrieve AAS objects in a CouchDB document database ### Documentation diff --git a/sdk/basyx/aas/backend/__init__.py b/sdk/basyx/aas/backend/__init__.py index f58060995..c1da90b1e 100644 --- a/sdk/basyx/aas/backend/__init__.py +++ b/sdk/basyx/aas/backend/__init__.py @@ -1,7 +1,3 @@ """ -This module implements a standardized way of integrating data from existing systems into AAS objects. To achieve this, -the abstract :class:`~basyx.aas.backend.backends.Backend` class implements the class methods -:meth:`~basyx.aas.backend.backends.Backend.update_object` and :meth:`~basyx.aas.backend.backends.Backend.commit_object`, -which every implementation of a backend needs to overwrite. For a tutorial on how to implement a backend, see -:ref:`this tutorial ` +This module implements a standardized way of persisting AAS objects using various backends. """ diff --git a/sdk/basyx/aas/backend/backends.py b/sdk/basyx/aas/backend/backends.py deleted file mode 100644 index 31be12628..000000000 --- a/sdk/basyx/aas/backend/backends.py +++ /dev/null @@ -1,175 +0,0 @@ -# Copyright (c) 2025 the Eclipse BaSyx Authors -# -# This program and the accompanying materials are made available under the terms of the MIT License, available in -# the LICENSE file of this project. -# -# SPDX-License-Identifier: MIT -""" -This module provides a registry and abstract base class for Backends. A :class:`~.Backend` is a class that allows to -synchronize Referable AAS objects or their included data with external data sources such as a remote API or a local -source for real time data. Each backend provides access to one kind of data source. - -The data source of an individual object is specified as a URI in its ``source`` attribute. The schema part of that URI -defines the type of data source and, in consequence, the backend class to use for synchronizing this object. - -Custom backends for additional types of data sources can be implemented by subclassing :class:`Backend` and -implementing the :meth:`~.Backend.commit_object` and :meth:`~.Backend.update_object` class methods. These are used -internally by the objects' :meth:`~basyx.aas.model.base.Referable.update` and -:meth:`~basyx.aas.model.base.Referable.commit` methods when the backend is applicable for the relevant source URI. -Then, the Backend class needs to be registered to handle update/commit requests for a specific URI schema, using -:meth:`~basyx.aas.backend.backends.register_backend`. -""" -import abc -import re -from typing import List, Dict, Type, TYPE_CHECKING - -if TYPE_CHECKING: - from ..model import Referable - - -class Backend(metaclass=abc.ABCMeta): - """ - Abstract base class for all Backend classes. - - Each Backend class is typically capable of synchronizing (updating/committing) objects with a type of external data - source, identified by one or more source URI schemas. Custom backends for custom source URI schemas should inherit - from this class and be registered via :meth:`~basyx.aas.backend.backends.register_backend`. to be used by Referable - object's :meth:`~basyx.aas.model.base.Referable.update` and :meth:`~basyx.aas.model.base.Referable.commit` methods - when required. - """ - - @classmethod - @abc.abstractmethod - def commit_object(cls, - committed_object: "Referable", - store_object: "Referable", - relative_path: List[str]) -> None: - """ - Function (class method) to be called when an object shall be committed (local changes pushed to the external - data source) via this backend implementation. - - It is automatically called by the :meth:`~basyx.aas.model.base.Referable.commit` implementation, when the source - URI of the object or the source URI one of its ancestors in the AAS object containment hierarchy include a - URI schema for which this - backend has been registered. Both of the objects are passed to this function: the one which shall be committed - (``committed_object``) and its ancestor with the relevant source URI (``store_object``). They may be the same, - the committed object has a source with the relevant schema itself. Additionally, the ``relative_path`` from the - ``store_object`` down to the ``committed_object`` is provided. - - The backend MUST ensure to commit all local changes of at least the ``committed_object`` and all objects - contained within it (if any) to the data source. It MAY additionally commit changes to other objects (i.e. the - ``store_object`` and any additional contained object). - - For this purpose a concrete implementation of this method would typically use the ``source`` attribute of the - ``store_object`` to identify the data source. If the data source supports fine-grained access to contained - objects, the ``relative_path`` may become handy to compose the committed object's address within the data - source's interface. - - :param committed_object: The object which shall be synced to the external data source - :param store_object: The object which originates from the relevant data source (i.e. has the relevant source - attribute). It may be the ``committed_object`` or one of its ancestors in the AAS object hierarchy. - :param relative_path: List of idShort strings to resolve the ``committed_object`` starting at the - ``store_object``, such that `obj = store_object; for i in relative_path: obj = obj.get_referable(i)` - resolves to the ``committed_object``. In case that ``store_object is committed_object``, it is an empty - list. - :raises BackendNotAvailableException: when the external data source cannot be reached - """ - pass - - @classmethod - @abc.abstractmethod - def update_object(cls, - updated_object: "Referable", - store_object: "Referable", - relative_path: List[str]) -> None: - """ - Function (class method) to be called when an object shall be updated (local object updated with changes from the - external data source) via this backend implementation. - - It is automatically called by the :meth:`~basyx.aas.model.base.Referable.update` implementation, - when the source URI of the object or the source URI one of its ancestors in the AAS object containment hierarchy - include a URI schema for which this backend has been registered. Both of the objects are passed - to this function: the one which shall be update (``updated_object``) and its ancestor with - the relevant source URI (``store_object``). They may be the same, the updated object has a source with - the relevant schema itself. Additionally, the ``relative_path`` from the ``store_object`` down to - the ``updated_object`` is provided. - - The backend MUST ensure to update at least the ``updated_object`` and all objects contained within it (if any) - with any changes from the data source. It MAY additionally update other objects (i.e. the ``store_object`` and - any additional contained object). - - For this purpose a concrete implementation of this method would typically use the ``source`` attribute of the - ``store_object`` to identify the data source. If the data source supports fine-grained access to contained - objects, the ``relative_path`` may become handy to compose the updated object's address within the data source's - interface. - - :param updated_object: The object which shall be synced from the external data source - :param store_object: The object which originates from the relevant data source (i.e. has the relevant source - attribute). It may be the ``committed_object`` or one of its ancestors in the AAS object hierarchy. - :param relative_path: List of idShort strings to resolve the ``updated_object`` starting at the - ``store_object``, such that `obj = store_object; for i in relative_path: obj = obj.get_referable(i)` - resolves to the ``updated_object``. In case that ``store_object is updated_object``, it is an empty list. - :raises BackendNotAvailableException: when the external data source cannot be reached - """ - pass - - -# Global registry for backends by URI scheme -# TODO allow multiple backends per scheme with priority -_backends_map: Dict[str, Type[Backend]] = {} - - -def register_backend(scheme: str, backend_class: Type[Backend]) -> None: - """ - Register a Backend implementation to handle update/commit operations for a specific type of external data sources, - identified by a source URI schema. - - This method may be called multiple times for a single Backend class, to register that class as a backend - implementation for different source URI schemas (e.g. use the same backend for 'http://' and 'https://' sources). - - :param scheme: The URI schema of source URIs to be handled with Backend class, without trailing colon and slashes. - E.g. 'http', 'https', 'couchdb', etc. - :param backend_class: The Backend implementation class. Should inherit from :class:`Backend`. - """ - # TODO handle multiple backends per scheme - _backends_map[scheme] = backend_class - - -RE_URI_SCHEME = re.compile(r"^([a-zA-Z][a-zA-Z+\-\.]*):") - - -def get_backend(url: str) -> Type[Backend]: - """ - Internal function to retrieve the Backend implementation for the external data source identified by the given - ``url`` via the url's schema. - - :param url: External data source URI to find an appropriate Backend implementation for - :return: A Backend class, capable of updating/committing from/to the external data source - :raises UnknownBackendException: When no backend is available for that url - """ - # TODO handle multiple backends per scheme - scheme_match = RE_URI_SCHEME.match(url) - if not scheme_match: - raise ValueError("{} is not a valid URL with URI scheme.".format(url)) - scheme = scheme_match[1] - try: - return _backends_map[scheme] - except KeyError as e: - raise UnknownBackendException("Could not find Backend for source '{}'".format(url)) from e - - -# ################################################################################################# -# Custom Exception classes for reporting errors during interaction with Backends -class BackendError(Exception): - """Base class of all exceptions raised by the backends module""" - pass - - -class UnknownBackendException(BackendError): - """Raised, if the backend is not found in the registry""" - pass - - -class BackendNotAvailableException(BackendError): - """Raised, if the backend does exist in the registry, but is not available for some reason""" - pass diff --git a/sdk/basyx/aas/backend/couchdb.py b/sdk/basyx/aas/backend/couchdb.py index 4b6f43611..6f2b3a0fc 100644 --- a/sdk/basyx/aas/backend/couchdb.py +++ b/sdk/basyx/aas/backend/couchdb.py @@ -8,8 +8,7 @@ This module adds the functionality of storing and retrieving :class:`~basyx.aas.model.base.Identifiable` objects in a CouchDB. -The :class:`~.CouchDBBackend` takes care of updating and committing objects from and to the CouchDB, while the -:class:`~CouchDBObjectStore` handles adding, deleting and otherwise managing the AAS objects in a specific CouchDB. +The :class:`~CouchDBObjectStore` handles adding, deleting and otherwise managing the AAS objects in a specific CouchDB. """ import threading import weakref @@ -21,7 +20,6 @@ import json import urllib3 # type: ignore -from . import backends from ..adapter.json import json_serialization, json_deserialization from basyx.aas import model @@ -30,146 +28,6 @@ _http_pool_manager = urllib3.PoolManager() -class CouchDBBackend(backends.Backend): - """ - This Backend stores each Identifiable object as a single JSON document in the configured CouchDB database. Each - document's id is build from the object's identifier. The document's contents comprise a single property ``data``, - containing the JSON serialization of the BaSyx Python SDK object. The :ref:`adapter.json ` - package is used for serialization and deserialization of objects. - """ - @classmethod - def update_object(cls, - updated_object: model.Referable, - store_object: model.Referable, - relative_path: List[str]) -> None: - - if not isinstance(store_object, model.Identifiable): - raise CouchDBSourceError("The given store_object is not Identifiable, therefore cannot be found " - "in the CouchDB") - url = CouchDBBackend._parse_source(store_object.source) - - try: - data = CouchDBBackend.do_request(url) - except CouchDBServerError as e: - if e.code == 404: - raise KeyError("No Identifiable found in CouchDB at {}".format(url)) from e - raise - - updated_store_object = data['data'] - set_couchdb_revision(url, data["_rev"]) - store_object.update_from(updated_store_object) - - @classmethod - def commit_object(cls, - committed_object: model.Referable, - store_object: model.Referable, - relative_path: List[str]) -> None: - if not isinstance(store_object, model.Identifiable): - raise CouchDBSourceError("The given store_object is not Identifiable, therefore cannot be found " - "in the CouchDB") - url = CouchDBBackend._parse_source(store_object.source) - # We need to get the revision of the object, if it already exists, otherwise we cannot write to the Couchdb - if get_couchdb_revision(url) is None: - raise CouchDBConflictError("No revision found for the given object. Try calling `update` on it.") - - data = json.dumps({'data': store_object, "_rev": get_couchdb_revision(url)}, - cls=json_serialization.AASToJsonEncoder) - - try: - response = CouchDBBackend.do_request( - url, method='PUT', additional_headers={'Content-type': 'application/json'}, body=data.encode('utf-8')) - set_couchdb_revision(url, response["rev"]) - except CouchDBServerError as e: - if e.code == 409: - raise CouchDBConflictError("Could not commit changes to id {} due to a concurrent modification in the " - "database.".format(store_object.id)) from e - elif e.code == 404: - raise KeyError("Object with id {} was not found in the CouchDB at {}" - .format(store_object.id, url)) from e - raise - - @classmethod - def _parse_source(cls, source: str) -> str: - """ - Parses the source parameter of a model.Referable object - - :param source: Source string of the model.Referable object - :return: URL to the document - :raises CouchDBBackendSourceError, if the source has the wrong format - """ - if source.startswith("couchdbs://"): - url = source.replace("couchdbs://", "https://", 1) - elif source.startswith("couchdb://"): - url = source.replace("couchdb://", "http://", 1) - else: - raise CouchDBSourceError("Source has wrong format. " - "Expected to start with {couchdb://, couchdbs://}, got {" + source + "}") - return url - - @classmethod - def do_request(cls, url: str, method: str = "GET", additional_headers: Optional[Dict[str, str]] = None, - body: Optional[bytes] = None) -> MutableMapping[str, Any]: - """ - Perform an HTTP(S) request to the CouchDBServer, parse the result and handle errors - - :param url: The HTTP or HTTPS URL to request - :param method: The HTTP method for the request - :param additional_headers: Additional headers to insert into the request. The default headers include - 'connection: keep-alive', 'accept-encoding: ...', 'authorization: basic ...', 'Accept: ...'. - :param body: Request body for POST, PUT, and PATCH requests - :return: The parsed JSON data if the request ``method`` is other than 'HEAD' or the response headers for 'HEAD' - requests - """ - url_parts = urllib.parse.urlparse(url) - host = url_parts.scheme + url_parts.netloc - auth = _credentials_store.get(host) - headers = urllib3.make_headers(keep_alive=True, accept_encoding=True, - basic_auth="{}:{}".format(*auth) if auth else None) - headers['Accept'] = 'application/json' - headers.update(additional_headers if additional_headers is not None else {}) - try: - response = _http_pool_manager.request(method, url, headers=headers, body=body) - except (urllib3.exceptions.TimeoutError, urllib3.exceptions.SSLError, urllib3.exceptions.ProtocolError) as e: - raise CouchDBConnectionError("Error while connecting to the CouchDB server: {}".format(e)) from e - except urllib3.exceptions.HTTPError as e: - raise CouchDBResponseError("Error while connecting to the CouchDB server: {}".format(e)) from e - - if not (200 <= response.status < 300): - logger.debug("Request %s %s finished with HTTP status code %s.", - method, url, response.status) - if response.headers.get('Content-type', None) != 'application/json': - raise CouchDBResponseError("Unexpected Content-type header {} of response from CouchDB server" - .format(response.headers.get('Content-type', None))) - - if method == 'HEAD': - raise CouchDBServerError(response.status, "", "", "HTTP {}".format(response.status)) - - try: - data = json.loads(response.data.decode('utf-8')) - except json.JSONDecodeError: - raise CouchDBResponseError("Could not parse error message of HTTP {}" - .format(response.status)) - raise CouchDBServerError(response.status, data['error'], data['reason'], - "HTTP {}: {} (reason: {})".format(response.status, data['error'], data['reason'])) - - # Check response & parse data - logger.debug("Request %s %s finished successfully.", method, url) - if method == 'HEAD': - return response.headers - - if response.headers.get('Content-type') != 'application/json': - raise CouchDBResponseError("Unexpected Content-type header") - try: - data = json.loads(response.data.decode('utf-8'), cls=json_deserialization.AASFromJsonDecoder) - except json.JSONDecodeError as e: - raise CouchDBResponseError("Could not parse CouchDB server response as JSON data.") from e - return data - - -backends.register_backend("couchdb", CouchDBBackend) -backends.register_backend("couchdbs", CouchDBBackend) - - # Global registry for credentials for CouchDB Servers _credentials_store: Dict[str, Tuple[str, str]] = {} # Note: The HTTPPasswordMgr is not thread safe during writing, should be thread safe for reading only. @@ -182,8 +40,7 @@ def register_credentials(url: str, username: str, password: str): .. Warning:: Do not use this function, while other threads may be accessing the credentials via the - :class:`~.CouchDBObjectStore` or update or commit functions of :class:`~.basyx.aas.model.base.Referable` - objects! + :class:`~.CouchDBObjectStore`! :param url: Toplevel URL :param username: Username to that CouchDB instance @@ -268,7 +125,7 @@ def check_database(self, create=False): """ try: - CouchDBBackend.do_request("{}/{}".format(self.url, self.database_name), 'HEAD') + self._do_request("{}/{}".format(self.url, self.database_name), 'HEAD') except CouchDBServerError as e: # If an HTTPError is raised, re-raise it, unless it is a 404 error and we are requested to create the # database @@ -280,7 +137,7 @@ def check_database(self, create=False): # Create database logger.info("Creating CouchDB database %s/%s ...", self.url, self.database_name) - CouchDBBackend.do_request("{}/{}".format(self.url, self.database_name), 'PUT') + self._do_request("{}/{}".format(self.url, self.database_name), 'PUT') def get_identifiable_by_couchdb_id(self, couchdb_id: str) -> model.Identifiable: """ @@ -293,7 +150,7 @@ def get_identifiable_by_couchdb_id(self, couchdb_id: str) -> model.Identifiable: # Create and issue HTTP request (raises HTTPError on status != 200) try: - data = CouchDBBackend.do_request( + data = self._do_request( "{}/{}/{}".format(self.url, self.database_name, urllib.parse.quote(couchdb_id, safe=''))) except CouchDBServerError as e: if e.code == 404: @@ -305,7 +162,6 @@ def get_identifiable_by_couchdb_id(self, couchdb_id: str) -> model.Identifiable: if not isinstance(obj, model.Identifiable): raise CouchDBResponseError("The CouchDB document with id {} does not contain an identifiable AAS object." .format(couchdb_id)) - self.generate_source(obj) # Generate the source parameter of this object set_couchdb_revision("{}/{}/{}".format(self.url, self.database_name, urllib.parse.quote(couchdb_id, safe='')), data["_rev"]) @@ -314,12 +170,8 @@ def get_identifiable_by_couchdb_id(self, couchdb_id: str) -> model.Identifiable: with self._object_cache_lock: if obj.id in self._object_cache: old_obj = self._object_cache[obj.id] - # If the source does not match the correct source for this CouchDB backend, the object seems to belong - # to another backend now, so we return a fresh copy - if old_obj.source == obj.source: - old_obj.update_from(obj) - return old_obj - + old_obj.update_from(obj) + return old_obj self._object_cache[obj.id] = obj return obj @@ -351,7 +203,7 @@ def add(self, x: model.Identifiable) -> None: # Create and issue HTTP request (raises HTTPError on status != 200) try: - response = CouchDBBackend.do_request( + response = self._do_request( "{}/{}/{}".format(self.url, self.database_name, self._transform_id(x.id)), 'PUT', {'Content-type': 'application/json'}, @@ -364,7 +216,6 @@ def add(self, x: model.Identifiable) -> None: raise with self._object_cache_lock: self._object_cache[x.id] = x - self.generate_source(x) # Set the source of the object def discard(self, x: model.Identifiable, safe_delete=False) -> None: """ @@ -394,7 +245,7 @@ def discard(self, x: model.Identifiable, safe_delete=False) -> None: # ETag response header try: logger.debug("fetching the current object revision for deletion ...") - headers = CouchDBBackend.do_request( + headers = self._do_request( "{}/{}/{}".format(self.url, self.database_name, self._transform_id(x.id)), 'HEAD') rev = headers['ETag'][1:-1] except CouchDBServerError as e: @@ -403,7 +254,7 @@ def discard(self, x: model.Identifiable, safe_delete=False) -> None: from e raise try: - CouchDBBackend.do_request( + self._do_request( "{}/{}/{}?rev={}".format(self.url, self.database_name, self._transform_id(x.id), rev), 'DELETE') except CouchDBServerError as e: @@ -419,7 +270,65 @@ def discard(self, x: model.Identifiable, safe_delete=False) -> None: self._transform_id(x.id))) with self._object_cache_lock: del self._object_cache[x.id] - x.source = "" + + @classmethod + def _do_request(cls, url: str, method: str = "GET", additional_headers: Dict[str, str] = {}, + body: Optional[bytes] = None) -> MutableMapping[str, Any]: + """ + Perform an HTTP(S) request to the CouchDBServer, parse the result and handle errors + + :param url: The HTTP or HTTPS URL to request + :param method: The HTTP method for the request + :param additional_headers: Additional headers to insert into the request. The default headers include + 'connection: keep-alive', 'accept-encoding: ...', 'authorization: basic ...', 'Accept: ...'. + :param body: Request body for POST, PUT, and PATCH requests + :return: The parsed JSON data if the request ``method`` is other than 'HEAD' or the response headers for 'HEAD' + requests + """ + url_parts = urllib.parse.urlparse(url) + host = url_parts.scheme + url_parts.netloc + auth = _credentials_store.get(host) + headers = urllib3.make_headers(keep_alive=True, accept_encoding=True, + basic_auth="{}:{}".format(*auth) if auth else None) + headers['Accept'] = 'application/json' + headers.update(additional_headers) + try: + response = _http_pool_manager.request(method, url, headers=headers, body=body) + except (urllib3.exceptions.TimeoutError, urllib3.exceptions.SSLError, urllib3.exceptions.ProtocolError) as e: + raise CouchDBConnectionError("Error while connecting to the CouchDB server: {}".format(e)) from e + except urllib3.exceptions.HTTPError as e: + raise CouchDBResponseError("Error while connecting to the CouchDB server: {}".format(e)) from e + + if not (200 <= response.status < 300): + logger.debug("Request %s %s finished with HTTP status code %s.", + method, url, response.status) + if response.headers.get('Content-type', None) != 'application/json': + raise CouchDBResponseError("Unexpected Content-type header {} of response from CouchDB server" + .format(response.headers.get('Content-type', None))) + + if method == 'HEAD': + raise CouchDBServerError(response.status, "", "", "HTTP {}".format(response.status)) + + try: + data = json.loads(response.data.decode('utf-8')) + except json.JSONDecodeError: + raise CouchDBResponseError("Could not parse error message of HTTP {}" + .format(response.status)) + raise CouchDBServerError(response.status, data['error'], data['reason'], + "HTTP {}: {} (reason: {})".format(response.status, data['error'], data['reason'])) + + # Check response & parse data + logger.debug("Request %s %s finished successfully.", method, url) + if method == 'HEAD': + return response.headers + + if response.headers.get('Content-type') != 'application/json': + raise CouchDBResponseError("Unexpected Content-type header") + try: + data = json.loads(response.data.decode('utf-8'), cls=json_deserialization.AASFromJsonDecoder) + except json.JSONDecodeError as e: + raise CouchDBResponseError("Could not parse CouchDB server response as JSON data.") from e + return data def __contains__(self, x: object) -> bool: """ @@ -441,7 +350,7 @@ def __contains__(self, x: object) -> bool: logger.debug("Checking existence of object with id %s in database ...", repr(x)) try: - CouchDBBackend.do_request( + self._do_request( "{}/{}/{}".format(self.url, self.database_name, self._transform_id(identifier)), 'HEAD') except CouchDBServerError as e: if e.code == 404: @@ -458,7 +367,7 @@ def __len__(self) -> int: (see ``_do_request()`` for details) """ logger.debug("Fetching number of documents from database ...") - data = CouchDBBackend.do_request("{}/{}".format(self.url, self.database_name)) + data = self._do_request("{}/{}".format(self.url, self.database_name)) return data['doc_count'] def __iter__(self) -> Iterator[model.Identifiable]: @@ -483,7 +392,7 @@ def __next__(self): # Fetch a list of all ids and construct Iterator object logger.debug("Creating iterator over objects in database ...") - data = CouchDBBackend.do_request("{}/{}/_all_docs".format(self.url, self.database_name)) + data = self._do_request("{}/{}/_all_docs".format(self.url, self.database_name)) return CouchDBIdentifiableIterator(self, (row['id'] for row in data['rows'])) @staticmethod @@ -497,17 +406,6 @@ def _transform_id(identifier: model.Identifier, url_quote=True) -> str: identifier = urllib.parse.quote(identifier, safe='') return identifier - def generate_source(self, identifiable: model.Identifiable): - """ - Generates the source string for an :class:`~basyx.aas.model.base.Identifiable` object that is backed - by the Couchdb - - :param identifiable: Identifiable object - """ - source: str = self.url.replace("https://", "couchdbs://").replace("http://", "couchdb://") - source += "/" + self.database_name + "/" + self._transform_id(identifiable.id) - identifiable.source = source - # ################################################################################################# # Custom Exception classes for reporting errors during interaction with the CouchDB server @@ -516,11 +414,6 @@ class CouchDBError(Exception): pass -class CouchDBSourceError(CouchDBError): - """Exception raised when the source has the wrong format""" - pass - - class CouchDBConnectionError(CouchDBError): """Exception raised when the CouchDB server could not be reached""" pass diff --git a/sdk/basyx/aas/backend/local_file.py b/sdk/basyx/aas/backend/local_file.py index ec0757375..39ff91415 100644 --- a/sdk/basyx/aas/backend/local_file.py +++ b/sdk/basyx/aas/backend/local_file.py @@ -8,8 +8,8 @@ This module adds the functionality of storing and retrieving :class:`~basyx.aas.model.base.Identifiable` objects in local files. -The :class:`~.LocalFileBackend` takes care of updating and committing objects from and to the files, while the -:class:`~LocalFileObjectStore` handles adding, deleting and otherwise managing the AAS objects in a specific Directory. +The :class:`~LocalFileObjectStore` handles adding, deleting and otherwise managing +the AAS objects in a specific Directory. """ from typing import List, Iterator, Iterable, Union import logging @@ -19,7 +19,6 @@ import threading import weakref -from . import backends from ..adapter.json import json_serialization, json_deserialization from basyx.aas import model @@ -27,45 +26,6 @@ logger = logging.getLogger(__name__) -class LocalFileBackend(backends.Backend): - """ - This Backend stores each Identifiable object as a single JSON document as a local file in a directory. - Each document's id is build from the object's identifier using a SHA256 sum of its identifiable; the document's - contents comprise a single property ``data``, containing the JSON serialization of the BaSyx Python SDK object. The - :ref:`adapter.json ` package is used for serialization and deserialization of objects. - """ - - @classmethod - def update_object(cls, - updated_object: model.Referable, - store_object: model.Referable, - relative_path: List[str]) -> None: - - if not isinstance(store_object, model.Identifiable): - raise FileBackendSourceError("The given store_object is not Identifiable, therefore cannot be found " - "in the FileBackend") - file_name: str = store_object.source.replace("file://localhost/", "") - with open(file_name, "r") as file: - data = json.load(file, cls=json_deserialization.AASFromJsonDecoder) - updated_store_object = data["data"] - store_object.update_from(updated_store_object) - - @classmethod - def commit_object(cls, - committed_object: model.Referable, - store_object: model.Referable, - relative_path: List[str]) -> None: - if not isinstance(store_object, model.Identifiable): - raise FileBackendSourceError("The given store_object is not Identifiable, therefore cannot be found " - "in the FileBackend") - file_name: str = store_object.source.replace("file://localhost/", "") - with open(file_name, "w") as file: - json.dump({'data': store_object}, file, cls=json_serialization.AASToJsonEncoder, indent=4) - - -backends.register_backend("file", LocalFileBackend) - - class LocalFileObjectStore(model.AbstractObjectStore): """ An ObjectStore implementation for :class:`~basyx.aas.model.base.Identifiable` BaSyx Python SDK objects backed @@ -112,7 +72,6 @@ def get_identifiable_by_hash(self, hash_: str) -> model.Identifiable: with open("{}/{}.json".format(self.directory_path, hash_), "r") as file: data = json.load(file, cls=json_deserialization.AASFromJsonDecoder) obj = data["data"] - self.generate_source(obj) except FileNotFoundError as e: raise KeyError("No Identifiable with hash {} found in local file database".format(hash_)) from e # If we still have a local replication of that object (since it is referenced from anywhere else), update that @@ -120,11 +79,8 @@ def get_identifiable_by_hash(self, hash_: str) -> model.Identifiable: with self._object_cache_lock: if obj.id in self._object_cache: old_obj = self._object_cache[obj.id] - # If the source does not match the correct source for this CouchDB backend, the object seems to belong - # to another backend now, so we return a fresh copy - if old_obj.source == obj.source: - old_obj.update_from(obj) - return old_obj + old_obj.update_from(obj) + return old_obj self._object_cache[obj.id] = obj return obj @@ -152,7 +108,6 @@ def add(self, x: model.Identifiable) -> None: json.dump({"data": x}, file, cls=json_serialization.AASToJsonEncoder, indent=4) with self._object_cache_lock: self._object_cache[x.id] = x - self.generate_source(x) # Set the source of the object def discard(self, x: model.Identifiable) -> None: """ @@ -168,7 +123,6 @@ def discard(self, x: model.Identifiable) -> None: raise KeyError("No AAS object with id {} exists in local file database".format(x.id)) from e with self._object_cache_lock: del self._object_cache[x.id] - x.source = "" def __contains__(self, x: object) -> bool: """ @@ -214,23 +168,3 @@ def _transform_id(identifier: model.Identifier) -> str: Helper method to represent an ASS Identifier as a string to be used as Local file document id """ return hashlib.sha256(identifier.encode("utf-8")).hexdigest() - - def generate_source(self, identifiable: model.Identifiable) -> str: - """ - Generates the source string for an :class:`~basyx.aas.model.base.Identifiable` object that is backed by the File - - :param identifiable: Identifiable object - """ - source: str = "file://localhost/{}/{}.json".format( - self.directory_path, - self._transform_id(identifiable.id) - ) - identifiable.source = source - return source - - -class FileBackendSourceError(Exception): - """ - Raised, if the given object's source is not resolvable as a local file - """ - pass diff --git a/sdk/basyx/aas/examples/tutorial_backend_couchdb.py b/sdk/basyx/aas/examples/tutorial_backend_couchdb.py index 5b476a80a..dc86bac85 100755 --- a/sdk/basyx/aas/examples/tutorial_backend_couchdb.py +++ b/sdk/basyx/aas/examples/tutorial_backend_couchdb.py @@ -4,9 +4,6 @@ """ Tutorial for storing Asset Administration Shells, Submodels and Assets in a CouchDB database server, using the CouchDBObjectStore and CouchDB Backend. - -This tutorial also shows the usage of the commit()/update() mechanism for synchronizing objects with an external data -source. """ from configparser import ConfigParser @@ -33,7 +30,6 @@ # Step-by-Step Guide: # step 1: connecting to a CouchDB server # step 2: storing objects in CouchDBObjectStore -# step 3: updating objects from the CouchDB and committing changes ########################################## @@ -58,8 +54,7 @@ # Provide the login credentials to the CouchDB backend. -# These credentials are used whenever communication with this CouchDB server is required either via the -# CouchDBObjectStore or via the update()/commit() backend. +# These credentials are used whenever communication with this CouchDB server is required via the CouchDBObjectStore. basyx.aas.backend.couchdb.register_credentials(couchdb_url, couchdb_user, couchdb_password) # Now, we create a CouchDBObjectStore as an interface for managing the objects in the CouchDB server. @@ -75,37 +70,11 @@ example_submodel2 = basyx.aas.examples.data.example_aas.create_example_bill_of_material_submodel() # The CouchDBObjectStore behaves just like other ObjectStore implementations (see `tutorial_storage.py`). The objects -# are transferred to the CouchDB immediately. Additionally, the `source` attribute is set automatically, so update() and -# commit() will work automatically (see below). +# are transferred to the CouchDB immediately. object_store.add(example_submodel1) object_store.add(example_submodel2) - -#################################################################### -# Step 3: Updating Objects from the CouchDB and Committing Changes # -#################################################################### - -# Since the CouchDBObjectStore has set the `source` attribute of our Submodel objects, we can now use update() and -# commit() to synchronize changes to these objects with the database. The `source` indicates (via its URI scheme) that -# the CouchDB backend is used for the synchronization and references the correct CouchDB server url and database. For -# this to work, we must make sure to `import aas.backend.couchdb` at least once in this Python application, so the -# CouchDB backend is loaded. - -# Fetch recent updates from the server -example_submodel1.update() - -# Make some changes to a Property within the submodel -prop = example_submodel1.get_referable('ManufacturerName') -assert isinstance(prop, basyx.aas.model.Property) - -prop.value = "RWTH Aachen" - -# Commit (upload) these changes to the CouchDB server -# We can simply call commit() on the Property object. It will check the `source` attribute of the object itself as well -# as the source attribute of all ancestors in the object hierarchy (including the Submodel) and commit the changes to -# all of these external data sources. -prop.commit() - +# For more information on how to use `ObjectStore`s in general, please refer to `tutorial_storage.py`. ############ # Clean up # diff --git a/sdk/basyx/aas/examples/tutorial_serialization_deserialization.py b/sdk/basyx/aas/examples/tutorial_serialization_deserialization.py index 6c99409a7..ec281818b 100755 --- a/sdk/basyx/aas/examples/tutorial_serialization_deserialization.py +++ b/sdk/basyx/aas/examples/tutorial_serialization_deserialization.py @@ -55,11 +55,6 @@ ############################################## # Step 2: Serializing Single Objects to JSON # ############################################## - -# Before serializing the data, we should make sure, it's up-to-date. This is irrelevant for the static AAS objects in -# this tutorial, but may be important when dealing with dynamic data. -aashell.update() - # `AASToJsonEncoder` from the `aas.adapter.json` module is a custom JSONEncoder class for serializing # Asset Administration Shell data into the official JSON format according to # 'Details of the Asset Administration Shell', chapter 5.5, using Python's built-in JSON library. When provided to the @@ -102,17 +97,13 @@ obj_store.add(submodel) obj_store.add(aashell) -# step 4.2: Again, make sure that the data is up-to-date -submodel.update() -aashell.update() - -# step 4.3: writing the contents of the ObjectStore to a JSON file +# step 4.2: writing the contents of the ObjectStore to a JSON file basyx.aas.adapter.json.write_aas_json_file('data.json', obj_store) # We can pass the additional keyword argument `indent=4` to `write_aas_json_file()` to format the JSON file in a more # human-readable (but much more space-consuming) manner. -# step 4.4: writing the contents of the ObjectStore to an XML file +# step 4.3: writing the contents of the ObjectStore to an XML file basyx.aas.adapter.xml.write_aas_xml_file('data.xml', obj_store) diff --git a/sdk/basyx/aas/examples/tutorial_storage.py b/sdk/basyx/aas/examples/tutorial_storage.py index 82f5bc1e5..fe978b11b 100755 --- a/sdk/basyx/aas/examples/tutorial_storage.py +++ b/sdk/basyx/aas/examples/tutorial_storage.py @@ -68,8 +68,7 @@ # persistent memory (i.e. on hard disk). In this case, you may choose the `CouchDBObjectStore` from # `aas.backends.couchdb` to use a CouchDB database server as persistent storage. Both ObjectStore implementations # provide the same interface. In addition, the CouchDBObjectStores allows synchronizing the local object with the -# database via a Backend and the update()/commit() mechanism. See the `tutorial_backend_couchdb.py` for more -# information. +# database via a Backend. See the `tutorial_backend_couchdb.py` for more information. obj_store: model.DictObjectStore[model.Identifiable] = model.DictObjectStore() # step 2.2: add submodel and asset administration shell to store diff --git a/sdk/basyx/aas/model/base.py b/sdk/basyx/aas/model/base.py index 56182c721..60b6d43fb 100644 --- a/sdk/basyx/aas/model/base.py +++ b/sdk/basyx/aas/model/base.py @@ -18,7 +18,6 @@ import re from . import datatypes, _string_constraints -from ..backend import backends if TYPE_CHECKING: from . import provider @@ -602,10 +601,6 @@ class Referable(HasExtension, metaclass=abc.ABCMeta): :ivar description: Description or comments on the element. :ivar parent: Reference (in form of a :class:`~.UniqueIdShortNamespace`) to the next referable parent element of the element. - - :ivar source: Source of the object, a URI, that defines where this object's data originates from. - This is used to specify where the Referable should be updated from and committed to. - Default is an empty string, making it use the source of its ancestor, if possible. """ @abc.abstractmethod def __init__(self): @@ -617,7 +612,6 @@ def __init__(self): # We use a Python reference to the parent Namespace instead of a Reference Object, as specified. This allows # simpler and faster navigation/checks and it has no effect in the serialized data formats anyway. self.parent: Optional[UniqueIdShortNamespace] = None - self.source: str = "" def __repr__(self) -> str: reversed_path = [] @@ -733,91 +727,22 @@ def _set_id_short(self, id_short: Optional[NameType]): # Redundant to the line above. However, this way, we make sure that we really update the _id_short self._id_short = id_short - def update(self, - max_age: float = 0, - recursive: bool = True, - _indirect_source: bool = True) -> None: - """ - Update the local Referable object from any underlying external data source, using an appropriate backend - - If there is no source given, it will find its next ancestor with a source and update from this source. - If there is no source in any ancestor, this function will do nothing - - :param max_age: Maximum age of the local data in seconds. This method may return early, if the previous update - of the object has been performed less than ``max_age`` seconds ago. - :param recursive: Also call update on all children of this object. Default is True - :param _indirect_source: Internal parameter to avoid duplicate updating. - :raises backends.BackendError: If no appropriate backend or the data source is not available - """ - # TODO consider max_age - if not _indirect_source: - # Update was already called on an ancestor of this Referable. Only update it, if it has its own source - if self.source != "": - backends.get_backend(self.source).update_object(updated_object=self, - store_object=self, - relative_path=[]) - - else: - # Try to find a valid source for this Referable - if self.source != "": - backends.get_backend(self.source).update_object(updated_object=self, - store_object=self, - relative_path=[]) - else: - store_object, relative_path = self.find_source() - if store_object and relative_path is not None: - backends.get_backend(store_object.source).update_object(updated_object=self, - store_object=store_object, - relative_path=list(relative_path)) - - if recursive: - # update all the children who have their own source - if isinstance(self, UniqueIdShortNamespace): - for namespace_set in self.namespace_element_sets: - if "id_short" not in namespace_set.get_attribute_name_list(): - continue - for referable in namespace_set: - referable.update(max_age, recursive=True, _indirect_source=False) - - def find_source(self) -> Tuple[Optional["Referable"], Optional[List[str]]]: # type: ignore - """ - Finds the closest source in these objects ancestors. If there is no source, returns None - - :return: Tuple with the closest ancestor with a defined source and the relative path of id_shorts to that - ancestor - """ - referable: Referable = self - relative_path: List[NameType] = [self.id_short] - while referable is not None: - if referable.source != "": - relative_path.reverse() - return referable, relative_path - if referable.parent: - assert isinstance(referable.parent, Referable) - referable = referable.parent - relative_path.append(referable.id_short) - continue - break - return None, None - - def update_from(self, other: "Referable", update_source: bool = False): + def update_from(self, other: "Referable"): """ Internal function to update the object's attributes from a different version of the exact same object. This function should not be used directly. It is typically used by backend implementations (database adapters, - protocol clients, etc.) to update the object's data, after ``update()`` has been called. + protocol clients, etc.) to update the object's data, after ``update_nss_from()`` has been called. :param other: The object to update from - :param update_source: Update the source attribute with the other's source attribute. This is not propagated - recursively """ for name in dir(other): # Skip private and protected attributes if name.startswith('_'): continue - # Do not update 'parent', 'namespace_element_sets', or 'source' (depending on update_source parameter) - if name in ("parent", "namespace_element_sets") or (name == "source" and not update_source): + # Do not update 'parent', 'namespace_element_sets' + if name in ("parent", "namespace_element_sets"): continue # Skip methods @@ -837,43 +762,6 @@ def update_from(self, other: "Referable", update_source: bool = False): else: setattr(self, name, attr) - def commit(self) -> None: - """ - Transfer local changes on this object to all underlying external data sources. - - This function commits the current state of this object to its own and each external data source of its - ancestors. If there is no source, this function will do nothing. - """ - current_ancestor = self.parent - relative_path: List[NameType] = [self.id_short] - # Commit to all ancestors with sources - while current_ancestor: - assert isinstance(current_ancestor, Referable) - if current_ancestor.source != "": - backends.get_backend(current_ancestor.source).commit_object(committed_object=self, - store_object=current_ancestor, - relative_path=list(relative_path)) - relative_path.insert(0, current_ancestor.id_short) - current_ancestor = current_ancestor.parent - # Commit to own source and check if there are children with sources to commit to - self._direct_source_commit() - - def _direct_source_commit(self): - """ - Commits children of an ancestor recursively, if they have a specific source given - """ - if self.source != "": - backends.get_backend(self.source).commit_object(committed_object=self, - store_object=self, - relative_path=[]) - - if isinstance(self, UniqueIdShortNamespace): - for namespace_set in self.namespace_element_sets: - if "id_short" not in namespace_set.get_attribute_name_list(): - continue - for referable in namespace_set: - referable._direct_source_commit() - id_short = property(_get_id_short, _set_id_short) @@ -2080,15 +1968,15 @@ def update_nss_from(self, other: "NamespaceSet"): if isinstance(other_object, Referable): backend, case_sensitive = self._backend["id_short"] referable = backend[other_object.id_short if case_sensitive else other_object.id_short.upper()] - referable.update_from(other_object, update_source=True) # type: ignore + referable.update_from(other_object) # type: ignore elif isinstance(other_object, Qualifier): backend, case_sensitive = self._backend["type"] qualifier = backend[other_object.type if case_sensitive else other_object.type.upper()] - # qualifier.update_from(other_object, update_source=True) # TODO: What should happen here? + # qualifier.update_from(other_object) # TODO: What should happend here? elif isinstance(other_object, Extension): backend, case_sensitive = self._backend["name"] extension = backend[other_object.name if case_sensitive else other_object.name.upper()] - # extension.update_from(other_object, update_source=True) # TODO: What should happen here? + # extension.update_from(other_object) # TODO: What should happend here? else: raise TypeError("Type not implemented") except KeyError: diff --git a/sdk/basyx/aas/model/provider.py b/sdk/basyx/aas/model/provider.py index ac50d33da..b7acea8b8 100644 --- a/sdk/basyx/aas/model/provider.py +++ b/sdk/basyx/aas/model/provider.py @@ -65,8 +65,9 @@ class AbstractObjectStore(AbstractObjectProvider, MutableSet[_IT], Generic[_IT], ObjectStores are special ObjectProvides that – in addition to retrieving objects by :class:`~basyx.aas.model.base.Identifier` – allow to add and delete objects (i.e. behave like a Python set). - This includes local object stores (like :class:`~.DictObjectStore`) and database - :class:`Backends `. + This includes local object stores (like :class:`~.DictObjectStore`) and specific object stores + (like :class:`~basyx.aas.backend.couchdb.CouchDBObjectStore` and + :class `~basyx.aas.backend.local_file.LocalFileObjectStore`). The AbstractObjectStore inherits from the :class:`~collections.abc.MutableSet` abstract collections class and therefore implements all the functions of this class. diff --git a/sdk/docs/source/backend/backends.rst b/sdk/docs/source/backend/backends.rst deleted file mode 100644 index d8c603849..000000000 --- a/sdk/docs/source/backend/backends.rst +++ /dev/null @@ -1,4 +0,0 @@ -backends - Base class and functionality for Backends -==================================================== - -.. automodule:: basyx.aas.backend.backends diff --git a/sdk/docs/source/backend/index.rst b/sdk/docs/source/backend/index.rst index 232867818..97554b3f5 100644 --- a/sdk/docs/source/backend/index.rst +++ b/sdk/docs/source/backend/index.rst @@ -7,6 +7,5 @@ basyx.aas.backend - Storing and Retrieving of AAS-objects in Backends :maxdepth: 2 :caption: Contents: - backends couchdb local_file diff --git a/sdk/test/backend/test_backends.py b/sdk/test/backend/test_backends.py index e0beee8f8..e69de29bb 100644 --- a/sdk/test/backend/test_backends.py +++ b/sdk/test/backend/test_backends.py @@ -1,36 +0,0 @@ -# Copyright (c) 2025 the Eclipse BaSyx Authors -# -# This program and the accompanying materials are made available under the terms of the MIT License, available in -# the LICENSE file of this project. -# -# SPDX-License-Identifier: MIT - -from typing import List -import unittest - -from basyx.aas.backend import backends -from basyx.aas.model import Referable - - -class ExampleBackend(backends.Backend): - @classmethod - def commit_object(cls, committed_object: Referable, store_object: Referable, relative_path: List[str]) -> None: - raise NotImplementedError("This is a mock") - - @classmethod - def update_object(cls, updated_object: Referable, store_object: Referable, relative_path: List[str]) -> None: - raise NotImplementedError("This is a mock") - - -class BackendsTest(unittest.TestCase): - def test_backend_store(self): - backends.register_backend("mockScheme", ExampleBackend) - self.assertIs(backends.get_backend("mockScheme:x-test:test_backend"), ExampleBackend) - - backends.register_backend("", ExampleBackend) - with self.assertRaises(ValueError) as cm: - backends.get_backend("") - self.assertEqual(" is not a valid URL with URI scheme.", str(cm.exception)) - - with self.assertRaises(backends.UnknownBackendException): - backends.get_backend("some-unkown-scheme://example.com") diff --git a/sdk/test/backend/test_couchdb.py b/sdk/test/backend/test_couchdb.py index 89fe992de..36e5ef039 100644 --- a/sdk/test/backend/test_couchdb.py +++ b/sdk/test/backend/test_couchdb.py @@ -18,32 +18,6 @@ TEST_CONFIG["couchdb"]["database"] + "/" -class CouchDBBackendOfflineMethodsTest(unittest.TestCase): - def test_parse_source(self): - couchdb.register_credentials(url="couchdb.plt.rwth-aachen.de:5984", - username="test_user", - password="test_password") - - url = couchdb.CouchDBBackend._parse_source( - "couchdbs://couchdb.plt.rwth-aachen.de:5984/path_to_db/path_to_doc" - ) - expected_url = "https://couchdb.plt.rwth-aachen.de:5984/path_to_db/path_to_doc" - self.assertEqual(expected_url, url) - - url = couchdb.CouchDBBackend._parse_source( - "couchdb://couchdb.plt.rwth-aachen.de:5984/path_to_db/path_to_doc" - ) - expected_url = "http://couchdb.plt.rwth-aachen.de:5984/path_to_db/path_to_doc" - self.assertEqual(expected_url, url) - - with self.assertRaises(couchdb.CouchDBSourceError) as cm: - couchdb.CouchDBBackend._parse_source("wrong_scheme:plt.rwth-aachen.couchdb:5984/path_to_db/path_to_doc") - self.assertEqual("Source has wrong format. " - "Expected to start with {couchdb://, couchdbs://}, got " - "{wrong_scheme:plt.rwth-aachen.couchdb:5984/path_to_db/path_to_doc}", - str(cm.exception)) - - @unittest.skipUnless(COUCHDB_OKAY, "No CouchDB is reachable at {}/{}: {}".format(TEST_CONFIG['couchdb']['url'], TEST_CONFIG['couchdb']['database'], COUCHDB_ERROR)) @@ -62,7 +36,8 @@ def tearDown(self) -> None: def test_object_store_add(self): test_object = create_example_submodel() self.object_store.add(test_object) - self.assertEqual(test_object.source, source_core+"https%3A%2F%2Facplt.org%2FTest_Submodel") + # Note that this test is only checking that there are no errors during adding. + # The actual logic is tested together with retrieval in `test_retrieval`. def test_retrieval(self): test_object = create_example_submodel() @@ -77,11 +52,6 @@ def test_retrieval(self): test_object_retrieved_again = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') self.assertIs(test_object_retrieved, test_object_retrieved_again) - # However, a changed source should invalidate the cached object, so we should get a new copy - test_object_retrieved.source = "couchdb://example.com/example/https%3A%2F%2Facplt.org%2FTest_Submodel" - test_object_retrieved_third = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') - self.assertIsNot(test_object_retrieved, test_object_retrieved_third) - def test_example_submodel_storing(self) -> None: example_submodel = create_example_submodel() @@ -138,46 +108,3 @@ def test_key_errors(self) -> None: self.object_store.discard(retrieved_submodel) self.assertEqual("'No AAS object with id https://acplt.org/Test_Submodel exists in " "CouchDB database'", str(cm.exception)) - - def test_conflict_errors(self): - # Preperation: add object and retrieve it from the database - example_submodel = create_example_submodel() - self.object_store.add(example_submodel) - retrieved_submodel = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') - - # Simulate a concurrent modification (Commit submodel, while preventing that the couchdb revision store is - # updated) - with unittest.mock.patch("basyx.aas.backend.couchdb.set_couchdb_revision"): - retrieved_submodel.commit() - - # Committing changes to the retrieved object should now raise a conflict error - retrieved_submodel.id_short = "myOtherNewIdShort" - with self.assertRaises(couchdb.CouchDBConflictError) as cm: - retrieved_submodel.commit() - self.assertEqual("Could not commit changes to id https://acplt.org/Test_Submodel due to a " - "concurrent modification in the database.", str(cm.exception)) - - # Deleting the submodel with safe_delete should also raise a conflict error. Deletion without safe_delete should - # work - with self.assertRaises(couchdb.CouchDBConflictError) as cm: - self.object_store.discard(retrieved_submodel, True) - self.assertEqual("Object with id https://acplt.org/Test_Submodel has been modified in the " - "database since the version requested to be deleted.", str(cm.exception)) - self.object_store.discard(retrieved_submodel, False) - self.assertEqual(0, len(self.object_store)) - - # Committing after deletion should not raise a conflict error due to removal of the source attribute - retrieved_submodel.commit() - - def test_editing(self): - test_object = create_example_submodel() - self.object_store.add(test_object) - - # Test if commit uploads changes - test_object.id_short = "SomeNewIdShort" - test_object.commit() - - # Test if update restores changes - test_object.id_short = "AnotherIdShort" - test_object.update() - self.assertEqual("SomeNewIdShort", test_object.id_short) diff --git a/sdk/test/backend/test_local_file.py b/sdk/test/backend/test_local_file.py index 22aaa3155..7d96d8713 100644 --- a/sdk/test/backend/test_local_file.py +++ b/sdk/test/backend/test_local_file.py @@ -31,10 +31,8 @@ def tearDown(self) -> None: def test_object_store_add(self): test_object = create_example_submodel() self.object_store.add(test_object) - self.assertEqual( - test_object.source, - source_core+"fd787262b2743360f7ad03a3b4e9187e4c088aa37303448c9c43fe4c973dac53.json" - ) + # Note that this test is only checking that there are no errors during adding. + # The actual logic is tested together with retrieval in `test_retrieval`. def test_retrieval(self): test_object = create_example_submodel() @@ -49,11 +47,6 @@ def test_retrieval(self): test_object_retrieved_again = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') self.assertIs(test_object_retrieved, test_object_retrieved_again) - # However, a changed source should invalidate the cached object, so we should get a new copy - test_object_retrieved.source = "couchdb://example.com/example/IRI-https%3A%2F%2Facplt.org%2FTest_Submodel" - test_object_retrieved_third = self.object_store.get_identifiable('https://acplt.org/Test_Submodel') - self.assertIsNot(test_object_retrieved, test_object_retrieved_third) - def test_example_submodel_storing(self) -> None: example_submodel = create_example_submodel() @@ -111,16 +104,3 @@ def test_key_errors(self) -> None: self.object_store.discard(retrieved_submodel) self.assertEqual("'No AAS object with id https://acplt.org/Test_Submodel exists in " "local file database'", str(cm.exception)) - - def test_editing(self): - test_object = create_example_submodel() - self.object_store.add(test_object) - - # Test if commit uploads changes - test_object.id_short = "SomeNewIdShort" - test_object.commit() - - # Test if update restores changes - test_object.id_short = "AnotherIdShort" - test_object.update() - self.assertEqual("SomeNewIdShort", test_object.id_short) diff --git a/sdk/test/model/test_base.py b/sdk/test/model/test_base.py index 1e0432a58..836980025 100644 --- a/sdk/test/model/test_base.py +++ b/sdk/test/model/test_base.py @@ -11,7 +11,6 @@ from collections import OrderedDict from basyx.aas import model -from basyx.aas.backend import backends from basyx.aas.model import Identifier, Identifiable from basyx.aas.examples.data import example_aas @@ -57,23 +56,6 @@ def __init__(self): super().__init__() -class MockBackend(backends.Backend): - @classmethod - def update_object(cls, - updated_object: "Referable", # type: ignore - store_object: "Referable", # type: ignore - relative_path: List[str]) -> None: ... - - @classmethod - def commit_object(cls, - committed_object: "Referable", # type: ignore - store_object: "Referable", # type: ignore - relative_path: List[str]) -> None: ... - - update_object = mock.Mock() - commit_object = mock.Mock() - - class ExampleIdentifiable(model.Identifiable): def __init__(self): super().__init__() @@ -84,7 +66,6 @@ def generate_example_referable_tree() -> model.Referable: Generates an example referable tree, built like this: example_grandparent -> example_parent -> example_referable -> example_child -> example_grandchild - example_grandparent and example_grandchild both have an nonempty source, pointing to the mock-backend :return: example_referable """ @@ -111,9 +92,6 @@ def generate_example_referable_with_namespace(id_short: model.NameType, example_parent = generate_example_referable_with_namespace("exampleParent", example_referable) example_grandparent = generate_example_referable_with_namespace("exampleGrandparent", example_parent) - example_grandchild.source = "mockScheme:exampleGrandchild" - example_grandparent.source = "mockScheme:exampleGrandparent" - return example_referable @@ -157,98 +135,6 @@ def __init__(self, value: model.Referable): self.assertEqual('Referable must have an identifiable as root object and only parents that are referable', str(cm.exception)) - def test_update(self): - backends.register_backend("mockScheme", MockBackend) - example_referable = generate_example_referable_tree() - example_grandparent = example_referable.parent.parent - example_grandchild = example_referable.get_referable("exampleChild").get_referable("exampleGrandchild") - - # Test update with parameter "recursive=False" - example_referable.update(recursive=False) - MockBackend.update_object.assert_called_once_with( - updated_object=example_referable, - store_object=example_grandparent, - relative_path=["exampleGrandparent", "exampleParent", "exampleReferable"] - ) - MockBackend.update_object.reset_mock() - - # Test update with parameter "recursive=True" - example_referable.update() - self.assertEqual(MockBackend.update_object.call_count, 2) - MockBackend.update_object.assert_has_calls([ - mock.call(updated_object=example_referable, - store_object=example_grandparent, - relative_path=["exampleGrandparent", "exampleParent", "exampleReferable"]), - mock.call(updated_object=example_grandchild, - store_object=example_grandchild, - relative_path=[]) - ]) - MockBackend.update_object.reset_mock() - - # Test update with source != "" in example_referable - example_referable.source = "mockScheme:exampleReferable" - example_referable.update(recursive=False) - MockBackend.update_object.assert_called_once_with( - updated_object=example_referable, - store_object=example_referable, - relative_path=[] - ) - MockBackend.update_object.reset_mock() - - # Test update with no source available - example_grandparent.source = "" - example_referable.source = "" - example_referable.update(recursive=False) - MockBackend.update_object.assert_not_called() - - def test_commit(self): - backends.register_backend("mockScheme", MockBackend) - example_referable = generate_example_referable_tree() - example_grandparent = example_referable.parent.parent - example_grandchild = example_referable.get_referable("exampleChild").get_referable("exampleGrandchild") - - # Test commit starting from example_referable - example_referable.commit() - self.assertEqual(MockBackend.commit_object.call_count, 2) - MockBackend.commit_object.assert_has_calls([ - mock.call(committed_object=example_referable, - store_object=example_grandparent, - relative_path=["exampleParent", "exampleReferable"]), - mock.call(committed_object=example_grandchild, - store_object=example_grandchild, - relative_path=[]) - ]) - MockBackend.commit_object.reset_mock() - - # Test commit starting from example_grandchild - example_grandchild.commit() - self.assertEqual(MockBackend.commit_object.call_count, 2) - MockBackend.commit_object.assert_has_calls([ - mock.call(committed_object=example_grandchild, - store_object=example_grandparent, - relative_path=["exampleParent", "exampleReferable", "exampleChild", "exampleGrandchild"]), - mock.call(committed_object=example_grandchild, - store_object=example_grandchild, - relative_path=[]) - ]) - MockBackend.commit_object.reset_mock() - - # Test commit starting from example_grandchild after adding a source to example_referable - example_referable.source = "mockScheme:exampleReferable" - example_grandchild.commit() - self.assertEqual(MockBackend.commit_object.call_count, 3) - MockBackend.commit_object.assert_has_calls([ - mock.call(committed_object=example_grandchild, - store_object=example_referable, - relative_path=["exampleChild", "exampleGrandchild"]), - mock.call(committed_object=example_grandchild, - store_object=example_grandparent, - relative_path=["exampleParent", "exampleReferable", "exampleChild", "exampleGrandchild"]), - mock.call(committed_object=example_grandchild, - store_object=example_grandchild, - relative_path=[]) - ]) - def test_update_from(self): example_submodel = example_aas.create_example_submodel() example_relel = example_submodel.get_referable('ExampleRelationshipElement') @@ -270,20 +156,8 @@ def test_update_from(self): self.assertIs(example_submodel.namespace_element_sets[0], example_submodel.submodel_element) self.assertIs(example_relel.parent, example_submodel) - # Test source update - example_relel.source = "scheme:OldRelElSource" - other_submodel.source = "scheme:NewSource" - other_relel.source = "scheme:NewRelElSource" - - example_submodel.update_from(other_submodel) - # Sources of the object itself should not be updated by default - self.assertEqual("", example_submodel.source) - # Sources of embedded objects should always be updated - self.assertEqual("scheme:NewRelElSource", example_relel.source) - def test_update_commit_qualifier_extension_semantic_id(self): submodel = model.Submodel("https://acplt.org/Test_Submodel") - submodel.update() qualifier = model.Qualifier("test", model.datatypes.String) extension = model.Extension("test") collection = model.SubmodelElementCollection("test") @@ -293,7 +167,6 @@ def test_update_commit_qualifier_extension_semantic_id(self): submodel.add_qualifier(qualifier) submodel.add_extension(extension) submodel.add_referable(collection) - submodel.commit() self.assertEqual(next(iter(submodel.qualifier)), qualifier) self.assertEqual(next(iter(submodel.extension)), extension) @@ -320,7 +193,6 @@ def test_update_commit_qualifier_extension_semantic_id(self): next(iter(submodel.submodel_element)) with self.assertRaises(StopIteration): next(iter(collection.value)) - submodel.commit() class ExampleNamespaceReferable(model.UniqueIdShortNamespace, model.UniqueSemanticIdNamespace, model.Identifiable): diff --git a/server/app/interfaces/base.py b/server/app/interfaces/base.py index 65233a893..686bb92cb 100644 --- a/server/app/interfaces/base.py +++ b/server/app/interfaces/base.py @@ -267,14 +267,12 @@ class ObjectStoreWSGIApp(BaseWSGIApp): def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: for obj in self.object_store: if isinstance(obj, type_): - obj.update() yield obj def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: identifiable = self.object_store.get(identifier) if not isinstance(identifiable, type_): raise NotFound(f"No {type_.__name__} with {identifier} found!") - identifiable.update() return identifiable diff --git a/server/app/interfaces/repository.py b/server/app/interfaces/repository.py index c55d9a7eb..03a3974c8 100644 --- a/server/app/interfaces/repository.py +++ b/server/app/interfaces/repository.py @@ -36,7 +36,7 @@ import io import json -from typing import Type, Iterator, List, Dict, Union, Callable, Tuple, Optional +from typing import Type, Iterator, List, Dict, Union, Callable, Tuple, Optional, Iterable import werkzeug.exceptions import werkzeug.routing @@ -188,6 +188,22 @@ def __init__(self, object_store: model.AbstractObjectStore, file_store: aasx.Abs "id_short_path": IdShortPathConverter }, strict_slashes=False) + # TODO: the parameters can be typed via builtin wsgiref with Python 3.11+ + def __call__(self, environ, start_response) -> Iterable[bytes]: + response: Response = self.handle_request(Request(environ)) + return response(environ, start_response) + + def _get_obj_ts(self, identifier: model.Identifier, type_: Type[model.provider._IT]) -> model.provider._IT: + identifiable = self.object_store.get(identifier) + if not isinstance(identifiable, type_): + raise NotFound(f"No {type_.__name__} with {identifier} found!") + return identifiable + + def _get_all_obj_of_type(self, type_: Type[model.provider._IT]) -> Iterator[model.provider._IT]: + for obj in self.object_store: + if isinstance(obj, type_): + yield obj + def _resolve_reference(self, reference: model.ModelReference[model.base._RT]) -> model.base._RT: try: return reference.resolve(self.object_store) @@ -337,7 +353,6 @@ def post_aas(self, request: Request, url_args: Dict, response_t: Type[APIRespons self.object_store.add(aas) except KeyError as e: raise Conflict(f"AssetAdministrationShell with Identifier {aas.id} already exists!") from e - aas.commit() created_resource_url = map_adapter.build(self.get_aas, { "aas_id": aas.id }, force_external=True) @@ -364,7 +379,6 @@ def put_aas(self, request: Request, url_args: Dict, response_t: Type[APIResponse aas = self._get_shell(url_args) aas.update_from(HTTPApiDecoder.request_body(request, model.AssetAdministrationShell, is_stripped_request(request))) - aas.commit() return response_t() def delete_aas(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: @@ -381,7 +395,6 @@ def put_aas_asset_information(self, request: Request, url_args: Dict, response_t **_kwargs) -> Response: aas = self._get_shell(url_args) aas.asset_information = HTTPApiDecoder.request_body(request, model.AssetInformation, False) - aas.commit() return response_t() def get_aas_submodel_refs(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -398,14 +411,12 @@ def post_aas_submodel_refs(self, request: Request, url_args: Dict, response_t: T if sm_ref in aas.submodel: raise Conflict(f"{sm_ref!r} already exists!") aas.submodel.add(sm_ref) - aas.commit() return response_t(sm_ref, status=201) def delete_aas_submodel_refs_specific(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: aas = self._get_shell(url_args) aas.submodel.remove(self._get_submodel_reference(aas, url_args["submodel_id"])) - aas.commit() return response_t() def put_aas_submodel_refs_submodel(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -418,11 +429,9 @@ def put_aas_submodel_refs_submodel(self, request: Request, url_args: Dict, respo id_changed: bool = submodel.id != new_submodel.id # TODO: https://github.com/eclipse-basyx/basyx-python-sdk/issues/216 submodel.update_from(new_submodel) - submodel.commit() if id_changed: aas.submodel.remove(sm_ref) aas.submodel.add(model.ModelReference.from_referable(submodel)) - aas.commit() return response_t() def delete_aas_submodel_refs_submodel(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -432,7 +441,6 @@ def delete_aas_submodel_refs_submodel(self, request: Request, url_args: Dict, re submodel = self._resolve_reference(sm_ref) self.object_store.remove(submodel) aas.submodel.remove(sm_ref) - aas.commit() return response_t() def aas_submodel_refs_redirect(self, request: Request, url_args: Dict, map_adapter: MapAdapter, response_t=None, @@ -461,7 +469,6 @@ def post_submodel(self, request: Request, url_args: Dict, response_t: Type[APIRe self.object_store.add(submodel) except KeyError as e: raise Conflict(f"Submodel with Identifier {submodel.id} already exists!") from e - submodel.commit() created_resource_url = map_adapter.build(self.get_submodel, { "submodel_id": submodel.id }, force_external=True) @@ -503,7 +510,6 @@ def get_submodels_reference(self, request: Request, url_args: Dict, response_t: def put_submodel(self, request: Request, url_args: Dict, response_t: Type[APIResponse], **_kwargs) -> Response: submodel = self._get_submodel(url_args) submodel.update_from(HTTPApiDecoder.request_body(request, model.Submodel, is_stripped_request(request))) - submodel.commit() return response_t() def get_submodel_submodel_elements(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -578,7 +584,6 @@ def put_submodel_submodel_elements_id_short_path(self, request: Request, url_arg model.SubmodelElement, # type: ignore[type-abstract] is_stripped_request(request)) submodel_element.update_from(new_submodel_element) - submodel_element.commit() return response_t() def delete_submodel_submodel_elements_id_short_path(self, request: Request, url_args: Dict, @@ -637,7 +642,6 @@ def put_submodel_submodel_element_attachment(self, request: Request, url_args: D f"while {submodel_element!r} has content_type {submodel_element.content_type!r}!") submodel_element.value = self.file_store.add_file(filename, file_storage.stream, submodel_element.content_type) - submodel_element.commit() return response_t() def delete_submodel_submodel_element_attachment(self, request: Request, url_args: Dict, @@ -660,7 +664,6 @@ def delete_submodel_submodel_element_attachment(self, request: Request, url_args pass submodel_element.value = None - submodel_element.commit() return response_t() def get_submodel_submodel_element_qualifiers(self, request: Request, url_args: Dict, response_t: Type[APIResponse], @@ -678,7 +681,6 @@ def post_submodel_submodel_element_qualifiers(self, request: Request, url_args: if sm_or_se.qualifier.contains_id("type", qualifier.type): raise Conflict(f"Qualifier with type {qualifier.type} already exists!") sm_or_se.qualifier.add(qualifier) - sm_or_se.commit() created_resource_url = map_adapter.build(self.get_submodel_submodel_element_qualifiers, { "submodel_id": url_args["submodel_id"], "id_shorts": url_args.get("id_shorts") or None, @@ -697,7 +699,6 @@ def put_submodel_submodel_element_qualifiers(self, request: Request, url_args: D raise Conflict(f"A qualifier of type {new_qualifier.type!r} already exists for {sm_or_se!r}") sm_or_se.remove_qualifier_by_type(qualifier.type) sm_or_se.qualifier.add(new_qualifier) - sm_or_se.commit() if qualifier_type_changed: created_resource_url = map_adapter.build(self.get_submodel_submodel_element_qualifiers, { "submodel_id": url_args["submodel_id"], @@ -713,7 +714,6 @@ def delete_submodel_submodel_element_qualifiers(self, request: Request, url_args sm_or_se = self._get_submodel_or_nested_submodel_element(url_args) qualifier_type = url_args["qualifier_type"] self._qualifiable_qualifier_op(sm_or_se, sm_or_se.remove_qualifier_by_type, qualifier_type) - sm_or_se.commit() return response_t() # --------- CONCEPT DESCRIPTION ROUTES --------- @@ -731,7 +731,6 @@ def post_concept_description(self, request: Request, url_args: Dict, response_t: self.object_store.add(concept_description) except KeyError as e: raise Conflict(f"ConceptDescription with Identifier {concept_description.id} already exists!") from e - concept_description.commit() created_resource_url = map_adapter.build(self.get_concept_description, { "concept_id": concept_description.id }, force_external=True) @@ -747,7 +746,6 @@ def put_concept_description(self, request: Request, url_args: Dict, response_t: concept_description = self._get_concept_description(url_args) concept_description.update_from(HTTPApiDecoder.request_body(request, model.ConceptDescription, is_stripped_request(request))) - concept_description.commit() return response_t() def delete_concept_description(self, request: Request, url_args: Dict, response_t: Type[APIResponse],