diff --git a/README.md b/README.md
index 2049348..8add7c5 100644
--- a/README.md
+++ b/README.md
@@ -17,6 +17,7 @@ Command-line and Python client for downloading and deploying datasets on DBpedia
- [CLI Usage](#cli-usage)
- [Download](#cli-download)
- [Deploy](#cli-deploy)
+ - [Delete](#cli-delete)
- [Module Usage](#module-usage)
- [Deploy](#module-deploy)
@@ -66,8 +67,8 @@ Commands to download the [DBpedia Knowledge Graphs](#dbpedia-knowledge-graphs) g
To download BUSL 1.1 licensed datasets, you need to register and get an access token.
-1. If you do not have a DBpedia Account yet (Forum/Databus), please register at https://account.dbpedia.org
-2. Log in at https://account.dbpedia.org and create your token.
+1. If you do not have a DBpedia Account yet (Forum/Databus), please register at [https://account.dbpedia.org](https://account.dbpedia.org)
+2. Log in at [https://account.dbpedia.org](https://account.dbpedia.org) and create your token.
3. Save the token to a file, e.g. `vault-token.dat`.
### DBpedia Knowledge Graphs
@@ -181,7 +182,7 @@ Options:
--databus TEXT Databus URL (if not given, inferred from databusuri,
e.g. https://databus.dbpedia.org/sparql)
--vault-token TEXT Path to Vault refresh token file
- --databus-key TEXT Databus API key to donwload from protected databus
+ --databus-key TEXT Databus API key to download from protected databus
--authurl TEXT Keycloak token endpoint URL [default:
https://auth.dbpedia.org/realms/dbpedia/protocol/openid-
connect/token]
@@ -190,7 +191,7 @@ Options:
--help Show this message and exit.
```
-### Examples of using the download command
+#### Examples of using the download command
**Download File**: download of a single file
```bash
@@ -396,6 +397,82 @@ docker run --rm -v $(pwd):/data dbpedia/databus-python-client deploy \
./data_folder
```
+
+### Delete
+
+With the delete command you can delete collections, groups, artifacts, and versions from the Databus. Deleting files is not supported via API.
+
+**Note**: Deleting datasets will recursively delete all data associated with the dataset below the specified level. Please use this command with caution. As security measure, the delete command will prompt you for confirmation before proceeding with any deletion.
+
+```bash
+# Python
+databusclient delete [OPTIONS] DATABUSURIS...
+# Docker
+docker run --rm -v $(pwd):/data dbpedia/databus-python-client delete [OPTIONS] DATABUSURIS...
+```
+
+**Help and further information on delete command:**
+```bash
+# Python
+databusclient delete --help
+# Docker
+docker run --rm -v $(pwd):/data dbpedia/databus-python-client delete --help
+
+# Output:
+Usage: databusclient delete [OPTIONS] DATABUSURIS...
+
+ Delete a dataset from the databus.
+
+ Delete a group, artifact, or version identified by the given databus URI.
+ Will recursively delete all data associated with the dataset.
+
+Options:
+ --databus-key TEXT Databus API key to access protected databus [required]
+ --dry-run Perform a dry run without actual deletion
+ --force Force deletion without confirmation prompt
+ --help Show this message and exit.
+```
+
+To authenticate the delete request, you need to provide an API key with `--databus-key YOUR_API_KEY`.
+
+If you want to perform a dry run without actual deletion, use the `--dry-run` option. This will show you what would be deleted without making any changes.
+
+As security measure, the delete command will prompt you for confirmation before proceeding with the deletion. If you want to skip this prompt, you can use the `--force` option.
+
+#### Examples of using the delete command
+
+**Delete Version**: delete a specific version
+```bash
+# Python
+databusclient delete https://databus.dbpedia.org/dbpedia/mappings/mappingbased-literals/2022.12.01 --databus-key YOUR_API_KEY
+# Docker
+docker run --rm -v $(pwd):/data dbpedia/databus-python-client delete https://databus.dbpedia.org/dbpedia/mappings/mappingbased-literals/2022.12.01 --databus-key YOUR_API_KEY
+```
+
+**Delete Artifact**: delete an artifact and all its versions
+```bash
+# Python
+databusclient delete https://databus.dbpedia.org/dbpedia/mappings/mappingbased-literals --databus-key YOUR_API_KEY
+# Docker
+docker run --rm -v $(pwd):/data dbpedia/databus-python-client delete https://databus.dbpedia.org/dbpedia/mappings/mappingbased-literals --databus-key YOUR_API_KEY
+```
+
+**Delete Group**: delete a group and all its artifacts and versions
+```bash
+# Python
+databusclient delete https://databus.dbpedia.org/dbpedia/mappings --databus-key YOUR_API_KEY
+# Docker
+docker run --rm -v $(pwd):/data dbpedia/databus-python-client delete https://databus.dbpedia.org/dbpedia/mappings --databus-key YOUR_API_KEY
+```
+
+**Delete Collection**: delete collection
+```bash
+# Python
+databusclient delete https://databus.dbpedia.org/dbpedia/collections/dbpedia-snapshot-2022-12 --databus-key YOUR_API_KEY
+# Docker
+docker run --rm -v $(pwd):/data dbpedia/databus-python-client delete https://databus.dbpedia.org/dbpedia/collections/dbpedia-snapshot-2022-12 --databus-key YOUR_API_KEY
+```
+
## Module Usage
diff --git a/databusclient/api/delete.py b/databusclient/api/delete.py
new file mode 100644
index 0000000..a3d7625
--- /dev/null
+++ b/databusclient/api/delete.py
@@ -0,0 +1,190 @@
+import json
+import requests
+from typing import List
+
+from databusclient.api.utils import get_databus_id_parts_from_uri, get_json_ld_from_databus
+
+def _confirm_delete(databusURI: str) -> str:
+ """
+ Confirm deletion of a Databus resource with the user.
+
+ Parameters:
+ - databusURI: The full databus URI of the resource to delete
+
+ Returns:
+ - "confirm" if the user confirms deletion
+ - "skip" if the user chooses to skip deletion
+ - "cancel" if the user chooses to cancel the entire deletion process
+ """
+ print(f"Are you sure you want to delete: {databusURI}?")
+ print("\nThis action is irreversible and will permanently remove the resource and all its data.")
+ while True:
+ choice = input("Type 'yes'/'y' to confirm, 'skip'/'s' to skip this resource, or 'cancel'/'c' to abort: ").strip().lower()
+ if choice in ("yes", "y"):
+ return "confirm"
+ elif choice in ("skip", "s"):
+ return "skip"
+ elif choice in ("cancel", "c"):
+ return "cancel"
+ else:
+ print("Invalid input. Please type 'yes'/'y', 'skip'/'s', or 'cancel'/'c'.")
+
+
+def _delete_resource(databusURI: str, databus_key: str, dry_run: bool = False, force: bool = False):
+ """
+ Delete a single Databus resource (version, artifact, group).
+
+ Equivalent to:
+ curl -X DELETE "" -H "accept: */*" -H "X-API-KEY: "
+
+ Parameters:
+ - databusURI: The full databus URI of the resource to delete
+ - databus_key: Databus API key to authenticate the deletion request
+ - dry_run: If True, do not perform the deletion but only print what would be deleted
+ - force: If True, skip confirmation prompt and proceed with deletion
+ """
+
+ # Confirm the deletion request, skip the request or cancel deletion process
+ if not (dry_run or force):
+ action = _confirm_delete(databusURI)
+ if action == "skip":
+ print(f"Skipping: {databusURI}\n")
+ return
+ if action == "cancel":
+ raise KeyboardInterrupt("Deletion cancelled by user.")
+
+ if databus_key is None:
+ raise ValueError("Databus API key must be provided for deletion")
+
+ headers = {
+ "accept": "*/*",
+ "X-API-KEY": databus_key
+ }
+
+ if dry_run:
+ print(f"[DRY RUN] Would delete: {databusURI}")
+ return
+
+ response = requests.delete(databusURI, headers=headers, timeout=30)
+
+ if response.status_code in (200, 204):
+ print(f"Successfully deleted: {databusURI}")
+ else:
+ raise Exception(f"Failed to delete {databusURI}: {response.status_code} - {response.text}")
+
+
+def _delete_list(databusURIs: List[str], databus_key: str, dry_run: bool = False, force: bool = False):
+ """
+ Delete a list of Databus resources.
+
+ Parameters:
+ - databusURIs: List of full databus URIs of the resources to delete
+ - databus_key: Databus API key to authenticate the deletion requests
+ """
+ for databusURI in databusURIs:
+ _delete_resource(databusURI, databus_key, dry_run=dry_run, force=force)
+
+
+def _delete_artifact(databusURI: str, databus_key: str, dry_run: bool = False, force: bool = False):
+ """
+ Delete an artifact and all its versions.
+
+ This function first retrieves all versions of the artifact and then deletes them one by one.
+ Finally, it deletes the artifact itself.
+
+ Parameters:
+ - databusURI: The full databus URI of the artifact to delete
+ - databus_key: Databus API key to authenticate the deletion requests
+ - dry_run: If True, do not perform the deletion but only print what would be deleted
+ """
+ artifact_body = get_json_ld_from_databus(databusURI, databus_key)
+
+ json_dict = json.loads(artifact_body)
+ versions = json_dict.get("databus:hasVersion")
+
+ # Single version case {}
+ if isinstance(versions, dict):
+ versions = [versions]
+ # Multiple versions case [{}, {}]
+
+ # If versions is None or empty skip
+ if versions is None:
+ print(f"No versions found for artifact: {databusURI}")
+ else:
+ version_uris = [v["@id"] for v in versions if "@id" in v]
+ if not version_uris:
+ print(f"No version URIs found in artifact JSON-LD for: {databusURI}")
+ else:
+ # Delete all versions
+ _delete_list(version_uris, databus_key, dry_run=dry_run, force=force)
+
+ # Finally, delete the artifact itself
+ _delete_resource(databusURI, databus_key, dry_run=dry_run, force=force)
+
+def _delete_group(databusURI: str, databus_key: str, dry_run: bool = False, force: bool = False):
+ """
+ Delete a group and all its artifacts and versions.
+
+ This function first retrieves all artifacts of the group, then deletes each artifact (which in turn deletes its versions).
+ Finally, it deletes the group itself.
+
+ Parameters:
+ - databusURI: The full databus URI of the group to delete
+ - databus_key: Databus API key to authenticate the deletion requests
+ - dry_run: If True, do not perform the deletion but only print what would be deleted
+ """
+ group_body = get_json_ld_from_databus(databusURI, databus_key)
+
+ json_dict = json.loads(group_body)
+ artifacts = json_dict.get("databus:hasArtifact", [])
+
+ artifact_uris = []
+ for item in artifacts:
+ uri = item.get("@id")
+ if not uri:
+ continue
+ _, _, _, _, version, _ = get_databus_id_parts_from_uri(uri)
+ if version is None:
+ artifact_uris.append(uri)
+
+ # Delete all artifacts (which deletes their versions)
+ for artifact_uri in artifact_uris:
+ _delete_artifact(artifact_uri, databus_key, dry_run=dry_run, force=force)
+
+ # Finally, delete the group itself
+ _delete_resource(databusURI, databus_key, dry_run=dry_run, force=force)
+
+def delete(databusURIs: List[str], databus_key: str, dry_run: bool, force: bool):
+ """
+ Delete a dataset from the databus.
+
+ Delete a group, artifact, or version identified by the given databus URI.
+ Will recursively delete all data associated with the dataset.
+
+ Parameters:
+ - databusURIs: List of full databus URIs of the resources to delete
+ - databus_key: Databus API key to authenticate the deletion requests
+ - dry_run: If True, will only print what would be deleted without performing actual deletions
+ - force: If True, skip confirmation prompt and proceed with deletion
+ """
+
+ for databusURI in databusURIs:
+ _host, _account, group, artifact, version, file = get_databus_id_parts_from_uri(databusURI)
+
+ if group == "collections" and artifact is not None:
+ print(f"Deleting collection: {databusURI}")
+ _delete_resource(databusURI, databus_key, dry_run=dry_run, force=force)
+ elif file is not None:
+ print(f"Deleting file is not supported via API: {databusURI}")
+ continue # skip file deletions
+ elif version is not None:
+ print(f"Deleting version: {databusURI}")
+ _delete_resource(databusURI, databus_key, dry_run=dry_run, force=force)
+ elif artifact is not None:
+ print(f"Deleting artifact and all its versions: {databusURI}")
+ _delete_artifact(databusURI, databus_key, dry_run=dry_run, force=force)
+ elif group is not None and group != "collections":
+ print(f"Deleting group and all its artifacts and versions: {databusURI}")
+ _delete_group(databusURI, databus_key, dry_run=dry_run, force=force)
+ else:
+ print(f"Deleting {databusURI} is not supported.")
diff --git a/databusclient/api/utils.py b/databusclient/api/utils.py
new file mode 100644
index 0000000..1ffe421
--- /dev/null
+++ b/databusclient/api/utils.py
@@ -0,0 +1,37 @@
+import requests
+from typing import Tuple, Optional
+
+def get_databus_id_parts_from_uri(uri: str) -> Tuple[Optional[str], Optional[str], Optional[str], Optional[str], Optional[str], Optional[str]]:
+ """
+ Extract databus ID parts from a given databus URI.
+
+ Parameters:
+ - uri: The full databus URI
+
+ Returns:
+ A tuple containing (host, accountId, groupId, artifactId, versionId, fileId).
+ Each element is a string or None if not present.
+ """
+ uri = uri.removeprefix("https://").removeprefix("http://")
+ parts = uri.strip("/").split("/")
+ parts += [None] * (6 - len(parts)) # pad with None if less than 6 parts
+ return tuple(parts[:6]) # return only the first 6 parts
+
+def get_json_ld_from_databus(uri: str, databus_key: str | None = None) -> str:
+ """
+ Retrieve JSON-LD representation of a databus resource.
+
+ Parameters:
+ - uri: The full databus URI
+ - databus_key: Optional Databus API key for authentication on protected resources
+
+ Returns:
+ JSON-LD string representation of the databus resource.
+ """
+ headers = {"Accept": "application/ld+json"}
+ if databus_key is not None:
+ headers["X-API-KEY"] = databus_key
+ response = requests.get(uri, headers=headers, timeout=30)
+ response.raise_for_status()
+
+ return response.text
diff --git a/databusclient/cli.py b/databusclient/cli.py
index 3209008..c983544 100644
--- a/databusclient/cli.py
+++ b/databusclient/cli.py
@@ -7,6 +7,7 @@
from databusclient import client
from databusclient.rclone_wrapper import upload
+from databusclient.api.delete import delete as api_delete
@click.group()
def app():
@@ -95,7 +96,7 @@ def deploy(version_id, title, abstract, description, license_url, apikey,
@click.option("--localdir", help="Local databus folder (if not given, databus folder structure is created in current working directory)")
@click.option("--databus", help="Databus URL (if not given, inferred from databusuri, e.g. https://databus.dbpedia.org/sparql)")
@click.option("--vault-token", help="Path to Vault refresh token file")
-@click.option("--databus-key", help="Databus API key to donwload from protected databus")
+@click.option("--databus-key", help="Databus API key to download from protected databus")
@click.option("--authurl", default="https://auth.dbpedia.org/realms/dbpedia/protocol/openid-connect/token", show_default=True, help="Keycloak token endpoint URL")
@click.option("--clientid", default="vault-token-exchange", show_default=True, help="Client ID for token exchange")
def download(databusuris: List[str], localdir, databus, vault_token, databus_key, authurl, clientid):
@@ -112,6 +113,26 @@ def download(databusuris: List[str], localdir, databus, vault_token, databus_key
client_id=clientid,
)
+@app.command()
+@click.argument("databusuris", nargs=-1, required=True)
+@click.option("--databus-key", help="Databus API key to access protected databus", required=True)
+@click.option("--dry-run", is_flag=True, help="Perform a dry run without actual deletion")
+@click.option("--force", is_flag=True, help="Force deletion without confirmation prompt")
+def delete(databusuris: List[str], databus_key: str, dry_run: bool, force: bool):
+ """
+ Delete a dataset from the databus.
+
+ Delete a group, artifact, or version identified by the given databus URI.
+ Will recursively delete all data associated with the dataset.
+ """
+
+ api_delete(
+ databusURIs=databusuris,
+ databus_key=databus_key,
+ dry_run=dry_run,
+ force=force,
+ )
+
if __name__ == "__main__":
app()
diff --git a/databusclient/client.py b/databusclient/client.py
index 8138a84..994e731 100644
--- a/databusclient/client.py
+++ b/databusclient/client.py
@@ -8,6 +8,8 @@
from hashlib import sha256
import os
+from databusclient.api.utils import get_databus_id_parts_from_uri, get_json_ld_from_databus
+
__debug = False
@@ -520,7 +522,7 @@ def __download_file__(url, filename, vault_token_file=None, databus_key=None, au
print("Redirects url: ", url)
# --- 2. Try direct GET ---
- response = requests.get(url, stream=True, allow_redirects=True)
+ response = requests.get(url, stream=True, allow_redirects=True, timeout=30)
www = response.headers.get('WWW-Authenticate', '') # get WWW-Authenticate header if present to check for Bearer auth
# Vault token required if 401 Unauthorized with Bearer challenge
@@ -534,7 +536,7 @@ def __download_file__(url, filename, vault_token_file=None, databus_key=None, au
headers = {"Authorization": f"Bearer {vault_token}"}
# --- 4. Retry with token ---
- response = requests.get(url, headers=headers, stream=True)
+ response = requests.get(url, headers=headers, stream=True, timeout=30)
# Databus API key required if only 401 Unauthorized
elif response.status_code == 401:
@@ -543,7 +545,7 @@ def __download_file__(url, filename, vault_token_file=None, databus_key=None, au
raise ValueError("Databus API key not given for protected download")
headers = {"X-API-KEY": databus_key}
- response = requests.get(url, headers=headers, stream=True)
+ response = requests.get(url, headers=headers, stream=True, timeout=30)
try:
response.raise_for_status() # Raise if still failing
@@ -566,8 +568,7 @@ def __download_file__(url, filename, vault_token_file=None, databus_key=None, au
# TODO: could be a problem of github raw / openflaas
if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
- # raise IOError("Downloaded size does not match Content-Length header")
- print(f"Warning: Downloaded size does not match Content-Length header:\nExpected {total_size_in_bytes}, got {progress_bar.n}")
+ raise IOError("Downloaded size does not match Content-Length header")
def __get_vault_access__(download_url: str,
@@ -620,13 +621,14 @@ def __get_vault_access__(download_url: str,
return vault_token
-def __query_sparql__(endpoint_url, query) -> dict:
+def __query_sparql__(endpoint_url, query, databus_key=None) -> dict:
"""
Query a SPARQL endpoint and return results in JSON format.
Parameters:
- endpoint_url: the URL of the SPARQL endpoint
- query: the SPARQL query string
+ - databus_key: Optional API key for authentication
Returns:
- Dictionary containing the query results
@@ -635,12 +637,14 @@ def __query_sparql__(endpoint_url, query) -> dict:
sparql.method = 'POST'
sparql.setQuery(query)
sparql.setReturnFormat(JSON)
+ if databus_key is not None:
+ sparql.setCustomHttpHeaders({"X-API-KEY": databus_key})
results = sparql.query().convert()
return results
-def __handle_databus_file_query__(endpoint_url, query) -> List[str]:
- result_dict = __query_sparql__(endpoint_url, query)
+def __handle_databus_file_query__(endpoint_url, query, databus_key=None) -> List[str]:
+ result_dict = __query_sparql__(endpoint_url, query, databus_key=databus_key)
for binding in result_dict['results']['bindings']:
if len(binding.keys()) > 1:
print("Error multiple bindings in query response")
@@ -704,7 +708,7 @@ def __get_databus_artifacts_of_group__(json_str: str) -> List[str]:
uri = item.get("@id")
if not uri:
continue
- _, _, _, _, version, _ = __get_databus_id_parts__(uri)
+ _, _, _, _, version, _ = get_databus_id_parts_from_uri(uri)
if version is None:
result.append(uri)
return result
@@ -714,19 +718,12 @@ def wsha256(raw: str):
return sha256(raw.encode('utf-8')).hexdigest()
-def __handle_databus_collection__(uri: str, databus_key: str = None) -> str:
+def __handle_databus_collection__(uri: str, databus_key: str | None = None) -> str:
headers = {"Accept": "text/sparql"}
if databus_key is not None:
headers["X-API-KEY"] = databus_key
- return requests.get(uri, headers=headers).text
-
-
-def __get_json_ld_from_databus__(uri: str, databus_key: str = None) -> str:
- headers = {"Accept": "application/ld+json"}
- if databus_key is not None:
- headers["X-API-KEY"] = databus_key
- return requests.get(uri, headers=headers).text
+ return requests.get(uri, headers=headers, timeout=30).text
def __download_list__(urls: List[str],
@@ -738,7 +735,7 @@ def __download_list__(urls: List[str],
fileLocalDir = localDir
for url in urls:
if localDir is None:
- host, account, group, artifact, version, file = __get_databus_id_parts__(url)
+ _host, account, group, artifact, version, file = get_databus_id_parts_from_uri(url)
fileLocalDir = os.path.join(os.getcwd(), account, group, artifact, version if version is not None else "latest")
print(f"Local directory not given, using {fileLocalDir}")
@@ -749,13 +746,6 @@ def __download_list__(urls: List[str],
print("\n")
-def __get_databus_id_parts__(uri: str) -> Tuple[Optional[str], Optional[str], Optional[str], Optional[str], Optional[str], Optional[str]]:
- uri = uri.removeprefix("https://").removeprefix("http://")
- parts = uri.strip("/").split("/")
- parts += [None] * (6 - len(parts)) # pad with None if less than 6 parts
- return tuple(parts[:6]) # return only the first 6 parts
-
-
def download(
localDir: str,
endpoint: str,
@@ -772,13 +762,14 @@ def download(
endpoint: the databus endpoint URL
databusURIs: identifiers to access databus registered datasets
token: Path to Vault refresh token file
+ databus_key: Databus API key for protected downloads
auth_url: Keycloak token endpoint URL
client_id: Client ID for token exchange
"""
# TODO: make pretty
for databusURI in databusURIs:
- host, account, group, artifact, version, file = __get_databus_id_parts__(databusURI)
+ host, account, group, artifact, version, file = get_databus_id_parts_from_uri(databusURI)
# dataID or databus collection
if databusURI.startswith("http://") or databusURI.startswith("https://"):
@@ -788,8 +779,8 @@ def download(
print(f"SPARQL endpoint {endpoint}")
# databus collection
- if "/collections/" in databusURI: # TODO "in" is not safe! there could be an artifact named collections, need to check for the correct part position in the URI
- query = __handle_databus_collection__(databusURI)
+ if group == "collections":
+ query = __handle_databus_collection__(databusURI, databus_key=databus_key)
res = __handle_databus_file_query__(endpoint, query)
__download_list__(res, localDir, vault_token_file=token, databus_key=databus_key, auth_url=auth_url, client_id=client_id)
# databus file
@@ -797,28 +788,28 @@ def download(
__download_list__([databusURI], localDir, vault_token_file=token, databus_key=databus_key, auth_url=auth_url, client_id=client_id)
# databus artifact version
elif version is not None:
- json_str = __get_json_ld_from_databus__(databusURI)
+ json_str = get_json_ld_from_databus(databusURI, databus_key=databus_key)
res = __handle_databus_artifact_version__(json_str)
__download_list__(res, localDir, vault_token_file=token, databus_key=databus_key, auth_url=auth_url, client_id=client_id)
# databus artifact
elif artifact is not None:
- json_str = __get_json_ld_from_databus__(databusURI)
+ json_str = get_json_ld_from_databus(databusURI, databus_key=databus_key)
latest = __get_databus_latest_version_of_artifact__(json_str)
print(f"No version given, using latest version: {latest}")
- json_str = __get_json_ld_from_databus__(latest)
+ json_str = get_json_ld_from_databus(latest, databus_key=databus_key)
res = __handle_databus_artifact_version__(json_str)
__download_list__(res, localDir, vault_token_file=token, databus_key=databus_key, auth_url=auth_url, client_id=client_id)
# databus group
elif group is not None:
- json_str = __get_json_ld_from_databus__(databusURI)
+ json_str = get_json_ld_from_databus(databusURI, databus_key=databus_key)
artifacts = __get_databus_artifacts_of_group__(json_str)
for artifact_uri in artifacts:
print(f"Processing artifact {artifact_uri}")
- json_str = __get_json_ld_from_databus__(artifact_uri)
+ json_str = get_json_ld_from_databus(artifact_uri, databus_key=databus_key)
latest = __get_databus_latest_version_of_artifact__(json_str)
print(f"No version given, using latest version: {latest}")
- json_str = __get_json_ld_from_databus__(latest)
+ json_str = get_json_ld_from_databus(latest, databus_key=databus_key)
res = __handle_databus_artifact_version__(json_str)
__download_list__(res, localDir, vault_token_file=token, databus_key=databus_key, auth_url=auth_url, client_id=client_id)
@@ -835,5 +826,5 @@ def download(
print("QUERY {}", databusURI.replace("\n", " "))
if endpoint is None: # endpoint is required for queries (--databus)
raise ValueError("No endpoint given for query")
- res = __handle_databus_file_query__(endpoint, databusURI)
+ res = __handle_databus_file_query__(endpoint, databusURI, databus_key=databus_key)
__download_list__(res, localDir, vault_token_file=token, databus_key=databus_key, auth_url=auth_url, client_id=client_id)