diff --git a/applications/backoffice/src/apiclient/workspaces/models/RepositoryType.ts b/applications/backoffice/src/apiclient/workspaces/models/RepositoryType.ts index 9e8e537dd..cbb19b0d3 100644 --- a/applications/backoffice/src/apiclient/workspaces/models/RepositoryType.ts +++ b/applications/backoffice/src/apiclient/workspaces/models/RepositoryType.ts @@ -23,7 +23,9 @@ export enum RepositoryType { Dandi = 'dandi', Figshare = 'figshare', - Github = 'github' + Github = 'github', + Biomodels = 'biomodels', + EBRAINS = "ebrains" } export function RepositoryTypeFromJSON(json: any): RepositoryType { diff --git a/applications/osb-portal/src/apiclient/workspaces/models/EBRAINSRepositoryResource.ts b/applications/osb-portal/src/apiclient/workspaces/models/EBRAINSRepositoryResource.ts new file mode 100644 index 000000000..954892255 --- /dev/null +++ b/applications/osb-portal/src/apiclient/workspaces/models/EBRAINSRepositoryResource.ts @@ -0,0 +1,96 @@ +/* tslint:disable */ +/* eslint-disable */ +/** + * OSB Workspaces manager API + * Opensource Brain Platform - Reference Workspaces manager API + * + * The version of the OpenAPI document: 0.2.0 + * + * + * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + * https://openapi-generator.tech + * Do not edit the class manually. + */ + +import { exists, mapValues } from '../runtime'; +import { + RepositoryResourceBase, + RepositoryResourceBaseFromJSON, + RepositoryResourceBaseFromJSONTyped, + RepositoryResourceBaseToJSON, +} from './'; + +/** + * figshare repository resource + * @export + * @interface EBRAINSRepositoryResource + */ +export interface EBRAINSRepositoryResource { + /** + * file name + * @type {string} + * @memberof EBRAINSRepositoryResource + */ + name?: string; + /** + * Download URL of the Resource + * @type {string} + * @memberof EBRAINSRepositoryResource + */ + path?: string; + /** + * OSB Repository id + * @type {number} + * @memberof EBRAINSRepositoryResource + */ + osbrepositoryId?: number; + /** + * File size in bytes of the RepositoryResource + * @type {number} + * @memberof EBRAINSRepositoryResource + */ + size?: number; + /** + * Date/time the ReposityResource is last modified + * @type {Date} + * @memberof EBRAINSRepositoryResource + */ + timestampModified?: Date; +} + +export function EBRAINSRepositoryResourceFromJSON(json: any): EBRAINSRepositoryResource { + return EBRAINSRepositoryResourceFromJSONTyped(json, false); +} + +export function EBRAINSRepositoryResourceFromJSONTyped(json: any, ignoreDiscriminator: boolean): EBRAINSRepositoryResource { + if ((json === undefined) || (json === null)) { + return json; + } + return { + + 'name': !exists(json, 'name') ? undefined : json['name'], + 'path': !exists(json, 'path') ? undefined : json['path'], + 'osbrepositoryId': !exists(json, 'osbrepository_id') ? undefined : json['osbrepository_id'], + 'size': !exists(json, 'size') ? undefined : json['size'], + 'timestampModified': !exists(json, 'timestamp_modified') ? undefined : (new Date(json['timestamp_modified'])), + }; +} + +export function EBRAINSRepositoryResourceToJSON(value?: EBRAINSRepositoryResource | null): any { + if (value === undefined) { + return undefined; + } + if (value === null) { + return null; + } + return { + + 'name': value.name, + 'path': value.path, + 'osbrepository_id': value.osbrepositoryId, + 'size': value.size, + 'timestamp_modified': value.timestampModified === undefined ? undefined : (value.timestampModified.toISOString()), + }; +} + + diff --git a/applications/osb-portal/src/apiclient/workspaces/models/RepositoryType.ts b/applications/osb-portal/src/apiclient/workspaces/models/RepositoryType.ts index f46285a84..1fa7320b6 100644 --- a/applications/osb-portal/src/apiclient/workspaces/models/RepositoryType.ts +++ b/applications/osb-portal/src/apiclient/workspaces/models/RepositoryType.ts @@ -17,6 +17,8 @@ * * dandi - DANDI repository * * figshare - FigShare repository * * github - Github repository + * * biomodels - Biomodels repository + * * ebrains - EBRAINS repository * @export * @enum {string} */ @@ -24,7 +26,8 @@ export enum RepositoryType { Dandi = 'dandi', Figshare = 'figshare', Github = 'github', - Biomodels = 'biomodels' + Biomodels = 'biomodels', + EBRAINS = 'ebrains' } export function RepositoryTypeFromJSON(json: any): RepositoryType { diff --git a/applications/osb-portal/src/apiclient/workspaces/models/index.ts b/applications/osb-portal/src/apiclient/workspaces/models/index.ts index d0d39625f..7152a64a0 100644 --- a/applications/osb-portal/src/apiclient/workspaces/models/index.ts +++ b/applications/osb-portal/src/apiclient/workspaces/models/index.ts @@ -1,4 +1,5 @@ export * from './BiomodelsRepositoryResource'; +export * from './EBRAINSRepositoryResource'; export * from './DandiRepositoryResource'; export * from './DownloadResource'; export * from './FigshareRepositoryResource'; diff --git a/applications/osb-portal/src/pages/RepositoryPage.tsx b/applications/osb-portal/src/pages/RepositoryPage.tsx index e540a16f0..6bddfe039 100644 --- a/applications/osb-portal/src/pages/RepositoryPage.tsx +++ b/applications/osb-portal/src/pages/RepositoryPage.tsx @@ -264,6 +264,10 @@ export const RepositoryPage = (props: any) => { "_blank" ); break; + // For figshare, there does not seem to be a version specific URL + case "ebrains": + window.open(`${repository.uri}`, "_blank"); + break; default: window.open(`#`, "_blank"); } diff --git a/applications/workspaces/api/openapi.yaml b/applications/workspaces/api/openapi.yaml index 8b2037ce0..9a55b12a4 100644 --- a/applications/workspaces/api/openapi.yaml +++ b/applications/workspaces/api/openapi.yaml @@ -994,6 +994,7 @@ components: - figshare - github - biomodels + - ebrains type: string RepositoryContentType: description: | @@ -1293,6 +1294,16 @@ components: sha: description: The GIT sha of the resource type: string + BiomodelsRepositoryResource: + description: biomodels repository resource + allOf: + - + $ref: '#/components/schemas/RepositoryResourceBase' + EBRAINSRepositoryResource: + description: EBRAINS repository resource + allOf: + - + $ref: '#/components/schemas/RepositoryResourceBase' DownloadResource: description: Download Resource (files/folders) allOf: @@ -1506,11 +1517,6 @@ components: $ref: '#/components/schemas/Tag' - x-secondary: osbrepository_tag - BiomodelsRepositoryResource: - description: figshare repository resource - allOf: - - - $ref: '#/components/schemas/RepositoryResourceBase' securitySchemes: bearerAuth: scheme: bearer diff --git a/applications/workspaces/server/requirements.txt b/applications/workspaces/server/requirements.txt index 1c23952c9..67f0cd543 100644 --- a/applications/workspaces/server/requirements.txt +++ b/applications/workspaces/server/requirements.txt @@ -15,6 +15,7 @@ cryptography==43.0.3 debugpy==1.8.9 deprecation==2.1.0 durationpy==0.9 +fairgraph==0.12.2 Flask==2.2.5 Flask-Cors==5.0.0 Flask-SQLAlchemy==3.0.2 @@ -75,4 +76,4 @@ types-toml==0.10.8.20240310 typing_extensions==4.12.2 urllib3==2.0.7 websocket-client==1.8.0 -Werkzeug==2.2.3 \ No newline at end of file +Werkzeug==2.2.3 diff --git a/applications/workspaces/server/workspaces/models/__init__.py b/applications/workspaces/server/workspaces/models/__init__.py index 9a1bd7f5d..2d595d8d4 100644 --- a/applications/workspaces/server/workspaces/models/__init__.py +++ b/applications/workspaces/server/workspaces/models/__init__.py @@ -46,3 +46,4 @@ from workspaces.models.workspace_resource_entity_all_of import WorkspaceResourceEntityAllOf from workspaces.models.repository_info import RepositoryInfo from workspaces.models.biomodels_repository_resource import BiomodelsRepositoryResource +from workspaces.models.ebrains_repository_resource import EBRAINSRepositoryResource diff --git a/applications/workspaces/server/workspaces/models/ebrains_repository_resource.py b/applications/workspaces/server/workspaces/models/ebrains_repository_resource.py new file mode 100644 index 000000000..5bf917183 --- /dev/null +++ b/applications/workspaces/server/workspaces/models/ebrains_repository_resource.py @@ -0,0 +1,236 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from workspaces.models.base_model_ import Model +from workspaces.models.repository_resource_base import RepositoryResourceBase +from workspaces import util + +from workspaces.models.repository_resource_base import RepositoryResourceBase # noqa: E501 + +class EBRAINSRepositoryResource(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, name=None, path=None, osbrepository_id=None, size=None, timestamp_modified=None, ref=None, sha=None): # noqa: E501 + """EBRAINSRepositoryResource - a model defined in OpenAPI + + :param name: The name of this EBRAINSRepositoryResource. # noqa: E501 + :type name: str + :param path: The path of this EBRAINSRepositoryResource. # noqa: E501 + :type path: str + :param osbrepository_id: The osbrepository_id of this EBRAINSRepositoryResource. # noqa: E501 + :type osbrepository_id: int + :param size: The size of this EBRAINSRepositoryResource. # noqa: E501 + :type size: int + :param timestamp_modified: The timestamp_modified of this EBRAINSRepositoryResource. # noqa: E501 + :type timestamp_modified: datetime + :param ref: The ref of this EBRAINSRepositoryResource. # noqa: E501 + :type ref: str + :param sha: The sha of this EBRAINSRepositoryResource. # noqa: E501 + :type sha: str + """ + self.openapi_types = { + 'name': str, + 'path': str, + 'osbrepository_id': int, + 'size': int, + 'timestamp_modified': datetime, + 'ref': str, + 'sha': str + } + + self.attribute_map = { + 'name': 'name', + 'path': 'path', + 'osbrepository_id': 'osbrepository_id', + 'size': 'size', + 'timestamp_modified': 'timestamp_modified', + 'ref': 'ref', + 'sha': 'sha' + } + + self._name = name + self._path = path + self._osbrepository_id = osbrepository_id + self._size = size + self._timestamp_modified = timestamp_modified + self._ref = ref + self._sha = sha + + @classmethod + def from_dict(cls, dikt) -> 'EBRAINSRepositoryResource': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The EBRAINSRepositoryResource of this EBRAINSRepositoryResource. # noqa: E501 + :rtype: EBRAINSRepositoryResource + """ + return util.deserialize_model(dikt, cls) + + @property + def name(self): + """Gets the name of this EBRAINSRepositoryResource. + + file name # noqa: E501 + + :return: The name of this EBRAINSRepositoryResource. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this EBRAINSRepositoryResource. + + file name # noqa: E501 + + :param name: The name of this EBRAINSRepositoryResource. + :type name: str + """ + + self._name = name + + @property + def path(self): + """Gets the path of this EBRAINSRepositoryResource. + + Download URL of the Resource # noqa: E501 + + :return: The path of this EBRAINSRepositoryResource. + :rtype: str + """ + return self._path + + @path.setter + def path(self, path): + """Sets the path of this EBRAINSRepositoryResource. + + Download URL of the Resource # noqa: E501 + + :param path: The path of this EBRAINSRepositoryResource. + :type path: str + """ + + self._path = path + + @property + def osbrepository_id(self): + """Gets the osbrepository_id of this EBRAINSRepositoryResource. + + OSB Repository id # noqa: E501 + + :return: The osbrepository_id of this EBRAINSRepositoryResource. + :rtype: int + """ + return self._osbrepository_id + + @osbrepository_id.setter + def osbrepository_id(self, osbrepository_id): + """Sets the osbrepository_id of this EBRAINSRepositoryResource. + + OSB Repository id # noqa: E501 + + :param osbrepository_id: The osbrepository_id of this EBRAINSRepositoryResource. + :type osbrepository_id: int + """ + + self._osbrepository_id = osbrepository_id + + @property + def size(self): + """Gets the size of this EBRAINSRepositoryResource. + + File size in bytes of the RepositoryResource # noqa: E501 + + :return: The size of this EBRAINSRepositoryResource. + :rtype: int + """ + return self._size + + @size.setter + def size(self, size): + """Sets the size of this EBRAINSRepositoryResource. + + File size in bytes of the RepositoryResource # noqa: E501 + + :param size: The size of this EBRAINSRepositoryResource. + :type size: int + """ + + self._size = size + + @property + def timestamp_modified(self): + """Gets the timestamp_modified of this EBRAINSRepositoryResource. + + Date/time the ReposityResource is last modified # noqa: E501 + + :return: The timestamp_modified of this EBRAINSRepositoryResource. + :rtype: datetime + """ + return self._timestamp_modified + + @timestamp_modified.setter + def timestamp_modified(self, timestamp_modified): + """Sets the timestamp_modified of this EBRAINSRepositoryResource. + + Date/time the ReposityResource is last modified # noqa: E501 + + :param timestamp_modified: The timestamp_modified of this EBRAINSRepositoryResource. + :type timestamp_modified: datetime + """ + + self._timestamp_modified = timestamp_modified + + @property + def ref(self): + """Gets the ref of this EBRAINSRepositoryResource. + + The GIT ref # noqa: E501 + + :return: The ref of this EBRAINSRepositoryResource. + :rtype: str + """ + return self._ref + + @ref.setter + def ref(self, ref): + """Sets the ref of this EBRAINSRepositoryResource. + + The GIT ref # noqa: E501 + + :param ref: The ref of this EBRAINSRepositoryResource. + :type ref: str + """ + + self._ref = ref + + @property + def sha(self): + """Gets the sha of this EBRAINSRepositoryResource. + + The GIT sha of the resource # noqa: E501 + + :return: The sha of this EBRAINSRepositoryResource. + :rtype: str + """ + return self._sha + + @sha.setter + def sha(self, sha): + """Sets the sha of this EBRAINSRepositoryResource. + + The GIT sha of the resource # noqa: E501 + + :param sha: The sha of this EBRAINSRepositoryResource. + :type sha: str + """ + + self._sha = sha diff --git a/applications/workspaces/server/workspaces/openapi/openapi.yaml b/applications/workspaces/server/workspaces/openapi/openapi.yaml index 8b2037ce0..9a55b12a4 100644 --- a/applications/workspaces/server/workspaces/openapi/openapi.yaml +++ b/applications/workspaces/server/workspaces/openapi/openapi.yaml @@ -994,6 +994,7 @@ components: - figshare - github - biomodels + - ebrains type: string RepositoryContentType: description: | @@ -1293,6 +1294,16 @@ components: sha: description: The GIT sha of the resource type: string + BiomodelsRepositoryResource: + description: biomodels repository resource + allOf: + - + $ref: '#/components/schemas/RepositoryResourceBase' + EBRAINSRepositoryResource: + description: EBRAINS repository resource + allOf: + - + $ref: '#/components/schemas/RepositoryResourceBase' DownloadResource: description: Download Resource (files/folders) allOf: @@ -1506,11 +1517,6 @@ components: $ref: '#/components/schemas/Tag' - x-secondary: osbrepository_tag - BiomodelsRepositoryResource: - description: figshare repository resource - allOf: - - - $ref: '#/components/schemas/RepositoryResourceBase' securitySchemes: bearerAuth: scheme: bearer diff --git a/applications/workspaces/server/workspaces/service/osbrepository/__init__.py b/applications/workspaces/server/workspaces/service/osbrepository/__init__.py index 57ee30952..db4c29089 100644 --- a/applications/workspaces/server/workspaces/service/osbrepository/__init__.py +++ b/applications/workspaces/server/workspaces/service/osbrepository/__init__.py @@ -7,7 +7,7 @@ -from workspaces.service.osbrepository.adapters import DandiAdapter, FigShareAdapter, GitHubAdapter, BiomodelsAdapter +from workspaces.service.osbrepository.adapters import DandiAdapter, FigShareAdapter, GitHubAdapter, BiomodelsAdapter, EBRAINSAdapter def get_repository_adapter(osbrepository: OSBRepository=None, repository_type=None, uri=None, *args, **kwargs): @@ -22,6 +22,8 @@ def get_repository_adapter(osbrepository: OSBRepository=None, repository_type=No return FigShareAdapter(*args, osbrepository=osbrepository, uri=uri, **kwargs) elif repository_type == "biomodels": return BiomodelsAdapter(*args, osbrepository=osbrepository, uri=uri, **kwargs) + elif repository_type == "ebrains": + return EBRAINSAdapter(*args, osbrepository=osbrepository, uri=uri, **kwargs) return None diff --git a/applications/workspaces/server/workspaces/service/osbrepository/adapters/__init__.py b/applications/workspaces/server/workspaces/service/osbrepository/adapters/__init__.py index 332728c21..330efb5e9 100644 --- a/applications/workspaces/server/workspaces/service/osbrepository/adapters/__init__.py +++ b/applications/workspaces/server/workspaces/service/osbrepository/adapters/__init__.py @@ -2,3 +2,4 @@ from workspaces.service.osbrepository.adapters.figshareadapter import FigShareAdapter from workspaces.service.osbrepository.adapters.githubadapter import GitHubAdapter from workspaces.service.osbrepository.adapters.biomodelsadapter import BiomodelsAdapter +from workspaces.service.osbrepository.adapters.ebrainsadapter import EBRAINSAdapter diff --git a/applications/workspaces/server/workspaces/service/osbrepository/adapters/ebrainsadapter.py b/applications/workspaces/server/workspaces/service/osbrepository/adapters/ebrainsadapter.py new file mode 100644 index 000000000..fa23e3eff --- /dev/null +++ b/applications/workspaces/server/workspaces/service/osbrepository/adapters/ebrainsadapter.py @@ -0,0 +1,472 @@ +import re +import sys +from typing import List, Optional +import requests +from functools import cache, cached_property + + +from fairgraph import KGClient, KGProxy, KGQuery +from fairgraph.errors import ResolutionFailure +from fairgraph.openminds.core import FileRepository, Model, ModelVersion +from cloudharness import log as logger +from cloudharness.utils.secrets import get_secret +from workspaces.models import RepositoryResourceNode, RepositoryInfo +from workspaces.models.resource_origin import ResourceOrigin +from workspaces.models.ebrains_repository_resource import EBRAINSRepositoryResource + +from .utils import add_to_tree +from .githubadapter import GitHubAdapter + + +class EBRAINSException(Exception): + pass + + +class EBRAINSAdapter: + """ + Adapter for EBRAINS + + https://search.kg.ebrains.eu/ + """ + + def __init__(self, osbrepository, uri=None): + self.osbrepository = osbrepository + self.uri = uri if uri else osbrepository.uri + self.api_url = "https://search.kg.ebrains.eu" + + # TODO: get permanent application auth token from EBRAINS + try: + kg_client = get_secret("ebrains-user") + except: + kg_client = None + try: + kg_secret = get_secret("ebrains-secret") + except: + kg_secret = None + if kg_user and kg_secret: + self.kg_client = KGClient(client_id=kg_client, client_secret=kg_secret, host="core.kg.ebrains.eu") + else: + token = "" + self.kg_client = KGClient(token=token, host="core.kg.ebrains.eu") + + if not self.kg_client: + raise EBRAINSException("Could not initialise EBRAINS KG client") + + try: + self.model_id = re.search( + f"{self.api_url}/instances/([\\w-]+)", + self.uri.strip("/")).group(1) + + except AttributeError: + raise EBRAINSException(f"{uri} is not a valid EBRAINS URL") + + @cache + def get_model(self, uri: Optional[str] = None) -> Model: + """Get model object using FairGraph + + This returns the main model, not the ModelVersion. If a ModelVersion URL is + passed, it finds the URL of the Model and returns that. + """ + logger.debug(f"Getting: {self.model_id}") + # if it's a Model + try: + model: Model = Model.from_id(id=self.model_id, client=self.kg_client) + # if it's a ModelVersion + except TypeError: + model_version: ModelVersion = ModelVersion.from_id(id=self.model_id, client=self.kg_client) + model_query: KGQuery = model_version.is_version_of + model = model_query.resolve(self.kg_client) + + if not model: + raise EBRAINSException("Could not fetch EBRAINS model") + + return model + + def get_base_uri(self): + return self.uri + + @cache + def get_info(self) -> RepositoryInfo: + """Get repository metadata from model object""" + model = self.get_model() + return RepositoryInfo(name=model.name, contexts=self.get_contexts(), tags=self.get_tags("foobar"), summary=self.get_description()) + + def _get_keywords(self) -> list[str]: + """Get keywords from model + + :returns: list of keywords + + """ + model = self.get_model() + keywords: list[str] = [] + if model.study_targets: + if isinstance(model.study_targets, KGProxy): + keyws = model.study_targets.resolve(self.kg_client) + else: + keyws = model.study_targets + + if isinstance(keyws, list): + for k in keyws: + if isinstance(k, KGProxy): + try: + keywords.append(k.resolve(self.kg_client).name) + except ResolutionFailure: + pass + else: + keywords.append(k.name) + else: + if isinstance(keyws, KGProxy): + keywords.append(keyws.resolve().name) + else: + keywords.append(keyws.name) + + if model.abstraction_level: + if isinstance(model.abstraction_level, KGProxy): + abs_l = model.abstraction_level.resolve(self.kg_client) + else: + abs_l = model.abstraction_level + keywords.append(abs_l.name) + + return keywords + + @cache + def get_contexts(self) -> list[str]: + model = self.get_model() + if isinstance(model.versions, list): + versions = model.versions + else: + versions = [model.versions] + + contexts = [] + + for v in versions: + v_r: Optional[ModelVersion] = None + if isinstance(v, KGProxy): + try: + v_r = v.resolve(self.kg_client) + except ResolutionFailure: + logger.error(f"ERROR: Could not resolve {v.id}") + continue + else: + v_r = v + + if v_r: + contexts.append(v_r.version_identifier) + + return contexts + + def _get_file_storage_url(self, context: str) -> Optional[str]: + """Get the URL of the file storage for the provided context + + :param context: TODO + :returns: TODO + + """ + model = self.get_model() + if isinstance(model.versions, list): + versions = model.versions + else: + versions = [model.versions] + + for v in versions: + v_r: Optional[ModelVersion] = None + if isinstance(v, KGProxy): + try: + v_r = v.resolve(self.kg_client) + except ResolutionFailure: + logger.error(f"ERROR: Could not resolve {v.id}") + continue + else: + v_r = v + if v_r: + if context == v_r.version_identifier: + repository: FileRepository = v_r.repository + try: + repository_r = repository.resolve(self.kg_client) + return repository_r.name + except ResolutionFailure: + logger.error(f"Could not resolve {repository.id}") + + return None + + + def _get_ebrains_data_proxy_file_list(self, url: str) -> dict[str, str]: + """Get the list of files from an ebrains data proxy URL. + + The complete url will be of this form: + + .. code-block:: + + https://data-proxy.ebrains.eu/api/v1/buckets/m-0ffae3c2-443c-44fd-919f-70a4b01506a4?prefix=CA1_pyr_mpg150211_A_idA_080220241322/ + + The API documentation is here: + https://data-proxy.ebrains.eu/api/docs + + This URL returns a JSON response with all the objects listed. + So we can get the file list from there. To get the download URL, we need + this end point for each object in the list: + + .. code-block:: + + /v1/buckets/{bucket_name}/{object_name} + + :param url: url of repository + :returns: dict of files and their download URLs + + """ + file_list: dict[str, str] = {} + top_level_url: str = url.split("?prefix=")[0] + + r = requests.get(url) + if r.status_code == 200: + logger.debug("data-proxy: response is") + logger.debug(r) + + json_r = r.json() + object_list = json_r["objects"] + for anobject in object_list: + object_url = top_level_url + "/" + anobject["name"] + file_list[anobject["name"]] = object_url + + else: + logger.error(f"Something went wrong: {r.response_code}") + + if len(file_list.items()) == 0: + logger.warn("No files found for this: check kg.ebrains.eu to verify") + + return file_list + + + def _get_cscs_file_list(self, url: str) -> dict[str, str]: + """Get the list of files from a CSCS repository URL. + + The complete url will be of this form: + + .. code-block: + + https://object.cscs.ch/v1/AUTH_c0a333ecf7c045809321ce9d9ecdfdea/hippocampus_optimization/rat/CA1/v4.0.5/optimizations_Python3/CA1_pyr_cACpyr_mpg141208_B_idA_20190328144006/CA1_pyr_cACpyr_mpg141208_B_idA_20190328144006.zip?use_cell=cell_seed3_0.hoc&bluenaas=true + + To get the file list, we only need the top level: + + .. code-block:: + + https://object.cscs.ch/v1/AUTH_c0a333ecf7c045809321ce9d9ecdfdea/hippocampus_optimization + + We then need to limit the file list to the bits we want, because the top + level container contains all the files and all the versions: + + .. code-block:: + + rat/CA1/v4.0.5/optimizations_Python3/CA1_pyr_cACpyr_mpg141208_B_idA_20190328144006/CA1_pyr_cACpyr_mpg141208_B_idA_20190328144006 + + Note that even if the url is wrong (eg, in the shown example, the file list + does not include a folder called `optimizations_Python3` at all), the cscs + server still returns a zip. However, manually checking search.kg.ebrains.eu + shows that the corresponding entry does not have a file list. It simply + says "no files available". + + Also note that the url may include a `prefix=` parameter which specifies + the file directory structure. + + Most of these directories also include a zipped version. For the moment, we + include this in the file list. + + :param url: url of repository + :returns: dict of files and their download URLs + + """ + file_list: dict[str, str] = {} + file_list_url: str = "" + file_list_string: str = "" + + special_suffixes = [ + "py", + "hoc", + "xz", + "zip", + "pkl", + "json", + "pdf", + "mod", + "txt", + "png", + "zip", + "ipynb", + ] + # if prefixed by data.kg, get rid of it so that we have only the cscs URL + new_url = url.replace("https://data.kg.ebrains.eu/zip?container=", "") + + logger.debug(f"Getting file list for {new_url}") + # default + url_portions: list[str] = new_url.split("/") + file_list_url = "/".join(url_portions[:6]) + file_list_string = "/".join(url_portions[6:]) + + # special cases + if ".zip?" in new_url: + logger.debug(f"Cscs url format with zip: {new_url}") + url_portions: list[str] = new_url.split(".zip")[0].split("/") + file_list_url = "/".join(url_portions[:6]) + file_list_string = "/".join(url_portions[6:]) + elif "?prefix=" in new_url: + logger.debug(f"Cscs url format with prefix: {new_url}") + file_list_url = new_url.split("?prefix=")[0] + file_list_string = new_url.split("?prefix=")[1] + else: + # handle single files: + # it is possible that they provide zip URLs but all the files are also + # individually available. there's no way for us to know that the whole file list + # is also available, though, so we simply provide the zip URL too. + logger.debug(f"Other cscs url format: {new_url}") + for suf in special_suffixes: + if new_url.endswith(suf): + file_list = {file_list_string: new_url} + return file_list + + # handle file lists + r = requests.get(file_list_url) + if r.status_code == 200: + for line in r.text.split(): + if ( + line.startswith(file_list_string) + and not line.endswith("/") + and line != file_list_string + ): + file_list[line] = file_list_url + "/" + line + else: + logger.error(f"Something went wrong: {r.response_code}") + + if len(file_list) == 0: + logger.warn("No files found for this: check kg.ebrains.eu to verify") + return file_list + + + def get_resources(self, context): + logger.debug(f"Getting resources: {context}") + + download_url = self._get_file_storage_url(context) + if "github" in download_url.lower(): + logger.debug("GITHUB resource") + # these may have things like "tree" in them, we only want github.com/user/repository + github_url_parts = download_url.split("/") + github_url = "/".join(github_url_parts[:5]) + gh_adapter = GitHubAdapter(self.osbrepository, github_url) + return gh_adapter.get_resources(context) + + elif "modeldb" in download_url.lower(): + logger.debug("Modeldb resource") + model_id = "" + # urls with model id after the last slash + # https://modeldb.science/249408?tab=7 + if "modeldb.science" in download_url or "modeldb.yale.edu" in download_url: + model_id = download_url.split("/")[-1].split('?')[0] + # legacy urls with model id as a parameter + # https://senselab.med.yale.edu/ModelDB/showmodel.cshtml?model=249408#tabs-1 + else: + model_id = download_url.split("?model=")[-1].split('#')[0] + + modeldb_url = f"https://github.com/ModelDBRepository/{model_id}" + gh_adapter = GitHubAdapter(self.osbrepository, modeldb_url) + # versions on EBRAINS do not match the versions on ModelDB/GitHub + # so we use the first context + contexts = gh_adapter.get_contexts() + return gh_adapter.get_resources(contexts[0]) + + elif "cscs.ch" in download_url: + logger.debug("CSCS resource") + files = self._get_cscs_file_list(download_url) + elif "data-proxy.ebrains.eu" in download_url: + logger.debug("Data-proxy resource") + files = self._get_ebrains_data_proxy_file_list(download_url) + else: + files = ["TODO: handle other special cases"] + + logger.debug(f"Files are: {files}") + + tree = RepositoryResourceNode( + resource=EBRAINSRepositoryResource( + name="/", + path="/", + osbrepository_id=self.osbrepository.id, + ref=context, + ), + children=[], + ) + + for afile, url in files.items(): + add_to_tree( + tree=tree, + tree_path=afile.split("/"), + path=url, + osbrepository_id=self.osbrepository.id, + ) + + return tree + + def get_description(self, context: str = "foobar"): + logger.debug(f"Getting description: {context}") + try: + result = self.get_model() + return result.description + except Exception as e: + logger.debug( + "unable to get the description from biomodels, %", str(e)) + return "" + + def get_tags(self, context): + # all versions have same tags for EBRAINS, so we pass any rubbish to the argument + logger.debug(f"Getting tags: {context}") + return self._get_keywords() + + def create_copy_task(self, workspace_id, origins: List[ResourceOrigin]): + import workspaces.service.workflow as workflow + + # no file tree in EBRAINS from the looks of it + folder = self.osbrepository.name + + download_url = self._get_file_storage_url(self.osbrepository.default_context) + if "github" in download_url: + # these may have things like "tree" in them, we only want github.com/user/repository + github_url_parts = download_url.split("/") + github_url = "/".join(github_url_parts[:5]) + gh_adapter = GitHubAdapter(self.osbrepository, uri=github_url) + return gh_adapter.create_copy_task(workspace_id, origins) + + elif "modeldb" in download_url.lower(): + model_id = "" + # urls with model id after the last slash + # https://modeldb.science/249408?tab=7 + if "modeldb.science" in download_url or "modeldb.yale.edu" in download_url: + model_id = download_url.split("/")[-1].split('?')[0] + # legacy urls with model id as a parameter + # https://senselab.med.yale.edu/ModelDB/showmodel.cshtml?model=249408#tabs-1 + else: + model_id = download_url.split("?model=")[-1].split('#')[0] + + modeldb_url = f"https://github.com/ModelDBRepository/{model_id}" + gh_adapter = GitHubAdapter(self.osbrepository, modeldb_url) + return gh_adapter.create_copy_task(workspace_id, origins) + + # if nothing is selected, origins has one entry with path "/" + # we get the file list and download individual files + if len(origins) == 1 and origins[0].path == "/": + files: dict[str, str] = {} + if "cscs.ch" in download_url: + files = self._get_cscs_file_list(download_url) + elif "data-proxy.ebrains.eu" in download_url: + files = self._get_ebrains_data_proxy_file_list(download_url) + + paths = "\\".join(list(files.values())) + else: + paths = "\\".join(o.path for o in origins) + + # username / password are not currently used + return workflow.create_copy_task( + image_name="workspaces-ebrains-copy", + workspace_id=workspace_id, + folder=folder, + url=f"{self.model_id}.{self.osbrepository.default_context}", + paths=paths, + username="", + password="", + ) diff --git a/applications/workspaces/tasks/ebrains-copy/Dockerfile b/applications/workspaces/tasks/ebrains-copy/Dockerfile new file mode 100644 index 000000000..f59d0fb53 --- /dev/null +++ b/applications/workspaces/tasks/ebrains-copy/Dockerfile @@ -0,0 +1,15 @@ +ARG CLOUDHARNESS_BASE +FROM $CLOUDHARNESS_BASE + +# much faster than curl/wget +# https://pkgs.alpinelinux.org/packages?name=aria2&branch=edge +RUN apk add aria2 unzip + + +ADD . / + +ENV shared_directory / +ENV workspace_id 1 + +RUN chmod +x ./run.sh +CMD ./run.sh diff --git a/applications/workspaces/tasks/ebrains-copy/README.md b/applications/workspaces/tasks/ebrains-copy/README.md new file mode 100644 index 000000000..feec14f05 --- /dev/null +++ b/applications/workspaces/tasks/ebrains-copy/README.md @@ -0,0 +1,16 @@ +# Biomodels copy task + + +How to test + +``` +shared_directory=/tmp folder=osbv2/develop url=BIOMD0000000998.9 ./run.sh +``` + +The above should checkout the file README.md and the full directory applications/workspaces inside /tmp/osbv2/develop + + +``` +shared_directory=/tmp folder=osbv2/develop url=https://github.com/OpenSourceBrain/OSBv2 branch=develop paths= ./run.sh +``` +This should checkout the whole repo diff --git a/applications/workspaces/tasks/ebrains-copy/docker-compose.yaml b/applications/workspaces/tasks/ebrains-copy/docker-compose.yaml new file mode 100644 index 000000000..e099cf379 --- /dev/null +++ b/applications/workspaces/tasks/ebrains-copy/docker-compose.yaml @@ -0,0 +1,6 @@ +version: "3.7" +services: + copy: + image: osb/workspaces-ebrains-copy:latest + environment: + - url=https://www.ebi.ac.uk/ebrains/MODEL2311220001s diff --git a/applications/workspaces/tasks/ebrains-copy/run.sh b/applications/workspaces/tasks/ebrains-copy/run.sh new file mode 100644 index 000000000..9f2de1914 --- /dev/null +++ b/applications/workspaces/tasks/ebrains-copy/run.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +set -e + +# remove the pvc from the path (if it has one) +# and append the folder +export download_path=`echo $shared_directory | cut -d ":" -f 2`/"${folder}" + +timestamp="$(date +"%Y%m%d%H%M%S-biomodels")" + +mkdir -p "${download_path}" +cd "${download_path}" + +# check is paths has a value, otherwise download the archive and unzip it +# note: we don't use the archive system because the archive is generated on the +# fly and can make us wait for an unspecified amount of time, which tools can't +# work with +# -> left here for completeness +if [ -z "$paths" ]; then + echo Biomodels downloading archive of "${url}" to "${download_path}" + # use ..="true" and ..="false" here, otherwise aria2c gets confused + aria2c --retry-wait=2 --max-tries=5 --timeout=300 --max-concurrent-downloads=5 --max-connection-per-server=5 --allow-overwrite="true" --auto-file-renaming="false" --out="$timestamp.omex" "https://www.ebi.ac.uk/biomodels/model/download/${url}" + unzip -o "$timestamp.omex" && rm -vf "$timestamp.omex" +else + touch filelist + # Split paths by ## and checkout each path + IFS='\' + for path in $paths; do + echo Biomodels copy "${path}" to "${download_path}" + echo "${path}" >> filelist + done + echo Biomodels downloading files + aria2c --retry-wait=2 --max-tries=5 --input-file=filelist --max-concurrent-downloads=5 --max-connection-per-server=5 --allow-overwrite "true" --auto-file-renaming "false" + rm filelist -f +fi + +# fix permissions +chown -R 1000:100 "${download_path}" diff --git a/libraries/client/workspaces/workspaces_cli/model/ebrains_repository_resource.py b/libraries/client/workspaces/workspaces_cli/model/ebrains_repository_resource.py new file mode 100644 index 000000000..93bbd398f --- /dev/null +++ b/libraries/client/workspaces/workspaces_cli/model/ebrains_repository_resource.py @@ -0,0 +1,327 @@ +""" + Workspaces manager API + + Opensource Brain Platform - Reference Workspaces manager API # noqa: E501 + + The version of the OpenAPI document: 0.2.0 + Generated by: https://openapi-generator.tech +""" + + +import re # noqa: F401 +import sys # noqa: F401 + +from workspaces_cli.model_utils import ( # noqa: F401 + ApiTypeError, + ModelComposed, + ModelNormal, + ModelSimple, + cached_property, + change_keys_js_to_python, + convert_js_args_to_python_args, + date, + datetime, + file_type, + none_type, + validate_get_composed_info, +) +from ..model_utils import OpenApiModel +from workspaces_cli.exceptions import ApiAttributeError + + +def lazy_import(): + from workspaces_cli.model.repository_resource_base import RepositoryResourceBase + globals()['RepositoryResourceBase'] = RepositoryResourceBase + + +class EBRAINSRepositoryResource(ModelComposed): + """NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + + Do not edit the class manually. + + Attributes: + allowed_values (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + with a capitalized key describing the allowed value and an allowed + value. These dicts store the allowed enum values. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + discriminator_value_class_map (dict): A dict to go from the discriminator + variable value to the discriminator class name. + validations (dict): The key is the tuple path to the attribute + and the for var_name this is (var_name,). The value is a dict + that stores validations for max_length, min_length, max_items, + min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, + inclusive_minimum, and regex. + additional_properties_type (tuple): A tuple of classes accepted + as additional properties values. + """ + + allowed_values = { + } + + validations = { + } + + @cached_property + def additional_properties_type(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + """ + lazy_import() + return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 + + _nullable = False + + @cached_property + def openapi_types(): + """ + This must be a method because a model may have properties that are + of type self, this must run after the class is loaded + + Returns + openapi_types (dict): The key is attribute name + and the value is attribute type. + """ + lazy_import() + return { + 'name': (str,), # noqa: E501 + 'path': (str,), # noqa: E501 + 'osbrepository_id': (int,), # noqa: E501 + 'size': (int,), # noqa: E501 + 'timestamp_modified': (datetime,), # noqa: E501 + } + + @cached_property + def discriminator(): + return None + + + attribute_map = { + 'name': 'name', # noqa: E501 + 'path': 'path', # noqa: E501 + 'osbrepository_id': 'osbrepository_id', # noqa: E501 + 'size': 'size', # noqa: E501 + 'timestamp_modified': 'timestamp_modified', # noqa: E501 + } + + read_only_vars = { + } + + @classmethod + @convert_js_args_to_python_args + def _from_openapi_data(cls, *args, **kwargs): # noqa: E501 + """EBRAINSRepositoryResource - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): file name. [optional] # noqa: E501 + path (str): Download URL of the Resource. [optional] # noqa: E501 + osbrepository_id (int): OSB Repository id. [optional] # noqa: E501 + size (int): File size in bytes of the RepositoryResource. [optional] # noqa: E501 + timestamp_modified (datetime): Date/time the ReposityResource is last modified. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + self = super(OpenApiModel, cls).__new__(cls) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + constant_args = { + '_check_type': _check_type, + '_path_to_item': _path_to_item, + '_spec_property_naming': _spec_property_naming, + '_configuration': _configuration, + '_visited_composed_classes': self._visited_composed_classes, + } + composed_info = validate_get_composed_info( + constant_args, kwargs, self) + self._composed_instances = composed_info[0] + self._var_name_to_model_instances = composed_info[1] + self._additional_properties_model_instances = composed_info[2] + discarded_args = composed_info[3] + + for var_name, var_value in kwargs.items(): + if var_name in discarded_args and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self._additional_properties_model_instances: + # discard variable. + continue + setattr(self, var_name, var_value) + + return self + + required_properties = set([ + '_data_store', + '_check_type', + '_spec_property_naming', + '_path_to_item', + '_configuration', + '_visited_composed_classes', + '_composed_instances', + '_var_name_to_model_instances', + '_additional_properties_model_instances', + ]) + + @convert_js_args_to_python_args + def __init__(self, *args, **kwargs): # noqa: E501 + """EBRAINSRepositoryResource - a model defined in OpenAPI + + Keyword Args: + _check_type (bool): if True, values for parameters in openapi_types + will be type checked and a TypeError will be + raised if the wrong type is input. + Defaults to True + _path_to_item (tuple/list): This is a list of keys or values to + drill down to the model in received_data + when deserializing a response + _spec_property_naming (bool): True if the variable names in the input data + are serialized names, as specified in the OpenAPI document. + False if the variable names in the input data + are pythonic names, e.g. snake case (default) + _configuration (Configuration): the instance to use when + deserializing a file_type parameter. + If passed, type conversion is attempted + If omitted no type conversion is done. + _visited_composed_classes (tuple): This stores a tuple of + classes that we have traveled through so that + if we see that class again we will not use its + discriminator again. + When traveling through a discriminator, the + composed schema that is + is traveled through is added to this set. + For example if Animal has a discriminator + petType and we pass in "Dog", and the class Dog + allOf includes Animal, we move through Animal + once using the discriminator, and pick Dog. + Then in Dog, we will make an instance of the + Animal class but this time we won't travel + through its discriminator because we passed in + _visited_composed_classes = (Animal,) + name (str): file name. [optional] # noqa: E501 + path (str): Download URL of the Resource. [optional] # noqa: E501 + osbrepository_id (int): OSB Repository id. [optional] # noqa: E501 + size (int): File size in bytes of the RepositoryResource. [optional] # noqa: E501 + timestamp_modified (datetime): Date/time the ReposityResource is last modified. [optional] # noqa: E501 + """ + + _check_type = kwargs.pop('_check_type', True) + _spec_property_naming = kwargs.pop('_spec_property_naming', False) + _path_to_item = kwargs.pop('_path_to_item', ()) + _configuration = kwargs.pop('_configuration', None) + _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) + + if args: + raise ApiTypeError( + "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( + args, + self.__class__.__name__, + ), + path_to_item=_path_to_item, + valid_classes=(self.__class__,), + ) + + self._data_store = {} + self._check_type = _check_type + self._spec_property_naming = _spec_property_naming + self._path_to_item = _path_to_item + self._configuration = _configuration + self._visited_composed_classes = _visited_composed_classes + (self.__class__,) + + constant_args = { + '_check_type': _check_type, + '_path_to_item': _path_to_item, + '_spec_property_naming': _spec_property_naming, + '_configuration': _configuration, + '_visited_composed_classes': self._visited_composed_classes, + } + composed_info = validate_get_composed_info( + constant_args, kwargs, self) + self._composed_instances = composed_info[0] + self._var_name_to_model_instances = composed_info[1] + self._additional_properties_model_instances = composed_info[2] + discarded_args = composed_info[3] + + for var_name, var_value in kwargs.items(): + if var_name in discarded_args and \ + self._configuration is not None and \ + self._configuration.discard_unknown_keys and \ + self._additional_properties_model_instances: + # discard variable. + continue + setattr(self, var_name, var_value) + if var_name in self.read_only_vars: + raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate " + f"class with read only attributes.") + + @cached_property + def _composed_schemas(): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error beause the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + lazy_import() + return { + 'anyOf': [ + ], + 'allOf': [ + RepositoryResourceBase, + ], + 'oneOf': [ + ], + } diff --git a/local-scripts/osbv2-local.sh b/local-scripts/osbv2-local.sh new file mode 100755 index 000000000..491d182ce --- /dev/null +++ b/local-scripts/osbv2-local.sh @@ -0,0 +1,228 @@ +#!/bin/bash + +# Copyright 2025 OSBv2 contributors +# Author: Ankur Sinha +# File : osbv2-local.sh +# +# Script to help with local deployments of OSBv2 +# To be executed from the root folder of the OSBv2 repository. + +# depends on how you install it, by default in the parent folder from where +# this script is called +CLOUD_HARNESS_URL="https://github.com/MetaCell/cloud-harness.git" +CLOUD_HARNESS_DIR_LOCATION="../" +CLOUD_HARNESS_DIR="${CLOUD_HARNESS_DIR_LOCATION}/cloud-harness" +CLOUD_HARNESS_DEFAULT="release/2.5.0" +CLOUD_HARNESS_BRANCH="" +SKAFFOLD="skaffold" + +# Application to deploy +DEPLOYMENT_APP="" +DEFAULT_DEPLOYMENT_APP="osb-portal" + +# Py version +# Cloud harness doesn't always work on newer versions +PY_VERSION="python3.12" +# +# if not, specify location of virtualenv here +OSB_DIR="./" +VENV_DIR="${OSB_DIR}/.venv" + +deploy () { + if ! command -v helm >/dev/null || ! command -v $SKAFFOLD >/dev/null || ! command -v harness-deployment >/dev/null ; then + echo "helm, skaffold, and cloud-harness are required but were not found." + echo + echo "Please install helm and skaffold as noted in their documentation:" + echo "- https://helm.sh/docs/intro/install/" + echo "- https://skaffold.dev/docs/install/" + echo + echo "To install cloud-harness, please see the -u/-U options" + exit 1 + fi + + pushd $OSB_DIR + echo "-> deploying" + echo "-> checking (and starting) docker daemon" + systemctl is-active docker --quiet || sudo systemctl start docker.service + echo "-> starting minkube" + minikube start --memory="10000mb" --cpus=8 --disk-size="60000mb" --kubernetes-version=v1.32 --driver=docker || notify_fail "Failed: minikube start" + echo "-> enabling ingress addon" + minikube addons enable ingress || notify_fail "Failed: ingress add on" + echo "-> setting up osblocal namespace" + kubectl get ns osblocal || kubectl create ns osblocal || notify_fail "Failed: ns set up" + echo "-> setting up minikube docker env" + eval $(minikube docker-env) || notify_fail "Failed: env setup" + echo "-> harnessing deployment" + harness_deployment + echo "-> running skaffold" + $SKAFFOLD dev --cleanup=false || { notify_fail "Failed: skaffold" ; minikube stop; } + #$SKAFFOLD dev || notify_fail "Failed: skaffold" + popd +} + +function harness_deployment() { + # `-e local` does not build nwbexplorer/netpyne + # use -e dev for that, but that will send e-mails to Filippo and Zoraan + # suggested: create a new file in deploy/values-ankur.yaml where you use + # your e-mail address, and then use `-e ankur` to use these values. + pushd $OSB_DIR + harness-deployment ../cloud-harness . -l -n osblocal -d osb.local -dtls -m build -e local -i $DEPLOYMENT_APP || notify_fail "Failed: harness-deployment" + #harness-deployment ../cloud-harness . -l -n osblocal -d osb.local -u -dtls -m build -e local -i workspaces || notify_fail "Failed: harness-deployment" + popd +} + +notify_fail () { + if ! command -v notify-send >/dev/null + then + echo "-> $1" + else + notify-send -t 1000 -i "org.gnome.Terminal" -a "Terminal" "OSBv2 deployment" "$1" + fi + exit 1 +} + +function update_cloud_harness() { + echo "Updating cloud harness" + CLOUD_HARNESS_PACKAGES=$(pip list | grep cloud | tr -s " " | cut -d " " -f1 | tr '\n' ' ') + pip uninstall ${CLOUD_HARNESS_PACKAGES} -y || echo "No cloud harness packages installed" + if ! [ -d "${CLOUD_HARNESS_DIR}" ] + then + echo "Cloud harness folder does not exist. Cloning" + pushd "${CLOUD_HARNESS_DIR_LOCATION}" && git clone "${CLOUD_HARNESS_URL}" && popd + fi + pushd "$CLOUD_HARNESS_DIR" && git clean -dfx && git fetch && git checkout ${CLOUD_HARNESS_BRANCH} && git pull && pip install -r requirements.txt && popd +} + +function activate_venv() { + if [ -f "${VENV_DIR}/bin/activate" ] + then + source "${VENV_DIR}/bin/activate" + else + echo "No virtual environment found at ${VENV_DIR}. Creating" + ${PY_VERSION} -m venv "${VENV_DIR}" && source "${VENV_DIR}/bin/activate" + fi +} + +# don't actually need this because when the script exists, the environment is +# lost anyway +function deactivate_venv() { + deactivate +} + +function print_versions() { + echo "** docker **" + docker version + echo "\n** minikube **" + minikube version + echo "\n** cloud harness **" + pushd "${CLOUD_HARNESS_DIR}" && git log --oneline | head -1 && popd + echo "\n** helm **" + helm version + echo "\n** skaffold **" + $SKAFFOLD version + echo "\n** python **" + python --version + echo "\n** git **" + git --version +} + +clean () { + pushd $OSB_DIR + echo "-> Cleaning up all images." + docker image prune --all + docker builder prune --all + $SKAFFOLD delete + minikube stop + minikube delete + docker image prune --all + docker builder prune --all + popd +} + +usage () { + echo "Script for automating local deployments of OSBv2" + echo + echo "USAGE $0 -[dDbBvuUch]" + echo + echo "-d: deploy" + echo "-D: deploy " + echo "-b: run 'harness-deployment': required when you have made changes and want to refresh the deployment" + echo "-B: run 'harness-deployment ': required when you have made changes and want to refresh the deployment" + echo "-v: print version information" + echo "-u branch: update and install provided cloud_harness branch $CLOUD_HARNESS_DEFAULT" + echo "-U branch: update and install specified cloud_harness branch ($CLOUD_HARNESS_DEFAULT)" + echo "-c: clean up minikube and docker: sometimes needed with an outdated cache" + echo "-h: print this and exit" +} + +if [ $# -lt 1 ] +then + usage + exit 1 +fi + + +# parse options +while getopts ":vdD:uU:hbB:c" OPTION +do + case $OPTION in + v) + activate_venv + print_versions + deactivate_venv + exit 0 + ;; + b) + DEPLOYMENT_APP="${DEFAULT_DEPLOYMENT_APP}" + activate_venv + harness_deployment + deactivate_venv + exit 0 + ;; + B) + DEPLOYMENT_APP="${OPTARG}" + activate_venv + harness_deployment + deactivate_venv + exit 0 + ;; + d) + DEPLOYMENT_APP="${DEFAULT_DEPLOYMENT_APP}" + activate_venv + deploy + exit 0 + ;; + D) + DEPLOYMENT_APP="${OPTARG}" + activate_venv + deploy + exit 0 + ;; + c) + clean + exit 0 + ;; + u) + CLOUD_HARNESS_BRANCH="${CLOUD_HARNESS_DEFAULT}" + activate_venv + update_cloud_harness + deactivate_venv + exit 0 + ;; + U) + CLOUD_HARNESS_BRANCH="${OPTARG}" + activate_venv + update_cloud_harness + deactivate_venv + exit 0 + ;; + h) + usage + exit 0 + ;; + ?) + usage + exit 1 + ;; + esac +done