diff --git a/api/openapi.json b/api/openapi.json index dcb7d219..3a4e8c74 100644 --- a/api/openapi.json +++ b/api/openapi.json @@ -251,7 +251,7 @@ "files" ], "summary": "Get Upload Links", - "description": "Get upload links for uploading a file to storage\n\nArguments:\n request -- Request object\n client_file -- ClientFile object\n user_id -- User Id\n\nReturns:\n FileUploadSchema", + "description": "Get upload links for uploading a file to storage", "operationId": "get_upload_links", "requestBody": { "content": { @@ -292,14 +292,14 @@ ] } }, - "/v0/files/{file_id}:complete": { - "post": { + "/v0/files/{file_id}": { + "get": { "tags": [ "files" ], - "summary": "Complete Multipart Upload", - "description": "Complete multipart upload\n\nArguments:\n request: The Request object\n file_id: The Storage id\n file: The File object which is to be completed\n uploaded_parts: The uploaded parts\n completion_link: The completion link\n user_id: The user id\n\nReturns:\n The completed File object", - "operationId": "complete_multipart_upload", + "summary": "Get File", + "description": "Gets metadata for a given file resource", + "operationId": "get_file", "parameters": [ { "required": true, @@ -312,16 +312,6 @@ "in": "path" } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_complete_multipart_upload_v0_files__file_id__complete_post" - } - } - }, - "required": true - }, "responses": { "200": { "description": "Successful Response", @@ -333,6 +323,16 @@ } } }, + "404": { + "description": "File not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { "description": "Validation Error", "content": { @@ -349,16 +349,13 @@ "HTTPBasic": [] } ] - } - }, - "/v0/files/{file_id}:abort": { - "post": { + }, + "delete": { "tags": [ "files" ], - "summary": "Abort Multipart Upload", - "description": "Abort a multipart upload\n\nArguments:\n request: The Request\n file_id: The StorageFileID\n upload_links: The FileUploadSchema\n user_id: The user id", - "operationId": "abort_multipart_upload", + "summary": "Delete File", + "operationId": "delete_file", "parameters": [ { "required": true, @@ -371,22 +368,17 @@ "in": "path" } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Body_abort_multipart_upload_v0_files__file_id__abort_post" - } - } - }, - "required": true - }, "responses": { - "200": { - "description": "Successful Response", + "204": { + "description": "Successful Response" + }, + "404": { + "description": "File not found", "content": { "application/json": { - "schema": {} + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } } } }, @@ -408,14 +400,13 @@ ] } }, - "/v0/files/{file_id}": { - "get": { + "/v0/files/{file_id}:abort": { + "post": { "tags": [ "files" ], - "summary": "Get File", - "description": "Gets metadata for a given file resource", - "operationId": "get_file", + "summary": "Abort Multipart Upload", + "operationId": "abort_multipart_upload", "parameters": [ { "required": true, @@ -428,24 +419,22 @@ "in": "path" } ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/File" - } + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_abort_multipart_upload_v0_files__file_id__abort_post" } } }, - "404": { - "description": "File not found", + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/ErrorGet" - } + "schema": {} } } }, @@ -465,13 +454,15 @@ "HTTPBasic": [] } ] - }, - "delete": { + } + }, + "/v0/files/{file_id}:complete": { + "post": { "tags": [ "files" ], - "summary": "Delete File", - "operationId": "delete_file", + "summary": "Complete Multipart Upload", + "operationId": "complete_multipart_upload", "parameters": [ { "required": true, @@ -484,16 +475,23 @@ "in": "path" } ], - "responses": { - "204": { - "description": "Successful Response" + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_complete_multipart_upload_v0_files__file_id__complete_post" + } + } }, - "404": { - "description": "File not found", + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ErrorGet" + "$ref": "#/components/schemas/File" } } } @@ -1145,7 +1143,7 @@ "required": true }, "responses": { - "200": { + "201": { "description": "Successful Response", "content": { "application/json": { @@ -1179,7 +1177,7 @@ "solvers" ], "summary": "Get Jobs Page", - "description": "List of jobs on a specific released solver (includes pagination)\n\n\nBreaking change in *version 0.5*: response model changed from list[Job] to pagination Page[Job].", + "description": "List of jobs on a specific released solver (includes pagination)", "operationId": "get_jobs_page", "parameters": [ { @@ -2051,6 +2049,64 @@ ] } }, + "/v0/studies/{study_id}:clone": { + "post": { + "tags": [ + "studies" + ], + "summary": "Clone Study", + "operationId": "clone_study", + "parameters": [ + { + "required": true, + "schema": { + "title": "Study Id", + "type": "string", + "format": "uuid" + }, + "name": "study_id", + "in": "path" + } + ], + "responses": { + "201": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Study" + } + } + } + }, + "404": { + "description": "Study not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] + } + }, "/v0/studies/{study_id}/ports": { "get": { "tags": [ @@ -2194,7 +2250,7 @@ } ], "responses": { - "200": { + "201": { "description": "Successful Response", "content": { "application/json": { @@ -2788,7 +2844,7 @@ "file_id": { "title": "File Id", "type": "string", - "description": "The file id", + "description": "The file resource id", "format": "uuid" }, "upload_schema": { diff --git a/clients/python/docs/BasicTutorial.ipynb b/clients/python/docs/BasicTutorial.ipynb index b5112f08..9c87e1f1 100755 --- a/clients/python/docs/BasicTutorial.ipynb +++ b/clients/python/docs/BasicTutorial.ipynb @@ -28,10 +28,15 @@ }, "outputs": [], "source": [ - "import importlib\n", - "if importlib.util.find_spec('osparc') is not None:\n", + "import importlib.util\n", + "if importlib.util.find_spec('osparc') is None:\n", " ! pip install osparc\n", - "! python -c \"import osparc; print(osparc.__version__)\"" + "! python -c \"import osparc; print(osparc.__version__)\"\n", + "\n", + "import osparc\n", + "# this tutorial is compatible with version >= 0.5.0\n", + "CLIENT_VERSION = tuple(map(int, osparc.__version__.split(\".\")))\n", + "assert tuple(map(int, osparc.__version__.split(\".\"))) >= (0, 5, 0)" ] }, { @@ -140,9 +145,6 @@ "\n", "import osparc\n", "\n", - "CLIENT_VERSION = tuple(map(int, osparc.__version__.split(\".\")))\n", - "assert CLIENT_VERSION >= (0, 4, 3)\n", - "\n", "Path(\"file_with_number.txt\").write_text(\"3\")\n", "\n", "with osparc.ApiClient(cfg) as api_client:\n", @@ -191,17 +193,16 @@ " # 'id': '9fb4f70e-3589-3e9e-991e-3059086c3aae'}\n", " # output_2 = 4.0\n", "\n", - " if CLIENT_VERSION >= (0, 5, 0):\n", - " logfile_path: str = solvers_api.get_job_output_logfile(\n", - " solver.id, solver.version, job.id\n", - " )\n", - " zip_path = Path(logfile_path)\n", + " logfile_path: str = solvers_api.get_job_output_logfile(\n", + " solver.id, solver.version, job.id\n", + " )\n", + " zip_path = Path(logfile_path)\n", "\n", - " with TemporaryDirectory() as tmp_dir:\n", - " with ZipFile(f\"{zip_path}\") as fzip:\n", - " fzip.extractall(tmp_dir)\n", - " logfiles = list(Path(tmp_dir).glob(\"*.log*\"))\n", - " print(\"Unzipped\", logfiles[0], \"contains:\\n\", logfiles[0].read_text())\n", + " with TemporaryDirectory() as tmp_dir:\n", + " with ZipFile(f\"{zip_path}\") as fzip:\n", + " fzip.extractall(tmp_dir)\n", + " logfiles = list(Path(tmp_dir).glob(\"*.log*\"))\n", + " print(\"Unzipped\", logfiles[0], \"contains:\\n\", logfiles[0].read_text())\n", " #\n", " # Unzipped extracted/sleeper_2.0.2.logs contains:\n", " # 2022-06-01T18:15:00.405035847+02:00 Entrypoint for stage production ...\n", diff --git a/clients/python/docs/StudyTutorial.ipynb b/clients/python/docs/StudyTutorial.ipynb new file mode 100755 index 00000000..4c434d8a --- /dev/null +++ b/clients/python/docs/StudyTutorial.ipynb @@ -0,0 +1,181 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "id": "f15de720", + "metadata": {}, + "source": [ + "# Study Tutorial\n", + "\n", + "\n", + "\n", + "## Installation and configuration\n", + "The installation and configuration process is discussed in detail in the `BasicTutorial`. Of course, if we don't already have `osparc` installed we need to do so and we need to setup the `osparc.Configuration`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dbeace4c", + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "import osparc\n", + "print(sys.executable)" + ] + }, + { + "cell_type": "markdown", + "id": "27ed012e", + "metadata": {}, + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "992916f5", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import importlib.util\n", + "if importlib.util.find_spec('osparc') is None:\n", + " ! pip install osparc\n", + "from osparc import Configuration\n", + "cfg: Configuration = Configuration(\n", + " host=os.environ[\"OSPARC_API_HOST\"],\n", + " username=os.environ[\"OSPARC_API_KEY\"],\n", + " password=os.environ[\"OSPARC_API_SECRET\"]\n", + ")\n", + "\n", + "# this tutorial is compatible with version > 0.5.0\n", + "assert tuple(map(int, osparc.__version__.split(\".\"))) > (0, 5, 0)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "6912889e", + "metadata": {}, + "source": [ + "## Study Workflow\n", + "\n", + "Studies can connect computational resources as well as pre- and postprocess data. Studies must be created through the UI (where you generated the `OSPARC_API_KEY` and the `OSPARC_API_SECRET`). Once a study has been created via the UI one can interact with it (modify, run, read and write data to it etc.) using the `osparc` python package. Here we will demonstrate the typical workflow. As an illustration we will use the `Single Sleeper` study. This study is available as a template through the UI. Depending on which version of OSPARC you are us\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8707f005", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "from typing import Optional\n", + "SINGLE_SLEEPER_STUDY_ID: Optional[str] = \"6dda87ae-26e3-11ee-a43a-02420a0f0717\" # https://osparc-master.speag.com/study/6dda87ae-26e3-11ee-a43a-02420a0f0717" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "398bcd09", + "metadata": {}, + "outputs": [], + "source": [ + "import time\n", + "from pathlib import Path\n", + "from zipfile import ZipFile\n", + "from tempfile import TemporaryDirectory\n", + "\n", + "import osparc\n", + "\n", + "# Get the ID of a study under the study information\n", + "assert SINGLE_SLEEPER_STUDY_ID is not None, \"The SINGLE_SLEEPER_STUDY_ID must be set before running the tutorial\"\n", + "\n", + "with osparc.ApiClient(cfg) as api_client:\n", + "\n", + " studies_api: osparc.StudiesApi = osparc.StudiesApi(api_client)\n", + " study: osparc.Study = studies_api.clone_study(SINGLE_SLEEPER_STUDY_ID)\n", + "\n", + " job: osparc.Job = studies_api.create_study_job(study_id=study.uid, \n", + " job_inputs=osparc.JobInputs({\n", + " \"X\": 1\n", + " }))\n", + "\n", + " status: osparc.JobStatus = studies_api.start_study_job(study.id, job_id=job.id)\n", + " while not status.stopped_at:\n", + " time.sleep(3)\n", + " status = studies_api.inspect_study_job(study.id, job.id)\n", + " print(\"Solver progress\", f\"{status.progress}/100\", flush=True)\n", + "\n", + " outputs: osparc.JobOutputs = studies_api.get_study_job_outputs(study_id=study.id, job_id=job.id)\n", + "\n", + " print(f\"Job {outputs.job_id} got these results:\")\n", + " for output_name, result in outputs.results.items():\n", + " print(output_name, \"=\", result)\n", + "\n", + " logfile_path: str = studies_api.get_study_job_output_logfile(study.id, job.id)\n", + " zip_path = Path(logfile_path)\n", + "\n", + " with TemporaryDirectory() as tmp_dir:\n", + " with ZipFile(f\"{zip_path}\") as fzip:\n", + " fzip.extractall(tmp_dir)\n", + " logfiles = list(Path(tmp_dir).glob(\"*.log*\"))\n", + " print(\"Unzipped\", logfiles[0], \"contains:\\n\", logfiles[0].read_text())\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "c0092d84", + "metadata": {}, + "source": [ + "The script above\n", + "\n", + "1. Gets the sleeper study\n", + "2. Submits a job to the sleeper study (asking it to sleep for `X=1` second)\n", + "3. When the execution completes, it checks the outputs\n", + "4. The logs are downloaded and extracted\n" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "id": "72d60050", + "metadata": {}, + "source": [ + "## References\n", + "\n", + "- [osparc API python client] documentation\n", + "- [osparc API] documentation\n", + "\n", + "[osparc API python client]:https://itisfoundation.github.io/osparc-simcore-python-client\n", + "[osparc API]:https://api.osparc.io/doc" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "", + "language": "python", + "name": "" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/clients/python/test/e2e/data/tutorial_client_compatibility.json b/clients/python/test/e2e/data/tutorial_client_compatibility.json new file mode 100644 index 00000000..228a1db2 --- /dev/null +++ b/clients/python/test/e2e/data/tutorial_client_compatibility.json @@ -0,0 +1,9 @@ +{ + "__comment": "This document describes which tutorial notebooks are compatible with which version of the osparc python package", + "versions": + { + "0.5.0": ["BasicTutorial.ipynb"], + "0.6.0": ["BasicTutorial.ipynb", + "StudyTutorial.ipynb"] + } +} diff --git a/clients/python/test/e2e/test_notebooks.py b/clients/python/test/e2e/test_notebooks.py index f976d741..11cb5829 100644 --- a/clients/python/test/e2e/test_notebooks.py +++ b/clients/python/test/e2e/test_notebooks.py @@ -1,7 +1,8 @@ +import json import shutil import sys from pathlib import Path -from typing import Any, List +from typing import Any, Dict, List, Optional, Set import osparc import papermill as pm @@ -10,26 +11,28 @@ docs_dir: Path = Path(__file__).parent.parent.parent / "docs" all_notebooks: List[Path] = list(docs_dir.rglob("*.ipynb")) +DOCS_DIR: Path = Path(__file__).parent.parent.parent / "docs" +DATA_DIR: Path = Path(__file__).parent / "data" +TUTORIAL_CLIENT_COMPATIBILITY_JSON: Path = ( + DATA_DIR / "tutorial_client_compatibility.json" +) +API_DIR: Path = DOCS_DIR.parent.parent.parent / "api" -def test_notebook_config(tmp_path: Path): - """Checks the jupyter environment is configured correctly""" - config_test_nb: Path = Path(__file__).parent / "data" / "config_test.ipynb" - assert config_test_nb.is_file() - test_run_notebooks( - tmp_path, - config_test_nb, - { - "expected_python_bin": sys.executable, - "expected_osparc_version": str(osparc.__version__), - "expected_osparc_file": osparc.__file__, - }, - ) - assert len(all_notebooks) > 0, f"Did not find any notebooks in {docs_dir}" +assert DOCS_DIR.is_dir() +assert DATA_DIR.is_dir() +assert TUTORIAL_CLIENT_COMPATIBILITY_JSON.is_file() +assert API_DIR.is_dir() -@pytest.mark.parametrize("notebook", all_notebooks) -def test_run_notebooks(tmp_path: Path, notebook: Path, params: dict[str, Any] = {}): - """Run all notebooks in the documentation""" +def _run_notebook(tmp_path: Path, notebook: Path, params: dict[str, Any] = {}): + """Run a jupyter notebook using papermill + + Args: + tmp_path (Path): temporary directory + notebook (Path): path to notebook to run + params (dict[str, Any], optional): parameters to pass to notebook. + Defaults to {}. + """ print(f"Running {notebook.name} with parameters {params}") assert ( notebook.is_file() @@ -44,3 +47,99 @@ def test_run_notebooks(tmp_path: Path, notebook: Path, params: dict[str, Any] = kernel_name="python3", parameters=params, ) + + +def _get_tutorials(osparc_version: Optional[str] = None) -> List[Path]: + """Returns the tutorial notebooks compatible with a given osparc client version + + Args: + osparc_version (str): osparc.__version__ + + Returns: + List[Path]: A list of *Path*s to the tutorial notebooks + """ + compatibility_dict: Dict[str, Any] = json.loads( + TUTORIAL_CLIENT_COMPATIBILITY_JSON.read_text() + ) + tutorial_names: Set[str] = set() + if osparc_version is not None: + assert ( + osparc_version in compatibility_dict["versions"] + ), f"{osparc_version} does not exist in {TUTORIAL_CLIENT_COMPATIBILITY_JSON}" + tutorial_names = set(compatibility_dict["versions"][osparc_version]) + else: + for v in compatibility_dict["versions"]: + tutorial_names = tutorial_names.union( + set(compatibility_dict["versions"][v]) + ) + result: List[Path] = [] + for name in tutorial_names: + result += list(DOCS_DIR.rglob(f"*{name}")) + + return result + + +# Tests ------------------------------------------------------------------------------- + + +def test_notebook_config(tmp_path: Path): + """Test configuration of test setup. + Make sanity checks (ensure all files are discovered, + correct installations are on path etc) + + Args: + tmp_path (Path): Temporary path pytest fixture + """ + # sanity check configuration of jupyter environment + config_test_nb: Path = DATA_DIR / "config_test.ipynb" + assert config_test_nb.is_file() + _run_notebook( + tmp_path, + config_test_nb, + { + "expected_python_bin": sys.executable, + "expected_osparc_version": str(osparc.__version__), + "expected_osparc_file": osparc.__file__, + }, + ) + + # sanity check paths and jsons: are we collecting all notebooks? + tutorials: Set[Path] = set(DOCS_DIR.glob("*.ipynb")) + json_notebooks: Set[Path] = set(_get_tutorials()) + assert len(tutorials) > 0, f"Did not find any tutorial notebooks in {DOCS_DIR}" + assert len(tutorials.difference(json_notebooks)) == 0, ( + "Some tutorial notebooks are " + "not present in {TUTORIAL_CLIENT_COMPATIBILITY_JSON}" + ) + + # check that version of this repo is present in compatibility json + current_version: str = json.loads((API_DIR / "config.json").read_text())["python"][ + "version" + ] + compatible_versions: Set[str] = json.loads( + TUTORIAL_CLIENT_COMPATIBILITY_JSON.read_text() + )["versions"].keys() + assert current_version in compatible_versions, ( + f"The version defined in {API_DIR/'config.json'} " + "is not present in {TUTORIAL_CLIENT_COMPATIBILITY_JSON}" + ) + + +@pytest.mark.parametrize("tutorial", _get_tutorials(), ids=lambda p: p.name) +def test_run_tutorials(tmp_path: Path, tutorial: Path): + """Run all tutorials compatible with the installed + version of osparc + + Args: + tmp_path (Path): pytest tmp_path fixture + tutorials (List[Path]): list of tutorials + """ + if tutorial not in _get_tutorials(osparc.__version__): + pytest.skip( + ( + f"{tutorial.relative_to(DOCS_DIR)} is not compatible " + "with osparc.__version__=={osparc.__version__}. " + "See {TUTORIAL_CLIENT_COMPATIBILITY_JSON.name}" + ) + ) + _run_notebook(tmp_path, tutorial)