diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 392320ae..037bcc0d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -26,7 +26,7 @@ /services/dy-modeling @sanderegg /services/dy-raw-graphs @odeimaiz -/services/dy-tissue-properties @odeimaiz +/services/tissue-properties @odeimaiz /services/dy-csv-table @odeimaiz /services/sleeper @GitHK /services/dy-mapcore-widget @odeimaiz diff --git a/.github/workflows/dy-tissue-properties.yml b/.github/workflows/dy-tissue-properties.yml index 5e2998ac..a3d231aa 100644 --- a/.github/workflows/dy-tissue-properties.yml +++ b/.github/workflows/dy-tissue-properties.yml @@ -1,15 +1,15 @@ -name: dy-tissue-properties +name: tissue-properties on: push: paths: - - ".github/workflows/dy-tissue-properties.yml" - - "services/dy-tissue-properties/**" + - ".github/workflows/tissue-properties.yml" + - "services/tissue-properties/**" - "!**.md" pull_request: paths: - - ".github/workflows/dy-tissue-properties.yml" - - "services/dy-tissue-properties/**" + - ".github/workflows/tissue-properties.yml" + - "services/tissue-properties/**" - "!**.md" env: @@ -20,7 +20,7 @@ env: jobs: build: - name: building dy-tissue-properties + name: building tissue-properties runs-on: ${{ matrix.os }} strategy: matrix: @@ -52,24 +52,24 @@ jobs: name: set docker registry env run: echo ::set-env name=DOCKER_REGISTRY::itisfoundation - name: set dev environs - working-directory: services/dy-tissue-properties + working-directory: services/tissue-properties run: make .venv - name: get current image if available - working-directory: services/dy-tissue-properties + working-directory: services/tissue-properties run: make pull || true - name: build - working-directory: services/dy-tissue-properties + working-directory: services/tissue-properties run: | source .venv/bin/activate make build - name: test - working-directory: services/dy-tissue-properties + working-directory: services/tissue-properties run: | source .venv/bin/activate make test - if: github.event_name == 'push' name: deploy - working-directory: services/dy-tissue-properties + working-directory: services/tissue-properties run: | ./../../ci/helpers/dockerhub_login.bash make push diff --git a/.gitignore b/.gitignore index 88ffe2d1..fcea14c9 100644 --- a/.gitignore +++ b/.gitignore @@ -138,3 +138,6 @@ pytest.ini # pylint-profile output prof/ !ci/**/build + +docker-compose.yml +docker-compose.dev.yml diff --git a/.osparc/3d-viewer-gpu/Dockerfile b/.osparc/3d-viewer-gpu/Dockerfile new file mode 100644 index 00000000..3747c70b --- /dev/null +++ b/.osparc/3d-viewer-gpu/Dockerfile @@ -0,0 +1,101 @@ +# this must be the first line +ARG BASE_IMAGE=kitware/paraviewweb:pv-v5.6.0-egl +# apply patch to visualizer +FROM node:8.12.0 AS visualizer +WORKDIR /home/node +ARG VISUALIZER_VERSION=v3.1.10 +RUN git clone --branch ${VISUALIZER_VERSION} https://github.com/Kitware/visualizer.git +WORKDIR /home/node/visualizer +COPY services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied . +COPY services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/rebrand-osparc.patch rebrand-osparc.patch +RUN git apply rebrand-osparc.patch && \ + npm install && \ + npm run build:release && \ + npm link && \ + cp src/*.png dist +# ---------------------------------------------------------------- +# build visualizer service +FROM ${BASE_IMAGE} AS common + +LABEL maintainer="sanderegg" +ENV PARAVIEW_INPUT_PATH="/data/A" + +USER root +# install git, pip +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + python3-pip &&\ + rm -rf /var/lib/apt/lists/* + +# install python 3.6 for root and also for www-data +ENV PYENV_ROOT=/pyenv +ENV NVIDIA_DRIVER_CAPABILITIES=all +# necessary to prevent some dependencies to ask questions while installing +ENV DEBIAN_FRONTEND=noninteractive +RUN curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash && \ + export PATH="${PYENV_ROOT}/bin:$PATH" && \ + eval "$(pyenv init -)" && \ + eval "$(pyenv virtualenv-init -)" && \ + apt-get update && \ + apt-get install -y --no-install-recommends \ + make build-essential libssl-dev zlib1g-dev libbz2-dev \ + libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ + xz-utils tk-dev libffi-dev liblzma-dev && \ + pyenv install 3.6.7 && \ + pyenv global 3.6.7 && \ + pip install --upgrade pip wheel setuptools &&\ + apt-get remove -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ + libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ + xz-utils tk-dev libffi-dev liblzma-dev &&\ + apt-get autoremove -y &&\ + apt-get clean -y &&\ + rm -rf /var/lib/apt/lists/* + +# get the patched visualizer +COPY --from=visualizer /home/node/visualizer/dist /opt/paraview/share/paraview-5.6/web/visualizer/www +WORKDIR /home/root +# ---------------------------------------------------------------- +# set up oSparc env variables +ENV SIMCORE_NODE_UUID="-1" \ + SIMCORE_USER_ID="-1" \ + SIMCORE_NODE_BASEPATH="" \ + SIMCORE_NODE_APP_STATE_PATH=${PARAVIEW_INPUT_PATH} \ + SIMCORE_HOST_NAME="localhost" \ + STORAGE_ENDPOINT="=1" \ + S3_ENDPOINT="=1" \ + S3_ACCESS_KEY="-1" \ + S3_SECRET_KEY="-1" \ + S3_BUCKET_NAME="-1" \ + POSTGRES_ENDPOINT="-1" \ + POSTGRES_USER="-1" \ + POSTGRES_PASSWORD="-1" \ + POSTGRES_DB="-1" + +# special paraview variables +ENV SERVER_PORT="80" \ + SIMCORE_STATE_FILE="simcore-state.pvsm" \ + PARAVIEW_DEBUG=0 + +RUN mkdir /home/root/trigger && \ + chmod 777 /home/root/trigger + + +#------------------------------------------ +FROM common AS production +# will start to run after interval every interval. fails after timeout. fail do not count if during start-period. will do # retries +HEALTHCHECK --interval=10s --timeout=30s --start-period=1s --retries=3 CMD [ "python3", "/home/root/docker/healthcheck_curl_host.py", "http://localhost", "/" ] +# copy scripts +COPY services/3d-viewer-gpu/docker/custom /home/root/docker +COPY services/3d-viewer-gpu/src/3d-viewer/utils /home/root/utils +COPY services/3d-viewer-gpu/src/3d-viewer/config /home/root/config +COPY services/3d-viewer-gpu/src/3d-viewer/handlers/*.rpy /opt/paraview/share/paraview-5.6/web/visualizer/www/ + +ENTRYPOINT [ "/bin/bash", "docker/entrypoint.bash" ] +#------------------------------------------ +FROM common as development +ENV CREATE_DUMMY_TABLE 1 +ENV USE_CASE_CONFIG_FILE="devel/port_config.json" +ENV TEST_DATA_PATH="/test-data" +# Need to use entrypoint as base image's entrypoint must be overriden +ENTRYPOINT [ "/bin/bash", "docker/entrypoint.bash" ] + diff --git a/.osparc/3d-viewer-gpu/Makefile b/.osparc/3d-viewer-gpu/Makefile new file mode 100644 index 00000000..bb8a6623 --- /dev/null +++ b/.osparc/3d-viewer-gpu/Makefile @@ -0,0 +1,5 @@ +REPO_BASE_DIR := $(realpath $(dir $(abspath $(lastword $(MAKEFILE_LIST)../../..)))) + +# TARGETS -------------------------------------------------- +include ${REPO_BASE_DIR}/scripts/common.Makefile +.DEFAULT_GOAL := help diff --git a/.osparc/3d-viewer-gpu/docker-compose.overwrite.yml b/.osparc/3d-viewer-gpu/docker-compose.overwrite.yml new file mode 100644 index 00000000..48b8afaa --- /dev/null +++ b/.osparc/3d-viewer-gpu/docker-compose.overwrite.yml @@ -0,0 +1,9 @@ +services: + "3d-viewer-gpu": # Add all service names here + build: + context: . + dockerfile: .osparc/3d-viewer-gpu/Dockerfile + target: production # Adjust this to match the dockerfile, might be 'production' or 'prod' or even no target at all + args: + BASE_IMAGE: kitware/paraviewweb:pv-v5.6.0-egl + VISUALIZER_VERSION: v3.1.6 diff --git a/.osparc/3d-viewer-gpu/metadata.yml b/.osparc/3d-viewer-gpu/metadata.yml new file mode 100644 index 00000000..a07dda4a --- /dev/null +++ b/.osparc/3d-viewer-gpu/metadata.yml @@ -0,0 +1,36 @@ +name: 3d-viewer-gpu +thumbnail: +description: "Paraview Web-based Visualizer" +key: simcore/services/dynamic/3d-viewer-gpu +version: 3.0.5 +integration-version: 2.0.0 +version_display: 3.0.5 +type: dynamic +authors: + - name: "Sylvain Anderegg" + email: "anderegg@itis.swiss" + affiliation: "IT'IS Foundation" +contact: anderegg@itis.swiss +inputs: + A: + displayOrder: 1.0 + label: "A" + description: "Any paraview compatible dataset. 1 dataset or N dataset compressed in a zip file." + type: "data:*/*" + B: + displayOrder: 1.0 + label: "B" + description: "Any paraview compatible dataset. 1 dataset or N dataset compressed in a zip file." + type: "data:*/*" + +outputs: {} + +boot-options: + boot_mode: + label: Boot mode + description: Boot mode + default: "0" + items: + "0": + label: default + description: default diff --git a/.osparc/3d-viewer-gpu/metadata.yml.j2 b/.osparc/3d-viewer-gpu/metadata.yml.j2 new file mode 100644 index 00000000..f41410d8 --- /dev/null +++ b/.osparc/3d-viewer-gpu/metadata.yml.j2 @@ -0,0 +1,56 @@ +name: {{ compose.services[service_name].build.labels["io.simcore.name"]["name"]}} +thumbnail: {{""}} +description: "{{ compose.services[service_name].build.labels["io.simcore.description"]["description"]}}" +key: {{ compose.services[service_name].build.labels["io.simcore.key"]["key"]}} +version: {{ desired_version }} +integration-version: 2.0.0 +version_display: {{ desired_version }} +type: dynamic +authors: +{% for author in compose.services[service_name].build.labels["io.simcore.authors"]["authors"] %} + - name: "{{ author.name }}" + email: "{{ author.email }}" + affiliation: "{{ author.affiliation }}" +{% endfor %} +contact: {{ compose.services[service_name].build.labels["io.simcore.authors"]["authors"][0]["email"]}} +{% if compose.services[service_name].build.labels["io.simcore.inputs"]["inputs"] %} +inputs: + {% for name, input_def in compose.services[service_name].build.labels["io.simcore.inputs"]["inputs"].items() %} + {{ name }}: + displayOrder: {{ input_def.displayOrder | float }} + label: "{{ input_def.label }}" + description: "{{ input_def.description }}" + type: "{{ input_def.type }}" + {% endfor %} +{% else %} +inputs: {} +{% endif %} + +{% if compose.services[service_name].build.labels["io.simcore.outputs"]["outputs"] %} +outputs: + {% for name, details in compose.services[service_name].build.labels["io.simcore.outputs"]["outputs"].items() %} + {{ name }}: + displayOrder: {{ details.displayOrder | float }} + label: "{{ details.label }}" + description: "{{ details.description }}" + type: "{{ details.type }}" + {% if details.fileToKeyMap %} + fileToKeyMap: + {% for file, key in details.fileToKeyMap.items() %} + {{ file }}: {{ key }} + {% endfor %} + {% endif %} + {% endfor %} +{% else %} +outputs: {} +{% endif %} + +boot-options: + boot_mode: + label: Boot mode + description: Boot mode + default: "0" + items: + "0": + label: default + description: default diff --git a/.osparc/3d-viewer-gpu/runtime.yml b/.osparc/3d-viewer-gpu/runtime.yml new file mode 100644 index 00000000..d8c939f9 --- /dev/null +++ b/.osparc/3d-viewer-gpu/runtime.yml @@ -0,0 +1,25 @@ +settings: + - name: "ports" + type: "int" + value: + 80 + - name: "constraints" + type: "string" + value: + - node.platform.os == linux + - name: "Resources" + type: "Resources" + value: + Limits: {'NanoCPUs': 4000000000, 'MemoryBytes': 17179869184} + Reservations: {'NanoCPUs': 100000000, 'MemoryBytes': 536870912, 'GenericResources': [{'DiscreteResourceSpec': {'Kind': 'VRAM', 'Value': 1}}]} +paths-mapping: # Watch this carefully and potentially adjust + outputs_path: "/home/scu/output" + inputs_path: "/data" + state_paths: [] +compose-spec: + version: "3.7" + services: + 3d-viewer-gpu: + image: $${SIMCORE_REGISTRY}/simcore/services/dynamic/3d-viewer-gpu:$${SERVICE_VERSION} + environment: +container-http-entrypoint: 3d-viewer-gpu diff --git a/.osparc/3d-viewer-gpu/runtime.yml.j2 b/.osparc/3d-viewer-gpu/runtime.yml.j2 new file mode 100644 index 00000000..d293829c --- /dev/null +++ b/.osparc/3d-viewer-gpu/runtime.yml.j2 @@ -0,0 +1,35 @@ +{% if compose.services[service_name].build.labels["simcore.service.settings"] %} +settings: +{% for setting in compose.services[service_name].build.labels["simcore.service.settings"] %} + - name: "{{ setting.name }}" + type: "{{ setting.type }}" + value: + {% if setting.value is mapping %} + {% for k, v in setting.value.items() %} + {{ k }}: {{ v }} + {% endfor %} + {% elif setting.value is iterable and not setting.value is string %} + {% for v in setting.value %} + - {{ v }} + {% endfor %} + {% else %} + {{ setting.value }} + {% endif %} +{% endfor %} +{% else %} +settings: [] +{% endif %} +paths-mapping: # Watch this carefully and potentially adjust + outputs_path: "/home/scu/output" + inputs_path: "/data" + state_paths: [] +compose-spec: + version: "3.7" + services: + {{ service_name }}: + image: $${SIMCORE_REGISTRY}/simcore/services/dynamic/{{ service_name }}:$${SERVICE_VERSION} + environment: + {% for key, value in compose.services[service_name].environment.items() %} + {{ key }}: "{{ value | replace('$${', '{{ ') | replace('}', '}') }}" + {% endfor %} +container-http-entrypoint: {{ service_name }} diff --git a/services/dy-jupyter/Dockerfile b/.osparc/jupyter-base-notebook/Dockerfile similarity index 52% rename from services/dy-jupyter/Dockerfile rename to .osparc/jupyter-base-notebook/Dockerfile index 30496ad1..09c5f57a 100644 --- a/services/dy-jupyter/Dockerfile +++ b/.osparc/jupyter-base-notebook/Dockerfile @@ -1,21 +1,13 @@ -ARG BASE_IMAGE=jupyter/base-notebook -FROM ${BASE_IMAGE} AS base +ARG BASE_IMAGE=jupyter/base-notebook:notebook-6.2.0 +FROM ${BASE_IMAGE} AS base LABEL maintainer="sanderegg" EXPOSE 8888 -# ---------------------------------------------------------------- -# Add a python2 kernel in the notebook -# Create a Python 2.x environment using conda including at least the ipython kernel -# and the kernda utility. Add any additional packages you want available for use -# in a Python 2 notebook to the first line here (e.g., pandas, matplotlib, etc.) -ARG INSTALL_PYTHON2 -RUN if [ "x${INSTALL_PYTHON2}" = "1" ] ; then conda create --quiet --yes -p $CONDA_DIR/envs/python2 python=2.7 ipython ipykernel kernda && \ - conda clean -tipsy ; fi + USER root -# Create a global kernelspec in the image and modify it so that it properly activates -# the python2 conda environment. -RUN if [ "x${INSTALL_PYTHON2}" = "1" ] ; then $CONDA_DIR/envs/python2/bin/python -m ipykernel install && \ - $CONDA_DIR/envs/python2/bin/kernda -o -y /usr/local/share/jupyter/kernels/python2/kernel.json ; fi +ENV NB_UID=8004 +ENV NB_GID=100 +RUN usermod -u $NB_UID $NB_USER RUN apt-get update &&\ apt-get install -y \ git \ @@ -39,7 +31,7 @@ ENV SIMCORE_NODE_UUID="-1" \ # specific notebook env ENV NOTEBOOK_TOKEN="simcore" # ---------------------------------------------------------------- -COPY --chown=jovyan:users scripts/docker/healthcheck_curl_host.py /healthcheck/healthcheck_curl_host.py +COPY --chown=1000:100 scripts/docker/healthcheck_curl_host.py /healthcheck/healthcheck_curl_host.py # will start to run after interval every interval. fails after timeout. fail do not count if during start-period. will do # retries HEALTHCHECK --interval=10s --timeout=30s --start-period=1s --retries=3 CMD [ "python3", "/healthcheck/healthcheck_curl_host.py", "http://localhost:8888" ] # configure notebook @@ -53,16 +45,13 @@ RUN pip install --upgrade pip && \ # ---------------------------------------------------------------- ENV PYTHONPATH=/home/jovyan/handlers # ---------------------------------------------------------------- -# install simcore-sdk -RUN pip install git+https://github.com/ITISFoundation/osparc-simcore.git@a208dce53a2f779195a878fda1cd59ec8532bf8e#subdirectory=packages/service-library &&\ - pip install git+https://github.com/ITISFoundation/osparc-simcore.git@a208dce53a2f779195a878fda1cd59ec8532bf8e#subdirectory=packages/models-library &&\ - pip install git+https://github.com/ITISFoundation/osparc-simcore.git@a208dce53a2f779195a878fda1cd59ec8532bf8e#subdirectory=packages/simcore-sdk &&\ - pip install git+https://github.com/ITISFoundation/osparc-simcore.git@a208dce53a2f779195a878fda1cd59ec8532bf8e#subdirectory=services/storage/client-sdk/python &&\ - pip install git+https://github.com/ITISFoundation/osparc-simcore.git@a208dce53a2f779195a878fda1cd59ec8532bf8e#subdirectory=packages/postgres-database &&\ - pip install blackfynn -ENV NOTEBOOK_URL="notebooks/notebook.ipynb" +RUN pip install --no-cache-dir markupsafe==2.0.1 + +ENV INPUTS_FOLDER=/home/jovyan/inputs +ENV OUTPUTS_FOLDER=/home/jovyan/outputs + # ---------------------------------------------------------------- FROM base AS development VOLUME /home/jovyan/packages @@ -72,7 +61,7 @@ VOLUME /home/root/scripts/dy_services_helpers VOLUME /home/jovyan/docker VOLUME /home/jovyan/handlers # increase development speed by pre-installing libraries -COPY --chown=jovyan:users services/dy-jupyter/devel/requirements.txt /home/jovyan/devel/requirements.txt +COPY --chown=1000:100 services/jupyter-base-notebook/devel/requirements.txt /home/jovyan/devel/requirements.txt RUN pip install -r /home/jovyan/devel/requirements.txt ENV CREATE_DUMMY_TABLE=1 ENV USE_CASE_CONFIG_FILE="devel/port_config.json" @@ -83,12 +72,17 @@ CMD [ "/bin/bash", "/docker/boot.sh" ] # ---------------------------------------------------------------- FROM base AS production # prepare for booting -COPY --chown=jovyan:users services/dy-jupyter/docker /docker +COPY --chown=1000:100 services/jupyter-base-notebook/docker /docker # install simcore packages. docker seems to set root:root permission to the implicitely created destination folders -COPY --chown=jovyan:users services/dy-jupyter/handlers/* /home/jovyan/handlers/ +COPY --chown=1000:100 services/jupyter-base-notebook/handlers/* /home/jovyan/handlers/ # copy the default notebook -COPY --chown=jovyan:users services/dy-jupyter/notebooks /home/jovyan/notebooks +COPY --chown=1000:100 services/jupyter-base-notebook/notebooks /home/jovyan/notebooks +USER root +RUN mkdir -p /home/jovyan/inputs && mkdir -p /home/jovyan/outputs +RUN chmod -R 777 /home/jovyan/notebooks && chmod -R 777 /home/jovyan/inputs && chmod -R 777 /home/jovyan/outputs +USER $NB_USER # cleanup RUN rm -rf /home/jovyan/packages && \ rm -rf /home/jovyan/services + ENTRYPOINT [ "/bin/bash", "/docker/boot.sh" ] diff --git a/.osparc/jupyter-base-notebook/Makefile b/.osparc/jupyter-base-notebook/Makefile new file mode 100644 index 00000000..bb8a6623 --- /dev/null +++ b/.osparc/jupyter-base-notebook/Makefile @@ -0,0 +1,5 @@ +REPO_BASE_DIR := $(realpath $(dir $(abspath $(lastword $(MAKEFILE_LIST)../../..)))) + +# TARGETS -------------------------------------------------- +include ${REPO_BASE_DIR}/scripts/common.Makefile +.DEFAULT_GOAL := help diff --git a/.osparc/jupyter-base-notebook/docker-compose.overwrite.yml b/.osparc/jupyter-base-notebook/docker-compose.overwrite.yml new file mode 100644 index 00000000..a3d4c6b0 --- /dev/null +++ b/.osparc/jupyter-base-notebook/docker-compose.overwrite.yml @@ -0,0 +1,6 @@ +services: + "jupyter-base-notebook": # Add all service names here + build: + context: . + dockerfile: .osparc/jupyter-base-notebook/Dockerfile + target: production # Adjust this to match the dockerfile, might be 'production' or 'prod' or even no target at all diff --git a/.osparc/jupyter-base-notebook/metadata.yml b/.osparc/jupyter-base-notebook/metadata.yml new file mode 100644 index 00000000..8abfbd2f --- /dev/null +++ b/.osparc/jupyter-base-notebook/metadata.yml @@ -0,0 +1,46 @@ +name: jupyter-base-notebook +thumbnail: +description: "Jupyter notebook" +key: simcore/services/dynamic/jupyter-base-notebook +version: 2.14.2 +integration-version: 2.0.0 +version_display: 2.14.2 +type: dynamic +authors: + - name: "sanderegg" + email: "anderegg@itis.swiss" + affiliation: "ITIS Foundation" +contact: anderegg@itis.swiss +inputs: + input_1: + displayOrder: 0.0 + label: "input 1" + description: "Input 1" + type: "data:*/*" + input_2: + displayOrder: 1.0 + label: "input 2" + description: "Input 2" + type: "data:*/*" + +outputs: + output_1: + displayOrder: 0.0 + label: "output 1" + description: "Output 1" + type: "data:*/*" + output_2: + displayOrder: 1.0 + label: "output 2" + description: "Output 2" + type: "data:*/*" + +boot-options: + boot_mode: + label: Boot mode + description: Boot mode + default: "0" + items: + "0": + label: default + description: default diff --git a/.osparc/jupyter-base-notebook/metadata.yml.j2 b/.osparc/jupyter-base-notebook/metadata.yml.j2 new file mode 100644 index 00000000..f41410d8 --- /dev/null +++ b/.osparc/jupyter-base-notebook/metadata.yml.j2 @@ -0,0 +1,56 @@ +name: {{ compose.services[service_name].build.labels["io.simcore.name"]["name"]}} +thumbnail: {{""}} +description: "{{ compose.services[service_name].build.labels["io.simcore.description"]["description"]}}" +key: {{ compose.services[service_name].build.labels["io.simcore.key"]["key"]}} +version: {{ desired_version }} +integration-version: 2.0.0 +version_display: {{ desired_version }} +type: dynamic +authors: +{% for author in compose.services[service_name].build.labels["io.simcore.authors"]["authors"] %} + - name: "{{ author.name }}" + email: "{{ author.email }}" + affiliation: "{{ author.affiliation }}" +{% endfor %} +contact: {{ compose.services[service_name].build.labels["io.simcore.authors"]["authors"][0]["email"]}} +{% if compose.services[service_name].build.labels["io.simcore.inputs"]["inputs"] %} +inputs: + {% for name, input_def in compose.services[service_name].build.labels["io.simcore.inputs"]["inputs"].items() %} + {{ name }}: + displayOrder: {{ input_def.displayOrder | float }} + label: "{{ input_def.label }}" + description: "{{ input_def.description }}" + type: "{{ input_def.type }}" + {% endfor %} +{% else %} +inputs: {} +{% endif %} + +{% if compose.services[service_name].build.labels["io.simcore.outputs"]["outputs"] %} +outputs: + {% for name, details in compose.services[service_name].build.labels["io.simcore.outputs"]["outputs"].items() %} + {{ name }}: + displayOrder: {{ details.displayOrder | float }} + label: "{{ details.label }}" + description: "{{ details.description }}" + type: "{{ details.type }}" + {% if details.fileToKeyMap %} + fileToKeyMap: + {% for file, key in details.fileToKeyMap.items() %} + {{ file }}: {{ key }} + {% endfor %} + {% endif %} + {% endfor %} +{% else %} +outputs: {} +{% endif %} + +boot-options: + boot_mode: + label: Boot mode + description: Boot mode + default: "0" + items: + "0": + label: default + description: default diff --git a/.osparc/jupyter-base-notebook/runtime.yml b/.osparc/jupyter-base-notebook/runtime.yml new file mode 100644 index 00000000..f8db3bc7 --- /dev/null +++ b/.osparc/jupyter-base-notebook/runtime.yml @@ -0,0 +1,18 @@ +settings: + - name: "ports" + type: "int" + value: + 8888 + - name: "constraints" + type: "string" + value: + - node.platform.os == linux + - name: "resources" + type: "resources" + value: + Limits: {'NanoCPUs': 4000000000, 'MemoryBytes': 8589934592} +paths-mapping: # Watch this carefully and potentially adjust + outputs_path: "/home/jovyan/outputs" + inputs_path: "/home/jovyan/inputs" + state_paths: ["/home/jovyan/notebooks"] + diff --git a/.osparc/jupyter-base-notebook/runtime.yml.j2 b/.osparc/jupyter-base-notebook/runtime.yml.j2 new file mode 100644 index 00000000..d0bf6630 --- /dev/null +++ b/.osparc/jupyter-base-notebook/runtime.yml.j2 @@ -0,0 +1,37 @@ +{% if compose.services[service_name].build.labels["simcore.service.settings"] %} +settings: +{% for setting in compose.services[service_name].build.labels["simcore.service.settings"] %} + - name: "{{ setting.name }}" + type: "{{ setting.type }}" + value: + {% if setting.value is mapping %} + {% for k, v in setting.value.items() %} + {{ k }}: {{ v }} + {% endfor %} + {% elif setting.value is iterable and not setting.value is string %} + {% for v in setting.value %} + - {{ v }} + {% endfor %} + {% else %} + {{ setting.value }} + {% endif %} +{% endfor %} +{% else %} +settings: [] +{% endif %} +paths-mapping: # Watch this carefully and potentially adjust + outputs_path: "/home/jovyan/outputs" + inputs_path: "/home/jovyan/inputs" + state_paths: ["/home/jovyan/notebooks"] +{% if compose.services[service_name].environment.items() %} +compose-spec: + version: "3.7" + services: + {{ service_name }}: + image: $${SIMCORE_REGISTRY}/simcore/services/dynamic/{{ service_name }}:$${SERVICE_VERSION} + environment: + {% for key, value in compose.services[service_name].environment.items() %} + {{ key }}: "{{ value | replace('$${', '{{ ') | replace('}', '}') }}" + {% endfor %} +container-http-entrypoint: {{ service_name }} +{% endif %} diff --git a/.osparc/raw-graphs/Dockerfile b/.osparc/raw-graphs/Dockerfile new file mode 100644 index 00000000..848fe9bb --- /dev/null +++ b/.osparc/raw-graphs/Dockerfile @@ -0,0 +1,109 @@ +FROM node:8.15.0-alpine as base + +LABEL maintainer=odeimaiz + +# non-root user 'scu' +RUN adduser -D -u 8004 scu + +RUN apk add --no-cache \ + su-exec + +ENV HOME /home/scu + +EXPOSE 4000 + +WORKDIR $HOME + +RUN apk update &&\ + apk add --no-cache \ + python3 \ + bash + + +RUN pip3 install --upgrade pip wheel setuptools + +# ---------------------------------------------------------------- +# install simcore-sdk +RUN apk add --no-cache --virtual .build-deps \ + g++ \ + git \ + python3-dev \ + postgresql-dev && \ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@ba8279cc7927929c7aa96232bc3ce1fd6ea6be55#subdirectory=packages/service-library &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@ba8279cc7927929c7aa96232bc3ce1fd6ea6be55#subdirectory=packages/simcore-sdk &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@ba8279cc7927929c7aa96232bc3ce1fd6ea6be55#subdirectory=services/storage/client-sdk/python &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@ba8279cc7927929c7aa96232bc3ce1fd6ea6be55#subdirectory=packages/postgres-database &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@ba8279cc7927929c7aa96232bc3ce1fd6ea6be55#subdirectory=packages/models-library &&\ + apk del --no-cache .build-deps + +RUN npm install express@4.16.4 + +# ---------------------------------------------------------------- +# set up oSparc env variables +ENV SIMCORE_NODE_UUID="-1" \ + SIMCORE_USER_ID="-1" \ + SIMCORE_NODE_BASEPATH="/raw" \ + STORAGE_ENDPOINT="=1" \ + RAWGRAPHS_INPUT_PATH="../inputs" \ + RAWGRAPHS_OUTPUT_PATH="../outputs" \ + S3_ENDPOINT="=1" \ + S3_ACCESS_KEY="-1" \ + S3_SECRET_KEY="-1" \ + S3_BUCKET_NAME="-1" \ + POSTGRES_ENDPOINT="-1" \ + POSTGRES_USER="-1" \ + POSTGRES_PASSWORD="-1" \ + POSTGRES_DB="-1" + +#-----------------Production---------------------- +FROM base AS production + +# https://github.com/npm/uid-number/issues/3 +RUN npm config set unsafe-perm true + + +RUN apk add --no-cache --virtual .build-deps \ + git &&\ + git clone https://github.com/ITISFoundation/raw.git --branch master-osparc &&\ + cd raw &&\ + npm install -g bower &&\ + bower install --force --allow-root &&\ + apk del --no-cache .build-deps + +RUN apk add --no-cache \ + postgresql-libs + +# --------------- Healthcheck ------------------- +COPY --chown=scu:scu scripts/docker/healthcheck.py $HOME/healthcheck/healthcheck.py +# will start to run after interval every interval. fails after timeout. fail do not count if during start-period. will do # retries +HEALTHCHECK --interval=10s --timeout=30s --start-period=1s --retries=3 CMD [ "python3", "/home/scu/healthcheck/healthcheck.py", "http://localhost:4000" ] + +WORKDIR $HOME +COPY --chown=scu:scu services/raw-graphs/docker $HOME/docker +COPY --chown=scu:scu services/raw-graphs/server $HOME/server +ENV TEST_ENV="test" +WORKDIR $HOME/raw +CMD ["/bin/bash", "../docker/boot.sh"] + +#-----------------Development----------------------- +FROM base as development + +VOLUME $HOME/scripts +VOLUME $HOME/services/storage/client-sdk + +VOLUME $HOME/raw +VOLUME $HOME/docker +VOLUME $HOME/inputs +VOLUME $HOME/server +VOLUME $HOME/devel + +COPY --chown=scu /scripts/dy_services_helpers/requirements.txt $HOME/scripts/dy_services_helpers/requirements.txt +RUN pip3 install -r $HOME/scripts/dy_services_helpers/requirements.txt + +ENV CREATE_DUMMY_TABLE 1 + +ENV USE_CASE_CONFIG_FILE="/home/scu/devel/port_config.json" +ENV INIT_OPTIONS="50 6 1 tab" + +WORKDIR $HOME/raw +ENTRYPOINT ["/bin/bash", "../docker/boot.sh"] diff --git a/.osparc/raw-graphs/Makefile b/.osparc/raw-graphs/Makefile new file mode 100644 index 00000000..bb8a6623 --- /dev/null +++ b/.osparc/raw-graphs/Makefile @@ -0,0 +1,5 @@ +REPO_BASE_DIR := $(realpath $(dir $(abspath $(lastword $(MAKEFILE_LIST)../../..)))) + +# TARGETS -------------------------------------------------- +include ${REPO_BASE_DIR}/scripts/common.Makefile +.DEFAULT_GOAL := help diff --git a/.osparc/raw-graphs/docker-compose.overwrite.yml b/.osparc/raw-graphs/docker-compose.overwrite.yml new file mode 100644 index 00000000..8c4092ac --- /dev/null +++ b/.osparc/raw-graphs/docker-compose.overwrite.yml @@ -0,0 +1,6 @@ +services: + "raw-graphs": # Add all service names here + build: + context: . + dockerfile: .osparc/raw-graphs/Dockerfile + target: production # Adjust this to match the dockerfile, might be 'production' or 'prod' or even no target at all diff --git a/.osparc/raw-graphs/metadata.yml b/.osparc/raw-graphs/metadata.yml new file mode 100644 index 00000000..0c231f67 --- /dev/null +++ b/.osparc/raw-graphs/metadata.yml @@ -0,0 +1,36 @@ +name: 2D plot +thumbnail: +description: "2D plots powered by RAW Graphs" +key: simcore/services/dynamic/raw-graphs +version: 2.11.3 +integration-version: 2.0.0 +version_display: 2.11.3 +type: dynamic +authors: + - name: "odeimaiz" + email: "maiz@itis.swiss" + affiliation: "ITIS Foundation" +contact: maiz@itis.swiss +inputs: + input_1: + displayOrder: 0.0 + label: "input 1" + description: "Input 1" + type: "data:*/*" + +outputs: + output_1: + displayOrder: 0.0 + label: "Output Graph" + description: "Output Graph" + type: "data:image/svg+xml" + +boot-options: + boot_mode: + label: Boot mode + description: Boot mode + default: "0" + items: + "0": + label: default + description: default diff --git a/.osparc/raw-graphs/metadata.yml.j2 b/.osparc/raw-graphs/metadata.yml.j2 new file mode 100644 index 00000000..f41410d8 --- /dev/null +++ b/.osparc/raw-graphs/metadata.yml.j2 @@ -0,0 +1,56 @@ +name: {{ compose.services[service_name].build.labels["io.simcore.name"]["name"]}} +thumbnail: {{""}} +description: "{{ compose.services[service_name].build.labels["io.simcore.description"]["description"]}}" +key: {{ compose.services[service_name].build.labels["io.simcore.key"]["key"]}} +version: {{ desired_version }} +integration-version: 2.0.0 +version_display: {{ desired_version }} +type: dynamic +authors: +{% for author in compose.services[service_name].build.labels["io.simcore.authors"]["authors"] %} + - name: "{{ author.name }}" + email: "{{ author.email }}" + affiliation: "{{ author.affiliation }}" +{% endfor %} +contact: {{ compose.services[service_name].build.labels["io.simcore.authors"]["authors"][0]["email"]}} +{% if compose.services[service_name].build.labels["io.simcore.inputs"]["inputs"] %} +inputs: + {% for name, input_def in compose.services[service_name].build.labels["io.simcore.inputs"]["inputs"].items() %} + {{ name }}: + displayOrder: {{ input_def.displayOrder | float }} + label: "{{ input_def.label }}" + description: "{{ input_def.description }}" + type: "{{ input_def.type }}" + {% endfor %} +{% else %} +inputs: {} +{% endif %} + +{% if compose.services[service_name].build.labels["io.simcore.outputs"]["outputs"] %} +outputs: + {% for name, details in compose.services[service_name].build.labels["io.simcore.outputs"]["outputs"].items() %} + {{ name }}: + displayOrder: {{ details.displayOrder | float }} + label: "{{ details.label }}" + description: "{{ details.description }}" + type: "{{ details.type }}" + {% if details.fileToKeyMap %} + fileToKeyMap: + {% for file, key in details.fileToKeyMap.items() %} + {{ file }}: {{ key }} + {% endfor %} + {% endif %} + {% endfor %} +{% else %} +outputs: {} +{% endif %} + +boot-options: + boot_mode: + label: Boot mode + description: Boot mode + default: "0" + items: + "0": + label: default + description: default diff --git a/.osparc/raw-graphs/runtime.yml b/.osparc/raw-graphs/runtime.yml new file mode 100644 index 00000000..fae0eaf6 --- /dev/null +++ b/.osparc/raw-graphs/runtime.yml @@ -0,0 +1,27 @@ +settings: + - name: "resources" + type: "Resources" + value: + mem_limit: 17179869184 + cpu_limit: 4000000000 + - name: "ports" + type: "int" + value: + 4000 + - name: "constraints" + type: "string" + value: + - node.platform.os == linux +paths-mapping: # Watch this carefully and potentially adjust + outputs_path: "/home/scu/output" + inputs_path: "/home/scu/input" + state_paths: [] +compose-spec: + version: "3.7" + services: + raw-graphs: + image: $${SIMCORE_REGISTRY}/simcore/services/dynamic/raw-graphs:$${SERVICE_VERSION} + environment: + SIMCORE_NODE_BASEPATH: "${SIMCORE_NODE_BASEPATH}" + RAWGRAPHS_INPUT_PATH: "${RAWGRAPHS_INPUT_PATH}" +container-http-entrypoint: raw-graphs diff --git a/.osparc/raw-graphs/runtime.yml.j2 b/.osparc/raw-graphs/runtime.yml.j2 new file mode 100644 index 00000000..a3eb3105 --- /dev/null +++ b/.osparc/raw-graphs/runtime.yml.j2 @@ -0,0 +1,35 @@ +{% if compose.services[service_name].build.labels["simcore.service.settings"] %} +settings: +{% for setting in compose.services[service_name].build.labels["simcore.service.settings"] %} + - name: "{{ setting.name }}" + type: "{{ setting.type }}" + value: + {% if setting.value is mapping %} + {% for k, v in setting.value.items() %} + {{ k }}: {{ v }} + {% endfor %} + {% elif setting.value is iterable and not setting.value is string %} + {% for v in setting.value %} + - {{ v }} + {% endfor %} + {% else %} + {{ setting.value }} + {% endif %} +{% endfor %} +{% else %} +settings: [] +{% endif %} +paths-mapping: # Watch this carefully and potentially adjust + outputs_path: "/home/scu/output" + inputs_path: "/home/scu/input" + state_paths: [] +compose-spec: + version: "3.7" + services: + {{ service_name }}: + image: $${SIMCORE_REGISTRY}/simcore/services/dynamic/{{ service_name }}:$${SERVICE_VERSION} + environment: + {% for key, value in compose.services[service_name].environment.items() %} + {{ key }}: "{{ value | replace('$${', '{{ ') | replace('}', '}') }}" + {% endfor %} +container-http-entrypoint: {{ service_name }} diff --git a/.osparc/tissue-properties/Dockerfile b/.osparc/tissue-properties/Dockerfile new file mode 100644 index 00000000..28d1ce5c --- /dev/null +++ b/.osparc/tissue-properties/Dockerfile @@ -0,0 +1,98 @@ +FROM node:8.15.0-alpine as base + +LABEL maintainer=odeimaiz + +# non-root user 'scu' +RUN adduser -D -u 8004 scu + +RUN apk add --no-cache \ + su-exec + +ENV HOME /home/scu + +EXPOSE 4000 + +WORKDIR $HOME + +RUN apk update &&\ + apk add --no-cache \ + python3 \ + bash + + +RUN pip3 install --upgrade pip wheel setuptools + +# ---------------------------------------------------------------- +# install simcore-sdk +RUN apk add --no-cache --virtual .build-deps \ + g++ \ + git \ + python3-dev \ + postgresql-dev && \ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@4159b895fc1e3743777aadb79f1209735582cfab#subdirectory=packages/service-library &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@4159b895fc1e3743777aadb79f1209735582cfab#subdirectory=packages/simcore-sdk &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@4159b895fc1e3743777aadb79f1209735582cfab#subdirectory=services/storage/client-sdk/python &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@4159b895fc1e3743777aadb79f1209735582cfab#subdirectory=packages/postgres-database &&\ + apk del --no-cache .build-deps + + +RUN npm install express@4.16.4 + +# ---------------------------------------------------------------- +# set up oSparc env variables +ENV SIMCORE_NODE_UUID="-1" \ + SIMCORE_USER_ID="-1" \ + SIMCORE_NODE_BASEPATH="/tissue-properties" \ + TISSUEPROPS_OUTPUT_PATH="../outputs" \ + STORAGE_ENDPOINT="=1" \ + S3_ENDPOINT="=1" \ + S3_ACCESS_KEY="-1" \ + S3_SECRET_KEY="-1" \ + S3_BUCKET_NAME="-1" \ + POSTGRES_ENDPOINT="-1" \ + POSTGRES_USER="-1" \ + POSTGRES_PASSWORD="-1" \ + POSTGRES_DB="-1" + +#-----------------Production---------------------- +FROM base AS production + +COPY --chown=scu:scu services/tissue-properties/csv-to-html-table $HOME/csv-to-html-table +COPY --chown=scu:scu services/tissue-properties/inputs $HOME/inputs +COPY --chown=scu:scu services/tissue-properties/inputs/TissueProperties.csv $HOME/csv-to-html-table/data/TissueProperties.csv + +RUN apk add --no-cache \ + postgresql-libs + +# --------------- Healthcheck ------------------- +COPY --chown=scu:scu scripts/docker/healthcheck.py $HOME/healthcheck/healthcheck.py +# will start to run after interval every interval. fails after timeout. fail do not count if during start-period. will do # retries +HEALTHCHECK --interval=10s --timeout=30s --start-period=1s --retries=3 CMD [ "python3", "/home/scu/healthcheck/healthcheck.py", "http://localhost:4000" ] + +WORKDIR $HOME +COPY --chown=scu:scu services/tissue-properties/docker $HOME/docker +COPY --chown=scu:scu services/tissue-properties/server $HOME/server + +WORKDIR $HOME/csv-to-html-table +CMD ["/bin/bash", "../docker/boot.sh"] + +#-----------------Development----------------------- +FROM base as development + +VOLUME $HOME/scripts +VOLUME $HOME/services/storage/client-sdk + +VOLUME $HOME/csv-to-html-table +VOLUME $HOME/docker +VOLUME $HOME/server +VOLUME $HOME/devel + +COPY --chown=scu /scripts/dy_services_helpers/requirements.txt $HOME/scripts/dy_services_helpers/requirements.txt +RUN pip3 install -r $HOME/scripts/dy_services_helpers/requirements.txt + +ENV CREATE_DUMMY_TABLE 1 +ENV USE_CASE_CONFIG_FILE="/home/scu/devel/port_config.json" +ENV INIT_OPTIONS="50 6 1 tab" + +WORKDIR $HOME/csv-to-html-table +ENTRYPOINT ["/bin/bash", "../docker/boot.sh"] diff --git a/.osparc/tissue-properties/Makefile b/.osparc/tissue-properties/Makefile new file mode 100644 index 00000000..bb8a6623 --- /dev/null +++ b/.osparc/tissue-properties/Makefile @@ -0,0 +1,5 @@ +REPO_BASE_DIR := $(realpath $(dir $(abspath $(lastword $(MAKEFILE_LIST)../../..)))) + +# TARGETS -------------------------------------------------- +include ${REPO_BASE_DIR}/scripts/common.Makefile +.DEFAULT_GOAL := help diff --git a/.osparc/tissue-properties/docker-compose.overwrite.yml b/.osparc/tissue-properties/docker-compose.overwrite.yml new file mode 100644 index 00000000..a962ad59 --- /dev/null +++ b/.osparc/tissue-properties/docker-compose.overwrite.yml @@ -0,0 +1,6 @@ +services: + "tissue-properties": + build: + context: . + dockerfile: .osparc/tissue-properties/Dockerfile + target: production # Adjust this to match the dockerfile, might be 'production' or 'prod' or even no target at all diff --git a/.osparc/tissue-properties/metadata.yml b/.osparc/tissue-properties/metadata.yml new file mode 100644 index 00000000..3e5700bb --- /dev/null +++ b/.osparc/tissue-properties/metadata.yml @@ -0,0 +1,33 @@ +name: Tissue properties +thumbnail: +description: "Tissue properties compiled in an extensive, critical literature review by the ITIS Foundation. Visit [itis.swiss/database](https://itis.swiss/database) for additional information, e.g., on tissue parameter variability/uncertainty, quality assurance, and the explored sources. Please use the following citation when referring to the database: Hasgall PA, Di Gennaro F, Baumgartner C, Neufeld E, Lloyd B, Gosselin MC, Payne D, Klingenböck A, Kuster N, ITIS Database for thermal and electromagnetic parameters of biological tissues, Version 4.0, May 15, 2018, DOI: 10.13099/VIP21000-04-0. [itis.swiss/database](https://itis.swiss/database). Powered by [csv-to-html-table](https://github.com/derekeder/csv-to-html-table)" +key: simcore/services/dynamic/tissue-properties +version: 1.0.4 +integration-version: 2.0.0 +version_display: 1.0.4 +type: dynamic +authors: + - name: "odeimaiz" + email: "maiz@itis.swiss" + affiliation: "ITIS Foundation" +contact: maiz@itis.swiss +inputs: {} + +outputs: + output_1: + displayOrder: 0.0 + label: "Tissue Properties" + description: "Input data csv" + type: "data:text/csv" + fileToKeyMap: + input.csv: output_1 + +boot-options: + boot_mode: + label: Boot mode + description: Boot mode + default: "0" + items: + "0": + label: default + description: default diff --git a/.osparc/tissue-properties/metadata.yml.j2 b/.osparc/tissue-properties/metadata.yml.j2 new file mode 100644 index 00000000..f41410d8 --- /dev/null +++ b/.osparc/tissue-properties/metadata.yml.j2 @@ -0,0 +1,56 @@ +name: {{ compose.services[service_name].build.labels["io.simcore.name"]["name"]}} +thumbnail: {{""}} +description: "{{ compose.services[service_name].build.labels["io.simcore.description"]["description"]}}" +key: {{ compose.services[service_name].build.labels["io.simcore.key"]["key"]}} +version: {{ desired_version }} +integration-version: 2.0.0 +version_display: {{ desired_version }} +type: dynamic +authors: +{% for author in compose.services[service_name].build.labels["io.simcore.authors"]["authors"] %} + - name: "{{ author.name }}" + email: "{{ author.email }}" + affiliation: "{{ author.affiliation }}" +{% endfor %} +contact: {{ compose.services[service_name].build.labels["io.simcore.authors"]["authors"][0]["email"]}} +{% if compose.services[service_name].build.labels["io.simcore.inputs"]["inputs"] %} +inputs: + {% for name, input_def in compose.services[service_name].build.labels["io.simcore.inputs"]["inputs"].items() %} + {{ name }}: + displayOrder: {{ input_def.displayOrder | float }} + label: "{{ input_def.label }}" + description: "{{ input_def.description }}" + type: "{{ input_def.type }}" + {% endfor %} +{% else %} +inputs: {} +{% endif %} + +{% if compose.services[service_name].build.labels["io.simcore.outputs"]["outputs"] %} +outputs: + {% for name, details in compose.services[service_name].build.labels["io.simcore.outputs"]["outputs"].items() %} + {{ name }}: + displayOrder: {{ details.displayOrder | float }} + label: "{{ details.label }}" + description: "{{ details.description }}" + type: "{{ details.type }}" + {% if details.fileToKeyMap %} + fileToKeyMap: + {% for file, key in details.fileToKeyMap.items() %} + {{ file }}: {{ key }} + {% endfor %} + {% endif %} + {% endfor %} +{% else %} +outputs: {} +{% endif %} + +boot-options: + boot_mode: + label: Boot mode + description: Boot mode + default: "0" + items: + "0": + label: default + description: default diff --git a/.osparc/tissue-properties/runtime.yml b/.osparc/tissue-properties/runtime.yml new file mode 100644 index 00000000..298b8338 --- /dev/null +++ b/.osparc/tissue-properties/runtime.yml @@ -0,0 +1,28 @@ +settings: + - name: "resources" + type: "Resources" + value: + mem_limit: 17179869184 + cpu_limit: 4000000000 + - name: "ports" + type: "int" + value: + 4000 + - name: "constraints" + type: "string" + value: + - node.platform.os == linux +paths-mapping: # Watch this carefully and potentially adjust + outputs_path: "/home/scu/output" + inputs_path: "/home/scu/input" + state_paths: [] +compose-spec: + version: "3.7" + services: + tissue-properties: + image: $${SIMCORE_REGISTRY}/simcore/services/dynamic/tissue-properties:$${SERVICE_VERSION} + environment: + SIMCORE_NODE_BASEPATH: "${SIMCORE_NODE_BASEPATH}" + TISSUEPROPS_INPUT_PATH: "${TISSUEPROPS_INPUT_PATH}" + TISSUEPROPS_OUTPUT_PATH: "${TISSUEPROPS_OUTPUT_PATH}" +container-http-entrypoint: tissue-properties diff --git a/.osparc/tissue-properties/runtime.yml.j2 b/.osparc/tissue-properties/runtime.yml.j2 new file mode 100644 index 00000000..a3eb3105 --- /dev/null +++ b/.osparc/tissue-properties/runtime.yml.j2 @@ -0,0 +1,35 @@ +{% if compose.services[service_name].build.labels["simcore.service.settings"] %} +settings: +{% for setting in compose.services[service_name].build.labels["simcore.service.settings"] %} + - name: "{{ setting.name }}" + type: "{{ setting.type }}" + value: + {% if setting.value is mapping %} + {% for k, v in setting.value.items() %} + {{ k }}: {{ v }} + {% endfor %} + {% elif setting.value is iterable and not setting.value is string %} + {% for v in setting.value %} + - {{ v }} + {% endfor %} + {% else %} + {{ setting.value }} + {% endif %} +{% endfor %} +{% else %} +settings: [] +{% endif %} +paths-mapping: # Watch this carefully and potentially adjust + outputs_path: "/home/scu/output" + inputs_path: "/home/scu/input" + state_paths: [] +compose-spec: + version: "3.7" + services: + {{ service_name }}: + image: $${SIMCORE_REGISTRY}/simcore/services/dynamic/{{ service_name }}:$${SERVICE_VERSION} + environment: + {% for key, value in compose.services[service_name].environment.items() %} + {{ key }}: "{{ value | replace('$${', '{{ ') | replace('}', '}') }}" + {% endfor %} +container-http-entrypoint: {{ service_name }} diff --git a/DEV_README.md b/DEV_README.md new file mode 100644 index 00000000..e08b34c1 --- /dev/null +++ b/DEV_README.md @@ -0,0 +1,15 @@ +# Developer README + +### To develop locally: + +``` +cd .osparc/YOUR_SERVICE +make run-local +``` + +### To build the prodution image + +``` +cd .osparc/YOUR_SERVICE +make build +``` diff --git a/Makefile b/Makefile index aa8463f7..0f2b5379 100644 --- a/Makefile +++ b/Makefile @@ -1,55 +1,7 @@ -# author: Sylvain Anderegg - -VERSION := $(shell uname -a) - -export VCS_URL:=$(shell git config --get remote.origin.url) -export VCS_REF:=$(shell git rev-parse --short HEAD) -export VCS_STATUS_CLIENT:=$(if $(shell git status -s),'modified/untracked','clean') -export BUILD_DATE:=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") - +# TARGETS -------------------------------------------------- .PHONY: help -help: ## help on rule's targets - @awk --posix 'BEGIN {FS = ":.*?## "} /^[[:alpha:][:space:]_-]+:.*?## / {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) +help: ## this help + @cat ./DEV_README.md .DEFAULT_GOAL := help - - -.PHONY: new-service -new-service: ## Bakes a new project from cookiecutter-simcore-pyservice and drops it under services/ - .venv/bin/cookiecutter gh:ITISFoundation/cookiecutter-osparc-service --output-dir $(CURDIR)/services - - -.PHONY: info -info: ## Displays some parameters of makefile environments - @echo '+ VCS_* ' - @echo ' - ULR : ${VCS_URL}' - @echo ' - REF : ${VCS_REF}' - @echo ' - (STATUS)REF_CLIENT : (${VCS_STATUS_CLIENT})' - @echo '+ BUILD_DATE : ${BUILD_DATE}' - @echo '+ VERSION : ${VERSION}' - @echo '+ DOCKER_REGISTRY : ${DOCKER_REGISTRY}' - - -.PHONY: devenv -devenv: .venv -.venv: ## Creates a python virtual environment with dev tools (pip, pylint, ...) - python3 -m venv .venv - .venv/bin/pip3 install --upgrade pip wheel setuptools - .venv/bin/pip3 install pylint black cookiecutter semver - .venv/bin/pip3 install -r scripts/auto-doc/requirements.txt - @echo "To activate the venv, execute 'source .venv/bin/activate'" - - -.PHONY: toc -toc: .venv ## Updates README.txt with a ToC of all services - @.venv/bin/python ${CURDIR}/scripts/auto-doc/create-toc.py - -.PHONY: resource-upgrade-stubs -resource-upgrade-stubs: ## retag images - @.venv/bin/python ${CURDIR}/scripts/resources/create_update_stubs.py --toc=${CURDIR}/toc.json - - -.PHONY: clean -clean: ## Cleans all unversioned files in project - @git clean -dxf -e .vscode/ diff --git a/README.md b/README.md index c10ac4c8..a84a53d9 100644 --- a/README.md +++ b/README.md @@ -41,7 +41,7 @@ Collection of some open-source services for the osparc simcore platform: | [sleeper](services/sleeper/docker/custom/Dockerfile) | A service which awaits for time to pass, two times. | computational | [![](https://images.microbadger.com/badges/version/itisfoundation/sleeper:2.2.1.svg)](https://microbadger.com/images/itisfoundation/sleeper:2.2.1 'See Image Version') | ![sleeper](https://github.com/ITISFoundation/osparc-services/workflows/sleeper/badge.svg?branch=master) | | [sleeper-gpu](services/sleeper/docker/custom/Dockerfile) | A service which awaits for time to pass, two times. | computational | [![](https://images.microbadger.com/badges/version/itisfoundation/sleeper-gpu:2.2.1.svg)](https://microbadger.com/images/itisfoundation/sleeper-gpu:2.2.1 'See Image Version') | | | [sleeper-mpi](services/sleeper/docker/custom/Dockerfile) | A service which awaits for time to pass, two times. | computational | [![](https://images.microbadger.com/badges/version/itisfoundation/sleeper-mpi:2.2.1.svg)](https://microbadger.com/images/itisfoundation/sleeper-mpi:2.2.1 'See Image Version') | | -| [Tissue properties](services/dy-tissue-properties/Dockerfile) | Tissue properties compiled in an extensive, critical literature review by the ITIS Foundation. Visit [itis.swiss/database](https://itis.swiss/database) for additional information, e.g., on tissue parameter variability/uncertainty, quality assurance, and the explored sources. Please use the following citation when referring to the database: Hasgall PA, Di Gennaro F, Baumgartner C, Neufeld E, Lloyd B, Gosselin MC, Payne D, Klingenböck A, Kuster N, ITIS Database for thermal and electromagnetic parameters of biological tissues, Version 4.0, May 15, 2018, DOI: 10.13099/VIP21000-04-0. [itis.swiss/database](https://itis.swiss/database). Powered by [csv-to-html-table](https://github.com/derekeder/csv-to-html-table) | dynamic | [![](https://images.microbadger.com/badges/version/itisfoundation/tissue-properties:1.0.1.svg)](https://microbadger.com/images/itisfoundation/tissue-properties:1.0.1 'See Image Version') | ![Tissue properties](https://github.com/ITISFoundation/osparc-services/workflows/dy-tissue-properties/badge.svg?branch=master) | +| [Tissue properties](services/tissue-properties/Dockerfile) | Tissue properties compiled in an extensive, critical literature review by the ITIS Foundation. Visit [itis.swiss/database](https://itis.swiss/database) for additional information, e.g., on tissue parameter variability/uncertainty, quality assurance, and the explored sources. Please use the following citation when referring to the database: Hasgall PA, Di Gennaro F, Baumgartner C, Neufeld E, Lloyd B, Gosselin MC, Payne D, Klingenböck A, Kuster N, ITIS Database for thermal and electromagnetic parameters of biological tissues, Version 4.0, May 15, 2018, DOI: 10.13099/VIP21000-04-0. [itis.swiss/database](https://itis.swiss/database). Powered by [csv-to-html-table](https://github.com/derekeder/csv-to-html-table) | dynamic | [![](https://images.microbadger.com/badges/version/itisfoundation/tissue-properties:1.0.1.svg)](https://microbadger.com/images/itisfoundation/tissue-properties:1.0.1 'See Image Version') | ![Tissue properties](https://github.com/ITISFoundation/osparc-services/workflows/tissue-properties/badge.svg?branch=master) | *Updated on 2024-11-15T10:33:51Z* diff --git a/requirements/devenv.txt b/requirements/devenv.txt new file mode 100644 index 00000000..37787a8f --- /dev/null +++ b/requirements/devenv.txt @@ -0,0 +1,11 @@ +pre-commit +black +isort +pylint +ruff +jinja2 +setuptools +j2cli[yaml] +simcore-models-library @ git+https://github.com/itisfoundation/osparc-simcore.git@master#subdirectory=packages/models-library +simcore-service-integration @ git+https://github.com/itisfoundation/osparc-simcore.git@master#subdirectory=packages/service-integration +simcore-common-library @ git+https://github.com/itisfoundation/osparc-simcore.git@master#subdirectory=packages/common-library diff --git a/scripts/classic_upgrader.py b/scripts/classic_upgrader.py new file mode 100755 index 00000000..6388441e --- /dev/null +++ b/scripts/classic_upgrader.py @@ -0,0 +1,120 @@ +#!/usr/bin/env -S uv --quiet run --script +# /// script +# requires-python = ">=3.13" +# dependencies = [ +# "jinja2", "pyyaml", "pydantic>2.0.0", +# "typer", "tqdm" +# ] +# /// +from pathlib import Path +import json +from typing import Any, Optional, Union, List +import typer +from pydantic import BaseModel, Field, validator +from jinja2 import Environment, FileSystemLoader, select_autoescape + +app = typer.Typer(help="Docker Compose Parser and Template Generator") +from pathlib import Path +from typing import Any, Optional +import json +import typer +from pydantic import BaseModel, Field, field_validator +import yaml +from jinja2 import Environment, FileSystemLoader, select_autoescape + +class NestedLabels(BaseModel): + class Config: + extra = "allow" + +from pydantic import field_validator, ValidationInfo + +class BuildConfig(BaseModel): + context: str + dockerfile: str + target: str + labels: dict[str, str] + + @field_validator('labels', mode='after') + @classmethod + def parse_json_labels(cls, labels: dict, info: ValidationInfo) -> dict: + json_labels = [ + 'io.simcore.key', 'io.simcore.version', 'io.simcore.type', + 'io.simcore.name', 'io.simcore.description', 'io.simcore.authors', + 'io.simcore.contact', 'io.simcore.inputs', 'io.simcore.outputs', + 'simcore.service.settings' + ] + + parsed = {} + for key, value in labels.items(): + if key in json_labels: + try: + parsed[key] = json.loads(value) + except json.JSONDecodeError: + parsed[key] = {"error": "Invalid JSON"} + else: + parsed[key] = value + return parsed + +class EnhancedService(BaseModel): + image: str + build: BuildConfig + environment: dict[str, str] = Field(default={}) + ports: list[str] + + @field_validator('environment', mode='before') + @classmethod + def parse_environment(cls, v: list[str]) -> dict: + return dict(item.split("=", 1) for item in v) + +class EnhancedComposeSpecification(BaseModel): + version: str + services: dict[str, EnhancedService] + +def render_template_( + template_path: Path, + compose_data: EnhancedComposeSpecification, + service_name: str,desired_version: str, + output_path: Optional[Path] = None, +) -> str: + env = Environment( + loader=FileSystemLoader(template_path.parent), + autoescape=select_autoescape(), + trim_blocks=True, + lstrip_blocks=True + ) + template = env.get_template(template_path.name) + + context = { + "service": compose_data.services[service_name], + "compose": compose_data, + "desired_version": desired_version, + "service_name": service_name + } + + rendered = template.render(context) + + if output_path: + output_path.write_text(rendered) + return rendered + +@app.command() +def render_template( + template_path: Path = typer.Argument(..., exists=True), + compose_path: Path = typer.Argument(..., exists=True), + service_name: str = typer.Argument(...), + desired_version: str = typer.Argument(..., help="Semantic version in format X.Y.Z"), + output_path: Path = typer.Option(None), +): + with compose_path.open() as f: + raw_data = yaml.safe_load(f) + compose_data = EnhancedComposeSpecification.model_validate(raw_data) + if service_name not in compose_data.services: + raise ValueError(f"Service '{service_name}' not found in compose file") + + result = render_template_(template_path, compose_data, service_name,desired_version, output_path) + + if not output_path: + typer.echo(result) + +if __name__ == "__main__": + app() diff --git a/scripts/common.Makefile b/scripts/common.Makefile new file mode 100644 index 00000000..32539972 --- /dev/null +++ b/scripts/common.Makefile @@ -0,0 +1,110 @@ +# HELPER Makefile that countains all the recipe that will be used by every services. Please include it in your Makefile if you add a new service +SHELL := /bin/bash +REPO_BASE_DIR := $(abspath $(dir $(abspath $(lastword $(MAKEFILE_LIST))))..) +CUR_TARGET := $(notdir $(CURDIR)) + +# Check that given variables are set and all have non-empty values, +# die with an error otherwise. +# +# Params: +# 1. Variable name(s) to test. +# 2. (optional) Error message to print. +guard-%: + @ if [ "${${*}}" = "" ]; then \ + echo "Argument '$*' is missing. TIP: make $*="; \ + exit 1; \ + fi + +# +# Automatic VENV management +# +# Inspired from https://potyarkin.com/posts/2019/manage-python-virtual-environment-from-your-makefile/ +DOCKER_IMAGE_NAME ?= $(notdir $(CURDIR)) +VENVDIR=$(REPO_BASE_DIR)/.venv +VENV_BIN=$(VENVDIR)/bin +UV := $$HOME/.local/bin/uv +$(UV): + @if [ ! -f $@ ]; then \ + echo "Installing uv..."; \ + curl -LsSf https://astral.sh/uv/install.sh | sh; \ + fi + +# Use venv for any target that requires virtual environment to be created and configured +venv: $(VENVDIR) ## configure repo's virtual environment +$(VENV_BIN): $(VENVDIR) + +$(VENVDIR): $(UV) + @if [ ! -d $@ ]; then \ + $< venv $@; \ + VIRTUAL_ENV=$@ $< pip install --quiet -r ${REPO_BASE_DIR}/requirements/devenv.txt; \ + $(VENV_BIN)/pre-commit install > /dev/null 2>&1; \ + fi + -@$(UV) self --quiet update + + +$(VENV_BIN)/%: $(VENVDIR) + @if [ ! -f "$@" ]; then \ + echo "ERROR: '$*' is not found in $(VENV_BIN)"; \ + exit 1; \ + fi + +.PHONY: show-venv +show-venv: venv ## show venv info + @$(VENV_BIN)/python -c "import sys; print('Python ' + sys.version.replace('\n',''))" + @$(UV) --version + @echo venv: $(VENVDIR) + +.PHONY: install +install: ${REPO_BASE_DIR}/requirements/devenv.txt venv + @VIRTUAL_ENV=$(VENVDIR) $(UV) pip install --requirement $< + +# +# HELPERS +# +MAKE_C := $(MAKE) --no-print-directory --directory + +.PHONY: clean check_clean +clean: .check_clean ## Cleans all outputs + # cleaning unversioned files in $(CURDIR) + @git clean -dxf + +.check_clean: + @echo -n "Are you sure? [y/N] " && read ans && [ $${ans:-N} = y ] + @echo -n "$(shell whoami), are you REALLY sure? [y/N] " && read ans && [ $${ans:-N} = y ] + +# Helpers ------------------------------------------------- +# NOTE: be careful that GNU Make replaces newlines with space which is why this command cannot work using a Make function +define upgrader + @$(REPO_BASE_DIR)/scripts/classic_upgrader.py $(1) $(2) $(3) $(4) > $(5) +endef + +metadata.yml: metadata.yml.j2 guard-VERSION venv ## Create metadata.yml file + # generate $@ + $(call upgrader, $<, $(REPO_BASE_DIR)/services/$(DOCKER_IMAGE_NAME)/docker-compose.yml,$(DOCKER_IMAGE_NAME),$(VERSION),$@) + +runtime.yml: runtime.yml.j2 guard-VERSION venv ## Create metadata.yml file + # generate $@ + $(call upgrader, $<, $(REPO_BASE_DIR)/services/$(DOCKER_IMAGE_NAME)/docker-compose.yml,$(DOCKER_IMAGE_NAME),$(VERSION),$@) + + +.PHONY: help +help: ## this help + @echo "usage: make [target] ..." + @echo "" + @echo "Targets for '$(notdir $(CURDIR))':" + @echo "" + @awk 'BEGIN {FS = ":.*?## "}; /^[^.[:space:]].*?:.*?## / {if ($$1 != "help" && NF == 2) {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}}' $(MAKEFILE_LIST) + @echo "" + +.PHONY: docker-compose.yml +docker-compose.yml: guard-VERSION ## runs ooil to assemble the docker-compose.yml file + @echo "----- -----" + @docker run -it --rm -v $(REPO_BASE_DIR):/mnt \ + -u $(shell id -u):$(shell id -g) \ + itisfoundation/ci-service-integration-library:v2.0.9-dev \ + bash -c "cd /mnt && ooil compose --metadata /mnt/.osparc --to-spec-file /mnt/docker-compose.yml" + @echo "----- -----" + +.PHONY: build +build: | metadata.yml runtime.yml docker-compose.yml ## build docker image + @docker compose --file $(REPO_BASE_DIR)/docker-compose.yml build $(DOCKER_IMAGE_NAME) diff --git a/services/dy-3dvis/.cookiecutterrc b/services/3d-viewer-gpu/.cookiecutterrc similarity index 100% rename from services/dy-3dvis/.cookiecutterrc rename to services/3d-viewer-gpu/.cookiecutterrc diff --git a/services/dy-3dvis/.dockerignore b/services/3d-viewer-gpu/.dockerignore similarity index 100% rename from services/dy-3dvis/.dockerignore rename to services/3d-viewer-gpu/.dockerignore diff --git a/services/dy-3dvis/.env-devel b/services/3d-viewer-gpu/.env-devel similarity index 100% rename from services/dy-3dvis/.env-devel rename to services/3d-viewer-gpu/.env-devel diff --git a/services/dy-3dvis/.gitignore b/services/3d-viewer-gpu/.gitignore similarity index 100% rename from services/dy-3dvis/.gitignore rename to services/3d-viewer-gpu/.gitignore diff --git a/services/dy-3dvis/.pylintrc b/services/3d-viewer-gpu/.pylintrc similarity index 100% rename from services/dy-3dvis/.pylintrc rename to services/3d-viewer-gpu/.pylintrc diff --git a/services/dy-3dvis/Makefile b/services/3d-viewer-gpu/Makefile similarity index 100% rename from services/dy-3dvis/Makefile rename to services/3d-viewer-gpu/Makefile diff --git a/services/dy-3dvis/README.md b/services/3d-viewer-gpu/README.md similarity index 100% rename from services/dy-3dvis/README.md rename to services/3d-viewer-gpu/README.md diff --git a/services/dy-3dvis/VERSION b/services/3d-viewer-gpu/VERSION similarity index 100% rename from services/dy-3dvis/VERSION rename to services/3d-viewer-gpu/VERSION diff --git a/services/dy-3dvis/VERSION_INTEGRATION b/services/3d-viewer-gpu/VERSION_INTEGRATION similarity index 100% rename from services/dy-3dvis/VERSION_INTEGRATION rename to services/3d-viewer-gpu/VERSION_INTEGRATION diff --git a/services/dy-3dvis/docker-compose-build.yml b/services/3d-viewer-gpu/docker-compose-build.yml similarity index 100% rename from services/dy-3dvis/docker-compose-build.yml rename to services/3d-viewer-gpu/docker-compose-build.yml diff --git a/services/dy-3dvis/docker-compose.devel.yml b/services/3d-viewer-gpu/docker-compose.devel.yml similarity index 98% rename from services/dy-3dvis/docker-compose.devel.yml rename to services/3d-viewer-gpu/docker-compose.devel.yml index cc61c698..94453ebe 100644 --- a/services/dy-3dvis/docker-compose.devel.yml +++ b/services/3d-viewer-gpu/docker-compose.devel.yml @@ -18,7 +18,7 @@ services: - SIMCORE_NODE_BASEPATH=${SIMCORE_NODE_BASEPATH} - SIMCORE_HOST_NAME=${SIMCORE_HOST_NAME} - TEST_DATA_PATH=/home/root/test-data - - SERVER_PORT=${SERVER_PORT} + - SERVER_PORT=80 - PARAVIEW_DEBUG=${PARAVIEW_DEBUG} depends_on: - postgres @@ -38,7 +38,7 @@ services: labels: - traefik.enable=true - traefik.docker.network=3d-viewer_default - - traefik.http.services.3d-viewer.loadbalancer.server.port=${SERVER_PORT} + - traefik.http.services.3d-viewer.loadbalancer.server.port=80 - traefik.http.routers.3d-viewer.rule=hostregexp(`{host:.+}`) && PathPrefix(`${SIMCORE_NODE_BASEPATH}`) - traefik.http.routers.3d-viewer.entrypoints=http # the stipprefixregex is necessary diff --git a/services/dy-3dvis/docker-compose-meta.yml b/services/3d-viewer-gpu/docker-compose.yml similarity index 82% rename from services/dy-3dvis/docker-compose-meta.yml rename to services/3d-viewer-gpu/docker-compose.yml index a0332fb8..4017ecfc 100644 --- a/services/dy-3dvis/docker-compose-meta.yml +++ b/services/3d-viewer-gpu/docker-compose.yml @@ -1,6 +1,16 @@ services: 3d-viewer: + init: true + image: local/${DOCKER_IMAGE_NAME}-gpu:${DOCKER_BUILD_TARGET} + ports: + - "8778:80" build: + context: ../../ + dockerfile: services/dy-3dvis/docker/custom/Dockerfile + target: ${DOCKER_BUILD_TARGET} + args: + BASE_IMAGE: kitware/paraviewweb:pv-v5.6.0-osmesa + VISUALIZER_VERSION: v3.1.6 labels: io.simcore.authors: '{"authors": [{"name": "Sylvain Anderegg", "email": "anderegg@itis.swiss", @@ -32,7 +42,18 @@ services: { "NanoCPUs": 4000000000, "MemoryBytes": 17179869184 }, "Reservations": { "NanoCPUs": 100000000, "MemoryBytes": 536870912 } } } ]' 3d-viewer-gpu: + init: true + image: ${DOCKER_REGISTRY}/${DOCKER_IMAGE_NAME}:${DOCKER_IMAGE_TAG} + # some defaults to test the service + ports: + - "8777:80" build: + context: ../../ + dockerfile: services/dy-3dvis/docker/custom/Dockerfile + target: ${DOCKER_BUILD_TARGET} + args: + BASE_IMAGE: kitware/paraviewweb:pv-v5.6.0-egl + VISUALIZER_VERSION: v3.1.6 labels: io.simcore.authors: '{"authors": [{"name": "Sylvain Anderegg", "email": "anderegg@itis.swiss", @@ -58,7 +79,7 @@ services: simcore.service.bootsettings: "[]" simcore.service.reverse-proxy-settings: '{"strip_path":true}' simcore.service.settings: - '[ {"name": "ports", "type": "int", "value": ${SERVER_PORT:-80}}, + '[ {"name": "ports", "type": "int", "value": 80}, {"name": "constraints", "type": "string", "value": [ "node.platform.os == linux" ]}, {"name": "Resources", "type": "Resources", "value": { "Limits": { "NanoCPUs": 4000000000, "MemoryBytes": 17179869184 }, "Reservations": diff --git a/services/dy-3dvis/docker/custom/Dockerfile b/services/3d-viewer-gpu/docker/custom/Dockerfile similarity index 100% rename from services/dy-3dvis/docker/custom/Dockerfile rename to services/3d-viewer-gpu/docker/custom/Dockerfile diff --git a/services/dy-3dvis/docker/custom/entrypoint.bash b/services/3d-viewer-gpu/docker/custom/entrypoint.bash similarity index 98% rename from services/dy-3dvis/docker/custom/entrypoint.bash rename to services/3d-viewer-gpu/docker/custom/entrypoint.bash index 1983a569..8572bed0 100755 --- a/services/dy-3dvis/docker/custom/entrypoint.bash +++ b/services/3d-viewer-gpu/docker/custom/entrypoint.bash @@ -51,7 +51,7 @@ echo "setting up visualizer options..." visualizer_options=(--content /opt/paraview/share/paraview-5.6/web/visualizer/www/ \ --data "${PARAVIEW_INPUT_PATH}" \ --host 0.0.0.0"${SIMCORE_NODE_BASEPATH}" \ - --port "${SERVER_PORT}" \ + --port "80" \ # --ws-endpoint ${SIMCORE_NODE_BASEPATH}/ws \ --timeout 20000 \ --no-built-in-palette \ diff --git a/services/dy-3dvis/docker/custom/healthcheck_curl_host.py b/services/3d-viewer-gpu/docker/custom/healthcheck_curl_host.py similarity index 100% rename from services/dy-3dvis/docker/custom/healthcheck_curl_host.py rename to services/3d-viewer-gpu/docker/custom/healthcheck_curl_host.py diff --git a/services/dy-3dvis/docker/custom/patch_paraview.bash b/services/3d-viewer-gpu/docker/custom/patch_paraview.bash similarity index 100% rename from services/dy-3dvis/docker/custom/patch_paraview.bash rename to services/3d-viewer-gpu/docker/custom/patch_paraview.bash diff --git a/services/dy-3dvis/metadata/metadata-gpu.yml b/services/3d-viewer-gpu/metadata/metadata-gpu.yml similarity index 100% rename from services/dy-3dvis/metadata/metadata-gpu.yml rename to services/3d-viewer-gpu/metadata/metadata-gpu.yml diff --git a/services/dy-3dvis/metadata/metadata.yml b/services/3d-viewer-gpu/metadata/metadata.yml similarity index 100% rename from services/dy-3dvis/metadata/metadata.yml rename to services/3d-viewer-gpu/metadata/metadata.yml diff --git a/services/dy-3dvis/requirements.in b/services/3d-viewer-gpu/requirements.in similarity index 100% rename from services/dy-3dvis/requirements.in rename to services/3d-viewer-gpu/requirements.in diff --git a/services/3d-viewer-gpu/requirements.txt b/services/3d-viewer-gpu/requirements.txt new file mode 100644 index 00000000..9a918a74 --- /dev/null +++ b/services/3d-viewer-gpu/requirements.txt @@ -0,0 +1,116 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --output-file=requirements.txt +# +arrow==1.3.0 + # via cookiecutter +attrs==25.3.0 + # via + # jsonschema + # referencing +binaryornot==0.4.4 + # via cookiecutter +certifi==2025.4.26 + # via requests +chardet==5.2.0 + # via binaryornot +charset-normalizer==3.4.2 + # via requests +click==8.1.8 + # via cookiecutter +cookiecutter==2.6.0 + # via pytest-cookies +coverage[toml]==7.8.0 + # via + # -r requirements.in + # pytest-cov +docker==7.1.0 + # via -r requirements.in +exceptiongroup==1.2.2 + # via pytest +idna==3.10 + # via requests +iniconfig==2.1.0 + # via pytest +jinja2==3.1.6 + # via cookiecutter +jsonschema==4.23.0 + # via -r requirements.in +jsonschema-specifications==2025.4.1 + # via jsonschema +markdown-it-py==3.0.0 + # via rich +markupsafe==3.0.2 + # via jinja2 +mdurl==0.1.2 + # via markdown-it-py +packaging==25.0 + # via + # pytest + # pytest-sugar +pluggy==1.5.0 + # via pytest +pygments==2.19.1 + # via rich +pytest==8.3.5 + # via + # -r requirements.in + # pytest-cookies + # pytest-cov + # pytest-instafail + # pytest-mock + # pytest-sugar +pytest-cookies==0.7.0 + # via -r requirements.in +pytest-cov==6.1.1 + # via -r requirements.in +pytest-instafail==0.5.0 + # via -r requirements.in +pytest-mock==3.14.0 + # via -r requirements.in +pytest-sugar==1.0.0 + # via -r requirements.in +python-dateutil==2.9.0.post0 + # via arrow +python-slugify==8.0.4 + # via cookiecutter +pyyaml==6.0.2 + # via + # -r requirements.in + # cookiecutter +referencing==0.36.2 + # via + # jsonschema + # jsonschema-specifications +requests==2.32.3 + # via + # cookiecutter + # docker +rich==14.0.0 + # via cookiecutter +rpds-py==0.24.0 + # via + # jsonschema + # referencing +six==1.17.0 + # via python-dateutil +termcolor==3.1.0 + # via pytest-sugar +text-unidecode==1.3 + # via python-slugify +tomli==2.2.1 + # via + # coverage + # pytest +types-python-dateutil==2.9.0.20241206 + # via arrow +typing-extensions==4.13.2 + # via + # referencing + # rich +urllib3==2.4.0 + # via + # docker + # requests diff --git a/services/dy-3dvis/src/3d-viewer/config/s4lColorMap.json b/services/3d-viewer-gpu/src/3d-viewer/config/s4lColorMap.json similarity index 100% rename from services/dy-3dvis/src/3d-viewer/config/s4lColorMap.json rename to services/3d-viewer-gpu/src/3d-viewer/config/s4lColorMap.json diff --git a/services/dy-3dvis/src/3d-viewer/config/s4lColorMap.xml b/services/3d-viewer-gpu/src/3d-viewer/config/s4lColorMap.xml similarity index 100% rename from services/dy-3dvis/src/3d-viewer/config/s4lColorMap.xml rename to services/3d-viewer-gpu/src/3d-viewer/config/s4lColorMap.xml diff --git a/services/dy-3dvis/src/3d-viewer/config/visualizer_config.json b/services/3d-viewer-gpu/src/3d-viewer/config/visualizer_config.json similarity index 100% rename from services/dy-3dvis/src/3d-viewer/config/visualizer_config.json rename to services/3d-viewer-gpu/src/3d-viewer/config/visualizer_config.json diff --git a/services/dy-3dvis/src/3d-viewer/devel/port_config.json b/services/3d-viewer-gpu/src/3d-viewer/devel/port_config.json similarity index 100% rename from services/dy-3dvis/src/3d-viewer/devel/port_config.json rename to services/3d-viewer-gpu/src/3d-viewer/devel/port_config.json diff --git a/services/dy-3dvis/src/3d-viewer/handlers/retrieve.rpy b/services/3d-viewer-gpu/src/3d-viewer/handlers/retrieve.rpy similarity index 100% rename from services/dy-3dvis/src/3d-viewer/handlers/retrieve.rpy rename to services/3d-viewer-gpu/src/3d-viewer/handlers/retrieve.rpy diff --git a/services/dy-3dvis/src/3d-viewer/handlers/state.rpy b/services/3d-viewer-gpu/src/3d-viewer/handlers/state.rpy similarity index 100% rename from services/dy-3dvis/src/3d-viewer/handlers/state.rpy rename to services/3d-viewer-gpu/src/3d-viewer/handlers/state.rpy diff --git a/services/dy-3dvis/src/3d-viewer/rebrand-osparc/rebrand-osparc.patch b/services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/rebrand-osparc.patch similarity index 100% rename from services/dy-3dvis/src/3d-viewer/rebrand-osparc/rebrand-osparc.patch rename to services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/rebrand-osparc.patch diff --git a/services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/logo.png b/services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/logo.png similarity index 100% rename from services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/logo.png rename to services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/logo.png diff --git a/services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/logo_old.png b/services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/logo_old.png similarity index 100% rename from services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/logo_old.png rename to services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/logo_old.png diff --git a/services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/osparc-icon-black-S.png b/services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/osparc-icon-black-S.png similarity index 100% rename from services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/osparc-icon-black-S.png rename to services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/osparc-icon-black-S.png diff --git a/services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/osparc-white-small.png b/services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/osparc-white-small.png similarity index 100% rename from services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/osparc-white-small.png rename to services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/osparc-white-small.png diff --git a/services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/ProgressLoaderWidgetOsparc.mcss b/services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/ProgressLoaderWidgetOsparc.mcss similarity index 100% rename from services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/ProgressLoaderWidgetOsparc.mcss rename to services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/ProgressLoaderWidgetOsparc.mcss diff --git a/services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/index.js b/services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/index.js similarity index 100% rename from services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/index.js rename to services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/index.js diff --git a/services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/osparc-icon-black-S.png b/services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/osparc-icon-black-S.png similarity index 100% rename from services/dy-3dvis/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/osparc-icon-black-S.png rename to services/3d-viewer-gpu/src/3d-viewer/rebrand-osparc/to-be-copied/src/panels/ProgressLoaderWidgetOsparc/osparc-icon-black-S.png diff --git a/services/3d-viewer-gpu/src/3d-viewer/utils/input-retriever.py b/services/3d-viewer-gpu/src/3d-viewer/utils/input-retriever.py new file mode 100644 index 00000000..1ded2ab9 --- /dev/null +++ b/services/3d-viewer-gpu/src/3d-viewer/utils/input-retriever.py @@ -0,0 +1,46 @@ +#!/usr/bin/python + +import argparse +import asyncio +import json +import logging +import os +import shutil +import sys +import tempfile +import time +import zipfile +from enum import IntEnum +from pathlib import Path +from typing import Dict, List + +logging.basicConfig(level=logging.INFO) +log = logging.getLogger(__file__ if __name__ == "__main__" else __name__) + +CACHE_FILE_PATH = Path(tempfile.gettempdir()) / "input_retriever.cache" + + +class ExitCode(IntEnum): + SUCCESS = 0 + FAIL = 1 + + +def input_path() -> Path: + path = os.environ.get("PARAVIEW_INPUT_PATH", "undefined") + assert path != "undefined", "PARAVIEW_INPUT_PATH is not defined!" + return Path(path) + + +async def task(node_key: str, fct, *args, **kwargs): + return (node_key, await fct(*args, *kwargs)) + + +async def retrieve_data(ports: List[str], cache: Dict) -> int: + return 0 + + +def main(args=None) -> int: + return ExitCode.SUCCESS + +if __name__ == "__main__": + sys.exit(main()) diff --git a/services/3d-viewer-gpu/src/3d-viewer/utils/state_manager.py b/services/3d-viewer-gpu/src/3d-viewer/utils/state_manager.py new file mode 100644 index 00000000..4241bec6 --- /dev/null +++ b/services/3d-viewer-gpu/src/3d-viewer/utils/state_manager.py @@ -0,0 +1,40 @@ +#!/usr/bin/python + +""" Tries to pull the node data from S3. Will return error code. + + Usage python state_puller.py PATH_OR_FILE +:return: error code +""" + +import argparse +import asyncio +import logging +import os +import sys +import time +from enum import IntEnum +from pathlib import Path + + +logging.basicConfig(level=logging.INFO) +log = logging.getLogger(__file__ if __name__ == "__main__" else __name__) + + +class ExitCode(IntEnum): + SUCCESS = 0 + FAIL = 1 + + +def state_path() -> Path: + path = os.environ.get("SIMCORE_NODE_APP_STATE_PATH", "undefined") + assert path != "undefined", "SIMCORE_NODE_APP_STATE_PATH is not defined!" + return Path(path) + +async def push_pull_state(path, op_type) -> None: + return + +def main(args=None) -> int: + return ExitCode.SUCCESS + +if __name__ == "__main__": + sys.exit(main()) diff --git a/services/dy-3dvis/tests/conftest.py b/services/3d-viewer-gpu/tests/conftest.py similarity index 100% rename from services/dy-3dvis/tests/conftest.py rename to services/3d-viewer-gpu/tests/conftest.py diff --git a/services/dy-3dvis/tests/integration/conftest.py b/services/3d-viewer-gpu/tests/integration/conftest.py similarity index 100% rename from services/dy-3dvis/tests/integration/conftest.py rename to services/3d-viewer-gpu/tests/integration/conftest.py diff --git a/services/dy-3dvis/tests/integration/test_docker_image.py b/services/3d-viewer-gpu/tests/integration/test_docker_image.py similarity index 100% rename from services/dy-3dvis/tests/integration/test_docker_image.py rename to services/3d-viewer-gpu/tests/integration/test_docker_image.py diff --git a/services/dy-3dvis/tests/unit/test_folder_structure.py b/services/3d-viewer-gpu/tests/unit/test_folder_structure.py similarity index 100% rename from services/dy-3dvis/tests/unit/test_folder_structure.py rename to services/3d-viewer-gpu/tests/unit/test_folder_structure.py diff --git a/services/dy-3dvis/tests/unit/test_validation_data.py b/services/3d-viewer-gpu/tests/unit/test_validation_data.py similarity index 100% rename from services/dy-3dvis/tests/unit/test_validation_data.py rename to services/3d-viewer-gpu/tests/unit/test_validation_data.py diff --git a/services/dy-3dvis/tools/update_compose_labels.py b/services/3d-viewer-gpu/tools/update_compose_labels.py similarity index 100% rename from services/dy-3dvis/tools/update_compose_labels.py rename to services/3d-viewer-gpu/tools/update_compose_labels.py diff --git a/services/dy-3dvis/validation/input/A.vtp b/services/3d-viewer-gpu/validation/input/A.vtp similarity index 100% rename from services/dy-3dvis/validation/input/A.vtp rename to services/3d-viewer-gpu/validation/input/A.vtp diff --git a/services/dy-3dvis/validation/input/B.vtp b/services/3d-viewer-gpu/validation/input/B.vtp similarity index 100% rename from services/dy-3dvis/validation/input/B.vtp rename to services/3d-viewer-gpu/validation/input/B.vtp diff --git a/services/dy-3dvis/validation/output/.gitkeep b/services/3d-viewer-gpu/validation/output/.gitkeep similarity index 100% rename from services/dy-3dvis/validation/output/.gitkeep rename to services/3d-viewer-gpu/validation/output/.gitkeep diff --git a/services/dy-3dvis/versioning/integration.cfg b/services/3d-viewer-gpu/versioning/integration.cfg similarity index 100% rename from services/dy-3dvis/versioning/integration.cfg rename to services/3d-viewer-gpu/versioning/integration.cfg diff --git a/services/dy-3dvis/versioning/service.cfg b/services/3d-viewer-gpu/versioning/service.cfg similarity index 100% rename from services/dy-3dvis/versioning/service.cfg rename to services/3d-viewer-gpu/versioning/service.cfg diff --git a/services/dy-3dvis/docker-compose.yml b/services/dy-3dvis/docker-compose.yml deleted file mode 100644 index 3c851d47..00000000 --- a/services/dy-3dvis/docker-compose.yml +++ /dev/null @@ -1,15 +0,0 @@ -version: "3.7" -services: - 3d-viewer: - init: true - image: ${DOCKER_REGISTRY}/${DOCKER_IMAGE_NAME}:${DOCKER_IMAGE_TAG} - # some defaults to test the service - ports: - - "8777:${SERVER_PORT}" - #-------------------------------------------------------------------- - 3d-viewer-gpu: - init: true - image: local/${DOCKER_IMAGE_NAME}-gpu:${DOCKER_BUILD_TARGET} - ports: - - "8778:${SERVER_PORT}" - # runtime: nvidia diff --git a/services/dy-3dvis/pytest.ini b/services/dy-3dvis/pytest.ini deleted file mode 100644 index bcab02bd..00000000 --- a/services/dy-3dvis/pytest.ini +++ /dev/null @@ -1,2 +0,0 @@ -[pytest] -junit_family = xunit2 \ No newline at end of file diff --git a/services/dy-3dvis/requirements.txt b/services/dy-3dvis/requirements.txt deleted file mode 100644 index 63c59da2..00000000 --- a/services/dy-3dvis/requirements.txt +++ /dev/null @@ -1,119 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements.txt -# -arrow==0.13.2 - # via - # jinja2-time - # pytest-cookies -attrs==20.3.0 - # via - # jsonschema - # pytest -binaryornot==0.4.4 - # via cookiecutter -certifi==2020.12.5 - # via requests -chardet==4.0.0 - # via - # binaryornot - # requests -click==7.1.2 - # via cookiecutter -cookiecutter==1.7.2 - # via pytest-cookies -coverage==5.5 - # via - # -r requirements.in - # pytest-cov -docker==4.4.4 - # via -r requirements.in -idna==2.10 - # via requests -importlib-metadata==3.7.3 - # via - # jsonschema - # pluggy - # pytest -jinja2-time==0.2.0 - # via cookiecutter -jinja2==2.11.3 - # via - # cookiecutter - # jinja2-time -jsonschema==3.2.0 - # via -r requirements.in -markupsafe==1.1.1 - # via - # cookiecutter - # jinja2 -more-itertools==8.7.0 - # via pytest -packaging==20.9 - # via - # pytest - # pytest-sugar -pluggy==0.13.1 - # via pytest -poyo==0.5.0 - # via cookiecutter -py==1.10.0 - # via pytest -pyparsing==2.4.7 - # via packaging -pyrsistent==0.17.3 - # via jsonschema -pytest-cookies==0.5.1 - # via -r requirements.in -pytest-cov==2.11.1 - # via -r requirements.in -pytest-instafail==0.4.2 - # via -r requirements.in -pytest-mock==3.5.1 - # via -r requirements.in -pytest-sugar==0.9.4 - # via -r requirements.in -pytest==5.4.3 - # via - # -r requirements.in - # pytest-cookies - # pytest-cov - # pytest-instafail - # pytest-mock - # pytest-sugar -python-dateutil==2.8.1 - # via arrow -python-slugify==4.0.1 - # via cookiecutter -pyyaml==5.4.1 - # via -r requirements.in -requests==2.25.1 - # via - # cookiecutter - # docker -six==1.15.0 - # via - # cookiecutter - # docker - # jsonschema - # python-dateutil - # websocket-client -termcolor==1.1.0 - # via pytest-sugar -text-unidecode==1.3 - # via python-slugify -typing-extensions==3.7.4.3 - # via importlib-metadata -urllib3==1.26.4 - # via requests -wcwidth==0.2.5 - # via pytest -websocket-client==0.58.0 - # via docker -zipp==3.4.1 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/services/dy-3dvis/src/3d-viewer/utils/input-retriever.py b/services/dy-3dvis/src/3d-viewer/utils/input-retriever.py deleted file mode 100644 index 9b57c066..00000000 --- a/services/dy-3dvis/src/3d-viewer/utils/input-retriever.py +++ /dev/null @@ -1,131 +0,0 @@ -#!/usr/bin/python - -import argparse -import asyncio -import json -import logging -import os -import shutil -import sys -import tempfile -import time -import zipfile -from enum import IntEnum -from pathlib import Path -from typing import Dict, List - -from simcore_sdk import node_ports - -logging.basicConfig(level=logging.INFO) -log = logging.getLogger(__file__ if __name__ == "__main__" else __name__) - -CACHE_FILE_PATH = Path(tempfile.gettempdir()) / "input_retriever.cache" - - -class ExitCode(IntEnum): - SUCCESS = 0 - FAIL = 1 - - -def input_path() -> Path: - path = os.environ.get("PARAVIEW_INPUT_PATH", "undefined") - assert path != "undefined", "PARAVIEW_INPUT_PATH is not defined!" - return Path(path) - - -async def task(node_key: str, fct, *args, **kwargs): - return (node_key, await fct(*args, *kwargs)) - - -async def retrieve_data(ports: List[str], cache: Dict) -> int: - # get all files in the local system and copy them to the input folder - start_time = time.clock() - PORTS = await node_ports.ports() - download_tasks = [] - for node_input in await PORTS.inputs: - # if ports contains some keys only download them - log.info("Checking node %s", node_input.key) - if ports and node_input.key not in ports: - continue - # delete the corresponding file(s) if applicable - if node_input.key in cache: - log.info("Deleting files from %s: %s", - node_input.key, cache[node_input.key]) - for file_path in cache[node_input.key]: - Path(file_path).unlink() - del cache[node_input.key] - if not node_input or node_input.value is None: - continue - # collect coroutines - download_tasks.append(task(node_input.key, node_input.get)) - log.info("retrieving %s data", len(download_tasks)) - - transfer_bytes = 0 - if download_tasks: - download_results = await asyncio.gather(*download_tasks) - log.info("completed download, extracting/moving data to final folder...") - for node_key, local_path in download_results: - if local_path is None: - continue - - if not local_path.exists(): - continue - transfer_bytes = transfer_bytes + local_path.stat().st_size - if zipfile.is_zipfile(str(local_path)): - log.info("extracting %s to %s", local_path, input_path()) - zip_ref = zipfile.ZipFile(str(local_path), 'r') - zip_ref.extractall(str(input_path())) - cache[node_key] = \ - [str(input_path() / zipped_file) - for zipped_file in zip_ref.namelist()] - zip_ref.close() - log.info("extraction completed") - else: - log.info("moving %s to input path %s", - local_path, input_path()) - dest_path = input_path() / local_path.name - shutil.move(str(local_path), str(dest_path)) - cache[node_key] = [str(dest_path)] - log.info("move completed") - end_time = time.clock() - log.info("retrieval complete: took %.2fseconds for %s bytes", - end_time - start_time, transfer_bytes) - return transfer_bytes - - -def main(args=None) -> int: - try: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("--port_keys", help="The port keys to push/pull", - type=str, nargs="*", required=False) - options = parser.parse_args(args) - log.info("has to retrieve the following ports: %s", - options.port_keys if options.port_keys else "all" - ) - - if not input_path().exists(): - input_path().mkdir() - log.info("Created input folder at %s", input_path()) - - file_transfer_history = {} - if CACHE_FILE_PATH.exists(): - with CACHE_FILE_PATH.open() as fp: - file_transfer_history = json.load(fp) - - loop = asyncio.get_event_loop() - transfer_bytes = \ - loop.run_until_complete(retrieve_data( - options.port_keys, file_transfer_history)) - log.info("saving cache: %s", file_transfer_history) - with CACHE_FILE_PATH.open("w") as fp: - json.dump(file_transfer_history, fp) - - print(transfer_bytes) - return ExitCode.SUCCESS - except: # pylint: disable=bare-except - log.exception("Unexpected error when retrieving data") - return ExitCode.FAIL - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/services/dy-3dvis/src/3d-viewer/utils/state_manager.py b/services/dy-3dvis/src/3d-viewer/utils/state_manager.py deleted file mode 100644 index 4a6a3282..00000000 --- a/services/dy-3dvis/src/3d-viewer/utils/state_manager.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/python - -""" Tries to pull the node data from S3. Will return error code. - - Usage python state_puller.py PATH_OR_FILE -:return: error code -""" - -import argparse -import asyncio -import logging -import os -import sys -import time -from enum import IntEnum -from pathlib import Path - -from simcore_sdk.node_data import data_manager - -logging.basicConfig(level=logging.INFO) -log = logging.getLogger(__file__ if __name__ == "__main__" else __name__) - - -class ExitCode(IntEnum): - SUCCESS = 0 - FAIL = 1 - - -def state_path() -> Path: - path = os.environ.get("SIMCORE_NODE_APP_STATE_PATH", "undefined") - assert path != "undefined", "SIMCORE_NODE_APP_STATE_PATH is not defined!" - return Path(path) - -async def push_pull_state(path, op_type) -> None: - if op_type == "pull": - if not await data_manager.is_file_present_in_storage(path): - log.info("File '%s' is not present in storage service, will skip.", str(path)) - return - await getattr(data_manager, op_type)(path) - -def main(args=None) -> int: - try: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("--path", help="The folder or file to get for the node", - type=Path, default=state_path(), required=False) - parser.add_argument("type", help="push or pull", - choices=["push", "pull"]) - options = parser.parse_args(args) - - loop = asyncio.get_event_loop() - - # push or pull state - start_time = time.clock() - loop.run_until_complete(push_pull_state(options.path, options.type)) - end_time = time.clock() - log.info("time to %s: %.2fseconds", options.type, end_time - start_time) - return ExitCode.SUCCESS - - except Exception: # pylint: disable=broad-except - log.exception("Could not %s state from S3 for %s", options.type, options.path) - return ExitCode.FAIL - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/services/dy-jupyter/docker-compose.yml b/services/dy-jupyter/docker-compose.yml deleted file mode 100644 index 304d66e6..00000000 --- a/services/dy-jupyter/docker-compose.yml +++ /dev/null @@ -1,140 +0,0 @@ -version: "3.7" -services: - jupyter-base-notebook: - image: ${DOCKER_REGISTRY}/jupyter-base-notebook:${DOCKER_IMAGE_TAG} - # init: true - build: - context: ../../ - dockerfile: services/dy-jupyter/Dockerfile - target: production - args: - BASE_IMAGE: jupyter/base-notebook:python-3.7.4 - INSTALL_PYTHON2: 1 - labels: - io.simcore.key: '{"key": "simcore/services/dynamic/jupyter-base-notebook"}' - io.simcore.version: '{"version": "${DOCKER_IMAGE_TAG}"}' - io.simcore.type: '{"type": "dynamic"}' - io.simcore.name: '{"name": "jupyter-base-notebook"}' - io.simcore.description: '{"description": "Jupyter notebook"}' - io.simcore.authors: '{"authors": [{"name": "sanderegg", "email": "anderegg@itis.swiss", "affiliation": "ITIS Foundation"}]}' - io.simcore.contact: '{"contact": "anderegg@itis.swiss"}' - io.simcore.inputs: '{"inputs": { - "input_1":{"label": "input 1", "displayOrder":0, "description": "Input 1", "type": "data:*/*"}, - "input_2":{"label": "input 2", "displayOrder":1, "description": "Input 2", "type": "data:*/*"} - }}' - io.simcore.outputs: '{"outputs": { - "output_1":{"label": "output 1", "displayOrder":0, "description": "Output 1", "type": "data:*/*"}, - "output_2":{"label": "output 2", "displayOrder":1, "description": "Output 2", "type": "data:*/*"} - }}' - simcore.service.settings: '[ - {"name": "ports", "type": "int", "value": 8888}, - {"name": "constraints", "type": "string", "value": ["node.platform.os == linux"]}, - {"name": "resources", "type": "resources", "value": - { - "Limits": { - "NanoCPUs": 4000000000, - "MemoryBytes": 8589934592 - } - } - } - ]' - # simcore.service.bootsettings: '[ - # {"name": "entry_point", "type": "string", "value": "?token=simcore"}]' - org.label-schema.schema-version: "1.0" - org.label-schema.build-date: "${BUILD_DATE}" - org.label-schema.vcs-url: "${VCS_URL}" - org.label-schema.vcs-ref: "${VCS_REF}" - ports: - - "8888:8888" - jupyter-scipy-notebook: - image: ${DOCKER_REGISTRY}/jupyter-scipy-notebook:${DOCKER_IMAGE_TAG} - init: true - build: - context: ../../ - dockerfile: services/dy-jupyter/Dockerfile - target: production - args: - BASE_IMAGE: jupyter/scipy-notebook:31b807ec9e83 - INSTALL_PYTHON2: 1 - labels: - io.simcore.key: '{"key": "simcore/services/dynamic/jupyter-scipy-notebook"}' - io.simcore.version: '{"version": "${DOCKER_IMAGE_TAG}"}' - io.simcore.type: '{"type": "dynamic"}' - io.simcore.name: '{"name": "jupyter-scipy-notebook"}' - io.simcore.description: '{"description": "Jupyter scipy notebook"}' - io.simcore.authors: '{"authors": [{"name": "sanderegg", "email": "anderegg@itis.swiss", "affiliation": "ITIS Foundation"}]}' - io.simcore.contact: '{"contact": "anderegg@itis.swiss"}' - io.simcore.inputs: '{"inputs": { - "input_1":{"label": "input 1", "displayOrder":0, "description": "Input 1", "type": "data:*/*"}, - "input_2":{"label": "input 2", "displayOrder":1, "description": "Input 2", "type": "data:*/*"} - }}' - io.simcore.outputs: '{"outputs": { - "output_1":{"label": "output 1", "displayOrder":0, "description": "Output 1", "type": "data:*/*"}, - "output_2":{"label": "output 2", "displayOrder":1, "description": "Output 2", "type": "data:*/*"} - }}' - simcore.service.settings: '[ - {"name": "ports", "type": "int", "value": 8888}, - {"name": "constraints", "type": "string", "value": ["node.platform.os == linux"]}, - {"name": "resources", "type": "resources", "value": - { - "Limits": { - "NanoCPUs": 4000000000, - "MemoryBytes": 8589934592 - } - } - } - ]' - # simcore.service.bootsettings: '[ - # {"name": "entry_point", "type": "string", "value": "?token=${NOTEBOOK_TOKEN}"}]' - org.label-schema.schema-version: "1.0" - org.label-schema.build-date: "${BUILD_DATE}" - org.label-schema.vcs-url: "${VCS_URL}" - org.label-schema.vcs-ref: "${VCS_REF}" - ports: - - "8889:8888" - jupyter-r-notebook: - image: ${DOCKER_REGISTRY}/jupyter-r-notebook:${DOCKER_IMAGE_TAG} - init: true - build: - context: ../../ - dockerfile: services/dy-jupyter/Dockerfile - target: production - args: - BASE_IMAGE: brajwa/r3.6.1-notebook - INSTALL_PYTHON2: 0 - labels: - io.simcore.key: '{"key": "simcore/services/dynamic/jupyter-r-notebook"}' - io.simcore.version: '{"version": "${DOCKER_IMAGE_TAG}"}' - io.simcore.type: '{"type": "dynamic"}' - io.simcore.name: '{"name": "jupyter-r-notebook"}' - io.simcore.description: '{"description": "Jupyter R notebook"}' - io.simcore.authors: '{"authors": [{"name": "sanderegg", "email": "anderegg@itis.swiss", "affiliation": "ITIS Foundation"}]}' - io.simcore.contact: '{"contact": "anderegg@itis.swiss"}' - io.simcore.inputs: '{"inputs": { - "input_1":{"label": "input 1", "displayOrder":0, "description": "Input 1", "type": "data:*/*"}, - "input_2":{"label": "input 2", "displayOrder":1, "description": "Input 2", "type": "data:*/*"} - }}' - io.simcore.outputs: '{"outputs": { - "output_1":{"label": "output 1", "displayOrder":0, "description": "Output 1", "type": "data:*/*"}, - "output_2":{"label": "output 2", "displayOrder":1, "description": "Output 2", "type": "data:*/*"} - }}' - simcore.service.settings: '[ - {"name": "ports", "type": "int", "value": 8888}, - {"name": "constraints", "type": "string", "value": ["node.platform.os == linux"]}, - {"name": "resources", "type": "resources", "value": - { - "Limits": { - "NanoCPUs": 4000000000, - "MemoryBytes": 8589934592 - } - } - } - ]' - # simcore.service.bootsettings: '[ - # {"name": "entry_point", "type": "string", "value": "?token=${NOTEBOOK_TOKEN}"}]' - org.label-schema.schema-version: "1.0" - org.label-schema.build-date: "${BUILD_DATE}" - org.label-schema.vcs-url: "${VCS_URL}" - org.label-schema.vcs-ref: "${VCS_REF}" - ports: - - "8890:8888" diff --git a/services/dy-jupyter/docker/state_puller.py b/services/dy-jupyter/docker/state_puller.py deleted file mode 100644 index 123a5634..00000000 --- a/services/dy-jupyter/docker/state_puller.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/python - -""" Tries to pull the node data from S3. Will return error code unless the --silent flag is on and only a warning will be output. - - Usage python state_puller.py PATH_OR_FILE --silent -:return: error code -""" - -import argparse -import asyncio -import logging -import sys -from enum import IntEnum -from pathlib import Path - -from simcore_sdk.node_data import data_manager - -log = logging.getLogger(__file__ if __name__ == "__main__" else __name__) -logging.basicConfig(level=logging.INFO) - - -class ExitCode(IntEnum): - SUCCESS = 0 - FAIL = 1 - - -async def pull_file_if_exists(path: Path) -> None: - """ - If the path already exist in storage pull it. Otherwise it is assumed - this is the first time the service starts. - - In each and every other case an error is raised and logged - """ - if not await data_manager.is_file_present_in_storage(path): - log.info("File '%s' is not present in storage service, will skip.", str(path)) - return - - await data_manager.pull(path) - log.info("Finished pulling and extracting %s", str(path)) - - -def main(args=None) -> int: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "path", help="The folder or file to get for the node", type=Path - ) - options = parser.parse_args(args) - try: - asyncio.get_event_loop().run_until_complete( - pull_file_if_exists(path=options.path) - ) - except Exception: # pylint: disable=broad-except - log.exception( - "Unexpected error when retrieving state from S3 for %s", options.path - ) - return ExitCode.FAIL - return ExitCode.SUCCESS - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/services/dy-jupyter/handlers/input_retriever.py b/services/dy-jupyter/handlers/input_retriever.py deleted file mode 100644 index aaf751f6..00000000 --- a/services/dy-jupyter/handlers/input_retriever.py +++ /dev/null @@ -1,200 +0,0 @@ -import asyncio -import json -import logging -import os -import shutil -import sys -import tempfile -import time -import zipfile -from pathlib import Path -from typing import List - -from simcore_sdk import node_ports - -logger = logging.getLogger(__name__) - -_INPUTS_FOLDER = os.environ.get("INPUTS_FOLDER", "~/inputs") -_OUTPUTS_FOLDER = os.environ.get("OUTPUTS_FOLDER", "~/outputs") -_FILE_TYPE_PREFIX = "data:" -_KEY_VALUE_FILE_NAME = "key_values.json" - - -def _compress_files_in_folder(folder: Path, one_file_not_compress: bool = True) -> Path: - list_files = list(folder.glob("*")) - - if list_files is None: - return None - - if one_file_not_compress and len(list_files) == 1: - return list_files[0] - - temp_file = tempfile.NamedTemporaryFile(suffix=".zip") - temp_file.close() - with zipfile.ZipFile(temp_file.name, mode="w") as zip_ptr: - for file_path in list_files: - zip_ptr.write(str(file_path), arcname=file_path.name) - - return Path(temp_file.name) - - -def _no_relative_path_zip(members: zipfile.ZipFile): - for zipinfo in members.infolist(): - path = Path(zipinfo.filename) - if path.is_absolute(): - # absolute path are not allowed - continue - if path.match("/../"): - # relative paths are not allowed - continue - yield zipinfo.filename - - -async def get_time_wrapped(port): - logger.info("transfer started for %s", port.key) - start_time = time.perf_counter() - ret = await port.get() - elapsed_time = time.perf_counter() - start_time - logger.info("transfer completed in %ss", elapsed_time) - if isinstance(ret, Path): - size_mb = ret.stat().st_size / 1024 / 1024 - logger.info("%s: data size: %sMB, transfer rate %sMB/s", - ret.name, size_mb, size_mb / elapsed_time) - return (port, ret) - - -async def set_time_wrapped(port, value): - logger.info("transfer started for %s", port.key) - start_time = time.perf_counter() - await port.set(value) - elapsed_time = time.perf_counter() - start_time - logger.info("transfer completed in %ss", elapsed_time) - if isinstance(value, Path): - size_bytes = value.stat().st_size - logger.info("%s: data size: %sMB, transfer rate %sMB/s", value.name, - size_bytes / 1024 / 1024, size_bytes / 1024 / 1024 / elapsed_time) - return size_bytes - return sys.getsizeof(value) - - -async def download_data(port_keys: List[str]) -> int: - logger.info("retrieving data from simcore...") - start_time = time.perf_counter() - PORTS = await node_ports.ports() - inputs_path = Path(_INPUTS_FOLDER).expanduser() - data = {} - - # let's gather all the data - download_tasks = [] - for node_input in await PORTS.inputs: - # if port_keys contains some keys only download them - logger.info("Checking node %s", node_input.key) - if port_keys and node_input.key not in port_keys: - continue - # collect coroutines - download_tasks.append(get_time_wrapped(node_input)) - logger.info("retrieving %s data", len(download_tasks)) - - transfer_bytes = 0 - if download_tasks: - results = await asyncio.gather(*download_tasks) - logger.info("completed download %s", results) - for port, value in results: - data[port.key] = {"key": port.key, "value": value} - - if _FILE_TYPE_PREFIX in port.type: - # if there are files, move them to the final destination - downloaded_file = value - dest_path = inputs_path / port.key - # first cleanup - if dest_path.exists(): - logger.info("removing %s", dest_path) - shutil.rmtree(dest_path) - if not downloaded_file or not downloaded_file.exists(): - # the link may be empty - continue - transfer_bytes = transfer_bytes + downloaded_file.stat().st_size - # in case of valid file, it is either uncompressed and/or moved to the final directory - logger.info("creating directory %s", dest_path) - dest_path.mkdir(exist_ok=True, parents=True) - data[port.key] = {"key": port.key, "value": str(dest_path)} - - if zipfile.is_zipfile(downloaded_file): - logger.info("unzipping %s", downloaded_file) - with zipfile.ZipFile(downloaded_file) as zip_file: - zip_file.extractall( - dest_path, members=_no_relative_path_zip(zip_file)) - logger.info("all unzipped in %s", dest_path) - else: - logger.info("moving %s", downloaded_file) - dest_path = dest_path / Path(downloaded_file).name - shutil.move(downloaded_file, dest_path) - logger.info("all moved to %s", dest_path) - else: - transfer_bytes = transfer_bytes + sys.getsizeof(value) - # create/update the json file with the new values - if data: - data_file = inputs_path / _KEY_VALUE_FILE_NAME - if data_file.exists(): - current_data = json.loads(data_file.read_text()) - # merge data - data = {**current_data, **data} - data_file.write_text(json.dumps(data)) - stop_time = time.perf_counter() - logger.info("all data retrieved from simcore in %sseconds: %s", - stop_time - start_time, data) - return transfer_bytes - - -async def upload_data(port_keys: List[str]) -> int: # pylint: disable=too-many-branches - logger.info("uploading data to simcore...") - start_time = time.perf_counter() - PORTS = await node_ports.ports() - outputs_path = Path(_OUTPUTS_FOLDER).expanduser() - - # let's gather the tasks - temp_files = [] - upload_tasks = [] - transfer_bytes = 0 - for port in await PORTS.outputs: - logger.info("Checking port %s", port.key) - if port_keys and port.key not in port_keys: - continue - logger.debug( - "uploading data to port '%s' with value '%s'...", port.key, port.value) - if _FILE_TYPE_PREFIX in port.type: - src_folder = outputs_path / port.key - list_files = list(src_folder.glob("*")) - if len(list_files) == 1: - # special case, direct upload - upload_tasks.append(set_time_wrapped(port, list_files[0])) - continue - # generic case let's create an archive - if len(list_files) > 1: - temp_file = tempfile.NamedTemporaryFile(suffix=".zip") - temp_file.close() - with zipfile.ZipFile(temp_file.name, mode="w") as zip_ptr: - for file_path in list_files: - zip_ptr.write(str(file_path), arcname=file_path.name) - - temp_files.append(temp_file.name) - upload_tasks.append(set_time_wrapped(port, temp_file.name)) - else: - data_file = outputs_path / _KEY_VALUE_FILE_NAME - if data_file.exists(): - data = json.loads(data_file.read_text()) - if port.key in data and data[port.key] is not None: - upload_tasks.append(set_time_wrapped(port, data[port.key])) - if upload_tasks: - try: - results = await asyncio.gather(*upload_tasks) - transfer_bytes = sum(results) - finally: - # clean up possible compressed files - for file_path in temp_files: - Path(file_path).unlink() - - stop_time = time.perf_counter() - logger.info("all data uploaded to simcore in %sseconds", - stop_time-start_time) - return transfer_bytes diff --git a/services/dy-jupyter/handlers/post_save_hook.py b/services/dy-jupyter/handlers/post_save_hook.py deleted file mode 100644 index e8c50cca..00000000 --- a/services/dy-jupyter/handlers/post_save_hook.py +++ /dev/null @@ -1,17 +0,0 @@ -import asyncio -import logging -import os -from pathlib import Path -from simcore_sdk.node_data import data_manager - -log = logging.getLogger(__file__ if __name__ == "__main__" else __name__) - -def export_to_osparc_hook(model, os_path, contents_manager): # pylint: disable=unused-argument - """export the notebooks to oSparc S3 when notebooks get saved - """ - - if model['type'] != 'notebook': - return - - notebooks_path = Path(os.environ.get("SIMCORE_NODE_APP_STATE_PATH", "undefined")) - asyncio.ensure_future(data_manager.push(notebooks_path)) diff --git a/services/dy-jupyter/handlers/retrieve.py b/services/dy-jupyter/handlers/retrieve.py deleted file mode 100644 index d566590a..00000000 --- a/services/dy-jupyter/handlers/retrieve.py +++ /dev/null @@ -1,62 +0,0 @@ -import asyncio -import json -import logging - -import input_retriever -from notebook.base.handlers import IPythonHandler -from notebook.utils import url_path_join - -logger = logging.getLogger(__name__) - - -class RetrieveHandler(IPythonHandler): - # deprecated: get download everything and upload everything - async def get(self): - try: - results = await asyncio.gather( - input_retriever.download_data(port_keys=[]), - input_retriever.upload_data(port_keys=[])) - transfered_size = sum(results) - self.write(json.dumps({ - "data": { - "size_bytes": transfered_size - } - })) - self.set_status(200) - except Exception as exc: #pylint: disable=broad-except - logger.exception("Unexpected problem when processing retrieve call") - self.set_status(500, reason=str(exc)) - finally: - self.finish('completed retrieve!') - - async def post(self): - request_contents = json.loads(self.request.body) - ports = request_contents["port_keys"] - logger.info("getting data of ports %s from previous node with POST request...", ports) - try: - transfered_size = await input_retriever.download_data(ports) - self.write(json.dumps({ - "data": { - "size_bytes": transfered_size - } - })) - self.set_status(200) - except Exception as exc: #pylint: disable=broad-except - logger.exception("Unexpected problem when processing retrieve call") - self.set_status(500, reason=str(exc)) - finally: - self.finish() - -def load_jupyter_server_extension(nb_server_app): - """ Called when the extension is loaded - - - Adds API to server - - :param nb_server_app: handle to the Notebook webserver instance. - :type nb_server_app: NotebookWebApplication - """ - web_app = nb_server_app.web_app - host_pattern = '.*$' - route_pattern = url_path_join(web_app.settings['base_url'], '/retrieve') - - web_app.add_handlers(host_pattern, [(route_pattern, RetrieveHandler)]) diff --git a/services/dy-jupyter/handlers/state.py b/services/dy-jupyter/handlers/state.py deleted file mode 100644 index 71ee18ce..00000000 --- a/services/dy-jupyter/handlers/state.py +++ /dev/null @@ -1,63 +0,0 @@ -import logging -import os -from pathlib import Path - -from notebook.base.handlers import IPythonHandler -from notebook.utils import url_path_join - -from simcore_sdk.node_ports import exceptions -from simcore_sdk.node_data import data_manager - -log = logging.getLogger(__name__) - -_STATE_PATH = os.environ.get("SIMCORE_NODE_APP_STATE_PATH", "undefined") - -def _state_path() -> Path: - assert _STATE_PATH != "undefined", "SIMCORE_NODE_APP_STATE_PATH is not defined!" - state_path = Path(_STATE_PATH) - return state_path - -class StateHandler(IPythonHandler): - def initialize(self): #pylint: disable=no-self-use - pass - - async def post(self): - log.info("started pushing current state to S3...") - try: - await data_manager.push(_state_path()) - self.set_status(204) - except exceptions.NodeportsException as exc: - log.exception("Unexpected error while pushing state") - self.set_status(500, reason=str(exc)) - finally: - self.finish() - - async def get(self): - log.info("started pulling state to S3...") - try: - await data_manager.pull(_state_path()) - self.set_status(204) - except exceptions.S3InvalidPathError as exc: - log.exception("Invalid path to S3 while retrieving state") - self.set_status(404, reason=str(exc)) - except exceptions.NodeportsException as exc: - log.exception("Unexpected error while retrieving state") - self.set_status(500, reason=str(exc)) - finally: - self.finish('completed pulling state') - - - -def load_jupyter_server_extension(nb_server_app): - """ Called when the extension is loaded - - - Adds API to server - - :param nb_server_app: handle to the Notebook webserver instance. - :type nb_server_app: NotebookWebApplication - """ - web_app = nb_server_app.web_app - host_pattern = '.*$' - route_pattern = url_path_join(web_app.settings['base_url'], '/state') - - web_app.add_handlers(host_pattern, [(route_pattern, StateHandler)]) diff --git a/services/dy-tissue-properties/Makefile b/services/dy-tissue-properties/Makefile deleted file mode 100644 index 8390c6e4..00000000 --- a/services/dy-tissue-properties/Makefile +++ /dev/null @@ -1,151 +0,0 @@ -VERSION := $(shell cat /proc/version) -# SAN this is a hack so that docker-compose works in the linux virtual environment under Windows -ifneq (,$(findstring Microsoft,$(VERSION))) -# executing from bash -$(echo 'detected WSL') -export DOCKER_COMPOSE=docker-compose -export DOCKER=docker -else ifeq ($(OS), Windows_NT) -$(echo 'detected Powershell/CMD') -# executing from powershell -export DOCKER_COMPOSE=docker-compose.exe -export DOCKER=docker.exe -else -$(echo 'detected native linux') -export DOCKER_COMPOSE=docker-compose -export DOCKER=docker -endif - - -# using ?= will only set if absent -export DOCKER_IMAGE_TAG ?= $(shell cat VERSION) -$(info DOCKER_IMAGE_TAG set to ${DOCKER_IMAGE_TAG}) - -# default to local (no registry) -export DOCKER_REGISTRY ?= itisfoundation -$(info DOCKER_REGISTRY set to ${DOCKER_REGISTRY}) - -export VCS_URL:=$(shell git config --get remote.origin.url) -export VCS_REF:=$(shell git rev-parse --short HEAD) -export BUILD_DATE:=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") - -## Tools ------------------------------------------------------------------------------------------------------ -# -tools = - -ifeq ($(shell uname -s),Darwin) - SED = gsed -else - SED = sed -endif - -ifeq ($(shell which ${SED}),) - tools += $(SED) -endif - -.PHONY: all -all: help info -ifdef tools - $(error "Can't find tools:${tools}") -endif - -.PHONY: build rebuild -# target: build, rebuild: – Builds all service images. Use `rebuild` to build w/o cache. -build: - ${DOCKER_COMPOSE} -f docker-compose.yml build --parallel - -.PHONY: rebuild -# target: rebuild: – Builds all service images without using cache. -rebuild: - ${DOCKER_COMPOSE} -f docker-compose.yml build --no-cache --parallel - -.PHONY: build-devel -# target: build-devel: – Builds images of core services for development. -build-devel: .env - ${DOCKER_COMPOSE} -f docker-compose.yml -f docker-compose.devel.yml build --parallel - -.PHONY: up up-devel down -# target: up, down: – starts/stops service -# target: up-devel: – deploys service in devel mode together with minimal swarm-like environment -up: .env - ${DOCKER_COMPOSE} -f docker-compose.yml up - -up-devel: .env - ${DOCKER_COMPOSE} -f docker-compose.yml -f docker-compose.devel.yml up - -down: - ${DOCKER_COMPOSE} -f docker-compose.yml down - ${DOCKER_COMPOSE} -f docker-compose.yml -f docker-compose.devel.yml down - -.PHONY: push pull -# target: push – Pushes the service images to ${DOCKER_REGISTRY} -# target: pull - Pulls the service images from ${DOCKER_REGISTRY} -push: - ${DOCKER_COMPOSE} -f docker-compose.yml push - -pull: - ${DOCKER_COMPOSE} -f docker-compose.yml pull - - -test: - # tests https://github.com/ITISFoundation/osparc-services/issues/15 - docker run ${DOCKER_REGISTRY}/tissue-properties:${DOCKER_IMAGE_TAG} python3 -c "import psycopg2; import simcore_sdk.models" - @echo 'test passed' - -## ------------------------------- -# Tools - -.PHONY: info -# target: info – Displays some parameters of makefile environments -info: .env - @echo '+ VCS_* ' - @echo ' - URL : ${VCS_URL}' - @echo ' - REF : ${VCS_REF}' - @echo '+ BUILD_DATE : ${BUILD_DATE}' - @echo '+ VERSION : ${VERSION}' - @echo '+ DOCKER_REGISTRY : ${DOCKER_REGISTRY}' - @echo '+ DOCKER_IMAGE_TAG : ${DOCKER_IMAGE_TAG}' - -PHONY: .env -.env: .env-devel - # first check if file exists, copies it - @if [ ! -f $@ ] ; then \ - echo "##### $@ does not exist, copying $< ############"; \ - cp $< $@; \ - else \ - echo "##### $< is newer than $@ ####"; \ - diff -uN $@ $<; \ - false; \ - fi - -.vscode/settings.json: .vscode-template/settings.json - $(info ##### $< is newer than $@ ####) - @diff -uN $@ $< - @false - -## ------------------------------- -# Virtual Environments -.venv: -# target: .venv – Creates a python virtual environment with dev tools (pip, pylint, ...) - python3 -m venv .venv - .venv/bin/pip3 install --upgrade pip wheel setuptools - .venv/bin/pip3 install pylint autopep8 virtualenv - @echo "To activate the venv, execute 'source .venv/bin/activate' or '.venv/bin/activate.bat' (WIN)" - -## ------------------------------- -# Auxiliary targets. - -.PHONY: clean -# target: clean – Cleans all unversioned files in project -clean: - @git clean -dxf -e .vscode/ - - -.PHONY: help -# target: help – Display all callable targets -help: - @echo "Make targets in osparc-simcore:" - @echo - @egrep "^\s*#\s*target\s*:\s*" [Mm]akefile \ - | $(SED) -r "s/^\s*#\s*target\s*:\s*//g" - @echo diff --git a/services/dy-jupyter/.env-devel b/services/jupyter-base-notebook/.env-devel similarity index 100% rename from services/dy-jupyter/.env-devel rename to services/jupyter-base-notebook/.env-devel diff --git a/services/dy-jupyter/Makefile b/services/jupyter-base-notebook/Makefile similarity index 100% rename from services/dy-jupyter/Makefile rename to services/jupyter-base-notebook/Makefile diff --git a/services/dy-jupyter/README.md b/services/jupyter-base-notebook/README.md similarity index 100% rename from services/dy-jupyter/README.md rename to services/jupyter-base-notebook/README.md diff --git a/services/dy-jupyter/VERSION b/services/jupyter-base-notebook/VERSION similarity index 100% rename from services/dy-jupyter/VERSION rename to services/jupyter-base-notebook/VERSION diff --git a/services/dy-jupyter/devel/port_config.json b/services/jupyter-base-notebook/devel/port_config.json similarity index 100% rename from services/dy-jupyter/devel/port_config.json rename to services/jupyter-base-notebook/devel/port_config.json diff --git a/services/dy-jupyter/devel/requirements.txt b/services/jupyter-base-notebook/devel/requirements.txt similarity index 100% rename from services/dy-jupyter/devel/requirements.txt rename to services/jupyter-base-notebook/devel/requirements.txt diff --git a/services/dy-jupyter/docker-compose.devel.yml b/services/jupyter-base-notebook/docker-compose.devel.yml similarity index 100% rename from services/dy-jupyter/docker-compose.devel.yml rename to services/jupyter-base-notebook/docker-compose.devel.yml diff --git a/services/dy-jupyter/docker/boot.sh b/services/jupyter-base-notebook/docker/boot.sh similarity index 100% rename from services/dy-jupyter/docker/boot.sh rename to services/jupyter-base-notebook/docker/boot.sh diff --git a/services/dy-jupyter/docker/boot_notebook.sh b/services/jupyter-base-notebook/docker/boot_notebook.sh similarity index 67% rename from services/dy-jupyter/docker/boot_notebook.sh rename to services/jupyter-base-notebook/docker/boot_notebook.sh index 80fdb787..aad9e9ff 100755 --- a/services/dy-jupyter/docker/boot_notebook.sh +++ b/services/jupyter-base-notebook/docker/boot_notebook.sh @@ -9,13 +9,8 @@ echo "current directory is ${PWD}" # create output folder echo echo "creating inputs/outputs folder" -mkdir -p "${INPUTS_FOLDER:-~/inputs}" -mkdir -p "${OUTPUTS_FOLDER:-~/outputs}" - -# try to pull data from S3 -echo -echo "trying to restore state..." -python /docker/state_puller.py "${SIMCORE_NODE_APP_STATE_PATH}" +mkdir -p "${INPUTS_FOLDER}" # must match dy-sidecar state-paths-whatever +mkdir -p "${OUTPUTS_FOLDER}" # must match dy-sidecar state-paths-whatever # the notebooks in the folder shall be trusted by default # jupyter trust ${SIMCORE_NODE_APP_STATE_PATH}/* @@ -23,7 +18,7 @@ python /docker/state_puller.py "${SIMCORE_NODE_APP_STATE_PATH}" # Trust all notebooks in the notbooks folder echo echo "trust all notebooks in path..." -find "${SIMCORE_NODE_APP_STATE_PATH}" -name '*.ipynb' -exec jupyter trust {} \; +find "/home/jovyan/notebooks" -name '*.ipynb' -exec jupyter trust {} \; # prevents notebook to open in separate tab cat > ~/.jupyter/custom/custom.js < jupyter_config.json < Path: + assert _STATE_PATH != "undefined", "SIMCORE_NODE_APP_STATE_PATH is not defined!" + state_path = Path(_STATE_PATH) + return state_path + +class StateHandler(IPythonHandler): + def initialize(self): #pylint: disable=no-self-use + pass + + async def post(self): + pass + + async def get(self): + pass + +def load_jupyter_server_extension(nb_server_app): + """ Called when the extension is loaded + + - Adds API to server + + :param nb_server_app: handle to the Notebook webserver instance. + :type nb_server_app: NotebookWebApplication + """ + web_app = nb_server_app.web_app + host_pattern = '.*$' + route_pattern = url_path_join(web_app.settings['base_url'], '/state') + + web_app.add_handlers(host_pattern, [(route_pattern, StateHandler)]) diff --git a/services/dy-jupyter/notebook-inputs/key_values.json b/services/jupyter-base-notebook/notebook-inputs/key_values.json similarity index 100% rename from services/dy-jupyter/notebook-inputs/key_values.json rename to services/jupyter-base-notebook/notebook-inputs/key_values.json diff --git a/services/dy-jupyter/notebooks/notebook.ipynb b/services/jupyter-base-notebook/notebooks/notebook.ipynb similarity index 100% rename from services/dy-jupyter/notebooks/notebook.ipynb rename to services/jupyter-base-notebook/notebooks/notebook.ipynb diff --git a/services/dy-jupyter/pytype.cfg b/services/jupyter-base-notebook/pytype.cfg similarity index 100% rename from services/dy-jupyter/pytype.cfg rename to services/jupyter-base-notebook/pytype.cfg diff --git a/services/dy-raw-graphs/.env-devel b/services/raw-graphs/.env-devel similarity index 100% rename from services/dy-raw-graphs/.env-devel rename to services/raw-graphs/.env-devel diff --git a/services/dy-raw-graphs/Dockerfile b/services/raw-graphs/Dockerfile similarity index 96% rename from services/dy-raw-graphs/Dockerfile rename to services/raw-graphs/Dockerfile index 297e487e..a7a77a41 100644 --- a/services/dy-raw-graphs/Dockerfile +++ b/services/raw-graphs/Dockerfile @@ -80,8 +80,8 @@ COPY --chown=scu:scu scripts/docker/healthcheck.py $HOME/healthcheck/healthcheck HEALTHCHECK --interval=10s --timeout=30s --start-period=1s --retries=3 CMD [ "python3", "/home/scu/healthcheck/healthcheck.py", "http://localhost:4000" ] WORKDIR $HOME -COPY --chown=scu:scu services/dy-raw-graphs/docker $HOME/docker -COPY --chown=scu:scu services/dy-raw-graphs/server $HOME/server +COPY --chown=scu:scu services/raw-graphs/docker $HOME/docker +COPY --chown=scu:scu services/raw-graphs/server $HOME/server WORKDIR $HOME/raw CMD ["/bin/bash", "../docker/boot.sh"] diff --git a/services/dy-raw-graphs/Makefile b/services/raw-graphs/Makefile similarity index 100% rename from services/dy-raw-graphs/Makefile rename to services/raw-graphs/Makefile diff --git a/services/dy-raw-graphs/README.md b/services/raw-graphs/README.md similarity index 100% rename from services/dy-raw-graphs/README.md rename to services/raw-graphs/README.md diff --git a/services/dy-raw-graphs/VERSION b/services/raw-graphs/VERSION similarity index 100% rename from services/dy-raw-graphs/VERSION rename to services/raw-graphs/VERSION diff --git a/services/dy-raw-graphs/devel/port_config.json b/services/raw-graphs/devel/port_config.json similarity index 100% rename from services/dy-raw-graphs/devel/port_config.json rename to services/raw-graphs/devel/port_config.json diff --git a/services/dy-raw-graphs/docker-compose.devel.yml b/services/raw-graphs/docker-compose.devel.yml similarity index 100% rename from services/dy-raw-graphs/docker-compose.devel.yml rename to services/raw-graphs/docker-compose.devel.yml diff --git a/services/dy-raw-graphs/docker-compose.yml b/services/raw-graphs/docker-compose.yml similarity index 77% rename from services/dy-raw-graphs/docker-compose.yml rename to services/raw-graphs/docker-compose.yml index acbb460a..f9e2cd3f 100644 --- a/services/dy-raw-graphs/docker-compose.yml +++ b/services/raw-graphs/docker-compose.yml @@ -4,7 +4,7 @@ services: image: ${DOCKER_REGISTRY}/raw-graphs-table:${DOCKER_IMAGE_TAG} build: context: ../../ - dockerfile: services/dy-raw-graphs/Dockerfile + dockerfile: services/raw-graphs/Dockerfile target: production args: BRANCH_NAME: master-osparc-table @@ -16,7 +16,7 @@ services: io.simcore.description: '{"description": "Table view powered by RAW Graphs"}' io.simcore.authors: '{"authors": [{"name": "odeimaiz", "email": "maiz@itis.swiss", "affiliation": "ITIS Foundation"}]}' io.simcore.contact: '{"contact": "maiz@itis.swiss"}' - io.simcore.inputs: '{"inputs": {"input_1": {"label": "input 1", "displayOrder": 0, "description": "Input 1", "type": "data:*/*"}, "input_2": {"label": "input 2", "displayOrder": 1, "description": "Input 2", "type": "data:*/*"}, "input_3": {"label": "input 3", "displayOrder": 2, "description": "Input 3", "type": "data:*/*"}, "input_4": {"label": "input 4", "displayOrder": 3, "description": "Input 4", "type": "data:*/*"}, "input_5": {"label": "input 5", "displayOrder": 4, "description": "Input 5", "type": "data:*/*"}}}' + io.simcore.inputs: '{"inputs": {"input_1": {"label": "input 1", "displayOrder": 0, "description": "Input 1", "type": "data:*/*"}}}' io.simcore.outputs: '{"outputs": {}}' simcore.service.settings: '[{"name": "resources", "type": "Resources", "value": {"mem_limit":17179869184, "cpu_limit": 4000000000}}, {"name": "ports", "type": "int", "value": 4000}, {"name": "constraints", "type": "string", "value": ["node.platform.os == linux"]}]' org.label-schema.schema-version: "1.0" @@ -32,7 +32,7 @@ services: image: ${DOCKER_REGISTRY}/raw-graphs:${DOCKER_IMAGE_TAG} build: context: ../../ - dockerfile: services/dy-raw-graphs/Dockerfile + dockerfile: services/raw-graphs/Dockerfile target: production args: BRANCH_NAME: master-osparc @@ -44,7 +44,7 @@ services: io.simcore.description: '{"description": "2D plots powered by RAW Graphs"}' io.simcore.authors: '{"authors": [{"name": "odeimaiz", "email": "maiz@itis.swiss", "affiliation": "ITIS Foundation"}]}' io.simcore.contact: '{"contact": "maiz@itis.swiss"}' - io.simcore.inputs: '{"inputs": {"input_1": {"label": "input 1", "displayOrder": 0, "description": "Input 1", "type": "data:*/*"}, "input_2": {"label": "input 2", "displayOrder": 1, "description": "Input 2", "type": "data:*/*"}, "input_3": {"label": "input 3", "displayOrder": 2, "description": "Input 3", "type": "data:*/*"}, "input_4": {"label": "input 4", "displayOrder": 3, "description": "Input 4", "type": "data:*/*"}, "input_5": {"label": "input 5", "displayOrder": 4, "description": "Input 5", "type": "data:*/*"}}}' + io.simcore.inputs: '{"inputs": {"input_1": {"label": "input 1", "displayOrder": 0, "description": "Input 1", "type": "data:*/*"}}}' io.simcore.outputs: '{"outputs": {"output_1":{"label": "Output Graph", "displayOrder":0, "description": "Output Graph", "type": "data:image/svg+xml"}}}' simcore.service.settings: '[{"name": "resources", "type": "Resources", "value": {"mem_limit":17179869184, "cpu_limit": 4000000000}}, {"name": "ports", "type": "int", "value": 4000}, {"name": "constraints", "type": "string", "value": ["node.platform.os == linux"]}]' org.label-schema.schema-version: "1.0" diff --git a/services/dy-raw-graphs/docker/boot.sh b/services/raw-graphs/docker/boot.sh similarity index 100% rename from services/dy-raw-graphs/docker/boot.sh rename to services/raw-graphs/docker/boot.sh diff --git a/services/dy-raw-graphs/index.html b/services/raw-graphs/index.html similarity index 100% rename from services/dy-raw-graphs/index.html rename to services/raw-graphs/index.html diff --git a/services/dy-raw-graphs/server/config.js b/services/raw-graphs/server/config.js similarity index 100% rename from services/dy-raw-graphs/server/config.js rename to services/raw-graphs/server/config.js diff --git a/services/dy-raw-graphs/server/input-retriever.py b/services/raw-graphs/server/input-retriever.py similarity index 100% rename from services/dy-raw-graphs/server/input-retriever.py rename to services/raw-graphs/server/input-retriever.py diff --git a/services/dy-raw-graphs/server/routes.js b/services/raw-graphs/server/routes.js similarity index 95% rename from services/dy-raw-graphs/server/routes.js rename to services/raw-graphs/server/routes.js index 848fdb6d..31fb6cf9 100644 --- a/services/dy-raw-graphs/server/routes.js +++ b/services/raw-graphs/server/routes.js @@ -70,15 +70,20 @@ function callInputRetriever(request, response) { } function getInputDir() { - const inputsDir = '../inputs/'; + const inputsDir = '../input/'; if (!fs.existsSync(inputsDir)) { fs.mkdirSync(inputsDir); } - return inputsDir; + const port = "input_1/"; + const inputsDirPort = inputsDir + port; + if (!fs.existsSync(inputsDirPort)) { + fs.mkdirSync(inputsDirPort); + } + return inputsDirPort; } function getOutputDir() { - const outputsDir = '../outputs/'; + const outputsDir = '../output/'; if (!fs.existsSync(outputsDir)) { fs.mkdirSync(outputsDir); } diff --git a/services/dy-raw-graphs/server/server.js b/services/raw-graphs/server/server.js similarity index 100% rename from services/dy-raw-graphs/server/server.js rename to services/raw-graphs/server/server.js diff --git a/services/dy-tissue-properties/.env-devel b/services/tissue-properties/.env-devel similarity index 100% rename from services/dy-tissue-properties/.env-devel rename to services/tissue-properties/.env-devel diff --git a/services/dy-tissue-properties/Dockerfile b/services/tissue-properties/Dockerfile similarity index 74% rename from services/dy-tissue-properties/Dockerfile rename to services/tissue-properties/Dockerfile index 6f40f2ba..76af8c4c 100644 --- a/services/dy-tissue-properties/Dockerfile +++ b/services/tissue-properties/Dockerfile @@ -29,10 +29,10 @@ RUN apk add --no-cache --virtual .build-deps \ git \ python3-dev \ postgresql-dev && \ - pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@master#subdirectory=packages/service-library &&\ - pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@master#subdirectory=packages/simcore-sdk &&\ - pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@master#subdirectory=services/storage/client-sdk/python &&\ - pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@master#subdirectory=packages/postgres-database &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@4159b895fc1e3743777aadb79f1209735582cfab#subdirectory=packages/service-library &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@4159b895fc1e3743777aadb79f1209735582cfab#subdirectory=packages/simcore-sdk &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@4159b895fc1e3743777aadb79f1209735582cfab#subdirectory=services/storage/client-sdk/python &&\ + pip install --no-cache-dir git+https://github.com/ITISFoundation/osparc-simcore.git@4159b895fc1e3743777aadb79f1209735582cfab#subdirectory=packages/postgres-database &&\ apk del --no-cache .build-deps @@ -57,9 +57,9 @@ ENV SIMCORE_NODE_UUID="-1" \ #-----------------Production---------------------- FROM base AS production -COPY --chown=scu:scu services/dy-tissue-properties/csv-to-html-table $HOME/csv-to-html-table -COPY --chown=scu:scu services/dy-tissue-properties/inputs $HOME/inputs -COPY --chown=scu:scu services/dy-tissue-properties/inputs/TissueProperties.csv $HOME/csv-to-html-table/data/TissueProperties.csv +COPY --chown=scu:scu services/tissue-properties/csv-to-html-table $HOME/csv-to-html-table +COPY --chown=scu:scu services/tissue-properties/inputs $HOME/inputs +COPY --chown=scu:scu services/tissue-properties/inputs/TissueProperties.csv $HOME/csv-to-html-table/data/TissueProperties.csv RUN apk add --no-cache \ postgresql-libs @@ -70,8 +70,8 @@ COPY --chown=scu:scu scripts/docker/healthcheck.py $HOME/healthcheck/healthcheck HEALTHCHECK --interval=10s --timeout=30s --start-period=1s --retries=3 CMD [ "python3", "/home/scu/healthcheck/healthcheck.py", "http://localhost:4000" ] WORKDIR $HOME -COPY --chown=scu:scu services/dy-tissue-properties/docker $HOME/docker -COPY --chown=scu:scu services/dy-tissue-properties/server $HOME/server +COPY --chown=scu:scu services/tissue-properties/docker $HOME/docker +COPY --chown=scu:scu services/tissue-properties/server $HOME/server WORKDIR $HOME/csv-to-html-table CMD ["/bin/bash", "../docker/boot.sh"] diff --git a/services/tissue-properties/Makefile b/services/tissue-properties/Makefile new file mode 100644 index 00000000..81a184ce --- /dev/null +++ b/services/tissue-properties/Makefile @@ -0,0 +1,38 @@ + +SHELL := /bin/bash +REPO_BASE_DIR := $(abspath $(dir $(abspath $(lastword $(MAKEFILE_LIST))))../../..) +CUR_TARGET := $(notdir $(CURDIR)) + +.PHONY: help +help: ## this help + @echo "usage: make [target] ..." + @echo "" + @echo "Targets for '$(notdir $(CURDIR))':" + @echo "" + @awk 'BEGIN {FS = ":.*?## "}; /^[^.[:space:]].*?:.*?## / {if ($$1 != "help" && NF == 2) {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}}' $(MAKEFILE_LIST) + @echo "" +DOCKER_IMAGE_NAME ?= $(notdir $(CURDIR)) +.PHONY: docker-compose.yml +docker-compose.yml: ## runs ooil to assemble the docker-compose.yml file + @echo "----- -----" + @docker run -it --rm -v $(CURDIR):/mnt \ + -u $(shell id -u):$(shell id -g) \ + itisfoundation/ci-service-integration-library:v2.0.9-dev \ + bash -c "cd /mnt && ooil compose --metadata /mnt/.osparc --to-spec-file /mnt/docker-compose.yml" + @echo "----- -----" + +.PHONY: docker-compose.dev.yml +docker-compose.dev.yml: ## runs ooil to assemble the docker-compose.yml file + @cd $(REPO_BASE_DIR) && $(VENV_BIN)/ooil compose --metadata .osparc --to-spec-file $(REPO_BASE_DIR)/docker-compose.dev.yml + +.PHONY: build +build: | metadata.yml docker-compose.yml ## build docker image + @docker compose --file $(REPO_BASE_DIR)/docker-compose.yml build $(DOCKER_IMAGE_NAME) + +.PHONY: run-local +run-local: | build-dev docker-compose.yml ## runs images with local configuration + @cd $(REPO_BASE_DIR) && docker compose --env-file $(REPO_BASE_DIR)/.osparc/$(DOCKER_IMAGE_NAME)/.env --file docker-compose.yml --file .osparc/$(DOCKER_IMAGE_NAME)/docker-compose.local.yml up --remove-orphans --force-recreate $(DOCKER_IMAGE_NAME) + +.PHONY: build-dev +build-dev: | venv metadata.yml install docker-compose.dev.yml ## build dev docker image + docker compose --env-file $(REPO_BASE_DIR)/.osparc/$(DOCKER_IMAGE_NAME)/.env --file $(REPO_BASE_DIR)/docker-compose.dev.yml --file $(REPO_BASE_DIR)/.osparc/$(DOCKER_IMAGE_NAME)/docker-compose.local.yml build $(DOCKER_IMAGE_NAME) diff --git a/services/dy-tissue-properties/README.md b/services/tissue-properties/README.md similarity index 100% rename from services/dy-tissue-properties/README.md rename to services/tissue-properties/README.md diff --git a/services/dy-tissue-properties/VERSION b/services/tissue-properties/VERSION similarity index 100% rename from services/dy-tissue-properties/VERSION rename to services/tissue-properties/VERSION diff --git a/services/dy-tissue-properties/csv-to-html-table/LICENSE b/services/tissue-properties/csv-to-html-table/LICENSE similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/LICENSE rename to services/tissue-properties/csv-to-html-table/LICENSE diff --git a/services/dy-tissue-properties/csv-to-html-table/README.md b/services/tissue-properties/csv-to-html-table/README.md similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/README.md rename to services/tissue-properties/csv-to-html-table/README.md diff --git a/services/dy-tissue-properties/csv-to-html-table/css/bootstrap.css.map b/services/tissue-properties/csv-to-html-table/css/bootstrap.css.map similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/css/bootstrap.css.map rename to services/tissue-properties/csv-to-html-table/css/bootstrap.css.map diff --git a/services/dy-tissue-properties/csv-to-html-table/css/bootstrap.min.css b/services/tissue-properties/csv-to-html-table/css/bootstrap.min.css similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/css/bootstrap.min.css rename to services/tissue-properties/csv-to-html-table/css/bootstrap.min.css diff --git a/services/dy-tissue-properties/csv-to-html-table/css/custom.css b/services/tissue-properties/csv-to-html-table/css/custom.css similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/css/custom.css rename to services/tissue-properties/csv-to-html-table/css/custom.css diff --git a/services/dy-tissue-properties/csv-to-html-table/css/dataTables.bootstrap.css b/services/tissue-properties/csv-to-html-table/css/dataTables.bootstrap.css similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/css/dataTables.bootstrap.css rename to services/tissue-properties/csv-to-html-table/css/dataTables.bootstrap.css diff --git a/services/dy-tissue-properties/csv-to-html-table/css/oSPARC.dark.css b/services/tissue-properties/csv-to-html-table/css/oSPARC.dark.css similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/css/oSPARC.dark.css rename to services/tissue-properties/csv-to-html-table/css/oSPARC.dark.css diff --git a/services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.eot b/services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.eot similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.eot rename to services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.eot diff --git a/services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.svg b/services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.svg similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.svg rename to services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.svg diff --git a/services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.ttf b/services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.ttf similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.ttf rename to services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.ttf diff --git a/services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.woff b/services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.woff similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.woff rename to services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.woff diff --git a/services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.woff2 b/services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.woff2 similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.woff2 rename to services/tissue-properties/csv-to-html-table/fonts/glyphicons-halflings-regular.woff2 diff --git a/services/dy-tissue-properties/csv-to-html-table/images/Sorting icons.psd b/services/tissue-properties/csv-to-html-table/images/Sorting icons.psd similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/images/Sorting icons.psd rename to services/tissue-properties/csv-to-html-table/images/Sorting icons.psd diff --git a/services/dy-tissue-properties/csv-to-html-table/images/favicon.ico b/services/tissue-properties/csv-to-html-table/images/favicon.ico similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/images/favicon.ico rename to services/tissue-properties/csv-to-html-table/images/favicon.ico diff --git a/services/dy-tissue-properties/csv-to-html-table/images/sort_asc.png b/services/tissue-properties/csv-to-html-table/images/sort_asc.png similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/images/sort_asc.png rename to services/tissue-properties/csv-to-html-table/images/sort_asc.png diff --git a/services/dy-tissue-properties/csv-to-html-table/images/sort_asc_disabled.png b/services/tissue-properties/csv-to-html-table/images/sort_asc_disabled.png similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/images/sort_asc_disabled.png rename to services/tissue-properties/csv-to-html-table/images/sort_asc_disabled.png diff --git a/services/dy-tissue-properties/csv-to-html-table/images/sort_both.png b/services/tissue-properties/csv-to-html-table/images/sort_both.png similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/images/sort_both.png rename to services/tissue-properties/csv-to-html-table/images/sort_both.png diff --git a/services/dy-tissue-properties/csv-to-html-table/images/sort_desc.png b/services/tissue-properties/csv-to-html-table/images/sort_desc.png similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/images/sort_desc.png rename to services/tissue-properties/csv-to-html-table/images/sort_desc.png diff --git a/services/dy-tissue-properties/csv-to-html-table/images/sort_desc_disabled.png b/services/tissue-properties/csv-to-html-table/images/sort_desc_disabled.png similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/images/sort_desc_disabled.png rename to services/tissue-properties/csv-to-html-table/images/sort_desc_disabled.png diff --git a/services/dy-tissue-properties/csv-to-html-table/index.html b/services/tissue-properties/csv-to-html-table/index.html similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/index.html rename to services/tissue-properties/csv-to-html-table/index.html diff --git a/services/dy-tissue-properties/csv-to-html-table/js/bootstrap.min.js b/services/tissue-properties/csv-to-html-table/js/bootstrap.min.js similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/js/bootstrap.min.js rename to services/tissue-properties/csv-to-html-table/js/bootstrap.min.js diff --git a/services/dy-tissue-properties/csv-to-html-table/js/csv_to_html_table.js b/services/tissue-properties/csv-to-html-table/js/csv_to_html_table.js similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/js/csv_to_html_table.js rename to services/tissue-properties/csv-to-html-table/js/csv_to_html_table.js diff --git a/services/dy-tissue-properties/csv-to-html-table/js/dataTables.bootstrap.js b/services/tissue-properties/csv-to-html-table/js/dataTables.bootstrap.js similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/js/dataTables.bootstrap.js rename to services/tissue-properties/csv-to-html-table/js/dataTables.bootstrap.js diff --git a/services/dy-tissue-properties/csv-to-html-table/js/jquery.csv.min.js b/services/tissue-properties/csv-to-html-table/js/jquery.csv.min.js similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/js/jquery.csv.min.js rename to services/tissue-properties/csv-to-html-table/js/jquery.csv.min.js diff --git a/services/dy-tissue-properties/csv-to-html-table/js/jquery.dataTables.min.js b/services/tissue-properties/csv-to-html-table/js/jquery.dataTables.min.js similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/js/jquery.dataTables.min.js rename to services/tissue-properties/csv-to-html-table/js/jquery.dataTables.min.js diff --git a/services/dy-tissue-properties/csv-to-html-table/js/jquery.min.js b/services/tissue-properties/csv-to-html-table/js/jquery.min.js similarity index 100% rename from services/dy-tissue-properties/csv-to-html-table/js/jquery.min.js rename to services/tissue-properties/csv-to-html-table/js/jquery.min.js diff --git a/services/dy-tissue-properties/devel/port_config.json b/services/tissue-properties/devel/port_config.json similarity index 100% rename from services/dy-tissue-properties/devel/port_config.json rename to services/tissue-properties/devel/port_config.json diff --git a/services/dy-tissue-properties/docker-compose.devel.yml b/services/tissue-properties/docker-compose.devel.yml similarity index 100% rename from services/dy-tissue-properties/docker-compose.devel.yml rename to services/tissue-properties/docker-compose.devel.yml diff --git a/services/dy-tissue-properties/docker-compose.yml b/services/tissue-properties/docker-compose.yml similarity index 97% rename from services/dy-tissue-properties/docker-compose.yml rename to services/tissue-properties/docker-compose.yml index 70a761cf..e5e72160 100644 --- a/services/dy-tissue-properties/docker-compose.yml +++ b/services/tissue-properties/docker-compose.yml @@ -4,7 +4,7 @@ services: image: ${DOCKER_REGISTRY}/tissue-properties:${DOCKER_IMAGE_TAG} build: context: ../../ - dockerfile: services/dy-tissue-properties/Dockerfile + dockerfile: services/tissue-properties/Dockerfile target: production labels: io.simcore.key: '{"key": "simcore/services/dynamic/tissue-properties"}' diff --git a/services/dy-tissue-properties/docker/boot.sh b/services/tissue-properties/docker/boot.sh similarity index 100% rename from services/dy-tissue-properties/docker/boot.sh rename to services/tissue-properties/docker/boot.sh diff --git a/services/dy-tissue-properties/index.html b/services/tissue-properties/index.html similarity index 100% rename from services/dy-tissue-properties/index.html rename to services/tissue-properties/index.html diff --git a/services/dy-tissue-properties/inputs/TissueProperties.csv b/services/tissue-properties/inputs/TissueProperties.csv similarity index 100% rename from services/dy-tissue-properties/inputs/TissueProperties.csv rename to services/tissue-properties/inputs/TissueProperties.csv diff --git a/services/dy-tissue-properties/server/config.js b/services/tissue-properties/server/config.js similarity index 100% rename from services/dy-tissue-properties/server/config.js rename to services/tissue-properties/server/config.js diff --git a/services/dy-tissue-properties/server/input-retriever.py b/services/tissue-properties/server/input-retriever.py similarity index 100% rename from services/dy-tissue-properties/server/input-retriever.py rename to services/tissue-properties/server/input-retriever.py diff --git a/services/dy-tissue-properties/server/routes.js b/services/tissue-properties/server/routes.js similarity index 92% rename from services/dy-tissue-properties/server/routes.js rename to services/tissue-properties/server/routes.js index 824156fc..bc4dc812 100644 --- a/services/dy-tissue-properties/server/routes.js +++ b/services/tissue-properties/server/routes.js @@ -36,11 +36,16 @@ function callInputRetriever(request, response) { } function getInputDir() { - const inputsDir = '../inputs/'; + const inputsDir = '../input/'; if (!fs.existsSync(inputsDir)) { fs.mkdirSync(inputsDir); } - return inputsDir; + const port = "input_1/"; + const inputsDirPort = inputsDir + port; + if (!fs.existsSync(inputsDirPort)) { + fs.mkdirSync(inputsDirPort); + } + return inputsDirPort; } function getOutputDir() { diff --git a/services/dy-tissue-properties/server/server.js b/services/tissue-properties/server/server.js similarity index 100% rename from services/dy-tissue-properties/server/server.js rename to services/tissue-properties/server/server.js diff --git a/toc.json b/toc.json index ec1fd405..8d1c985f 100644 --- a/toc.json +++ b/toc.json @@ -257,7 +257,7 @@ }, "tissue-properties": { "description": "Tissue properties compiled in an extensive, critical literature review by the ITIS Foundation. Visit [itis.swiss/database](https://itis.swiss/database) for additional information, e.g., on tissue parameter variability/uncertainty, quality assurance, and the explored sources. Please use the following citation when referring to the database: Hasgall PA, Di Gennaro F, Baumgartner C, Neufeld E, Lloyd B, Gosselin MC, Payne D, Klingenb\u00f6ck A, Kuster N, ITIS Database for thermal and electromagnetic parameters of biological tissues, Version 4.0, May 15, 2018, DOI: 10.13099/VIP21000-04-0. [itis.swiss/database](https://itis.swiss/database). Powered by [csv-to-html-table](https://github.com/derekeder/csv-to-html-table)", - "dockerfile": "dy-tissue-properties/Dockerfile", + "dockerfile": "tissue-properties/Dockerfile", "image": "${DOCKER_REGISTRY}/tissue-properties:${DOCKER_IMAGE_TAG}", "key": "simcore/services/dynamic/tissue-properties", "name": "Tissue properties",