diff --git a/.github/workflows/build-windows-executable-app.yaml b/.github/workflows/build-windows-executable-app.yaml index 21a10ec4c..db89edb04 100644 --- a/.github/workflows/build-windows-executable-app.yaml +++ b/.github/workflows/build-windows-executable-app.yaml @@ -258,6 +258,7 @@ jobs: - name: Create .bat file run: | + echo '@echo off' > ${{ env.APP_NAME }}.bat echo '' >> ${{ env.APP_NAME }}.bat echo 'REM Create .streamlit directory in user''s home if it doesn''t exist' >> ${{ env.APP_NAME }}.bat @@ -266,6 +267,8 @@ jobs: echo 'REM Create credentials.toml with empty email to disable email prompt' >> ${{ env.APP_NAME }}.bat echo 'copy /Y ".streamlit\credentials.toml" "%USERPROFILE%\.streamlit\credentials.toml" > nul' >> ${{ env.APP_NAME }}.bat echo '' >> ${{ env.APP_NAME }}.bat + echo 'start /min .\python-${{ env.PYTHON_VERSION }}\python -m redis-server' > ${{ env.APP_NAME }}.bat + echo 'start /min .\python-${{ env.PYTHON_VERSION }}\python -m rq worker --with-scheduler' >> ${{ env.APP_NAME }}.bat echo 'start /min .\python-${{ env.PYTHON_VERSION }}\python -m streamlit run app.py local' >> ${{ env.APP_NAME }}.bat - name: Create All-in-one executable folder diff --git a/Dockerfile b/Dockerfile index 9153e87d7..f7a6b91a4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -141,6 +141,8 @@ RUN echo "0 3 * * * /root/miniforge3/envs/streamlit-env/bin/python /app/clean-up RUN echo "#!/bin/bash" > /app/entrypoint.sh && \ echo "source /root/miniforge3/bin/activate streamlit-env" >> /app/entrypoint.sh && \ echo "service cron start" >> /app/entrypoint.sh && \ + echo "redis-server" >> /app/entrypoint.sh && \ + echo "rq worker --with-scheduler" >> /app/entrypoint.sh && \ echo "streamlit run app.py" >> /app/entrypoint.sh # make the script executable RUN chmod +x /app/entrypoint.sh diff --git a/Dockerfile_simple b/Dockerfile_simple index 64c24d16f..07da643b0 100644 --- a/Dockerfile_simple +++ b/Dockerfile_simple @@ -87,6 +87,8 @@ RUN echo "0 3 * * * /root/miniforge3/envs/streamlit-env/bin/python /app/clean-up RUN echo "#!/bin/bash" > /app/entrypoint.sh RUN echo "source /root/miniforge3/bin/activate streamlit-env" >> /app/entrypoint.sh && \ echo "service cron start" >> /app/entrypoint.sh && \ + echo "redis-server" >> /app/entrypoint.sh && \ + echo "rq worker --with-scheduler" >> /app/entrypoint.sh && \ echo "streamlit run app.py" >> /app/entrypoint.sh # make the script executable RUN chmod +x /app/entrypoint.sh diff --git a/docs/installation.md b/docs/installation.md index f3e6c88af..22b06716c 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -43,7 +43,9 @@ Create and activate the conda environment: Run the app via streamlit command in the terminal. *local* and *online* mode can be toggled in the settings.json. Learn more about *local* and *online* mode in the documentation page 📖 **OpenMS Template App**. -`streamlit run app.py` +1. Open a new terminal and run `redis-server` +2. On another terminal run `rq worker --with-scheduler` (multiple workers can be spawned) +3. Finally start the application `streamlit run app.py` ## Docker diff --git a/environment.yml b/environment.yml index 188d5c118..5fc05d763 100644 --- a/environment.yml +++ b/environment.yml @@ -15,4 +15,6 @@ dependencies: - captcha==0.5.0 - pyopenms_viz==1.0.0 - streamlit-js-eval - - psutil==7.0.0 \ No newline at end of file + - psutil==7.0.0 + - redis==5.2.1 + - rq==2.1.0 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 95615786e..f5a56027a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,4 +8,6 @@ plotly==5.22.0 captcha==0.5.0 pyopenms_viz==1.0.0 streamlit-js-eval -psutil==7.0.0 \ No newline at end of file +psutil==7.0.0 +redis==5.2.1 +rq==2.1.0 \ No newline at end of file diff --git a/src/Workflow.py b/src/Workflow.py index 7523a0cbf..8d70d9209 100644 --- a/src/Workflow.py +++ b/src/Workflow.py @@ -7,7 +7,6 @@ import plotly.express as px from src.common.common import show_fig - class Workflow(WorkflowManager): # Setup pages for upload, parameter, execution and results. # For layout use any streamlit components such as tabs (as shown in example), columns, or even expanders. @@ -70,8 +69,10 @@ def execution(self) -> None: # Run FeatureFinderMetabo tool with input and output files. self.logger.log("Detecting features...") - self.executor.run_topp( - "FeatureFinderMetabo", input_output={"in": in_mzML, "out": out_ffm} + featureFinderMetaboJob = self.queue.enqueue( + self.executor.run_topp, + "FeatureFinderMetabo", + input_output={"in": in_mzML, "out": out_ffm} ) # Prepare input and output files for feature linking @@ -82,17 +83,28 @@ def execution(self) -> None: # Run FeatureLinkerUnlabaeledKD with all feature maps passed at once self.logger.log("Linking features...") - self.executor.run_topp( - "FeatureLinkerUnlabeledKD", input_output={"in": in_fl, "out": out_fl} + featureLinkerUnlabeledKDJob = self.queue.enqueue( + self.executor.run_topp, + "FeatureLinkerUnlabeledKD", + input_output={"in": in_fl, "out": out_fl}, + depends_on=featureFinderMetaboJob ) self.logger.log("Exporting consensus features to pandas DataFrame...") - self.executor.run_python( - "export_consensus_feature_df", input_output={"in": out_fl[0]} + exportConsensusFeatureJob = self.queue.enqueue( + self.executor.run_python, + "export_consensus_feature_df", + input_output={"in": out_fl[0]}, + depends_on=featureLinkerUnlabeledKDJob + ) + # Delete pid dir path to indicate workflow is done + self.queue.enqueue( + self.executor.end_run, + depends_on=exportConsensusFeatureJob ) # Check if adduct detection should be run. if self.params["run-python-script"]: # Example for a custom Python tool, which is located in src/python-tools. - self.executor.run_python("example", {"in": in_mzML}) + self.queue.enqueue(self.executor.run_python, "example", {"in": in_mzML}) @st.fragment def results(self) -> None: diff --git a/src/workflow/CommandExecutor.py b/src/workflow/CommandExecutor.py index 6cc493014..51d599ca4 100644 --- a/src/workflow/CommandExecutor.py +++ b/src/workflow/CommandExecutor.py @@ -90,7 +90,8 @@ def run_command(self, command: list[str]) -> None: stdout, stderr = process.communicate() # Cleanup PID file - pid_file_path.unlink() + if pid_file_path.exists(): + pid_file_path.unlink() end_time = time.time() execution_time = end_time - start_time @@ -210,6 +211,12 @@ def stop(self) -> None: shutil.rmtree(self.pid_dir, ignore_errors=True) self.logger.log("Workflow stopped.") + def end_run(self) -> None: + """ + Cleans up the PID directory by removing all PID files. + """ + shutil.rmtree(self.pid_dir, ignore_errors=True) + def run_python(self, script_file: str, input_output: dict = {}) -> None: """ Executes a specified Python script with dynamic input and output parameters, diff --git a/src/workflow/WorkflowManager.py b/src/workflow/WorkflowManager.py index a299f5c2a..56378877d 100644 --- a/src/workflow/WorkflowManager.py +++ b/src/workflow/WorkflowManager.py @@ -8,11 +8,14 @@ import streamlit as st import shutil import time +from rq import Queue +from redis import Redis class WorkflowManager: # Core workflow logic using the above classes def __init__(self, name: str, workspace: str): self.name = name + self.queue = Queue(connection=Redis()) self.workflow_dir = Path(workspace, name.replace(" ", "-").lower()) self.file_manager = FileManager(self.workflow_dir) self.logger = Logger(self.workflow_dir) @@ -51,8 +54,6 @@ def workflow_process(self) -> None: self.logger.log("WORKFLOW FINISHED") except Exception as e: self.logger.log(f"ERROR: {e}") - # Delete pid dir path to indicate workflow is done - shutil.rmtree(self.executor.pid_dir, ignore_errors=True) def show_file_upload_section(self) -> None: """ diff --git a/test_gui.py b/test_gui.py index 101865cfc..0d56fe8aa 100644 --- a/test_gui.py +++ b/test_gui.py @@ -5,7 +5,6 @@ from pathlib import Path import shutil - @pytest.fixture def launch(request): test = AppTest.from_file(request.param)