|
4 | 4 |
|
5 | 5 | import shared.command_helper as cmd_hlp |
6 | 6 | import shared.helpers as hlp |
7 | | -from shared.sessionstate import retrieve_all_from_ss, ss_get, ss_set |
8 | 7 |
|
9 | | -# pull saved values if set, otherwise set to defaults |
10 | | -ss_values = retrieve_all_from_ss() |
11 | | -OK = ss_values["OK"] |
12 | | -MY_SSH = ss_values["MY_SSH"] |
13 | | -username = ss_values["user_name"] |
14 | | -server = ss_values["server"] |
15 | | -GROUP = ss_values["GROUP"] |
16 | | -GROUPS = ss_values["GROUPS"] |
17 | | -SCRATCH = ss_values["SCRATCH"] |
18 | | -RDS = ss_values["RDS"] |
19 | | -SAMPLE = ss_values["SAMPLE"] |
20 | | -PIPELINE = ss_values["PIPELINE"] |
21 | | -password = ss_values["password"] |
22 | | -PROJECT = ss_values["PROJECT"] |
23 | | -JOB_ID = ss_values["JOB_ID"] |
24 | | -WORKDIR = ss_values["WORK_DIR"] |
25 | | -OUTPUT_DIR = ss_values["OUTPUT_DIR"] |
26 | | -run_pipeline_clicked = ss_values["run_pipeline_clicked"] |
27 | 8 |
|
| 9 | +def tab( |
| 10 | + username, |
| 11 | + MY_SSH, |
| 12 | + selected_pipeline, |
| 13 | + selected_project, |
| 14 | + selected_samples="all", |
| 15 | + work_dir="work", |
| 16 | + output_dir="output", |
| 17 | + custom_sample_list=[], |
| 18 | +): |
| 19 | + # --- Initialize session state --- |
| 20 | + st.session_state.setdefault("username", username) |
| 21 | + st.session_state.setdefault("JOB_ID", "17379785") |
| 22 | + st.session_state.setdefault("run_pipeline_clicked", False) |
28 | 23 |
|
29 | | -def display_log(title, log_path, output_container): |
30 | | - """function to display log content.""" |
31 | | - try: |
32 | | - with hlp.st_capture(output_container.code): |
33 | | - print(f"{title} log:", log_path) |
34 | | - log_content = MY_SSH.read_file(log_path) |
35 | | - print(log_content) |
36 | | - except Exception as e: |
37 | | - st.error(f"Failed to read {title.lower()} log: {e}") |
38 | | - |
39 | | - |
40 | | -def tab(username, MY_SSH, selected_pipeline, selected_project, selected_samples="all", work_dir="work", output_dir="output"): |
41 | | - JOB_ID = ss_values["JOB_ID"] |
42 | | - |
43 | | - cols = st.columns([1, 1, 1]) |
| 24 | + # --- Display username input --- |
| 25 | + cols = st.columns([1]) |
44 | 26 | with cols[0]: |
45 | | - username = st.text_input( |
46 | | - "Username(s):", |
47 | | - username, |
48 | | - key="username-mod", |
49 | | - help="Enter your username e.g. ralcraft", |
50 | | - ) |
| 27 | + st.session_state["username"] = st.text_input("Username(s):", st.session_state["username"]) |
51 | 28 |
|
| 29 | + # --- Log display helper --- |
| 30 | + def display_log(title, log_path, output_container): |
| 31 | + try: |
| 32 | + log_file = MY_SSH.read_file(log_path) |
| 33 | + log_content = log_file.read() if hasattr(log_file, "read") else str(log_file) |
| 34 | + output_container.code(log_content, language="bash") |
| 35 | + except Exception as e: |
| 36 | + st.error(f"❌ Failed to read {title.lower()} log: {e}") |
| 37 | + |
| 38 | + # --- Run pipeline logic --- |
52 | 39 | def run_nextflow(): |
53 | 40 | cmd_pipeline = cmd_hlp.pipe_cmd( |
54 | | - username, |
| 41 | + st.session_state["username"], |
55 | 42 | selected_pipeline, |
56 | 43 | selected_project, |
57 | 44 | cmd_num=0, |
58 | 45 | selected_samples=selected_samples, |
59 | 46 | output_dir=output_dir, |
60 | 47 | work_dir=work_dir, |
| 48 | + custom_sample_list=custom_sample_list, |
61 | 49 | ) |
62 | 50 | st.code(cmd_pipeline) |
63 | | - _dict = MY_SSH.run_cmd(cmd_pipeline) |
64 | | - # process output to get job id |
65 | | - match = re.search(r"Submitted batch job (\d+)", _dict["output"]) |
66 | | - JOB_ID = match.group(1) if match else None |
67 | | - st.success(f"✅ Job ID: {JOB_ID}") |
68 | | - return JOB_ID |
69 | | - # to do, we need to wait for an asynchronous answer regarding slurm? |
| 51 | + result = MY_SSH.run_cmd(cmd_pipeline) |
| 52 | + match = re.search(r"Submitted batch job (\d+)", result["output"]) |
| 53 | + job_id = match.group(1) if match else None |
| 54 | + st.session_state["JOB_ID"] = job_id |
| 55 | + st.success(f"✅ Job submitted. ID: {job_id}") |
| 56 | + return job_id |
70 | 57 |
|
| 58 | + # --- Tabs --- |
71 | 59 | tabP, tabL, tabQ = st.tabs(["Run pipeline", "Check logs", "Check queues"]) |
| 60 | + |
| 61 | + # --- Pipeline tab --- |
| 62 | + with tabP: |
| 63 | + if st.button("Run the selected pipeline"): |
| 64 | + st.session_state["run_pipeline_clicked"] = True |
| 65 | + with st.spinner("Submitting pipeline..."): |
| 66 | + try: |
| 67 | + run_nextflow() |
| 68 | + except Exception as e: |
| 69 | + st.error(f"Pipeline error: {e}") |
| 70 | + |
| 71 | + if st.session_state["JOB_ID"]: |
| 72 | + st.success(f"Running Job ID: {st.session_state['JOB_ID']}") |
| 73 | + |
| 74 | + # --- Logs tab --- |
72 | 75 | with tabL: |
73 | 76 | if st.button("Get Logs"): |
74 | | - if JOB_ID == "": |
| 77 | + # st.write("📦 session_state:", dict(st.session_state)) |
| 78 | + job_id = st.session_state.get("JOB_ID") |
| 79 | + st.write("📌 Accessed JOB_ID:", job_id) # DEBUG |
| 80 | + if not job_id: |
75 | 81 | st.error("No job was launched yet") |
76 | 82 | else: |
77 | | - log_out = f"{work_dir}/logs/{JOB_ID}.out" |
78 | | - log_err = f"{work_dir}/logs/{JOB_ID}.err" |
| 83 | + log_out = f"{work_dir}/logs/{job_id}.out" |
| 84 | + log_err = f"{work_dir}/logs/{job_id}.err" |
79 | 85 | tO, tE = st.tabs(["Output", "Error"]) |
80 | 86 | outputO, outputE = tO.empty(), tE.empty() |
81 | | - with st.spinner("Fetching...", show_time=True): |
| 87 | + with st.spinner("Fetching logs..."): |
82 | 88 | display_log("Output", log_out, outputO) |
83 | 89 | display_log("Error", log_err, outputE) |
84 | 90 |
|
| 91 | + # --- Queues tab --- |
85 | 92 | with tabQ: |
86 | 93 | if st.button("Check slurm queues"): |
87 | 94 | output = st.empty() |
88 | | - with st.spinner("Checking...", show_time=True): |
| 95 | + with st.spinner("Checking queue..."): |
89 | 96 | with hlp.st_capture(output.code): |
90 | | - cmd_pipeline = cmd_hlp.pipe_cmd(username, cmd_num=1) |
91 | | - print("Executing command\n", cmd_pipeline) |
| 97 | + cmd_pipeline = cmd_hlp.pipe_cmd(st.session_state["username"], cmd_num=1) |
92 | 98 | try: |
93 | 99 | results = MY_SSH.run_cmd(cmd_pipeline) |
94 | | - if results["err"] != None: |
| 100 | + if results["err"] is not None: |
95 | 101 | st.error(results["err"]) |
96 | 102 | else: |
97 | | - print("------------------------------") |
98 | 103 | print(results["output"]) |
99 | 104 | except Exception as e: |
100 | 105 | st.error(f"Error {e}") |
101 | | - with tabP: |
102 | | - # disable button once the user clicks a first time. by default it gets disabled after calling the callback |
103 | | - clicked = st.button(f"Run the selected nextflow pipeline for {username}", disabled=ss_get("run_pipeline_clicked")) |
104 | | - JOB_ID = ss_get("JOB_ID") |
105 | | - if JOB_ID is not None: |
106 | | - st.success(f"Running Job ID: {JOB_ID}") |
107 | | - if clicked: |
108 | | - ss_set("run_pipeline_clicked", True) |
109 | | - output = st.empty() |
110 | | - with st.spinner("Starting...", show_time=True): |
111 | | - with hlp.st_capture(output.code): |
112 | | - try: |
113 | | - JOB_ID = run_nextflow() |
114 | | - ss_set("JOB_ID", JOB_ID) |
115 | | - except Exception as e: |
116 | | - st.error(f"Error {e}") |
0 commit comments