Skip to content

Commit c4192fc

Browse files
authored
Merge pull request #112 from AllenNeuralDynamics/han_improve_processed_query
Merge in PI name to data inventory
2 parents e9319e1 + ab9c2d3 commit c4192fc

File tree

2 files changed

+23
-4
lines changed

2 files changed

+23
-4
lines changed

code/Home.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -354,6 +354,7 @@ def init(if_load_bpod_data_override=None, if_load_docDB_override=None):
354354

355355
# Merge in PI name
356356
df_mouse_pi_mapping = load_mouse_PI_mapping()
357+
st.session_state.df_mouse_pi_mapping = df_mouse_pi_mapping # Save to session state for later use
357358
_df = _df.merge(df_mouse_pi_mapping, how='left', on='subject_id') # Merge in PI name
358359
_df.loc[_df["PI"].isnull(), "PI"] = _df.loc[
359360
_df["PI"].isnull() &

code/pages/0_Data inventory.py

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ def load_presets():
6666

6767
QUERY_PRESET, VENN_PRESET = load_presets()
6868

69-
META_COLUMNS = [
69+
QUERY_COLUMNS = [
7070
"Han_temp_pipeline (bpod)",
7171
"Han_temp_pipeline (bonsai)",
7272
"VAST_raw_data_on_VAST",
@@ -79,6 +79,8 @@ def load_presets():
7979
"Quarterly": "M4",
8080
}
8181

82+
FIRST_SEVERAL_COLS = ["PI"]
83+
8284

8385
@st.cache_data(ttl=3600*12)
8486
def merge_queried_dfs(dfs, queries_to_merge):
@@ -368,6 +370,7 @@ def app():
368370
[
369371
"subject_id",
370372
"session_date",
373+
"PI", # Add PI name
371374
"Han_temp_pipeline (bpod)",
372375
"Han_temp_pipeline (bonsai)",
373376
]
@@ -414,6 +417,7 @@ def app():
414417
# Merging with df_merged (using the unique mouse-date dataframe)
415418
df_merged = df_merged.combine_first(df_raw_sessions_on_VAST_unique_mouse_date)
416419
df_merged.sort_index(level=["session_date", "subject_id"], ascending=[False, False], inplace=True)
420+
df_merged = df_merged[FIRST_SEVERAL_COLS + [col for col in df_merged.columns if col not in FIRST_SEVERAL_COLS]]
417421

418422
# --- Add sidebar ---
419423
add_sidebar(df_merged, dfs_docDB, df_Han_pipeline, dfs_raw_on_VAST, docDB_retrieve_time)
@@ -445,6 +449,9 @@ def add_venn_diagrms(df_merged):
445449
cols = st.columns([2, 1])
446450
cols[0].markdown("## Issues in dynamic foraging data inventory")
447451
cols[0].markdown("#### [Github discussion](https://github.com/AllenNeuralDynamics/aind-behavior-blog/discussions/851)")
452+
with cols[0].columns([1, 1])[0].expander('Venn diagram presets'):
453+
st.json(VENN_PRESET)
454+
448455
with cols[1].expander("Time view settings", expanded=True):
449456
cols_1 = st.columns([1, 1])
450457
if_separate_plots = cols_1[0].checkbox("Separate in subplots", value=True)
@@ -456,7 +463,7 @@ def add_venn_diagrms(df_merged):
456463
["Daily", "Weekly", "Monthly", "Quarterly"],
457464
index=0,
458465
)
459-
466+
460467
st.markdown("---")
461468
for section in VENN_PRESET:
462469
section_name, section_contents = section["section_name"], section["section_contents"]
@@ -494,8 +501,9 @@ def add_venn_diagrms(df_merged):
494501

495502
# Join in other extra columns
496503
df_this_preset = df_this_preset.join(
497-
df_merged[[col for col in df_merged.columns if col not in META_COLUMNS]], how="left"
504+
df_merged[[col for col in df_merged.columns if col not in QUERY_COLUMNS]], how="left"
498505
)
506+
df_this_preset = df_this_preset[FIRST_SEVERAL_COLS + [col for col in df_this_preset.columns if col not in FIRST_SEVERAL_COLS]]
499507

500508
with cols[0]:
501509
download_df(
@@ -504,7 +512,17 @@ def add_venn_diagrms(df_merged):
504512
file_name=f"df_{venn_preset['name']}.csv",
505513
)
506514
with st.expander(f"Show dataframe, n = {len(df_this_preset)}"):
507-
st.write(df_this_preset)
515+
aggrid_interactive_table_basic(
516+
df_this_preset.reset_index(),
517+
height=400,
518+
configure_columns=[
519+
dict(
520+
field="session_date",
521+
type=["customDateTimeFormat"],
522+
custom_format_string="yyyy-MM-dd",
523+
)
524+
],
525+
)
508526

509527
with cols[1]:
510528
# -- Show histogram over time --

0 commit comments

Comments
 (0)