Skip to content

Commit fbb81dd

Browse files
committed
Update Subtype assignment and export
1 parent e2747e2 commit fbb81dd

File tree

8 files changed

+520
-8
lines changed

8 files changed

+520
-8
lines changed
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
[
2+
{
3+
"cochlea": "M_AMD_N180_L",
4+
"image_channel": [
5+
"CR",
6+
"Lypd1",
7+
"Ntng1",
8+
"SGN_merged"
9+
],
10+
"segmentation_channel": "SGN_merged",
11+
"type": "sgn",
12+
"n_blocks": 6,
13+
"halo_size": [
14+
256,
15+
256,
16+
50
17+
],
18+
"component_list": [
19+
1
20+
],
21+
"crop_centers": [
22+
[
23+
578,
24+
1095,
25+
560
26+
],
27+
[
28+
728,
29+
809,
30+
463
31+
],
32+
[
33+
441,
34+
660,
35+
521
36+
],
37+
[
38+
510,
39+
660,
40+
850
41+
],
42+
[
43+
859,
44+
575,
45+
914
46+
],
47+
[
48+
1044,
49+
269,
50+
820
51+
]
52+
],
53+
"max_edge_distance": 30
54+
}
55+
]
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
[
2+
{
3+
"cochlea": "M_AMD_N180_R",
4+
"image_channel": [
5+
"CR",
6+
"CTBP2",
7+
"Ntng1",
8+
"SGN_merged"
9+
],
10+
"segmentation_channel": "SGN_merged",
11+
"type": "sgn",
12+
"n_blocks": 6,
13+
"halo_size": [
14+
256,
15+
256,
16+
50
17+
],
18+
"component_list": [
19+
1
20+
],
21+
"crop_centers": [
22+
[
23+
849,
24+
1071,
25+
660
26+
],
27+
[
28+
689,
29+
847,
30+
806
31+
],
32+
[
33+
637,
34+
730,
35+
512
36+
],
37+
[
38+
951,
39+
656,
40+
506
41+
],
42+
[
43+
976,
44+
449,
45+
789
46+
],
47+
[
48+
783,
49+
169,
50+
880
51+
]
52+
],
53+
"max_edge_distance": 30
54+
}
55+
]
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
[
2+
{
3+
"cochlea": "M_AMD_N180_L",
4+
"image_channel": [
5+
"CR",
6+
"Lypd1",
7+
"Ntng1"
8+
],
9+
"segmentation_channel": "SGN_merged",
10+
"component_list": [
11+
1
12+
]
13+
}
14+
]
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
[
2+
{
3+
"cochlea": "M_AMD_N180_R",
4+
"image_channel": [
5+
"CR",
6+
"Ntng1"
7+
],
8+
"segmentation_channel": "SGN_merged",
9+
"component_list": [
10+
1
11+
]
12+
}
13+
]

reproducibility/tonotopic_mapping/2025-07-SGN_subtypes.json

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,5 +18,15 @@
1818
"cochlea": "M_LR_000214_L",
1919
"segmentation_channel": "PV_SGN_v2",
2020
"type": "sgn"
21-
}
21+
},
22+
{
23+
"cochlea": "M_AMD_N180_L",
24+
"segmentation_channel": "SGN_merged",
25+
"type": "sgn"
26+
},
27+
{
28+
"cochlea": "M_AMD_N180_R",
29+
"segmentation_channel": "SGN_merged",
30+
"type": "sgn"
31+
}
2232
]

scripts/assign_subtypes.py

Lines changed: 161 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,161 @@
1+
import argparse
2+
import os
3+
4+
import pandas as pd
5+
6+
from flamingo_tools.s3_utils import get_s3_path, BUCKET_NAME, SERVICE_ENDPOINT
7+
# from skimage.segmentation import relabel_sequential
8+
9+
COCHLEA_DICT = {
10+
"M_LR_000099_L": {"seg_data": "PV_SGN_v2", "subtype": ["Calb1", "Lypd1"]},
11+
"M_LR_000184_L": {"seg_data": "SGN_v2b", "subtype": ["Prph"]},
12+
"M_LR_000184_R": {"seg_data": "SGN_v2b", "subtype": ["Prph"]},
13+
"M_LR_000260_L": {"seg_data": "SGN_v2", "subtype": ["Prph", "Tuj1"]},
14+
"M_AMD_N180_L": {"seg_data": "SGN_merged", "subtype": ["CR", "Ntng1"]},
15+
"M_AMD_N180_R": {"seg_data": "SGN_merged", "subtype": ["CR", "Ntng1"]},
16+
}
17+
18+
19+
STAIN_TO_TYPE = {
20+
# Combinations of Calb1 and CR:
21+
"CR+/Calb1+": "Type Ib",
22+
"CR-/Calb1+": "Type IbIc", # Calb1 is expressed at Ic less than Lypd1 but more then CR
23+
"CR+/Calb1-": "Type Ia",
24+
"CR-/Calb1-": "Type II",
25+
26+
# Combinations of Calb1 and Lypd1:
27+
"Calb1+/Lypd1+": "Type IbIc",
28+
"Calb1+/Lypd1-": "Type Ib",
29+
"Calb1-/Lypd1+": "Type Ic",
30+
"Calb1-/Lypd1-": "inconclusive", # Can be Type Ia or Type II
31+
32+
# Combinations of Prph and Tuj1:
33+
"Prph+/Tuj1+": "Type II",
34+
"Prph+/Tuj1-": "Type II",
35+
"Prph-/Tuj1+": "Type I",
36+
"Prph-/Tuj1-": "inconclusive",
37+
38+
# Prph is isolated.
39+
"Prph+": "Type II",
40+
"Prph-": "Type I",
41+
42+
# Combinations of CR and Ntng1
43+
"CR+/Ntng1+": "Type Ib",
44+
"CR+/Ntng1-": "Type Ia",
45+
"CR-/Ntng1+": "Type Ic",
46+
"CR-/Ntng1-": "inconclusive",
47+
}
48+
49+
50+
def types_for_stain(stains):
51+
stains.sort()
52+
assert len(stains) in (1, 2)
53+
if len(stains) == 1:
54+
combinations = [f"{stains[0]}+", f"{stains[0]}-"]
55+
else:
56+
combinations = [
57+
f"{stains[0]}+/{stains[1]}+",
58+
f"{stains[0]}+/{stains[1]}-",
59+
f"{stains[0]}-/{stains[1]}+",
60+
f"{stains[0]}-/{stains[1]}-"
61+
]
62+
types = list(set([STAIN_TO_TYPE[stain] for stain in combinations]))
63+
return types
64+
65+
66+
def stain_expression_from_subtype(subtype, stains):
67+
assert len(stains) in (1, 2)
68+
dic_list = []
69+
if len(stains) == 1:
70+
possible_key = [
71+
key for key in STAIN_TO_TYPE.keys()
72+
if STAIN_TO_TYPE[key] == subtype and len(key.split("/")) != 2 and stains[0] in key
73+
][0]
74+
dic = {stains[0]: possible_key[-1:]}
75+
dic_list.append(dic)
76+
77+
else:
78+
possible_keys = [
79+
key for key in STAIN_TO_TYPE.keys()
80+
if STAIN_TO_TYPE[key] == subtype and len(key.split("/")) > 1 and all([stain in key for stain in stains])
81+
]
82+
for key in possible_keys:
83+
stain1 = key.split("/")[0][:-1]
84+
stain2 = key.split("/")[1][:-1]
85+
expression1 = key.split("/")[0][-1:]
86+
expression2 = key.split("/")[1][-1:]
87+
dic = {stain1: expression1, stain2: expression2}
88+
dic_list.append(dic)
89+
90+
return dic_list
91+
92+
93+
def filter_subtypes(cochlea, seg_name, subtype, stains=None):
94+
"""Filter segmentation with marker labels.
95+
Positive segmentation instances are set to 1, negative to 2.
96+
"""
97+
internal_path = os.path.join(cochlea, "tables", seg_name, "default.tsv")
98+
tsv_path, fs = get_s3_path(internal_path, bucket_name=BUCKET_NAME, service_endpoint=SERVICE_ENDPOINT)
99+
with fs.open(tsv_path, "r") as f:
100+
table_seg = pd.read_csv(f, sep="\t")
101+
102+
# get stains
103+
if stains is None:
104+
stains = [column.split("_")[1] for column in list(table_seg.columns) if "marker_" in column]
105+
stains.sort()
106+
107+
stain_dict = stain_expression_from_subtype(subtype, stains)
108+
if len(stain_dict) == 0:
109+
raise ValueError("The dictionary containing stain information must have at least one entry. Check parameters.")
110+
111+
subset = table_seg.copy()
112+
113+
for dic in stain_dict:
114+
for stain in dic.keys():
115+
expression_value = 1 if dic[stain] == "+" else 2
116+
subset = subset.loc[subset[f"marker_{stain}"] == expression_value]
117+
118+
label_ids_subtype = list(subset["label_id"])
119+
return label_ids_subtype
120+
121+
122+
def export_lower_resolution(args):
123+
124+
cochlea = args.cochlea
125+
subtype_stains = COCHLEA_DICT[cochlea]["subtype"]
126+
subtype_stains.sort()
127+
seg_name = COCHLEA_DICT[cochlea]["seg_data"]
128+
129+
out_path = os.path.join(args.output_folder, f"{cochlea}_subtypes.tsv")
130+
131+
table_seg_path = f"{cochlea}/tables/{seg_name}/default.tsv"
132+
table_path_s3, fs = get_s3_path(table_seg_path)
133+
with fs.open(table_path_s3, "r") as f:
134+
table = pd.read_csv(f, sep="\t")
135+
136+
print(f"Subtype stains: {subtype_stains}.")
137+
subtypes = types_for_stain(subtype_stains)
138+
subtypes.sort()
139+
140+
# Subtype labels
141+
subtype_labels = ["None" for _ in range(len(table))]
142+
table["subtype_label"] = subtype_labels
143+
for subtype in subtypes:
144+
145+
label_ids_subtype = filter_subtypes(cochlea, seg_name=seg_name, subtype=subtype, stains=subtype_stains)
146+
table.loc[table["label_id"].isin(label_ids_subtype), "subtype_label"] = subtype
147+
148+
table.to_csv(out_path, sep="\t", index=False)
149+
150+
151+
def main():
152+
parser = argparse.ArgumentParser()
153+
parser.add_argument("--cochlea", "-c", required=True)
154+
parser.add_argument("--output_folder", "-o", required=True)
155+
args = parser.parse_args()
156+
157+
export_lower_resolution(args)
158+
159+
160+
if __name__ == "__main__":
161+
main()

0 commit comments

Comments
 (0)