Skip to content

Commit 04e58d4

Browse files
committed
WIP convert DLC predictions to COCO
1 parent a5fe7fa commit 04e58d4

File tree

4 files changed

+72098
-0
lines changed

4 files changed

+72098
-0
lines changed

MANIFEST.in

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@ recursive-exclude * __pycache__
66
recursive-exclude * *.py[co]
77
recursive-exclude docs *
88
recursive-exclude tests *
9+
recursive-exclude examples *
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
"""Convert DeepLabCut predictions to COCO format
2+
================================================
3+
4+
Use ``sleap-io`` to load keypoints predicted by DeepLabCut
5+
and save them to COCO .json format.
6+
"""
7+
8+
# %%
9+
# Imports
10+
# -------
11+
from pathlib import Path
12+
13+
import sleap_io as sio
14+
from movement import sample_data
15+
from movement.io import load_poses, save_poses
16+
17+
# %%
18+
# List available DeepLabCut sample datasets
19+
# -----------------------------------------
20+
# Let's see which DeepLabCut sample datasets are available
21+
# through `movement.sample_data```. We will also include
22+
# sample datasets from LightningPose, because they use
23+
# the same file formats.
24+
25+
print("Available DeepLabCut and LightningPose sample datasets:\n")
26+
27+
sample_ds_names = [
28+
name
29+
for name in sample_data.list_datasets()
30+
if name.startswith("DLC_") or name.startswith("LP_")
31+
]
32+
print(*sample_ds_names, sep="\n")
33+
34+
# %%
35+
# Fetch path to a sample dataset
36+
# -------------------------------
37+
# We pick one from the Allen Institute for Neural Dynamics (AIND)
38+
39+
ds_name = "LP_mouse-face_AIND.predictions.csv"
40+
file_path = sample_data.fetch_dataset_paths(ds_name)["poses"]
41+
ds_path = file_path.resolve()
42+
print(f"\nPath to sample dataset '{ds_name}':\n{ds_path}")
43+
44+
# %%
45+
# The df.index contains the frame numbers (0, 1, 2, ...)
46+
# I want to convert them into a "fake" frame paths of the form
47+
# "labeled_data/video/frame_00000.png", etc.
48+
# Let's create a list of such paths
49+
50+
# `fps = None`` for time coordinates to be frame indices
51+
ds = load_poses.from_dlc_file(ds_path, fps=None)
52+
frame_ids = ds.coords["time"].values
53+
num_frames = ds.sizes["time"]
54+
# zero-padding width
55+
pad_width = len(str(num_frames - 1))
56+
57+
frame_paths = [
58+
f"labeled-data/video/img{i:0{pad_width}d}.png" for i in frame_ids
59+
]
60+
61+
print("\nFirst 5 frame paths:")
62+
print("\n".join(frame_paths[:5]))
63+
64+
65+
# %%
66+
# Assign the frame paths to the dataset as time coordinates
67+
ds = ds.assign_coords({"time": frame_paths})
68+
print(ds.coords["time"].values[:5])
69+
70+
cwd = Path.cwd()
71+
72+
# uses a HACKED version of to_dlc_file that retains frame paths as csv index
73+
save_poses.to_dlc_file(
74+
ds,
75+
cwd / "dlc_predictions_with_frame_paths.csv",
76+
split_individuals=False,
77+
)
78+
79+
80+
# %%
81+
# Let's load it with sleap-io
82+
83+
print(f"\nCurrent working directory: {cwd}")
84+
85+
poses = sio.load_file(
86+
cwd / "dlc_predictions_with_frame_paths_no-likelihood.csv", format="dlc"
87+
)
88+
89+
# %%

0 commit comments

Comments
 (0)