-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathbuiltin.py
More file actions
232 lines (205 loc) · 7.71 KB
/
builtin.py
File metadata and controls
232 lines (205 loc) · 7.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
# -*- coding: utf-8 -*-
"""
This file registers pre-defined datasets at hard-coded paths, and their metadata.
We hard-code metadata for common datasets. This will enable:
1. Consistency check when loading the datasets
2. Use models on these standard datasets directly and run demos,
without having to download the dataset annotations
We hard-code some paths to the dataset that's assumed to exist in "./datasets/".
Refer to the tutorial "detectron2/docs/DATASETS.md" to add new dataset.
"""
import os
from detectron2.data.datasets import register_coco_instances
from detectron2.data.catalog import DatasetCatalog, MetadataCatalog
from .builtin_meta import _get_builtin_metadata
from .hico import load_hico_json
from .vcoco import load_vcoco_json
_PREDEFINED_SPLITS_COCO = {}
# _PREDEFINED_SPLITS_COCO["coco_minus_vcoco"] = {
# "coco2014train_minus_vcocoval": (
# "coco/images/train2014", # path to images
# "coco/annotations/instances_train2014minusvcocoval.json" # path to the annotation file
# ),
# "coco2014val_minus_vcocotest": (
# "coco/images/val2014",
# "coco/annotations/instances_val2014minusvcocotest.json"
# ),
# }
# _PREDEFINED_SPLITS_COCO["coco_minus_vcoco_known"] = {
# "coco2014train_minus_vcocoval_known": (
# "coco/images/train2014",
# "coco/annotations/instances_train2014minusvcocoval_known.json"
# ),
# "coco2014val_minus_vcocotest_known": (
# "coco/images/val2014",
# "coco/annotations/instances_val2014minusvcocotest_known.json"
# ),
# }
_PREDEFINED_SPLITS_COCO["coco"] = {
"hico_debug": (
"hico/test2015",
"hico_20160224_det/annotations/instances_hico_debug.json",
),
"hico_train": (
"hico_20160224_det/images/train2015",
"hico_20160224_det/annotations/instances_hico_train.json",
),
"hico_test": (
"hico/test2015",
"hico/annotations/instances_hico_test.json",
),
"coco2014_train": (
"coco2014/train2014",
"coco2014/annotations/instances_train2014.json",
),
"coco2014_val": (
"coco2014/val2014",
"coco2014/annotations/instances_val2014.json",
)
}
_PREDEFINED_SPLITS_VCOCO = {}
_PREDEFINED_SPLITS_VCOCO["vcoco"] = {
"vcoco_train": (
"coco/images/train2014",
"vcoco/annotations/instances_vcocotrain.json"
),
"vcoco_val": (
"coco/images/train2014",
"vcoco/annotations/instances_vcocoval.json"
),
"vcoco_test": (
"coco/images/val2014",
"vcoco/annotations/instances_vcocotest.json"
),
"vcoco_val_only_interaction": (
"coco/images/train2014",
"vcoco/annotations/instances_vcocoval_only_active.json"
),
"vcoco_test_only_interaction": (
"coco/images/val2014",
"vcoco/annotations/instances_vcocotest_only_active.json"
),
}
_PREDEFINED_SPLITS_VCOCO["vcoco_known"] = {
"vcoco_train_known": (
"coco/images/train2014",
"vcoco/annotations/instances_vcocotrain_known.json"
),
"vcoco_val_known": (
"coco/images/train2014",
"vcoco/annotations/instances_vcocoval_known.json"
),
}
_PREDEFINED_SPLITS_HICO = {}
_PREDEFINED_SPLITS_HICO["hico-det"] = {
"hico-det_train": (
"hico_20160224_det/images/train2015",
"hico_20160224_det/annotations/instances_hico_train.json",
),
"hico-det_debug": (
"hico/test2015",
"hico_20160224_det/annotations/instances_hico_debug.json",
),
"hico-det_test": (
"hico/test2015",
"hico/annotations/instances_hico_test.json",
),
"hico-det_train_seen": (
"hico/train2015",
"hico/annotations/instances_hico_train_seen.json"
),
"hico-coco": (
# "coco2014/train2014",
"hico/train2015",
"coco2014/annotations/instances_train2014_hico.json"
),
"hico-coco1": (
# "coco2014/train2014",
"hico/train2015",
"coco2014/annotations/instances_train2014_hico_1.json"
),
"hico-coco2": (
# "coco2014/train2014",
"hico/train2015",
"coco2014/annotations/instances_train2014_hico_2.json"
),
"hico-val2017": (
# "coco2014/train2014",
"coco/val2017",
"coco/annotations/instances_val2017_hico.json"
),
}
def register_hico_instances(name, metadata, json_file, image_root, evaluator_type):
"""
Register a hico-det dataset in COCO's json annotation format for human-object
interaction detection (i.e., `instances_hico_*.json` in the dataset).
This is an example of how to register a new dataset.
You can do something similar to this function, to register new datasets.
Args:
name (str): the name that identifies a dataset, e.g. "hico-det".
metadata (dict): extra metadata associated with this dataset. You can
leave it as an empty dict.
json_file (str): path to the json instance annotation file.
image_root (str or path-like): directory which contains all the images.
"""
# 1. register a function which returns dicts
DatasetCatalog.register(name, lambda: load_hico_json(json_file, image_root, name))
# 2. Optionally, add metadata about this dataset,
# since they might be useful in evaluation, visualization or logging
MetadataCatalog.get(name).set(
json_file=json_file,
image_root=image_root,
evaluator_type=evaluator_type,
**metadata
)
def register_vcoco_instances(name, metadata, json_file, image_root, evaluator_type):
"""
Register a vcoco dataset in COCO's json annotation format for human-object
interaction detection (i.e., `instances_hico_*.json` in the dataset).
Args:
see `register_hico_instances`
"""
# 1. register a function which returns dicts
DatasetCatalog.register(name, lambda: load_vcoco_json(json_file, image_root, name))
# 2. Optionally, add metadata about this dataset,
# since they might be useful in evaluation, visualization or logging
MetadataCatalog.get(name).set(
json_file=json_file,
image_root=image_root,
evaluator_type=evaluator_type,
**metadata
)
def register_all_hico(root):
for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_HICO.items():
for key, (image_root, json_file) in splits_per_dataset.items():
register_hico_instances(
key,
_get_builtin_metadata(dataset_name),
os.path.join(root, json_file) if "://" not in json_file else json_file,
os.path.join(root, image_root),
evaluator_type=dataset_name
)
def register_all_coco(root):
for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_COCO.items():
for key, (image_root, json_file) in splits_per_dataset.items():
register_coco_instances(
key,
_get_builtin_metadata(dataset_name),
os.path.join(root, json_file) if "://" not in json_file else json_file,
os.path.join(root, image_root),
)
def register_all_vcoco(root):
for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_VCOCO.items():
for key, (image_root, json_file) in splits_per_dataset.items():
register_vcoco_instances(
key,
_get_builtin_metadata(dataset_name),
os.path.join(root, json_file) if "://" not in json_file else json_file,
os.path.join(root, image_root),
evaluator_type=dataset_name
)
# Register them all under _root (path to datasets)
_root = os.getenv("DETECTRON2_DATASETS", "/public/data0/users/houzhi28/Code/zero_shot_hoi/datasets")
register_all_hico(_root)
register_all_vcoco(_root)
register_all_coco(_root)