18
18
load_json ,
19
19
save_json ,
20
20
create_file_if_missing ,
21
- json_dumps_pretty ,
21
+ json_dumps ,
22
22
set_readonly ,
23
23
is_readonly ,
24
24
get_datetime ,
25
25
)
26
+ from . import __version__
26
27
27
28
lgr = logging .getLogger (__name__ )
28
29
40
41
("Description" , "md5 hash of UIDs" )])),
41
42
])
42
43
44
+ #: JSON Key where we will embed our version in the newly produced .json files
45
+ HEUDICONV_VERSION_JSON_KEY = 'HeudiconvVersion'
46
+
47
+
43
48
class BIDSError (Exception ):
44
49
pass
45
50
46
51
52
+ BIDS_VERSION = "1.4.1"
53
+
54
+
55
+ def maybe_na (val ):
56
+ """Return 'n/a' if non-None value represented as str is not empty
57
+
58
+ Primarily for the consistent use of lower case 'n/a' so 'N/A' and 'NA'
59
+ are also treated as 'n/a'
60
+ """
61
+ if val is not None :
62
+ val = str (val )
63
+ val = val .strip ()
64
+ return 'n/a' if (not val or val in ('N/A' , 'NA' )) else val
65
+
66
+
67
+ def treat_age (age ):
68
+ """Age might encounter 'Y' suffix or be a float"""
69
+ age = str (age )
70
+ if age .endswith ('M' ):
71
+ age = age .rstrip ('M' )
72
+ age = float (age ) / 12
73
+ age = ('%.2f' if age != int (age ) else '%d' ) % age
74
+ else :
75
+ age = age .rstrip ('Y' )
76
+ if age :
77
+ # strip all leading 0s but allow to scan a newborn (age 0Y)
78
+ age = '0' if not age .lstrip ('0' ) else age .lstrip ('0' )
79
+ if age .startswith ('.' ):
80
+ # we had float point value, let's prepend 0
81
+ age = '0' + age
82
+ return age
83
+
84
+
47
85
def populate_bids_templates (path , defaults = {}):
48
86
"""Premake BIDS text files with templates"""
49
87
@@ -53,7 +91,7 @@ def populate_bids_templates(path, defaults={}):
53
91
save_json (descriptor ,
54
92
OrderedDict ([
55
93
('Name' , "TODO: name of the dataset" ),
56
- ('BIDSVersion' , "1.0.1" ),
94
+ ('BIDSVersion' , BIDS_VERSION ),
57
95
('License' , defaults .get ('License' ,
58
96
"TODO: choose a license, e.g. PDDL "
59
97
"(http://opendatacommons.org/licenses/pddl/)" )),
@@ -87,6 +125,9 @@ def populate_bids_templates(path, defaults={}):
87
125
create_file_if_missing (op .join (path , 'README' ),
88
126
"TODO: Provide description for the dataset -- basic details about the "
89
127
"study, possibly pointing to pre-registration (if public or embargoed)" )
128
+ create_file_if_missing (op .join (path , 'scans.json' ),
129
+ json_dumps (SCANS_FILE_FIELDS , sort_keys = False )
130
+ )
90
131
91
132
populate_aggregated_jsons (path )
92
133
@@ -111,7 +152,8 @@ def populate_aggregated_jsons(path):
111
152
# way too many -- let's just collect all which are the same!
112
153
# FIELDS_TO_TRACK = {'RepetitionTime', 'FlipAngle', 'EchoTime',
113
154
# 'Manufacturer', 'SliceTiming', ''}
114
- for fpath in find_files ('.*_task-.*\_bold\.json' , topdir = path ,
155
+ for fpath in find_files ('.*_task-.*\_bold\.json' ,
156
+ topdir = glob (op .join (path , 'sub-*' )),
115
157
exclude_vcs = True ,
116
158
exclude = "/\.(datalad|heudiconv)/" ):
117
159
#
@@ -120,7 +162,7 @@ def populate_aggregated_jsons(path):
120
162
# TODO: if we are to fix it, then old ones (without _acq) should be
121
163
# removed first
122
164
task = re .sub ('.*_(task-[^_\.]*(_acq-[^_\.]*)?)_.*' , r'\1' , fpath )
123
- json_ = load_json (fpath )
165
+ json_ = load_json (fpath , retry = 100 )
124
166
if task not in tasks :
125
167
tasks [task ] = json_
126
168
else :
@@ -172,10 +214,10 @@ def populate_aggregated_jsons(path):
172
214
placeholders = {
173
215
"TaskName" : ("TODO: full task name for %s" %
174
216
task_acq .split ('_' )[0 ].split ('-' )[1 ]),
175
- "CogAtlasID" : "TODO" ,
217
+ "CogAtlasID" : "http://www.cognitiveatlas.org/task/id/ TODO" ,
176
218
}
177
219
if op .lexists (task_file ):
178
- j = load_json (task_file )
220
+ j = load_json (task_file , retry = 100 )
179
221
# Retain possibly modified placeholder fields
180
222
for f in placeholders :
181
223
if f in j :
@@ -207,6 +249,10 @@ def tuneup_bids_json_files(json_files):
207
249
# Let's hope no word 'Date' comes within a study name or smth like
208
250
# that
209
251
raise ValueError ("There must be no dates in .json sidecar" )
252
+ # Those files should not have our version field already - should have been
253
+ # freshly produced
254
+ assert HEUDICONV_VERSION_JSON_KEY not in json_
255
+ json_ [HEUDICONV_VERSION_JSON_KEY ] = str (__version__ )
210
256
save_json (jsonfile , json_ )
211
257
212
258
# Load the beast
@@ -274,12 +320,13 @@ def add_participant_record(studydir, subject, age, sex):
274
320
"control group)" )])),
275
321
]),
276
322
sort_keys = False )
323
+
277
324
# Add a new participant
278
325
with open (participants_tsv , 'a' ) as f :
279
326
f .write (
280
327
'\t ' .join (map (str , [participant_id ,
281
- age . lstrip ( '0' ). rstrip ( 'Y' ) if age else 'N/A' ,
282
- sex ,
328
+ maybe_na ( treat_age ( age )) ,
329
+ maybe_na ( sex ) ,
283
330
'control' ])) + '\n ' )
284
331
285
332
@@ -369,11 +416,6 @@ def add_rows_to_scans_keys_file(fn, newrows):
369
416
os .unlink (fn )
370
417
else :
371
418
fnames2info = newrows
372
- # Populate _scans.json (an optional file to describe column names in
373
- # _scans.tsv). This auto generation will make BIDS-validator happy.
374
- scans_json = '.' .join (fn .split ('.' )[:- 1 ] + ['json' ])
375
- if not op .lexists (scans_json ):
376
- save_json (scans_json , SCANS_FILE_FIELDS , sort_keys = False )
377
419
378
420
header = SCANS_FILE_FIELDS
379
421
# prepare all the data rows
@@ -404,10 +446,10 @@ def get_formatted_scans_key_row(dcm_fn):
404
446
"""
405
447
dcm_data = dcm .read_file (dcm_fn , stop_before_pixels = True , force = True )
406
448
# we need to store filenames and acquisition times
407
- # parse date and time and get it into isoformat
449
+ # parse date and time of start of run acquisition and get it into isoformat
408
450
try :
409
- date = dcm_data .ContentDate
410
- time = dcm_data .ContentTime
451
+ date = dcm_data .AcquisitionDate
452
+ time = dcm_data .AcquisitionTime
411
453
acq_time = get_datetime (date , time )
412
454
except (AttributeError , ValueError ) as exc :
413
455
lgr .warning ("Failed to get date/time for the content: %s" , str (exc ))
0 commit comments