Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,12 @@ jobs:
/tmp/data/ds005 /tmp/ds005/derivatives participant \
--sloppy --write-graph --mem_mb 4096 \
--nthreads 2 --anat-only -vv
- run:
name: Clean-up after anatomical run
command: |
rm -rf /tmp/ds005/work/fmriprep_wf/fsdir*
rm -rf /tmp/ds005/work/reportlets
rm -rf /tmp/ds005/derivatives/fmriprep
- save_cache:
key: ds005-anat-v14-{{ .Branch }}-{{ epoch }}
paths:
Expand Down Expand Up @@ -525,6 +531,12 @@ jobs:
/tmp/data/ds054 /tmp/ds054/derivatives participant \
--fs-no-reconall --sloppy --write-graph \
--mem_mb 4096 --nthreads 2 --anat-only -vv
- run:
name: Clean-up after anatomical run
command: |
rm -rf /tmp/ds054/work/fmriprep_wf/fsdir*
rm -rf /tmp/ds054/work/reportlets
rm -rf /tmp/ds054/derivatives/fmriprep
- save_cache:
key: ds054-anat-v12-{{ .Branch }}-{{ epoch }}
paths:
Expand Down Expand Up @@ -643,6 +655,12 @@ jobs:
/tmp/data/ds210 /tmp/ds210/derivatives participant \
--fs-no-reconall --sloppy --write-graph \
--mem_mb 4096 --nthreads 2 --anat-only -vv
- run:
name: Clean-up after anatomical run
command: |
rm -rf /tmp/ds210/work/fmriprep_wf/fsdir*
rm -rf /tmp/ds210/work/reportlets
rm -rf /tmp/ds210/derivatives/fmriprep
- save_cache:
key: ds210-anat-v10-{{ .Branch }}-{{ epoch }}
paths:
Expand Down
4 changes: 1 addition & 3 deletions docs/workflows.rst
Original file line number Diff line number Diff line change
Expand Up @@ -70,17 +70,15 @@ T1w/T2w preprocessing
bids_root='.',
debug=False,
freesurfer=True,
fs_spaces=['T1w', 'fsnative',
'template', 'fsaverage5'],
hires=True,
longitudinal=False,
num_t1w=1,
omp_nthreads=1,
output_dir='.',
output_spaces={'MNI152NLin2009cAsym': {'res': 2}},
reportlets_dir='.',
skull_strip_template='MNI152NLin2009cAsym',
skull_strip_fixed_seed=False,
template='MNI152NLin2009cAsym',
)

The anatomical sub-workflow begins by constructing an average image by
Expand Down
4 changes: 2 additions & 2 deletions fmriprep/__about__.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,9 @@

LINKS_REQUIRES = [
'git+https://github.com/poldracklab/niworkflows.git@'
'b7d111c8fd36a099c74be5e7671677eedb175533#egg=niworkflows',
'076aed98962b10d107c83110c05e42466a89bbc4#egg=niworkflows',
'git+https://github.com/poldracklab/smriprep.git@'
'423bcc43ab7300177eb3b98da62817b2cad8eb87#egg=smriprep-0.1.0',
'f1cfc37bcdc346549dbf1d037cdade3a3b32d5de#egg=smriprep-0.1.0',
]

TESTS_REQUIRES = [
Expand Down
56 changes: 35 additions & 21 deletions fmriprep/cli/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,6 @@ def main():
"""Entry point"""
from nipype import logging as nlogging
from multiprocessing import set_start_method, Process, Manager
from ..viz.reports import generate_reports
from ..utils.bids import write_derivative_description
set_start_method('forkserver')

Expand Down Expand Up @@ -384,8 +383,6 @@ def before_send(event, hints):
nlogging.getLogger('nipype.interface').setLevel(log_level)
nlogging.getLogger('nipype.utils').setLevel(log_level)

errno = 0

# Call build_workflow(opts, retval)
with Manager() as mgr:
retval = mgr.dict()
Expand All @@ -395,9 +392,9 @@ def before_send(event, hints):

retcode = p.exitcode or retval.get('return_code', 0)

bids_dir = retval.get('bids_dir')
output_dir = retval.get('output_dir')
work_dir = retval.get('work_dir')
bids_dir = Path(retval.get('bids_dir'))
output_dir = Path(retval.get('output_dir'))
work_dir = Path(retval.get('work_dir'))
plugin_settings = retval.get('plugin_settings', None)
subject_list = retval.get('subject_list', None)
fmriprep_wf = retval.get('workflow', None)
Expand Down Expand Up @@ -436,32 +433,48 @@ def before_send(event, hints):
sentry_sdk.add_breadcrumb(message='fMRIPrep started', level='info')
sentry_sdk.capture_message('fMRIPrep started', level='info')

errno = 1 # Default is error exit unless otherwise set
try:
fmriprep_wf.run(**plugin_settings)
except RuntimeError as e:
errno = 1
if "Workflow did not execute cleanly" not in str(e):
sentry_sdk.capture_exception(e)
raise
except Exception as e:
if not opts.notrack:
from ..utils.sentry import process_crashfile
crashfolders = [output_dir / 'fmriprep' / 'sub-{}'.format(s) / 'log' / run_uuid
for s in subject_list]
for crashfolder in crashfolders:
for crashfile in crashfolder.glob('crash*.*'):
process_crashfile(crashfile)

if "Workflow did not execute cleanly" not in str(e):
sentry_sdk.capture_exception(e)
logger.critical('fMRIPrep failed: %s', e)
raise
else:
if opts.run_reconall:
from templateflow import api
from niworkflows.utils.misc import _copy_any
dseg_tsv = str(api.get('fsaverage', suffix='dseg', extensions=['.tsv']))
_copy_any(dseg_tsv,
str(Path(output_dir) / 'fmriprep' / 'desc-aseg_dseg.tsv'))
str(output_dir / 'fmriprep' / 'desc-aseg_dseg.tsv'))
_copy_any(dseg_tsv,
str(Path(output_dir) / 'fmriprep' / 'desc-aparcaseg_dseg.tsv'))
str(output_dir / 'fmriprep' / 'desc-aparcaseg_dseg.tsv'))
errno = 0
logger.log(25, 'fMRIPrep finished without errors')
if not opts.notrack:
sentry_sdk.capture_message('fMRIPrep finished without errors',
level='info')
finally:
from niworkflows.reports import generate_reports
# Generate reports phase
errno += generate_reports(subject_list, output_dir, work_dir, run_uuid,
sentry_sdk=sentry_sdk)
write_derivative_description(bids_dir, str(Path(output_dir) / 'fmriprep'))
failed_reports = generate_reports(
subject_list, output_dir, work_dir, run_uuid, packagename='fmriprep')
write_derivative_description(bids_dir, output_dir / 'fmriprep')

if not opts.notrack and errno == 0:
sentry_sdk.capture_message('fMRIPrep finished without errors', level='info')
sys.exit(int(errno > 0))
if failed_reports and not opts.notrack:
sentry_sdk.capture_message(
'Report generation failed for %d subjects' % failed_reports,
level='error')
sys.exit(int((errno + failed_reports) > 0))


def validate_input_dir(exec_env, bids_dir, participant_label):
Expand Down Expand Up @@ -574,9 +587,9 @@ def build_workflow(opts, retval):

from nipype import logging, config as ncfg
from niworkflows.utils.bids import collect_participants
from niworkflows.reports import generate_reports
from ..__about__ import __version__
from ..workflows.base import init_fmriprep_wf
from ..viz.reports import generate_reports

logger = logging.getLogger('nipype.workflow')

Expand Down Expand Up @@ -734,7 +747,8 @@ def build_workflow(opts, retval):
run_uuid = opts.run_uuid
retval['run_uuid'] = run_uuid
retval['return_code'] = generate_reports(
subject_list, str(output_dir), str(work_dir), run_uuid)
subject_list, output_dir, work_dir, run_uuid,
packagename='fmriprep')
return retval

# Build main workflow
Expand Down
115 changes: 115 additions & 0 deletions fmriprep/utils/sentry.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Stripped out routines for Sentry"""
import re
from niworkflows.utils.misc import read_crashfile
import sentry_sdk

CHUNK_SIZE = 16384
# Group common events with pre specified fingerprints
KNOWN_ERRORS = {
'permission-denied': [
"PermissionError: [Errno 13] Permission denied"
],
'memory-error': [
"MemoryError",
"Cannot allocate memory",
"Return code: 134",
],
'reconall-already-running': [
"ERROR: it appears that recon-all is already running"
],
'no-disk-space': [
"[Errno 28] No space left on device",
"[Errno 122] Disk quota exceeded"
],
'segfault': [
"Segmentation Fault",
"Segfault",
"Return code: 139",
],
'potential-race-condition': [
"[Errno 39] Directory not empty",
"_unfinished.json",
],
'keyboard-interrupt': [
"KeyboardInterrupt",
],
}


def process_crashfile(crashfile):
"""Parse the contents of a crashfile and submit sentry messages"""
crash_info = read_crashfile(str(crashfile))
with sentry_sdk.push_scope() as scope:
scope.level = 'fatal'

# Extract node name
node_name = crash_info.pop('node').split('.')[-1]
scope.set_tag("node_name", node_name)

# Massage the traceback, extract the gist
traceback = crash_info.pop('traceback')
# last line is probably most informative summary
gist = traceback.splitlines()[-1]
exception_text_start = 1
for line in traceback.splitlines()[1:]:
if not line[0].isspace():
break
exception_text_start += 1

exception_text = '\n'.join(
traceback.splitlines()[exception_text_start:])

# Extract inputs, if present
inputs = crash_info.pop('inputs', None)
if inputs:
scope.set_extra('inputs', dict(inputs))

# Extract any other possible metadata in the crash file
for k, v in crash_info.items():
strv = list(_chunks(str(v)))
if len(strv) == 1:
scope.set_extra(k, strv[0])
else:
for i, chunk in enumerate(strv):
scope.set_extra('%s_%02d' % (k, i), chunk)

fingerprint = ''
issue_title = '{}: {}'.format(node_name, gist)
for new_fingerprint, error_snippets in KNOWN_ERRORS.items():
for error_snippet in error_snippets:
if error_snippet in traceback:
fingerprint = new_fingerprint
issue_title = new_fingerprint
break
if fingerprint:
break

message = issue_title + '\n\n'
message += exception_text[-(8192 - len(message)):]
if fingerprint:
sentry_sdk.add_breadcrumb(message=fingerprint, level='fatal')
else:
# remove file paths
fingerprint = re.sub(r"(/[^/ ]*)+/?", '', message)
# remove words containing numbers
fingerprint = re.sub(r"([a-zA-Z]*[0-9]+[a-zA-Z]*)+", '', fingerprint)
# adding the return code if it exists
for line in message.splitlines():
if line.startswith("Return code"):
fingerprint += line
break

scope.fingerprint = [fingerprint]
sentry_sdk.capture_message(message, 'fatal')


def _chunks(string, length=CHUNK_SIZE):
"""
Splits a string into smaller chunks
>>> list(_chunks('some longer string.', length=3))
['som', 'e l', 'ong', 'er ', 'str', 'ing', '.']
"""
return (string[i:i + length]
for i in range(0, len(string), length))
8 changes: 0 additions & 8 deletions fmriprep/viz/__init__.py

This file was deleted.

Loading