|
4 | 4 | """
|
5 | 5 | Base module variables
|
6 | 6 | """
|
7 |
| - |
8 | 7 | from ._version import get_versions
|
9 | 8 | __version__ = get_versions()['version']
|
10 | 9 | del get_versions
|
11 | 10 |
|
12 | 11 | __author__ = 'The CRN developers'
|
13 | 12 | __copyright__ = 'Copyright 2018, Center for Reproducible Neuroscience, Stanford University'
|
14 |
| -__credits__ = ['Craig Moodie', 'Ross Blair', 'Oscar Esteban', 'Chris Gorgolewski', |
15 |
| - 'Shoshana Berleant', 'Christopher J. Markiewicz', 'Russell A. Poldrack'] |
| 13 | +__credits__ = ('Contributors: please check the ``.zenodo.json`` file at the top-level folder' |
| 14 | + 'of the repository') |
16 | 15 | __license__ = '3-clause BSD'
|
17 |
| -__maintainer__ = 'Ross Blair' |
18 |
| - |
| 16 | +__maintainer__ = 'Oscar Esteban' |
| 17 | + |
19 | 18 | __status__ = 'Prototype'
|
20 | 19 | __url__ = 'https://github.com/poldracklab/fmriprep'
|
21 | 20 | __packagename__ = 'fmriprep'
|
22 |
| -__description__ = ("FMRIprep is a functional magnetic resonance image pre-processing pipeline " |
23 |
| - "that is designed to provide an easily accessible, state-of-the-art interface " |
24 |
| - "that is robust to differences in scan acquisition protocols and that requires " |
25 |
| - "minimal user input, while providing easily interpretable and comprehensive " |
26 |
| - "error and output reporting.") |
| 21 | +__description__ = """\ |
| 22 | +fMRIPrep is a robust and easy-to-use pipeline for preprocessing of diverse fMRI data. |
| 23 | +The transparent workflow dispenses of manual intervention, thereby ensuring the reproducibility |
| 24 | +of the results""" |
27 | 25 | __longdesc__ = """\
|
28 | 26 | Preprocessing of functional MRI (fMRI) involves numerous steps to clean and standardize
|
29 |
| -data before statistical analysis. |
30 |
| -Generally, researchers create ad hoc preprocessing workflows for each new dataset, |
31 |
| -building upon a large inventory of tools available for each step. |
32 |
| -The complexity of these workflows has snowballed with rapid advances in MR data |
33 |
| -acquisition and image processing techniques. |
34 |
| -FMRIPrep is an analysis-agnostic tool that addresses the challenge of robust and |
| 27 | +the data before statistical analysis. |
| 28 | +Generally, researchers create ad hoc preprocessing workflows for each dataset, |
| 29 | +building upon a large inventory of available tools. |
| 30 | +The complexity of these workflows has snowballed with rapid advances in |
| 31 | +acquisition and processing. |
| 32 | +fMRIPrep is an analysis-agnostic tool that addresses the challenge of robust and |
35 | 33 | reproducible preprocessing for task-based and resting fMRI data.
|
36 |
| -FMRIPrep automatically adapts a best-in-breed workflow to the idiosyncrasies of |
37 |
| -virtually any dataset, ensuring high-quality preprocessing with no manual intervention, |
38 |
| -while providing easily interpretable and comprehensive error and output reporting. |
39 |
| -It performs basic preprocessing steps (coregistration, normalization, unwarping, noise |
40 |
| -component extraction, segmentation, skullstripping etc.) providing outputs that can be |
41 |
| -easily submitted to a variety of group level analyses, including task-based or resting-state |
42 |
| -fMRI, graph theory measures, surface or volume-based statistics, etc. |
| 34 | +fMRIPrep automatically adapts a best-in-breed workflow to the idiosyncrasies of |
| 35 | +virtually any dataset, ensuring high-quality preprocessing without manual intervention. |
| 36 | +fMRIPrep robustly produces high-quality results on diverse fMRI data. |
| 37 | +Additionally, fMRIPrep introduces less uncontrolled spatial smoothness than observed |
| 38 | +with commonly used preprocessing tools. |
| 39 | +fMRIPrep equips neuroscientists with an easy-to-use and transparent preprocessing |
| 40 | +workflow, which can help ensure the validity of inference and the interpretability |
| 41 | +of results. |
43 | 42 |
|
44 | 43 | The workflow is based on `Nipype <https://nipype.readthedocs.io>`_ and encompases a large
|
45 | 44 | set of tools from well-known neuroimaging packages, including
|
|
52 | 51 | preprocessing, and will be updated as newer and better neuroimaging software becomes
|
53 | 52 | available.
|
54 | 53 |
|
55 |
| -This tool allows you to easily do the following: |
| 54 | +fMRIPrep performs basic preprocessing steps (coregistration, normalization, unwarping, noise |
| 55 | +component extraction, segmentation, skullstripping etc.) providing outputs that can be |
| 56 | +easily submitted to a variety of group level analyses, including task-based or resting-state |
| 57 | +fMRI, graph theory measures, surface or volume-based statistics, etc. |
| 58 | +fMRIPrep allows you to easily do the following: |
56 | 59 |
|
57 | 60 | * Take fMRI data from *unprocessed* (only reconstructed) to ready for analysis.
|
58 | 61 | * Implement tools from different software packages.
|
|
63 | 66 | * Automate and parallelize processing steps, which provides a significant speed-up from
|
64 | 67 | typical linear, manual processing.
|
65 | 68 |
|
66 |
| -FMRIPrep has the potential to transform fMRI research by equipping |
67 |
| -neuroscientists with a high-quality, robust, easy-to-use and transparent preprocessing workflow |
68 |
| -which can help ensure the validity of inference and the interpretability of their results. |
69 |
| -
|
70 |
| -[Pre-print doi:`10.1101/306951 <https://doi.org/10.1101/306951>`_] |
| 69 | +[Nat Meth doi:`10.1038/s41592-018-0235-4 <https://doi.org/10.1038/s41592-018-0235-4>`_] |
71 | 70 | [Documentation `fmriprep.org <https://fmriprep.readthedocs.io>`_]
|
72 | 71 | [Software doi:`10.5281/zenodo.852659 <https://doi.org/10.5281/zenodo.852659>`_]
|
73 | 72 | [Support `neurostars.org <https://neurostars.org/tags/fmriprep>`_]
|
|
85 | 84 | ]
|
86 | 85 |
|
87 | 86 | REQUIRES = [
|
88 |
| - 'numpy', |
89 |
| - 'lockfile', |
90 |
| - 'future', |
91 |
| - 'scikit-learn', |
92 |
| - 'matplotlib>=2.2.0', |
93 |
| - 'nilearn', |
94 |
| - 'sklearn', |
| 87 | + 'grabbit==0.2.3', |
| 88 | + 'indexed_gzip>=0.8.8', |
95 | 89 | 'nibabel>=2.2.1',
|
| 90 | + 'nilearn', |
| 91 | + 'nipype>=1.1.6', |
| 92 | + 'nitime', |
| 93 | + 'niworkflows>=0.5.1,<0.5.2', |
| 94 | + 'numpy', |
96 | 95 | 'pandas',
|
97 |
| - 'grabbit==0.2.3', |
98 | 96 | 'pybids==0.6.5',
|
99 |
| - 'nitime', |
100 |
| - 'nipype>=1.1.6', |
101 |
| - 'niworkflows==0.5.1', |
102 |
| - 'tedana>=0.0.5', |
103 |
| - 'statsmodels', |
104 |
| - 'seaborn', |
105 |
| - 'indexed_gzip>=0.8.8', |
106 |
| - 'scikit-image', |
107 |
| - 'versioneer', |
108 | 97 | 'pyyaml',
|
| 98 | + 'scikit-image', |
| 99 | + 'statsmodels', |
| 100 | + 'tedana>=0.0.5', |
109 | 101 | ]
|
110 | 102 |
|
| 103 | + |
111 | 104 | LINKS_REQUIRES = [
|
112 | 105 | ]
|
113 | 106 |
|
114 | 107 | TESTS_REQUIRES = [
|
115 |
| - "mock", |
116 | 108 | "codecov",
|
117 | 109 | "pytest",
|
118 | 110 | ]
|
119 | 111 |
|
120 | 112 | EXTRA_REQUIRES = {
|
| 113 | + 'datalad': ['datalad'], |
121 | 114 | 'doc': [
|
| 115 | + 'nbsphinx', |
| 116 | + 'packaging', |
| 117 | + 'pydot>=1.2.3', |
| 118 | + 'pydotplus', |
122 | 119 | 'sphinx>=1.5.3',
|
123 |
| - 'sphinx_rtd_theme', |
124 | 120 | 'sphinx-argparse',
|
125 |
| - 'pydotplus', |
126 |
| - 'pydot>=1.2.3', |
127 |
| - 'packaging', |
128 |
| - 'nbsphinx', |
| 121 | + 'sphinx_rtd_theme', |
129 | 122 | ],
|
130 |
| - 'tests': TESTS_REQUIRES, |
131 | 123 | 'duecredit': ['duecredit'],
|
132 |
| - 'datalad': ['datalad'], |
133 | 124 | 'resmon': ['psutil>=5.4.0'],
|
134 | 125 | 'sentry': ['sentry-sdk>=0.5.3'],
|
| 126 | + 'tests': TESTS_REQUIRES, |
135 | 127 | }
|
136 | 128 | EXTRA_REQUIRES['docs'] = EXTRA_REQUIRES['doc']
|
137 | 129 |
|
|
0 commit comments