5
5
pull_request :
6
6
schedule :
7
7
- cron : 0 0 * * 0
8
+ # Allow job to be triggered manually from GitHub interface
9
+ workflow_dispatch :
10
+
11
+ # Force pytest to use color
12
+ env :
13
+ FORCE_COLOR : true
8
14
9
15
concurrency :
10
16
group : ${{ github.workflow }}-${{ github.ref }}
@@ -18,7 +24,20 @@ defaults:
18
24
shell : bash -el {0}
19
25
20
26
jobs :
21
- build-linux :
27
+ build-package :
28
+ name : Build & inspect package
29
+ runs-on : ubuntu-latest
30
+ permissions :
31
+ attestations : write
32
+ id-token : write
33
+
34
+ steps :
35
+ - uses : actions/checkout@v4
36
+ with :
37
+ fetch-depth : 0
38
+ - uses : hynek/build-and-inspect-python-package@v2
39
+
40
+ test :
22
41
if : " !contains(github.event.head_commit.message, '[skip ci]' && (github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'nipreps/sdcflows'))"
23
42
runs-on : ubuntu-latest
24
43
env :
@@ -30,11 +49,16 @@ jobs:
30
49
AFNI_IMSAVE_WARNINGS : NO
31
50
AFNI_TTATLAS_DATASET : /opt/afni/atlases
32
51
AFNI_PLUGINPATH : /opt/afni/plugins
33
- ANTSPATH : /opt/ants
34
52
strategy :
35
- max-parallel : 5
53
+ max-parallel : 6
36
54
matrix :
37
55
python-version : ["3.9", "3.10", "3.11", "3.12"]
56
+ marks : ["not slow"]
57
+ include :
58
+ - python-version : " 3.9"
59
+ marks : " slow and not veryslow"
60
+ - python-version : " 3.12"
61
+ marks : " veryslow"
38
62
39
63
steps :
40
64
- uses : actions/cache@v4
66
90
tcsh @update.afni.binaries -package linux_ubuntu_16_64 -bindir ${AFNI_HOME}
67
91
fi
68
92
69
- - uses : actions/cache@v4
70
- with :
71
- path : /opt/ants
72
- key : ants-v1
73
- restore-keys : |
74
- ants-v1
75
- - name : Install ANTS
76
- run : |
77
- if [[ ! -d "${ANTSPATH}" ]]; then
78
- sudo mkdir -p $ANTSPATH
79
- curl -sSL "https://dl.dropbox.com/s/gwf51ykkk5bifyj/ants-Linux-centos6_x86_64-v2.3.4.tar.gz" | sudo tar -xzC $ANTSPATH --strip-components 1
80
- fi
81
-
82
93
- name : Git settings (pacify DataLad)
83
94
run : |
84
95
git config --global user.name 'NiPreps Bot'
@@ -89,11 +100,11 @@ jobs:
89
100
auto-update-conda : true
90
101
auto-activate-base : true
91
102
python-version : ${{ matrix.python-version }}
92
- channels : anaconda, https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/,conda-forge
103
+ channels : https://fsl.fmrib.ox.ac.uk/fsldownloads/fslconda/public/,conda-forge
93
104
- uses : actions/cache@v4
94
105
id : conda
95
106
env :
96
- CACHE_NUM : v4
107
+ CACHE_NUM : v5
97
108
with :
98
109
path : |
99
110
~/conda_pkgs_dir
@@ -105,9 +116,9 @@ jobs:
105
116
run : |
106
117
conda install git-annex=*=alldep* pip
107
118
pip install datalad datalad-osf
108
- - name : Install fsl
119
+ - name : Install fsl and ANTs
109
120
run : |
110
- conda install fsl-fugue fsl-topup
121
+ conda install fsl-fugue fsl-topup ants
111
122
- uses : actions/checkout@v4
112
123
- name : Install dependencies
113
124
timeout-minutes : 5
@@ -128,7 +139,7 @@ jobs:
128
139
- uses : actions/cache@v4
129
140
with :
130
141
path : ${{ env.TEST_DATA_HOME }}
131
- key : data-cache-v1
142
+ key : data-cache-v2
132
143
restore-keys : |
133
144
data-cache-
134
145
- name : Install test data
@@ -139,46 +150,85 @@ jobs:
139
150
# ds001600
140
151
datalad install -r https://github.com/nipreps-data/ds001600.git
141
152
datalad update -r --merge -d ds001600/
142
- datalad get -r -d ds001600/ ds001600/sub-1/
153
+ datalad get -r -J 2 - d ds001600/ ds001600/sub-1/
143
154
144
155
# HCP/sub-101006
145
156
datalad install -r https://github.com/nipreps-data/HCP101006.git
146
157
datalad update -r --merge -d HCP101006/
147
- datalad get -r -d HCP101006
158
+ datalad get -r -J 2 - d HCP101006 HCP101006/*
148
159
149
160
# ds001771
150
161
datalad install -r https://github.com/nipreps-data/ds001771.git
151
162
datalad update -r --merge -d ds001771/
152
- datalad get -r -d ds001771/ ds001771/sub-36/*
153
- datalad get -r -d ds001771/derivatives ds001771/derivatives/openneuro/sub-36/*
163
+ datalad get -r -J 2 - d ds001771/ ds001771/sub-36/*
164
+ datalad get -r -J 2 - d ds001771/derivatives ds001771/derivatives/openneuro/sub-36/*
154
165
155
166
# ds000054
156
167
datalad install -r https://github.com/nipreps-data/ds000054.git
157
168
datalad update --merge -d ds000054/
158
169
datalad get -r -d ds000054/ ds000054/sub-100185/*
170
+ datalad get -r -J 2 -d ds000054/ ds000054/derivatives/smriprep-0.6/sub-100185/anat/
159
171
160
172
# ds000206
161
173
datalad install -r https://github.com/nipreps-data/ds000206.git
162
174
datalad update -r --merge -d ds000206/
163
- datalad get -r -d ds000206/ ds000206/sub-05/
175
+ datalad get -r -J 2 -d ds000206/ ds000206/sub-05/
176
+
177
+ # Brain extraction tests
178
+ datalad install -r https://gin.g-node.org/nipreps-data/brain-extraction-tests
179
+ datalad update --merge -d brain-extraction-tests/
180
+ datalad get -r -J 2 -d brain-extraction-tests brain-extraction-tests/*
181
+
182
+ # HCPH pilot
183
+ datalad install -r https://github.com/nipreps-data/hcph-pilot_fieldmaps.git
184
+ datalad update -r --merge -d hcph-pilot_fieldmaps/
185
+ datalad get -r -J 2 -d hcph-pilot_fieldmaps/ hcph-pilot_fieldmaps/*
186
+
187
+ - name : Set FreeSurfer variables
188
+ run : |
189
+ echo "FREESURFER_HOME=$HOME/.cache/freesurfer" >> $GITHUB_ENV
190
+ echo "FS_LICENSE=$HOME/.cache/freesurfer/license.txt" >> $GITHUB_ENV
164
191
165
192
- name : Install FreeSurfer's mri_robust_template
166
193
env :
167
194
MRI_ROBUST_TEMPLATE : sx2n7/providers/osfstorage/5e825301d0e35400ebb481f2
168
195
run : |
169
196
curl https://files.osf.io/v1/resources/$MRI_ROBUST_TEMPLATE?direct > mri_robust_template
170
197
sudo install mri_robust_template /usr/local/bin
171
- mkdir -p $HOME/.cache/freesurfer/
172
- echo "b2VzdGViYW5Ac3RhbmZvcmQuZWR1CjMwNzU2CiAqQ1MzYkJ5VXMxdTVNCiBGU2kvUGJsejJxR1V3Cg==" | base64 -d > $HOME/.cache/freesurfer/license.txt
198
+ mkdir -p $( dirname $FS_LICENSE )
199
+ echo "b2VzdGViYW5Ac3RhbmZvcmQuZWR1CjMwNzU2CiAqQ1MzYkJ5VXMxdTVNCiBGU2kvUGJsejJxR1V3Cg==" | base64 -d > $FS_LICENSE
173
200
174
201
- name : Run pytest with coverage
175
202
run : |
176
- export LD_LIBRARY_PATH=/usr/lib/fsl/5.0:$LD_LIBRARY_PATH
177
- export PATH=$ANTSPATH:${AFNI_HOME}:/usr/lib/fsl/5.0:$PATH
178
- pytest -v --cov sdcflows --cov-report xml:cov.xml --doctest-modules -n auto sdcflows
203
+ export PATH=${AFNI_HOME}:$PATH
204
+ export FSLDIR=${CONDA_PREFIX}
205
+ pytest -v --cov sdcflows --cov-report xml:cov.xml --doctest-modules -n auto sdcflows \
206
+ --durations=20 --durations-min=10 -m "$MARKS"
207
+ env :
208
+ MARKS : ${{ matrix.marks }}
179
209
180
210
- uses : codecov/codecov-action@v4
181
211
with :
182
212
file : cov.xml
183
213
token : ${{ secrets.CODECOV_TOKEN }}
184
214
if : ${{ always() }}
215
+
216
+ publish :
217
+ name : Publish released package to pypi.org
218
+ environment : release-pypi
219
+ if : github.event.action == 'published'
220
+ runs-on : ubuntu-latest
221
+ needs : [build-package, test]
222
+ permissions :
223
+ attestations : write
224
+ id-token : write
225
+
226
+ steps :
227
+ - name : Download packages built by build-and-inspect-python-package
228
+ uses : actions/download-artifact@v4
229
+ with :
230
+ name : Packages
231
+ path : dist
232
+
233
+ - name : Upload package to PyPI
234
+ uses : pypa/gh-action-pypi-publish@release/v1
0 commit comments