29
29
from os .path import join , dirname
30
30
from warnings import warn
31
31
32
- import sqlite3
33
-
34
32
from .. import config , logging
35
33
from ..utils .filemanip import (
36
34
copyfile , simplify_list , ensure_list ,
37
- get_related_files , related_filetype_sets )
35
+ get_related_files )
38
36
from ..utils .misc import human_order_sorted , str2bool
39
37
from .base import (
40
38
TraitedSpec , traits , Str , File , Directory , BaseInterface , InputMultiPath ,
41
- isdefined , OutputMultiPath , DynamicTraitedSpec , Undefined , BaseInterfaceInputSpec )
42
-
43
- have_pybids = True
44
- try :
45
- import bids
46
- except ImportError :
47
- have_pybids = False
48
-
49
- if have_pybids :
50
- try :
51
- from bids import layout as bidslayout
52
- except ImportError :
53
- from bids import grabbids as bidslayout
54
-
55
- try :
56
- import pyxnat
57
- except :
58
- pass
59
-
60
- try :
61
- import paramiko
62
- except :
63
- pass
64
-
65
- try :
66
- import boto
67
- from boto .s3 .connection import S3Connection , OrdinaryCallingFormat
68
- except :
69
- pass
39
+ isdefined , OutputMultiPath , DynamicTraitedSpec , Undefined , BaseInterfaceInputSpec ,
40
+ LibraryBaseInterface )
70
41
71
42
iflogger = logging .getLogger ('nipype.interface' )
72
43
@@ -536,8 +507,6 @@ def _fetch_bucket(self, bucket_name):
536
507
'''
537
508
538
509
# Import packages
539
- import logging
540
-
541
510
try :
542
511
import boto3
543
512
import botocore
@@ -607,7 +576,6 @@ def _upload_to_s3(self, bucket, src, dst):
607
576
608
577
# Import packages
609
578
import hashlib
610
- import logging
611
579
import os
612
580
613
581
from botocore .exceptions import ClientError
@@ -849,7 +817,7 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
849
817
desc = 'Information to plug into template' )
850
818
851
819
852
- class S3DataGrabber (IOBase ):
820
+ class S3DataGrabber (LibraryBaseInterface , IOBase ):
853
821
""" Generic datagrabber module that wraps around glob in an
854
822
intelligent way for neuroimaging tasks to grab files from
855
823
Amazon S3
@@ -865,6 +833,8 @@ class S3DataGrabber(IOBase):
865
833
input_spec = S3DataGrabberInputSpec
866
834
output_spec = DynamicTraitedSpec
867
835
_always_run = True
836
+ _pkg = 'boto'
837
+ imports = ('botocore' ,)
868
838
869
839
def __init__ (self , infields = None , outfields = None , ** kwargs ):
870
840
"""
@@ -919,6 +889,7 @@ def _add_output_traits(self, base):
919
889
def _list_outputs (self ):
920
890
# infields are mandatory, however I could not figure out how to set 'mandatory' flag dynamically
921
891
# hence manual check
892
+ import boto
922
893
if self ._infields :
923
894
for key in self ._infields :
924
895
value = getattr (self .inputs , key )
@@ -1035,6 +1006,7 @@ def _list_outputs(self):
1035
1006
# Takes an s3 address and downloads the file to a local
1036
1007
# directory, returning the local path.
1037
1008
def s3tolocal (self , s3path , bkt ):
1009
+ import boto
1038
1010
# path formatting
1039
1011
if not os .path .split (self .inputs .local_directory )[1 ] == '' :
1040
1012
self .inputs .local_directory += '/'
@@ -1817,7 +1789,7 @@ class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
1817
1789
cache_dir = Directory (desc = 'Cache directory' )
1818
1790
1819
1791
1820
- class XNATSource (IOBase ):
1792
+ class XNATSource (LibraryBaseInterface , IOBase ):
1821
1793
""" Generic XNATSource module that wraps around the pyxnat module in
1822
1794
an intelligent way for neuroimaging tasks to grab files and data
1823
1795
from an XNAT server.
@@ -1852,6 +1824,7 @@ class XNATSource(IOBase):
1852
1824
"""
1853
1825
input_spec = XNATSourceInputSpec
1854
1826
output_spec = DynamicTraitedSpec
1827
+ _pkg = 'pyxnat'
1855
1828
1856
1829
def __init__ (self , infields = None , outfields = None , ** kwargs ):
1857
1830
"""
@@ -1901,6 +1874,7 @@ def _add_output_traits(self, base):
1901
1874
def _list_outputs (self ):
1902
1875
# infields are mandatory, however I could not figure out
1903
1876
# how to set 'mandatory' flag dynamically, hence manual check
1877
+ import pyxnat
1904
1878
1905
1879
cache_dir = self .inputs .cache_dir or tempfile .gettempdir ()
1906
1880
@@ -2034,16 +2008,18 @@ def __setattr__(self, key, value):
2034
2008
super (XNATSinkInputSpec , self ).__setattr__ (key , value )
2035
2009
2036
2010
2037
- class XNATSink (IOBase ):
2011
+ class XNATSink (LibraryBaseInterface , IOBase ):
2038
2012
""" Generic datasink module that takes a directory containing a
2039
2013
list of nifti files and provides a set of structured output
2040
2014
fields.
2041
2015
"""
2042
2016
input_spec = XNATSinkInputSpec
2017
+ _pkg = 'xnat'
2043
2018
2044
2019
def _list_outputs (self ):
2045
2020
"""Execute this module.
2046
2021
"""
2022
+ import pyxnat
2047
2023
2048
2024
# setup XNAT connection
2049
2025
cache_dir = self .inputs .cache_dir or tempfile .gettempdir ()
@@ -2202,7 +2178,7 @@ class SQLiteSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec):
2202
2178
table_name = Str (mandatory = True )
2203
2179
2204
2180
2205
- class SQLiteSink (IOBase ):
2181
+ class SQLiteSink (LibraryBaseInterface , IOBase ):
2206
2182
""" Very simple frontend for storing values into SQLite database.
2207
2183
2208
2184
.. warning::
@@ -2222,6 +2198,7 @@ class SQLiteSink(IOBase):
2222
2198
2223
2199
"""
2224
2200
input_spec = SQLiteSinkInputSpec
2201
+ _pkg = 'sqlite3'
2225
2202
2226
2203
def __init__ (self , input_names , ** inputs ):
2227
2204
@@ -2233,6 +2210,7 @@ def __init__(self, input_names, **inputs):
2233
2210
def _list_outputs (self ):
2234
2211
"""Execute this module.
2235
2212
"""
2213
+ import sqlite3
2236
2214
conn = sqlite3 .connect (
2237
2215
self .inputs .database_file , check_same_thread = False )
2238
2216
c = conn .cursor ()
@@ -2333,7 +2311,7 @@ class SSHDataGrabberInputSpec(DataGrabberInputSpec):
2333
2311
desc = 'If set SSH commands will be logged to the given file' )
2334
2312
2335
2313
2336
- class SSHDataGrabber (DataGrabber ):
2314
+ class SSHDataGrabber (LibraryBaseInterface , DataGrabber ):
2337
2315
""" Extension of DataGrabber module that downloads the file list and
2338
2316
optionally the files from a SSH server. The SSH operation must
2339
2317
not need user and password so an SSH agent must be active in
@@ -2397,6 +2375,7 @@ class SSHDataGrabber(DataGrabber):
2397
2375
input_spec = SSHDataGrabberInputSpec
2398
2376
output_spec = DynamicTraitedSpec
2399
2377
_always_run = False
2378
+ _pkg = 'paramiko'
2400
2379
2401
2380
def __init__ (self , infields = None , outfields = None , ** kwargs ):
2402
2381
"""
@@ -2411,11 +2390,6 @@ def __init__(self, infields=None, outfields=None, **kwargs):
2411
2390
See class examples for usage
2412
2391
2413
2392
"""
2414
- try :
2415
- paramiko
2416
- except NameError :
2417
- warn ("The library paramiko needs to be installed"
2418
- " for this module to run." )
2419
2393
if not outfields :
2420
2394
outfields = ['outfiles' ]
2421
2395
kwargs = kwargs .copy ()
@@ -2490,11 +2464,7 @@ def _get_files_over_ssh(self, template):
2490
2464
return outfiles
2491
2465
2492
2466
def _list_outputs (self ):
2493
- try :
2494
- paramiko
2495
- except NameError :
2496
- raise ImportError ("The library paramiko needs to be installed"
2497
- " for this module to run." )
2467
+ import paramiko
2498
2468
2499
2469
if len (self .inputs .ssh_log_to_file ) > 0 :
2500
2470
paramiko .util .log_to_file (self .inputs .ssh_log_to_file )
@@ -2574,6 +2544,7 @@ def _list_outputs(self):
2574
2544
return outputs
2575
2545
2576
2546
def _get_ssh_client (self ):
2547
+ import paramiko
2577
2548
config = paramiko .SSHConfig ()
2578
2549
config .parse (open (os .path .expanduser ('~/.ssh/config' )))
2579
2550
host = config .lookup (self .inputs .hostname )
@@ -2765,7 +2736,7 @@ class BIDSDataGrabberInputSpec(DynamicTraitedSpec):
2765
2736
'ignore derivatives/, sourcedata/, etc.)' )
2766
2737
2767
2738
2768
- class BIDSDataGrabber (IOBase ):
2739
+ class BIDSDataGrabber (LibraryBaseInterface , IOBase ):
2769
2740
2770
2741
""" BIDS datagrabber module that wraps around pybids to allow arbitrary
2771
2742
querying of BIDS datasets.
@@ -2798,6 +2769,7 @@ class BIDSDataGrabber(IOBase):
2798
2769
input_spec = BIDSDataGrabberInputSpec
2799
2770
output_spec = DynamicTraitedSpec
2800
2771
_always_run = True
2772
+ _pkg = 'bids'
2801
2773
2802
2774
def __init__ (self , infields = None , ** kwargs ):
2803
2775
"""
@@ -2815,7 +2787,12 @@ def __init__(self, infields=None, **kwargs):
2815
2787
}
2816
2788
2817
2789
# If infields is empty, use all BIDS entities
2818
- if infields is None and have_pybids :
2790
+ if infields is None :
2791
+ # Version resilience
2792
+ try :
2793
+ from bids import layout as bidslayout
2794
+ except ImportError :
2795
+ from bids import grabbids as bidslayout
2819
2796
bids_config = join (dirname (bidslayout .__file__ ), 'config' , 'bids.json' )
2820
2797
bids_config = json .load (open (bids_config , 'r' ))
2821
2798
infields = [i ['name' ] for i in bids_config ['entities' ]]
@@ -2830,18 +2807,16 @@ def __init__(self, infields=None, **kwargs):
2830
2807
2831
2808
self .inputs .trait_set (trait_change_notify = False , ** undefined_traits )
2832
2809
2833
- def _run_interface (self , runtime ):
2834
- if not have_pybids :
2835
- raise ImportError (
2836
- "The BIDSEventsGrabber interface requires pybids."
2837
- " Please make sure it is installed." )
2838
- return runtime
2839
-
2840
2810
def _list_outputs (self ):
2811
+ # Version resilience
2812
+ try :
2813
+ from bids import BIDSLayout
2814
+ except ImportError :
2815
+ from bids .grabbids import BIDSLayout
2841
2816
exclude = None
2842
2817
if self .inputs .strict :
2843
2818
exclude = ['derivatives/' , 'code/' , 'sourcedata/' ]
2844
- layout = bidslayout . BIDSLayout (self .inputs .base_dir , exclude = exclude )
2819
+ layout = BIDSLayout (self .inputs .base_dir , exclude = exclude )
2845
2820
2846
2821
# If infield is not given nm input value, silently ignore
2847
2822
filters = {}
0 commit comments